28 #include "../include/Timeline.h"
30 using namespace openshot;
34 is_open(false), auto_map_clips(true)
76 apply_mapper_to_clip(clip);
79 clips.push_back(clip);
89 effects.push_back(effect);
98 effects.remove(effect);
108 void Timeline::apply_mapper_to_clip(
Clip* clip)
115 if (clip->
Reader()->Name() ==
"FrameMapper")
131 clip->
Reader(clip_reader);
141 list<Clip*>::iterator clip_itr;
142 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
145 Clip *clip = (*clip_itr);
148 apply_mapper_to_clip(clip);
153 double Timeline::calculate_time(int64_t number,
Fraction rate)
156 double raw_fps = rate.
ToFloat();
159 return double(number - 1) / raw_fps;
163 std::shared_ptr<Frame> Timeline::apply_effects(std::shared_ptr<Frame> frame, int64_t timeline_frame_number,
int layer)
166 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects",
"frame->number", frame->number,
"timeline_frame_number", timeline_frame_number,
"layer", layer,
"", -1,
"", -1,
"", -1);
169 list<EffectBase*>::iterator effect_itr;
170 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
179 bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->
Layer() == layer);
182 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects (Does effect intersect)",
"effect->Position()", effect->
Position(),
"does_effect_intersect", does_effect_intersect,
"timeline_frame_number", timeline_frame_number,
"layer", layer,
"", -1,
"", -1);
185 if (does_effect_intersect)
189 long effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
192 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects (Process Effect)",
"effect_frame_number", effect_frame_number,
"does_effect_intersect", does_effect_intersect,
"", -1,
"", -1,
"", -1,
"", -1);
195 frame = effect->
GetFrame(frame, effect_frame_number);
205 std::shared_ptr<Frame> Timeline::GetOrCreateFrame(
Clip* clip, int64_t number)
207 std::shared_ptr<Frame> new_frame;
214 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetOrCreateFrame (from reader)",
"number", number,
"samples_in_frame", samples_in_frame,
"", -1,
"", -1,
"", -1,
"", -1);
217 #pragma omp critical (T_GetOtCreateFrame)
218 new_frame = std::shared_ptr<Frame>(clip->
GetFrame(number));
232 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetOrCreateFrame (create blank)",
"number", number,
"samples_in_frame", samples_in_frame,
"", -1,
"", -1,
"", -1,
"", -1);
236 #pragma omp critical (T_GetOtCreateFrame)
245 void Timeline::add_layer(std::shared_ptr<Frame> new_frame,
Clip* source_clip, int64_t clip_frame_number, int64_t timeline_frame_number,
bool is_top_clip,
float max_volume)
248 std::shared_ptr<Frame> source_frame;
249 #pragma omp critical (T_addLayer)
250 source_frame = GetOrCreateFrame(source_clip, clip_frame_number);
257 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer",
"new_frame->number", new_frame->number,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1,
"", -1,
"", -1);
263 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Generate Waveform Image)",
"source_frame->number", source_frame->number,
"source_clip->Waveform()", source_clip->
Waveform(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
272 std::shared_ptr<QImage> source_image;
273 #pragma omp critical (T_addLayer)
275 source_frame->AddImage(std::shared_ptr<QImage>(source_image));
280 if (is_top_clip && source_frame)
281 #pragma omp critical (T_addLayer)
282 source_frame = apply_effects(source_frame, timeline_frame_number, source_clip->
Layer());
285 std::shared_ptr<QImage> source_image;
288 if (source_clip->
Reader()->info.has_audio) {
290 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Copy Audio)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1);
293 for (
int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
296 float previous_volume = source_clip->
volume.
GetValue(clip_frame_number - 1);
304 previous_volume = previous_volume / max_volume;
305 volume = volume / max_volume;
309 previous_volume = previous_volume * 0.77;
310 volume = volume * 0.77;
314 if (channel_filter != -1 && channel_filter != channel)
318 if (previous_volume == 0.0 && volume == 0.0)
322 if (channel_mapping == -1)
323 channel_mapping = channel;
326 if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
327 source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
333 if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount())
335 #pragma omp critical (T_addLayer)
340 #pragma omp critical (T_addLayer)
341 new_frame->AddAudio(
false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
346 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1);
351 if (!source_clip->
Waveform() && !source_clip->
Reader()->info.has_video)
356 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Get Source Image)",
"source_frame->number", source_frame->number,
"source_clip->Waveform()", source_clip->
Waveform(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
359 source_image = source_frame->GetImage();
364 float alpha = source_clip->
alpha.
GetValue(clip_frame_number);
367 unsigned char *pixels = (
unsigned char *) source_image->bits();
370 for (
int pixel = 0, byte_index=0; pixel < source_image->width() * source_image->height(); pixel++, byte_index+=4)
373 int A = pixels[byte_index + 3];
376 pixels[byte_index + 3] *= alpha;
380 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Set Alpha & Opacity)",
"alpha", alpha,
"source_frame->number", source_frame->number,
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
384 QSize source_size = source_image->size();
385 switch (source_clip->
scale)
392 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_FIT)",
"source_frame->number", source_frame->number,
"source_width", source_size.width(),
"source_height", source_size.height(),
"", -1,
"", -1,
"", -1);
400 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_STRETCH)",
"source_frame->number", source_frame->number,
"source_width", source_size.width(),
"source_height", source_size.height(),
"", -1,
"", -1,
"", -1);
409 source_size.scale(width_size.width(), width_size.height(), Qt::KeepAspectRatio);
411 source_size.scale(height_size.width(), height_size.height(), Qt::KeepAspectRatio);
414 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_CROP)",
"source_frame->number", source_frame->number,
"source_width", source_size.width(),
"source_height", source_size.height(),
"", -1,
"", -1,
"", -1);
421 float source_width_ratio = source_size.width() / float(
info.
width);
422 float source_height_ratio = source_size.height() / float(
info.
height);
426 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_NONE)",
"source_frame->number", source_frame->number,
"source_width", source_size.width(),
"source_height", source_size.height(),
"", -1,
"", -1,
"", -1);
438 float scaled_source_width = source_size.width() * sx;
439 float scaled_source_height = source_size.height() * sy;
474 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Gravity)",
"source_frame->number", source_frame->number,
"source_clip->gravity", source_clip->
gravity,
"info.width",
info.
width,
"scaled_source_width", scaled_source_width,
"info.height",
info.
height,
"scaled_source_height", scaled_source_height);
483 bool transformed =
false;
484 QTransform transform;
487 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Build QTransform - if needed)",
"source_frame->number", source_frame->number,
"x", x,
"y", y,
"r", r,
"sx", sx,
"sy", sy);
489 if (!isEqual(r, 0)) {
491 float origin_x = x + (scaled_source_width / 2.0);
492 float origin_y = y + (scaled_source_height / 2.0);
493 transform.translate(origin_x, origin_y);
495 transform.translate(-origin_x,-origin_y);
499 if (!isEqual(x, 0) || !isEqual(y, 0)) {
501 transform.translate(x, y);
506 float source_width_scale = (float(source_size.width()) /
float(source_image->width())) * sx;
507 float source_height_scale = (float(source_size.height()) /
float(source_image->height())) * sy;
509 if (!isEqual(source_width_scale, 1.0) || !isEqual(source_height_scale, 1.0)) {
510 transform.scale(source_width_scale, source_height_scale);
514 if (!isEqual(shear_x, 0) || !isEqual(shear_y, 0)) {
516 transform.shear(shear_x, shear_y);
521 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Prepare)",
"source_frame->number", source_frame->number,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1,
"", -1,
"", -1);
524 std::shared_ptr<QImage> new_image;
525 #pragma omp critical (T_addLayer)
526 new_image = new_frame->GetImage();
529 QPainter painter(new_image.get());
530 painter.setRenderHints(QPainter::Antialiasing | QPainter::SmoothPixmapTransform | QPainter::TextAntialiasing,
true);
534 painter.setTransform(transform);
537 painter.setCompositionMode(QPainter::CompositionMode_SourceOver);
538 painter.drawImage(0, 0, *source_image);
542 stringstream frame_number_str;
546 frame_number_str << clip_frame_number;
550 frame_number_str << timeline_frame_number;
554 frame_number_str << timeline_frame_number <<
" (" << clip_frame_number <<
")";
559 painter.setPen(QColor(
"#ffffff"));
560 painter.drawText(20, 20, QString(frame_number_str.str().c_str()));
566 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Completed)",
"source_frame->number", source_frame->number,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1,
"", -1,
"", -1);
570 void Timeline::update_open_clips(
Clip *clip,
bool does_clip_intersect)
572 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::update_open_clips (before)",
"does_clip_intersect", does_clip_intersect,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size(),
"", -1,
"", -1,
"", -1);
575 bool clip_found = open_clips.count(clip);
577 if (clip_found && !does_clip_intersect)
580 open_clips.erase(clip);
585 else if (!clip_found && does_clip_intersect)
588 open_clips[clip] = clip;
600 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::update_open_clips (after)",
"does_clip_intersect", does_clip_intersect,
"clip_found", clip_found,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size(),
"", -1,
"", -1);
604 void Timeline::sort_clips()
607 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::SortClips",
"clips.size()", clips.size(),
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
614 void Timeline::sort_effects()
623 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::Close",
"", -1,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
626 list<Clip*>::iterator clip_itr;
627 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
630 Clip *clip = (*clip_itr);
633 update_open_clips(clip,
false);
640 final_cache->
Clear();
650 bool Timeline::isEqual(
double a,
double b)
652 return fabs(a - b) < 0.000001;
659 if (requested_frame < 1)
663 std::shared_ptr<Frame> frame;
664 #pragma omp critical (T_GetFrame)
665 frame = final_cache->
GetFrame(requested_frame);
668 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Cached frame found)",
"requested_frame", requested_frame,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
680 throw ReaderClosed(
"The Timeline is closed. Call Open() before calling this method.",
"");
683 #pragma omp critical (T_GetFrame)
684 frame = final_cache->
GetFrame(requested_frame);
687 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Cached frame found on 2nd look)",
"requested_frame", requested_frame,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
698 vector<Clip*> nearby_clips;
699 #pragma omp critical (T_GetFrame)
700 nearby_clips = find_intersecting_clips(requested_frame, minimum_frames,
true);
704 omp_set_nested(
true);
707 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame",
"requested_frame", requested_frame,
"minimum_frames", minimum_frames,
"OPEN_MP_NUM_PROCESSORS",
OPEN_MP_NUM_PROCESSORS,
"", -1,
"", -1,
"", -1);
711 for (int64_t frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
714 for (
int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
717 Clip *clip = nearby_clips[clip_index];
721 bool does_clip_intersect = (clip_start_position <= frame_number && clip_end_position >= frame_number);
722 if (does_clip_intersect)
726 long clip_frame_number = frame_number - clip_start_position + clip_start_frame;
736 #pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames) schedule(static,1)
737 for (int64_t frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
740 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (processing frame)",
"frame_number", frame_number,
"omp_get_thread_num()", omp_get_thread_num(),
"", -1,
"", -1,
"", -1,
"", -1);
747 #pragma omp critical (T_GetFrame)
749 new_frame->AddAudioSilence(samples_in_frame);
755 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Adding solid color)",
"frame_number", frame_number,
"info.width",
info.
width,
"info.height",
info.
height,
"", -1,
"", -1,
"", -1);
763 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Loop through clips)",
"frame_number", frame_number,
"clips.size()", clips.size(),
"nearby_clips.size()", nearby_clips.size(),
"", -1,
"", -1,
"", -1);
766 for (
int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
769 Clip *clip = nearby_clips[clip_index];
773 bool does_clip_intersect = (clip_start_position <= frame_number && clip_end_position >= frame_number);
776 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Does clip intersect)",
"frame_number", frame_number,
"clip->Position()", clip->
Position(),
"clip->Duration()", clip->
Duration(),
"does_clip_intersect", does_clip_intersect,
"", -1,
"", -1);
779 if (does_clip_intersect)
782 bool is_top_clip =
true;
783 float max_volume = 0.0;
784 for (
int top_clip_index = 0; top_clip_index < nearby_clips.size(); top_clip_index++)
786 Clip *nearby_clip = nearby_clips[top_clip_index];
790 long nearby_clip_frame_number = frame_number - nearby_clip_start_position + nearby_clip_start_frame;
793 if (clip->
Id() != nearby_clip->
Id() && clip->
Layer() == nearby_clip->
Layer() &&
794 nearby_clip_start_position <= frame_number && nearby_clip_end_position >= frame_number &&
795 nearby_clip_start_position > clip_start_position && is_top_clip ==
true) {
800 if (nearby_clip->
Reader() && nearby_clip->
Reader()->info.has_audio &&
802 nearby_clip_start_position <= frame_number && nearby_clip_end_position >= frame_number) {
803 max_volume += nearby_clip->
volume.
GetValue(nearby_clip_frame_number);
809 long clip_frame_number = frame_number - clip_start_position + clip_start_frame;
812 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Calculate clip's frame #)",
"clip->Position()", clip->
Position(),
"clip->Start()", clip->
Start(),
"info.fps.ToFloat()",
info.
fps.
ToFloat(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1);
815 add_layer(new_frame, clip, clip_frame_number, frame_number, is_top_clip, max_volume);
819 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (clip does not intersect)",
"frame_number", frame_number,
"does_clip_intersect", does_clip_intersect,
"", -1,
"", -1,
"", -1,
"", -1);
824 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Add frame to cache)",
"frame_number", frame_number,
"info.width",
info.
width,
"info.height",
info.
height,
"", -1,
"", -1,
"", -1);
829 new_frame->SetFrameNumber(frame_number);
832 final_cache->
Add(new_frame);
839 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (end parallel region)",
"requested_frame", requested_frame,
"omp_get_thread_num()", omp_get_thread_num(),
"", -1,
"", -1,
"", -1,
"", -1);
842 return final_cache->
GetFrame(requested_frame);
848 vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame,
int number_of_frames,
bool include)
851 vector<Clip*> matching_clips;
854 float min_requested_frame = requested_frame;
855 float max_requested_frame = requested_frame + (number_of_frames - 1);
861 list<Clip*>::iterator clip_itr;
862 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
865 Clip *clip = (*clip_itr);
871 bool does_clip_intersect =
872 (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
873 (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
876 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::find_intersecting_clips (Is clip near or intersecting)",
"requested_frame", requested_frame,
"min_requested_frame", min_requested_frame,
"max_requested_frame", max_requested_frame,
"clip->Position()", clip->
Position(),
"does_clip_intersect", does_clip_intersect,
"", -1);
879 #pragma omp critical (reader_lock)
880 update_open_clips(clip, does_clip_intersect);
883 if (does_clip_intersect && include)
885 matching_clips.push_back(clip);
887 else if (!does_clip_intersect && !include)
889 matching_clips.push_back(clip);
894 return matching_clips;
900 final_cache = new_cache;
915 root[
"type"] =
"Timeline";
922 root[
"clips"] = Json::Value(Json::arrayValue);
925 list<Clip*>::iterator clip_itr;
926 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
929 Clip *existing_clip = (*clip_itr);
930 root[
"clips"].append(existing_clip->
JsonValue());
934 root[
"effects"] = Json::Value(Json::arrayValue);
937 list<EffectBase*>::iterator effect_itr;
938 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
942 root[
"effects"].append(existing_effect->
JsonValue());
958 bool success = reader.parse( value, root );
961 throw InvalidJSON(
"JSON could not be parsed (or is invalid)",
"");
971 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
979 bool was_open = is_open;
985 if (!root[
"clips"].isNull()) {
990 for (
int x = 0; x < root[
"clips"].size(); x++) {
992 Json::Value existing_clip = root[
"clips"][x];
1005 if (!root[
"effects"].isNull()) {
1010 for (
int x = 0; x < root[
"effects"].size(); x++) {
1012 Json::Value existing_effect = root[
"effects"][x];
1017 if (!existing_effect[
"type"].isNull()) {
1031 if (!root[
"duration"].isNull()) {
1050 Json::Reader reader;
1051 bool success = reader.parse( value, root );
1052 if (!success || !root.isArray())
1054 throw InvalidJSON(
"JSON could not be parsed (or is invalid).",
"");
1059 for (
int x = 0; x < root.size(); x++) {
1061 Json::Value change = root[x];
1062 string root_key = change[
"key"][(uint)0].asString();
1065 if (root_key ==
"clips")
1067 apply_json_to_clips(change);
1069 else if (root_key ==
"effects")
1071 apply_json_to_effects(change);
1075 apply_json_to_timeline(change);
1082 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
1087 void Timeline::apply_json_to_clips(Json::Value change) {
1090 string change_type = change[
"type"].asString();
1091 string clip_id =
"";
1092 Clip *existing_clip = NULL;
1095 for (
int x = 0; x < change[
"key"].size(); x++) {
1097 Json::Value key_part = change[
"key"][x];
1099 if (key_part.isObject()) {
1101 if (!key_part[
"id"].isNull()) {
1103 clip_id = key_part[
"id"].asString();
1106 list<Clip*>::iterator clip_itr;
1107 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
1110 Clip *c = (*clip_itr);
1111 if (c->
Id() == clip_id) {
1123 if (existing_clip && change[
"key"].size() == 4 && change[
"key"][2] ==
"effects")
1126 Json::Value key_part = change[
"key"][3];
1128 if (key_part.isObject()) {
1130 if (!key_part[
"id"].isNull())
1133 string effect_id = key_part[
"id"].asString();
1136 list<EffectBase*> effect_list = existing_clip->
Effects();
1137 list<EffectBase*>::iterator effect_itr;
1138 for (effect_itr=effect_list.begin(); effect_itr != effect_list.end(); ++effect_itr)
1142 if (e->
Id() == effect_id) {
1144 apply_json_to_effects(change, e);
1149 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1159 if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
1160 int64_t new_starting_frame = (change[
"value"][
"position"].asDouble() *
info.
fps.
ToDouble()) + 1;
1161 int64_t new_ending_frame = ((change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) *
info.
fps.
ToDouble()) + 1;
1162 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1166 if (change_type ==
"insert") {
1174 apply_mapper_to_clip(clip);
1176 }
else if (change_type ==
"update") {
1179 if (existing_clip) {
1184 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1187 if (existing_clip->
Reader() && existing_clip->
Reader()->GetCache())
1188 existing_clip->
Reader()->GetCache()->Remove(old_starting_frame - 8, old_ending_frame + 8);
1194 apply_mapper_to_clip(existing_clip);
1197 }
else if (change_type ==
"delete") {
1200 if (existing_clip) {
1205 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1216 void Timeline::apply_json_to_effects(Json::Value change) {
1219 string change_type = change[
"type"].asString();
1223 for (
int x = 0; x < change[
"key"].size(); x++) {
1225 Json::Value key_part = change[
"key"][x];
1227 if (key_part.isObject()) {
1229 if (!key_part[
"id"].isNull())
1232 string effect_id = key_part[
"id"].asString();
1235 list<EffectBase*>::iterator effect_itr;
1236 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
1240 if (e->
Id() == effect_id) {
1241 existing_effect =
e;
1251 if (existing_effect || change_type ==
"insert")
1253 apply_json_to_effects(change, existing_effect);
1257 void Timeline::apply_json_to_effects(Json::Value change,
EffectBase* existing_effect) {
1260 string change_type = change[
"type"].asString();
1263 if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
1264 int64_t new_starting_frame = (change[
"value"][
"position"].asDouble() *
info.
fps.
ToDouble()) + 1;
1265 int64_t new_ending_frame = ((change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) *
info.
fps.
ToDouble()) + 1;
1266 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1270 if (change_type ==
"insert") {
1273 string effect_type = change[
"value"][
"type"].asString();
1279 if (e =
EffectInfo().CreateEffect(effect_type)) {
1288 }
else if (change_type ==
"update") {
1291 if (existing_effect) {
1296 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1302 }
else if (change_type ==
"delete") {
1305 if (existing_effect) {
1310 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1320 void Timeline::apply_json_to_timeline(Json::Value change) {
1323 string change_type = change[
"type"].asString();
1324 string root_key = change[
"key"][(uint)0].asString();
1325 string sub_key =
"";
1326 if (change[
"key"].size() >= 2)
1327 sub_key = change[
"key"][(uint)1].asString();
1330 final_cache->
Clear();
1333 if (change_type ==
"insert" || change_type ==
"update") {
1337 if (root_key ==
"color")
1340 else if (root_key ==
"viewport_scale")
1343 else if (root_key ==
"viewport_x")
1346 else if (root_key ==
"viewport_y")
1349 else if (root_key ==
"duration") {
1354 else if (root_key ==
"width")
1357 else if (root_key ==
"height")
1360 else if (root_key ==
"fps" && sub_key ==
"" && change[
"value"].isObject()) {
1362 if (!change[
"value"][
"num"].isNull())
1363 info.
fps.
num = change[
"value"][
"num"].asInt();
1364 if (!change[
"value"][
"den"].isNull())
1365 info.
fps.
den = change[
"value"][
"den"].asInt();
1367 else if (root_key ==
"fps" && sub_key ==
"num")
1370 else if (root_key ==
"fps" && sub_key ==
"den")
1373 else if (root_key ==
"sample_rate")
1376 else if (root_key ==
"channels")
1379 else if (root_key ==
"channel_layout")
1386 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1389 }
else if (change[
"type"].asString() ==
"delete") {
1393 if (root_key ==
"color") {
1399 else if (root_key ==
"viewport_scale")
1401 else if (root_key ==
"viewport_x")
1403 else if (root_key ==
"viewport_y")
1407 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1420 final_cache->
Clear();
1423 list<Clip*>::iterator clip_itr;
1424 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
1427 Clip *clip = (*clip_itr);
1430 clip->
Reader()->GetCache()->Clear();
1433 if (clip->
Reader()->Name() ==
"FrameMapper") {
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Display the timeline's frame number.
void Close()
Close the internal reader.
string Json()
Get and Set JSON methods.
int MAX_HEIGHT
Maximum height for image data (useful for optimzing for a smaller preview or render) ...
Json::Value JsonValue()
Generate Json::JsonValue for this object.
int num
Numerator for the fraction.
Keyframe scale_y
Curve representing the vertical scaling in percent (0 to 1)
ReaderBase * Reader()
Get the current reader.
CriticalSection getFrameCriticalSection
Section lock for multiple threads.
This abstract class is the base class, used by all effects in libopenshot.
EffectBase * CreateEffect(string effect_type)
Align clip to the right of its parent (middle aligned)
Keyframe green
Curve representing the green value (0 - 255)
Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
Align clip to the bottom right of its parent.
void SetCache(CacheBase *new_cache)
Get the cache object used by this reader.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
GravityType gravity
The gravity of a clip determines where it snaps to it's parent.
Keyframe alpha
Curve representing the alpha value (0 - 255)
int width
The width of the video (in pixesl)
Keyframe volume
Curve representing the volume (0 to 1)
virtual std::shared_ptr< Frame > GetFrame(std::shared_ptr< Frame > frame, int64_t frame_number)=0
This method is required for all derived classes of EffectBase, and returns a modified openshot::Frame...
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Keyframe red
Curve representing the red value (0 - 255)
float duration
Length of time (in seconds)
string GetColorHex(int64_t frame_number)
Get the HEX value of a color at a specific frame.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
Scale the clip until both height and width fill the canvas (cropping the overlap) ...
Evenly divide the overlapping clips volume keyframes, so that the sum does not exceed 100%...
Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Fraction Reciprocal()
Return the reciprocal as a Fraction.
This abstract class is the base class, used by all readers in libopenshot.
int Layer()
Get layer of clip on timeline (lower number is covered by higher numbers)
#define OPEN_MP_NUM_PROCESSORS
Keyframe has_audio
Override has_video and has_audio properties of clip (and their readers)
Exception when a reader is closed, and a frame is requested.
bool has_video
Determines if this file has a video stream.
Do not display the frame number.
std::shared_ptr< Frame > GetFrame(int64_t requested_frame)
Get an openshot::Frame object for a specific frame number of this timeline.
Color wave_color
Curve representing the color of the audio wave form.
Align clip to the top right of its parent.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
Align clip to the bottom left of its parent.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Exception for missing JSON Change key.
Keyframe location_x
Curve representing the relative X position in percent based on the gravity (-1 to 1) ...
Keyframe location_y
Curve representing the relative Y position in percent based on the gravity (-1 to 1) ...
bool has_audio
Determines if this file has an audio stream.
This class represents a clip (used to arrange readers on the timeline)
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
int MAX_WIDTH
Maximum width for image data (useful for optimzing for a smaller preview or render) ...
Keyframe blue
Curve representing the red value (0 - 255)
virtual std::shared_ptr< Frame > GetFrame(int64_t frame_number)=0
Get a frame from the cache.
Keyframe shear_x
Curve representing X shear angle in degrees (-45.0=left, 45.0=right)
bool Waveform()
Waveform property.
int64_t video_length
The number of frames in the video stream.
ScaleType scale
The scale determines how a clip should be resized to fit it's parent.
int height
The height of the video (in pixels)
Align clip to the bottom center of its parent.
virtual void Remove(int64_t frame_number)=0
Remove a specific frame.
void SetMaxSize(int width, int height)
Exception for files that can not be found or opened.
string Id()
Get basic properties.
Keyframe channel_filter
Audio channel filter and mappings.
void ClearAllCache()
Clear all cache for this timeline instance, and all clips, mappers, and readers under it...
float Position()
Get position on timeline (in seconds)
static CrashHandler * Instance()
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
void Reader(ReaderBase *new_reader)
Set the current reader.
list< EffectBase * > Effects()
Return the list of effects on the timeline.
void AppendDebugMethod(string method_name, string arg1_name, float arg1_value, string arg2_name, float arg2_value, string arg3_name, float arg3_value, string arg4_name, float arg4_value, string arg5_name, float arg5_value, string arg6_name, float arg6_value)
Append debug information.
FrameDisplayType display
The format to display the frame number (if any)
std::shared_ptr< Frame > GetFrame(int64_t requested_frame)
This class represents a fraction.
All cache managers in libopenshot are based on this CacheBase class.
Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel) ...
virtual void Add(std::shared_ptr< Frame > frame)=0
Add a Frame to the cache.
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround...
Align clip to the left of its parent (middle aligned)
void AddClip(Clip *clip)
Add an openshot::Clip to the timeline.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
void Close()
Close the timeline reader (and any resources it was consuming)
int GetInt(int64_t index)
Get the rounded INT value at a specific index.
Keyframe rotation
Curve representing the rotation (0 to 360)
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
Scale the clip until both height and width fill the canvas (distort to fit)
Display the clip's internal frame number.
vector< Point > Points
Vector of all Points.
ReaderInfo info
Information about the current media file.
Keyframe shear_y
Curve representing Y shear angle in degrees (-45.0=down, 45.0=up)
Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
Exception for frames that are out of bounds.
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
void Open()
Open the internal reader.
This class represents a color (used on the timeline and clips)
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method) ...
Reduce volume by about %25, and then mix (louder, but could cause pops if the sum exceeds 100%) ...
Align clip to the center of its parent (middle aligned)
void Open()
Open the reader (and start consuming resources)
void ApplyJsonDiff(string value)
Apply a special formatted JSON object, which represents a change to the timeline (add, update, delete) This is primarily designed to keep the timeline (and its child objects... such as clips and effects) in sync with another application... such as OpenShot Video Editor (http://www.openshot.org).
double GetValue(int64_t index)
Get the value at a specific index.
Display both the clip's and timeline's frame number.
Do not apply pull-down techniques, just repeat or skip entire frames.
virtual void Clear()=0
Clear the cache of all frames.
void RemoveClip(Clip *clip)
Remove an openshot::Clip from the timeline.
void RemoveEffect(EffectBase *effect)
Remove an effect from the timeline.
Exception for invalid JSON.
Keyframe alpha
Curve representing the alpha (1 to 0)
virtual CacheBase * GetCache()=0
Get the cache object used by this reader (note: not all readers use cache)
Keyframe viewport_x
Curve representing the x coordinate for the viewport.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Keyframe scale_x
Curve representing the horizontal scaling in percent (0 to 1)
static Settings * Instance()
Create or get an instance of this logger singleton (invoke the class with this method) ...
VolumeMixType mixing
What strategy should be followed when mixing audio with other clips.
Color color
Background color of timeline canvas.
Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout)
Default Constructor for the timeline (which sets the canvas width and height and FPS) ...
This class returns a listing of all effects supported by libopenshot.
Align clip to the top center of its parent.
void SetJson(string value)
Load JSON string into this object.
int den
Denominator for the fraction.
int channels
The number of audio channels used in the audio stream.
A Keyframe is a collection of Point instances, which is used to vary a number or property over time...
Scale the clip until either height or width fills the canvas (with no cropping)
void AddEffect(EffectBase *effect)
Add an effect to the timeline.
int GetSamplesPerFrame(Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Json::Value JsonValue()
Generate Json::JsonValue for this object.
float Duration()
Get the length of this clip (in seconds)
This class is a memory-based cache manager for Frame objects.
float Start()
Get start position (in seconds) of clip (trim start of video)
double ToDouble()
Return this fraction as a double (i.e. 1/2 = 0.5)
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Exception when too many seek attempts happen.