OpenShot Library | libopenshot  0.3.1
Timeline.cpp
Go to the documentation of this file.
1 
9 // Copyright (c) 2008-2019 OpenShot Studios, LLC
10 //
11 // SPDX-License-Identifier: LGPL-3.0-or-later
12 
13 #include "Timeline.h"
14 
15 #include "CacheBase.h"
16 #include "CacheDisk.h"
17 #include "CacheMemory.h"
18 #include "CrashHandler.h"
19 #include "FrameMapper.h"
20 #include "Exceptions.h"
21 
22 #include <QDir>
23 #include <QFileInfo>
24 
25 using namespace openshot;
26 
27 // Default Constructor for the timeline (which sets the canvas width and height)
28 Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout) :
29  is_open(false), auto_map_clips(true), managed_cache(true), path(""),
30  max_concurrent_frames(OPEN_MP_NUM_PROCESSORS), max_time(0.0)
31 {
32  // Create CrashHandler and Attach (incase of errors)
34 
35  // Init viewport size (curve based, because it can be animated)
36  viewport_scale = Keyframe(100.0);
37  viewport_x = Keyframe(0.0);
38  viewport_y = Keyframe(0.0);
39 
40  // Init background color
41  color.red = Keyframe(0.0);
42  color.green = Keyframe(0.0);
43  color.blue = Keyframe(0.0);
44 
45  // Init FileInfo struct (clear all values)
46  info.width = width;
47  info.height = height;
50  info.fps = fps;
51  info.sample_rate = sample_rate;
52  info.channels = channels;
53  info.channel_layout = channel_layout;
55  info.duration = 60 * 30; // 30 minute default duration
56  info.has_audio = true;
57  info.has_video = true;
59  info.display_ratio = openshot::Fraction(width, height);
62  info.acodec = "openshot::timeline";
63  info.vcodec = "openshot::timeline";
64 
65  // Init max image size
67 
68  // Init cache
69  final_cache = new CacheMemory();
70  final_cache->SetMaxBytesFromInfo(max_concurrent_frames * 4, info.width, info.height, info.sample_rate, info.channels);
71 }
72 
73 // Delegating constructor that copies parameters from a provided ReaderInfo
75  info.width, info.height, info.fps, info.sample_rate,
76  info.channels, info.channel_layout) {}
77 
78 // Constructor for the timeline (which loads a JSON structure from a file path, and initializes a timeline)
79 Timeline::Timeline(const std::string& projectPath, bool convert_absolute_paths) :
80  is_open(false), auto_map_clips(true), managed_cache(true), path(projectPath),
81  max_concurrent_frames(OPEN_MP_NUM_PROCESSORS), max_time(0.0) {
82 
83  // Create CrashHandler and Attach (incase of errors)
85 
86  // Init final cache as NULL (will be created after loading json)
87  final_cache = NULL;
88 
89  // Init viewport size (curve based, because it can be animated)
90  viewport_scale = Keyframe(100.0);
91  viewport_x = Keyframe(0.0);
92  viewport_y = Keyframe(0.0);
93 
94  // Init background color
95  color.red = Keyframe(0.0);
96  color.green = Keyframe(0.0);
97  color.blue = Keyframe(0.0);
98 
99  // Check if path exists
100  QFileInfo filePath(QString::fromStdString(path));
101  if (!filePath.exists()) {
102  throw InvalidFile("File could not be opened.", path);
103  }
104 
105  // Check OpenShot Install Path exists
107  QDir openshotPath(QString::fromStdString(s->PATH_OPENSHOT_INSTALL));
108  if (!openshotPath.exists()) {
109  throw InvalidFile("PATH_OPENSHOT_INSTALL could not be found.", s->PATH_OPENSHOT_INSTALL);
110  }
111  QDir openshotTransPath(openshotPath.filePath("transitions"));
112  if (!openshotTransPath.exists()) {
113  throw InvalidFile("PATH_OPENSHOT_INSTALL/transitions could not be found.", openshotTransPath.path().toStdString());
114  }
115 
116  // Determine asset path
117  QString asset_name = filePath.baseName().left(30) + "_assets";
118  QDir asset_folder(filePath.dir().filePath(asset_name));
119  if (!asset_folder.exists()) {
120  // Create directory if needed
121  asset_folder.mkpath(".");
122  }
123 
124  // Load UTF-8 project file into QString
125  QFile projectFile(QString::fromStdString(path));
126  projectFile.open(QFile::ReadOnly);
127  QString projectContents = QString::fromUtf8(projectFile.readAll());
128 
129  // Convert all relative paths into absolute paths (if requested)
130  if (convert_absolute_paths) {
131 
132  // Find all "image" or "path" references in JSON (using regex). Must loop through match results
133  // due to our path matching needs, which are not possible with the QString::replace() function.
134  QRegularExpression allPathsRegex(QStringLiteral("\"(image|path)\":.*?\"(.*?)\""));
135  std::vector<QRegularExpressionMatch> matchedPositions;
136  QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(projectContents);
137  while (i.hasNext()) {
138  QRegularExpressionMatch match = i.next();
139  if (match.hasMatch()) {
140  // Push all match objects into a vector (so we can reverse them later)
141  matchedPositions.push_back(match);
142  }
143  }
144 
145  // Reverse the matches (bottom of file to top, so our replacements don't break our match positions)
146  std::vector<QRegularExpressionMatch>::reverse_iterator itr;
147  for (itr = matchedPositions.rbegin(); itr != matchedPositions.rend(); itr++) {
148  QRegularExpressionMatch match = *itr;
149  QString relativeKey = match.captured(1); // image or path
150  QString relativePath = match.captured(2); // relative file path
151  QString absolutePath = "";
152 
153  // Find absolute path of all path, image (including special replacements of @assets and @transitions)
154  if (relativePath.startsWith("@assets")) {
155  absolutePath = QFileInfo(asset_folder.absoluteFilePath(relativePath.replace("@assets", "."))).canonicalFilePath();
156  } else if (relativePath.startsWith("@transitions")) {
157  absolutePath = QFileInfo(openshotTransPath.absoluteFilePath(relativePath.replace("@transitions", "."))).canonicalFilePath();
158  } else {
159  absolutePath = QFileInfo(filePath.absoluteDir().absoluteFilePath(relativePath)).canonicalFilePath();
160  }
161 
162  // Replace path in JSON content, if an absolute path was successfully found
163  if (!absolutePath.isEmpty()) {
164  projectContents.replace(match.capturedStart(0), match.capturedLength(0), "\"" + relativeKey + "\": \"" + absolutePath + "\"");
165  }
166  }
167  // Clear matches
168  matchedPositions.clear();
169  }
170 
171  // Set JSON of project
172  SetJson(projectContents.toStdString());
173 
174  // Calculate valid duration and set has_audio and has_video
175  // based on content inside this Timeline's clips.
176  float calculated_duration = 0.0;
177  for (auto clip : clips)
178  {
179  float clip_last_frame = clip->Position() + clip->Duration();
180  if (clip_last_frame > calculated_duration)
181  calculated_duration = clip_last_frame;
182  if (clip->Reader() && clip->Reader()->info.has_audio)
183  info.has_audio = true;
184  if (clip->Reader() && clip->Reader()->info.has_video)
185  info.has_video = true;
186 
187  }
188  info.video_length = calculated_duration * info.fps.ToFloat();
189  info.duration = calculated_duration;
190 
191  // Init FileInfo settings
192  info.acodec = "openshot::timeline";
193  info.vcodec = "openshot::timeline";
195  info.has_video = true;
196  info.has_audio = true;
197 
198  // Init max image size
200 
201  // Init cache
202  final_cache = new CacheMemory();
203  final_cache->SetMaxBytesFromInfo(max_concurrent_frames * 4, info.width, info.height, info.sample_rate, info.channels);
204 }
205 
207  if (is_open) {
208  // Auto Close if not already
209  Close();
210  }
211 
212  // Remove all clips, effects, and frame mappers
213  Clear();
214 
215  // Destroy previous cache (if managed by timeline)
216  if (managed_cache && final_cache) {
217  delete final_cache;
218  final_cache = NULL;
219  }
220 }
221 
222 // Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
223 void Timeline::AddTrackedObject(std::shared_ptr<openshot::TrackedObjectBase> trackedObject){
224 
225  // Search for the tracked object on the map
226  auto iterator = tracked_objects.find(trackedObject->Id());
227 
228  if (iterator != tracked_objects.end()){
229  // Tracked object's id already present on the map, overwrite it
230  iterator->second = trackedObject;
231  }
232  else{
233  // Tracked object's id not present -> insert it on the map
234  tracked_objects[trackedObject->Id()] = trackedObject;
235  }
236 
237  return;
238 }
239 
240 // Return tracked object pointer by it's id
241 std::shared_ptr<openshot::TrackedObjectBase> Timeline::GetTrackedObject(std::string id) const{
242 
243  // Search for the tracked object on the map
244  auto iterator = tracked_objects.find(id);
245 
246  if (iterator != tracked_objects.end()){
247  // Id found, return the pointer to the tracked object
248  std::shared_ptr<openshot::TrackedObjectBase> trackedObject = iterator->second;
249  return trackedObject;
250  }
251  else {
252  // Id not found, return a null pointer
253  return nullptr;
254  }
255 }
256 
257 // Return the ID's of the tracked objects as a list of strings
258 std::list<std::string> Timeline::GetTrackedObjectsIds() const{
259 
260  // Create a list of strings
261  std::list<std::string> trackedObjects_ids;
262 
263  // Iterate through the tracked_objects map
264  for (auto const& it: tracked_objects){
265  // Add the IDs to the list
266  trackedObjects_ids.push_back(it.first);
267  }
268 
269  return trackedObjects_ids;
270 }
271 
272 #ifdef USE_OPENCV
273 // Return the trackedObject's properties as a JSON string
274 std::string Timeline::GetTrackedObjectValues(std::string id, int64_t frame_number) const {
275 
276  // Initialize the JSON object
277  Json::Value trackedObjectJson;
278 
279  // Search for the tracked object on the map
280  auto iterator = tracked_objects.find(id);
281 
282  if (iterator != tracked_objects.end())
283  {
284  // Id found, Get the object pointer and cast it as a TrackedObjectBBox
285  std::shared_ptr<TrackedObjectBBox> trackedObject = std::static_pointer_cast<TrackedObjectBBox>(iterator->second);
286 
287  // Get the trackedObject values for it's first frame
288  if (trackedObject->ExactlyContains(frame_number)){
289  BBox box = trackedObject->GetBox(frame_number);
290  float x1 = box.cx - (box.width/2);
291  float y1 = box.cy - (box.height/2);
292  float x2 = box.cx + (box.width/2);
293  float y2 = box.cy + (box.height/2);
294  float rotation = box.angle;
295 
296  trackedObjectJson["x1"] = x1;
297  trackedObjectJson["y1"] = y1;
298  trackedObjectJson["x2"] = x2;
299  trackedObjectJson["y2"] = y2;
300  trackedObjectJson["rotation"] = rotation;
301 
302  } else {
303  BBox box = trackedObject->BoxVec.begin()->second;
304  float x1 = box.cx - (box.width/2);
305  float y1 = box.cy - (box.height/2);
306  float x2 = box.cx + (box.width/2);
307  float y2 = box.cy + (box.height/2);
308  float rotation = box.angle;
309 
310  trackedObjectJson["x1"] = x1;
311  trackedObjectJson["y1"] = y1;
312  trackedObjectJson["x2"] = x2;
313  trackedObjectJson["y2"] = y2;
314  trackedObjectJson["rotation"] = rotation;
315  }
316 
317  }
318  else {
319  // Id not found, return all 0 values
320  trackedObjectJson["x1"] = 0;
321  trackedObjectJson["y1"] = 0;
322  trackedObjectJson["x2"] = 0;
323  trackedObjectJson["y2"] = 0;
324  trackedObjectJson["rotation"] = 0;
325  }
326 
327  return trackedObjectJson.toStyledString();
328 }
329 #endif
330 
331 // Add an openshot::Clip to the timeline
333 {
334  // Get lock (prevent getting frames while this happens)
335  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
336 
337  // Assign timeline to clip
338  clip->ParentTimeline(this);
339 
340  // Clear cache of clip and nested reader (if any)
341  if (clip->Reader() && clip->Reader()->GetCache())
342  clip->Reader()->GetCache()->Clear();
343 
344  // All clips should be converted to the frame rate of this timeline
345  if (auto_map_clips) {
346  // Apply framemapper (or update existing framemapper)
347  apply_mapper_to_clip(clip);
348  }
349 
350  // Add clip to list
351  clips.push_back(clip);
352 
353  // Sort clips
354  sort_clips();
355 }
356 
357 // Add an effect to the timeline
359 {
360  // Assign timeline to effect
361  effect->ParentTimeline(this);
362 
363  // Add effect to list
364  effects.push_back(effect);
365 
366  // Sort effects
367  sort_effects();
368 }
369 
370 // Remove an effect from the timeline
372 {
373  effects.remove(effect);
374 
375  // Delete effect object (if timeline allocated it)
376  bool allocated = allocated_effects.count(effect);
377  if (allocated) {
378  delete effect;
379  effect = NULL;
380  allocated_effects.erase(effect);
381  }
382 
383  // Sort effects
384  sort_effects();
385 }
386 
387 // Remove an openshot::Clip to the timeline
389 {
390  // Get lock (prevent getting frames while this happens)
391  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
392 
393  clips.remove(clip);
394 
395  // Delete clip object (if timeline allocated it)
396  bool allocated = allocated_clips.count(clip);
397  if (allocated) {
398  delete clip;
399  clip = NULL;
400  allocated_clips.erase(clip);
401  }
402 
403  // Sort clips
404  sort_clips();
405 }
406 
407 // Look up a clip
408 openshot::Clip* Timeline::GetClip(const std::string& id)
409 {
410  // Find the matching clip (if any)
411  for (const auto& clip : clips) {
412  if (clip->Id() == id) {
413  return clip;
414  }
415  }
416  return nullptr;
417 }
418 
419 // Look up a timeline effect
421 {
422  // Find the matching effect (if any)
423  for (const auto& effect : effects) {
424  if (effect->Id() == id) {
425  return effect;
426  }
427  }
428  return nullptr;
429 }
430 
432 {
433  // Search all clips for matching effect ID
434  for (const auto& clip : clips) {
435  const auto e = clip->GetEffect(id);
436  if (e != nullptr) {
437  return e;
438  }
439  }
440  return nullptr;
441 }
442 
443 // Return the list of effects on all clips
444 std::list<openshot::EffectBase*> Timeline::ClipEffects() const {
445 
446  // Initialize the list
447  std::list<EffectBase*> timelineEffectsList;
448 
449  // Loop through all clips
450  for (const auto& clip : clips) {
451 
452  // Get the clip's list of effects
453  std::list<EffectBase*> clipEffectsList = clip->Effects();
454 
455  // Append the clip's effects to the list
456  timelineEffectsList.insert(timelineEffectsList.end(), clipEffectsList.begin(), clipEffectsList.end());
457  }
458 
459  return timelineEffectsList;
460 }
461 
462 // Compute the end time of the latest timeline element
464  // Return cached max_time variable (threadsafe)
465  return max_time;
466 }
467 
468 // Compute the highest frame# based on the latest time and FPS
470  double fps = info.fps.ToDouble();
471  auto max_time = GetMaxTime();
472  return std::round(max_time * fps) + 1;
473 }
474 
475 // Apply a FrameMapper to a clip which matches the settings of this timeline
476 void Timeline::apply_mapper_to_clip(Clip* clip)
477 {
478  // Determine type of reader
479  ReaderBase* clip_reader = NULL;
480  if (clip->Reader()->Name() == "FrameMapper")
481  {
482  // Get the existing reader
483  clip_reader = (ReaderBase*) clip->Reader();
484 
485  // Update the mapping
486  FrameMapper* clip_mapped_reader = (FrameMapper*) clip_reader;
488 
489  } else {
490 
491  // Create a new FrameMapper to wrap the current reader
493  allocated_frame_mappers.insert(mapper);
494  clip_reader = (ReaderBase*) mapper;
495  }
496 
497  // Update clip reader
498  clip->Reader(clip_reader);
499 }
500 
501 // Apply the timeline's framerate and samplerate to all clips
503 {
504  // Clear all cached frames
505  ClearAllCache();
506 
507  // Loop through all clips
508  for (auto clip : clips)
509  {
510  // Apply framemapper (or update existing framemapper)
511  apply_mapper_to_clip(clip);
512  }
513 }
514 
515 // Calculate time of a frame number, based on a framerate
516 double Timeline::calculate_time(int64_t number, Fraction rate)
517 {
518  // Get float version of fps fraction
519  double raw_fps = rate.ToFloat();
520 
521  // Return the time (in seconds) of this frame
522  return double(number - 1) / raw_fps;
523 }
524 
525 // Apply effects to the source frame (if any)
526 std::shared_ptr<Frame> Timeline::apply_effects(std::shared_ptr<Frame> frame, int64_t timeline_frame_number, int layer)
527 {
528  // Debug output
530  "Timeline::apply_effects",
531  "frame->number", frame->number,
532  "timeline_frame_number", timeline_frame_number,
533  "layer", layer);
534 
535  // Find Effects at this position and layer
536  for (auto effect : effects)
537  {
538  // Does clip intersect the current requested time
539  long effect_start_position = round(effect->Position() * info.fps.ToDouble()) + 1;
540  long effect_end_position = round((effect->Position() + (effect->Duration())) * info.fps.ToDouble());
541 
542  bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->Layer() == layer);
543 
544  // Debug output
546  "Timeline::apply_effects (Does effect intersect)",
547  "effect->Position()", effect->Position(),
548  "does_effect_intersect", does_effect_intersect,
549  "timeline_frame_number", timeline_frame_number,
550  "layer", layer);
551 
552  // Clip is visible
553  if (does_effect_intersect)
554  {
555  // Determine the frame needed for this clip (based on the position on the timeline)
556  long effect_start_frame = (effect->Start() * info.fps.ToDouble()) + 1;
557  long effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
558 
559  // Debug output
561  "Timeline::apply_effects (Process Effect)",
562  "effect_frame_number", effect_frame_number,
563  "does_effect_intersect", does_effect_intersect);
564 
565  // Apply the effect to this frame
566  frame = effect->GetFrame(frame, effect_frame_number);
567  }
568 
569  } // end effect loop
570 
571  // Return modified frame
572  return frame;
573 }
574 
575 // Get or generate a blank frame
576 std::shared_ptr<Frame> Timeline::GetOrCreateFrame(std::shared_ptr<Frame> background_frame, Clip* clip, int64_t number, openshot::TimelineInfoStruct* options)
577 {
578  std::shared_ptr<Frame> new_frame;
579 
580  // Init some basic properties about this frame
581  int samples_in_frame = Frame::GetSamplesPerFrame(number, info.fps, info.sample_rate, info.channels);
582 
583  try {
584  // Debug output
586  "Timeline::GetOrCreateFrame (from reader)",
587  "number", number,
588  "samples_in_frame", samples_in_frame);
589 
590  // Attempt to get a frame (but this could fail if a reader has just been closed)
591  new_frame = std::shared_ptr<Frame>(clip->GetFrame(background_frame, number, options));
592 
593  // Return real frame
594  return new_frame;
595 
596  } catch (const ReaderClosed & e) {
597  // ...
598  } catch (const OutOfBoundsFrame & e) {
599  // ...
600  }
601 
602  // Debug output
604  "Timeline::GetOrCreateFrame (create blank)",
605  "number", number,
606  "samples_in_frame", samples_in_frame);
607 
608  // Create blank frame
609  return new_frame;
610 }
611 
612 // Process a new layer of video or audio
613 void Timeline::add_layer(std::shared_ptr<Frame> new_frame, Clip* source_clip, int64_t clip_frame_number, bool is_top_clip, float max_volume)
614 {
615  // Create timeline options (with details about this current frame request)
616  TimelineInfoStruct* options = new TimelineInfoStruct();
617  options->is_top_clip = is_top_clip;
618 
619  // Get the clip's frame, composited on top of the current timeline frame
620  std::shared_ptr<Frame> source_frame;
621  source_frame = GetOrCreateFrame(new_frame, source_clip, clip_frame_number, options);
622  delete options;
623 
624  // No frame found... so bail
625  if (!source_frame)
626  return;
627 
628  // Debug output
630  "Timeline::add_layer",
631  "new_frame->number", new_frame->number,
632  "clip_frame_number", clip_frame_number);
633 
634  /* COPY AUDIO - with correct volume */
635  if (source_clip->Reader()->info.has_audio) {
636  // Debug output
638  "Timeline::add_layer (Copy Audio)",
639  "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio,
640  "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
641  "info.channels", info.channels,
642  "clip_frame_number", clip_frame_number);
643 
644  if (source_frame->GetAudioChannelsCount() == info.channels && source_clip->has_audio.GetInt(clip_frame_number) != 0)
645  for (int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
646  {
647  // Get volume from previous frame and this frame
648  float previous_volume = source_clip->volume.GetValue(clip_frame_number - 1);
649  float volume = source_clip->volume.GetValue(clip_frame_number);
650  int channel_filter = source_clip->channel_filter.GetInt(clip_frame_number); // optional channel to filter (if not -1)
651  int channel_mapping = source_clip->channel_mapping.GetInt(clip_frame_number); // optional channel to map this channel to (if not -1)
652 
653  // Apply volume mixing strategy
654  if (source_clip->mixing == VOLUME_MIX_AVERAGE && max_volume > 1.0) {
655  // Don't allow this clip to exceed 100% (divide volume equally between all overlapping clips with volume
656  previous_volume = previous_volume / max_volume;
657  volume = volume / max_volume;
658  }
659  else if (source_clip->mixing == VOLUME_MIX_REDUCE && max_volume > 1.0) {
660  // Reduce clip volume by a bit, hoping it will prevent exceeding 100% (but it is very possible it will)
661  previous_volume = previous_volume * 0.77;
662  volume = volume * 0.77;
663  }
664 
665  // If channel filter enabled, check for correct channel (and skip non-matching channels)
666  if (channel_filter != -1 && channel_filter != channel)
667  continue; // skip to next channel
668 
669  // If no volume on this frame or previous frame, do nothing
670  if (previous_volume == 0.0 && volume == 0.0)
671  continue; // skip to next channel
672 
673  // If channel mapping disabled, just use the current channel
674  if (channel_mapping == -1)
675  channel_mapping = channel;
676 
677  // Apply ramp to source frame (if needed)
678  if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
679  source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
680 
681  // TODO: Improve FrameMapper (or Timeline) to always get the correct number of samples per frame.
682  // Currently, the ResampleContext sometimes leaves behind a few samples for the next call, and the
683  // number of samples returned is variable... and does not match the number expected.
684  // This is a crude solution at best. =)
685  if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount()){
686  // Force timeline frame to match the source frame
687  new_frame->ResizeAudio(info.channels, source_frame->GetAudioSamplesCount(), info.sample_rate, info.channel_layout);
688  }
689  // Copy audio samples (and set initial volume). Mix samples with existing audio samples. The gains are added together, to
690  // be sure to set the gain's correctly, so the sum does not exceed 1.0 (of audio distortion will happen).
691  new_frame->AddAudio(false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
692  }
693  else
694  // Debug output
696  "Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
697  "source_clip->Reader()->info.has_audio",
698  source_clip->Reader()->info.has_audio,
699  "source_frame->GetAudioChannelsCount()",
700  source_frame->GetAudioChannelsCount(),
701  "info.channels", info.channels,
702  "clip_frame_number", clip_frame_number);
703  }
704 
705  // Debug output
707  "Timeline::add_layer (Transform: Composite Image Layer: Completed)",
708  "source_frame->number", source_frame->number,
709  "new_frame->GetImage()->width()", new_frame->GetImage()->width(),
710  "new_frame->GetImage()->height()", new_frame->GetImage()->height());
711 }
712 
713 // Update the list of 'opened' clips
714 void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect)
715 {
716  // Get lock (prevent getting frames while this happens)
717  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
718 
720  "Timeline::update_open_clips (before)",
721  "does_clip_intersect", does_clip_intersect,
722  "closing_clips.size()", closing_clips.size(),
723  "open_clips.size()", open_clips.size());
724 
725  // is clip already in list?
726  bool clip_found = open_clips.count(clip);
727 
728  if (clip_found && !does_clip_intersect)
729  {
730  // Remove clip from 'opened' list, because it's closed now
731  open_clips.erase(clip);
732 
733  // Close clip
734  clip->Close();
735  }
736  else if (!clip_found && does_clip_intersect)
737  {
738  // Add clip to 'opened' list, because it's missing
739  open_clips[clip] = clip;
740 
741  try {
742  // Open the clip
743  clip->Open();
744 
745  } catch (const InvalidFile & e) {
746  // ...
747  }
748  }
749 
750  // Debug output
752  "Timeline::update_open_clips (after)",
753  "does_clip_intersect", does_clip_intersect,
754  "clip_found", clip_found,
755  "closing_clips.size()", closing_clips.size(),
756  "open_clips.size()", open_clips.size());
757 }
758 
759 // Calculate the max duration (in seconds) of the timeline, based on all the clips, and cache the value
760 void Timeline::calculate_max_duration() {
761  double last_clip = 0.0;
762  double last_effect = 0.0;
763 
764  if (!clips.empty()) {
765  const auto max_clip = std::max_element(
766  clips.begin(), clips.end(), CompareClipEndFrames());
767  last_clip = (*max_clip)->Position() + (*max_clip)->Duration();
768  }
769  if (!effects.empty()) {
770  const auto max_effect = std::max_element(
771  effects.begin(), effects.end(), CompareEffectEndFrames());
772  last_effect = (*max_effect)->Position() + (*max_effect)->Duration();
773  }
774  max_time = std::max(last_clip, last_effect);
775 }
776 
777 // Sort clips by position on the timeline
778 void Timeline::sort_clips()
779 {
780  // Get lock (prevent getting frames while this happens)
781  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
782 
783  // Debug output
785  "Timeline::SortClips",
786  "clips.size()", clips.size());
787 
788  // sort clips
789  clips.sort(CompareClips());
790 
791  // calculate max timeline duration
792  calculate_max_duration();
793 }
794 
795 // Sort effects by position on the timeline
796 void Timeline::sort_effects()
797 {
798  // Get lock (prevent getting frames while this happens)
799  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
800 
801  // sort clips
802  effects.sort(CompareEffects());
803 
804  // calculate max timeline duration
805  calculate_max_duration();
806 }
807 
808 // Clear all clips from timeline
810 {
811  ZmqLogger::Instance()->AppendDebugMethod("Timeline::Clear");
812 
813  // Get lock (prevent getting frames while this happens)
814  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
815 
816  // Close all open clips
817  for (auto clip : clips)
818  {
819  update_open_clips(clip, false);
820 
821  // Delete clip object (if timeline allocated it)
822  bool allocated = allocated_clips.count(clip);
823  if (allocated) {
824  delete clip;
825  }
826  }
827  // Clear all clips
828  clips.clear();
829  allocated_clips.clear();
830 
831  // Close all effects
832  for (auto effect : effects)
833  {
834  // Delete effect object (if timeline allocated it)
835  bool allocated = allocated_effects.count(effect);
836  if (allocated) {
837  delete effect;
838  }
839  }
840  // Clear all effects
841  effects.clear();
842  allocated_effects.clear();
843 
844  // Delete all FrameMappers
845  for (auto mapper : allocated_frame_mappers)
846  {
847  mapper->Reader(NULL);
848  mapper->Close();
849  delete mapper;
850  }
851  allocated_frame_mappers.clear();
852 }
853 
854 // Close the reader (and any resources it was consuming)
856 {
857  ZmqLogger::Instance()->AppendDebugMethod("Timeline::Close");
858 
859  // Get lock (prevent getting frames while this happens)
860  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
861 
862  // Close all open clips
863  for (auto clip : clips)
864  {
865  // Open or Close this clip, based on if it's intersecting or not
866  update_open_clips(clip, false);
867  }
868 
869  // Mark timeline as closed
870  is_open = false;
871 
872  // Clear all cache (deep clear, including nested Readers)
873  ClearAllCache(true);
874 }
875 
876 // Open the reader (and start consuming resources)
878 {
879  is_open = true;
880 }
881 
882 // Compare 2 floating point numbers for equality
883 bool Timeline::isEqual(double a, double b)
884 {
885  return fabs(a - b) < 0.000001;
886 }
887 
888 // Get an openshot::Frame object for a specific frame number of this reader.
889 std::shared_ptr<Frame> Timeline::GetFrame(int64_t requested_frame)
890 {
891  // Adjust out of bounds frame number
892  if (requested_frame < 1)
893  requested_frame = 1;
894 
895  // Check cache
896  std::shared_ptr<Frame> frame;
897  frame = final_cache->GetFrame(requested_frame);
898  if (frame) {
899  // Debug output
901  "Timeline::GetFrame (Cached frame found)",
902  "requested_frame", requested_frame);
903 
904  // Return cached frame
905  return frame;
906  }
907  else
908  {
909  // Prevent async calls to the following code
910  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
911 
912  // Check cache 2nd time
913  std::shared_ptr<Frame> frame;
914  frame = final_cache->GetFrame(requested_frame);
915  if (frame) {
916  // Debug output
918  "Timeline::GetFrame (Cached frame found on 2nd check)",
919  "requested_frame", requested_frame);
920 
921  // Return cached frame
922  return frame;
923  } else {
924  // Get a list of clips that intersect with the requested section of timeline
925  // This also opens the readers for intersecting clips, and marks non-intersecting clips as 'needs closing'
926  std::vector<Clip *> nearby_clips;
927  nearby_clips = find_intersecting_clips(requested_frame, 1, true);
928 
929  // Debug output
931  "Timeline::GetFrame (processing frame)",
932  "requested_frame", requested_frame,
933  "omp_get_thread_num()", omp_get_thread_num());
934 
935  // Init some basic properties about this frame
936  int samples_in_frame = Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels);
937 
938  // Create blank frame (which will become the requested frame)
939  std::shared_ptr<Frame> new_frame(std::make_shared<Frame>(requested_frame, preview_width, preview_height, "#000000", samples_in_frame, info.channels));
940  new_frame->AddAudioSilence(samples_in_frame);
941  new_frame->SampleRate(info.sample_rate);
942  new_frame->ChannelsLayout(info.channel_layout);
943 
944  // Debug output
946  "Timeline::GetFrame (Adding solid color)",
947  "requested_frame", requested_frame,
948  "info.width", info.width,
949  "info.height", info.height);
950 
951  // Add Background Color to 1st layer (if animated or not black)
952  if ((color.red.GetCount() > 1 || color.green.GetCount() > 1 || color.blue.GetCount() > 1) ||
953  (color.red.GetValue(requested_frame) != 0.0 || color.green.GetValue(requested_frame) != 0.0 ||
954  color.blue.GetValue(requested_frame) != 0.0))
955  new_frame->AddColor(preview_width, preview_height, color.GetColorHex(requested_frame));
956 
957  // Debug output
959  "Timeline::GetFrame (Loop through clips)",
960  "requested_frame", requested_frame,
961  "clips.size()", clips.size(),
962  "nearby_clips.size()", nearby_clips.size());
963 
964  // Find Clips near this time
965  for (auto clip : nearby_clips) {
966  long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1;
967  long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble());
968  bool does_clip_intersect = (clip_start_position <= requested_frame && clip_end_position >= requested_frame);
969 
970  // Debug output
972  "Timeline::GetFrame (Does clip intersect)",
973  "requested_frame", requested_frame,
974  "clip->Position()", clip->Position(),
975  "clip->Duration()", clip->Duration(),
976  "does_clip_intersect", does_clip_intersect);
977 
978  // Clip is visible
979  if (does_clip_intersect) {
980  // Determine if clip is "top" clip on this layer (only happens when multiple clips are overlapping)
981  bool is_top_clip = true;
982  float max_volume = 0.0;
983  for (auto nearby_clip : nearby_clips) {
984  long nearby_clip_start_position = round(nearby_clip->Position() * info.fps.ToDouble()) + 1;
985  long nearby_clip_end_position = round((nearby_clip->Position() + nearby_clip->Duration()) * info.fps.ToDouble()) + 1;
986  long nearby_clip_start_frame = (nearby_clip->Start() * info.fps.ToDouble()) + 1;
987  long nearby_clip_frame_number = requested_frame - nearby_clip_start_position + nearby_clip_start_frame;
988 
989  // Determine if top clip
990  if (clip->Id() != nearby_clip->Id() && clip->Layer() == nearby_clip->Layer() &&
991  nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame &&
992  nearby_clip_start_position > clip_start_position && is_top_clip == true) {
993  is_top_clip = false;
994  }
995 
996  // Determine max volume of overlapping clips
997  if (nearby_clip->Reader() && nearby_clip->Reader()->info.has_audio &&
998  nearby_clip->has_audio.GetInt(nearby_clip_frame_number) != 0 &&
999  nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame) {
1000  max_volume += nearby_clip->volume.GetValue(nearby_clip_frame_number);
1001  }
1002  }
1003 
1004  // Determine the frame needed for this clip (based on the position on the timeline)
1005  long clip_start_frame = (clip->Start() * info.fps.ToDouble()) + 1;
1006  long clip_frame_number = requested_frame - clip_start_position + clip_start_frame;
1007 
1008  // Debug output
1010  "Timeline::GetFrame (Calculate clip's frame #)",
1011  "clip->Position()", clip->Position(),
1012  "clip->Start()", clip->Start(),
1013  "info.fps.ToFloat()", info.fps.ToFloat(),
1014  "clip_frame_number", clip_frame_number);
1015 
1016  // Add clip's frame as layer
1017  add_layer(new_frame, clip, clip_frame_number, is_top_clip, max_volume);
1018 
1019  } else {
1020  // Debug output
1022  "Timeline::GetFrame (clip does not intersect)",
1023  "requested_frame", requested_frame,
1024  "does_clip_intersect", does_clip_intersect);
1025  }
1026 
1027  } // end clip loop
1028 
1029  // Debug output
1031  "Timeline::GetFrame (Add frame to cache)",
1032  "requested_frame", requested_frame,
1033  "info.width", info.width,
1034  "info.height", info.height);
1035 
1036  // Set frame # on mapped frame
1037  new_frame->SetFrameNumber(requested_frame);
1038 
1039  // Add final frame to cache
1040  final_cache->Add(new_frame);
1041 
1042  // Return frame (or blank frame)
1043  return new_frame;
1044  }
1045  }
1046 }
1047 
1048 
1049 // Find intersecting clips (or non intersecting clips)
1050 std::vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include)
1051 {
1052  // Find matching clips
1053  std::vector<Clip*> matching_clips;
1054 
1055  // Calculate time of frame
1056  float min_requested_frame = requested_frame;
1057  float max_requested_frame = requested_frame + (number_of_frames - 1);
1058 
1059  // Find Clips at this time
1060  for (auto clip : clips)
1061  {
1062  // Does clip intersect the current requested time
1063  long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1;
1064  long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble()) + 1;
1065 
1066  bool does_clip_intersect =
1067  (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
1068  (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
1069 
1070  // Debug output
1072  "Timeline::find_intersecting_clips (Is clip near or intersecting)",
1073  "requested_frame", requested_frame,
1074  "min_requested_frame", min_requested_frame,
1075  "max_requested_frame", max_requested_frame,
1076  "clip->Position()", clip->Position(),
1077  "does_clip_intersect", does_clip_intersect);
1078 
1079  // Open (or schedule for closing) this clip, based on if it's intersecting or not
1080  update_open_clips(clip, does_clip_intersect);
1081 
1082  // Clip is visible
1083  if (does_clip_intersect && include)
1084  // Add the intersecting clip
1085  matching_clips.push_back(clip);
1086 
1087  else if (!does_clip_intersect && !include)
1088  // Add the non-intersecting clip
1089  matching_clips.push_back(clip);
1090 
1091  } // end clip loop
1092 
1093  // return list
1094  return matching_clips;
1095 }
1096 
1097 // Set the cache object used by this reader
1098 void Timeline::SetCache(CacheBase* new_cache) {
1099  // Destroy previous cache (if managed by timeline)
1100  if (managed_cache && final_cache) {
1101  delete final_cache;
1102  final_cache = NULL;
1103  managed_cache = false;
1104  }
1105 
1106  // Set new cache
1107  final_cache = new_cache;
1108 }
1109 
1110 // Generate JSON string of this object
1111 std::string Timeline::Json() const {
1112 
1113  // Return formatted string
1114  return JsonValue().toStyledString();
1115 }
1116 
1117 // Generate Json::Value for this object
1118 Json::Value Timeline::JsonValue() const {
1119 
1120  // Create root json object
1121  Json::Value root = ReaderBase::JsonValue(); // get parent properties
1122  root["type"] = "Timeline";
1123  root["viewport_scale"] = viewport_scale.JsonValue();
1124  root["viewport_x"] = viewport_x.JsonValue();
1125  root["viewport_y"] = viewport_y.JsonValue();
1126  root["color"] = color.JsonValue();
1127  root["path"] = path;
1128 
1129  // Add array of clips
1130  root["clips"] = Json::Value(Json::arrayValue);
1131 
1132  // Find Clips at this time
1133  for (const auto existing_clip : clips)
1134  {
1135  root["clips"].append(existing_clip->JsonValue());
1136  }
1137 
1138  // Add array of effects
1139  root["effects"] = Json::Value(Json::arrayValue);
1140 
1141  // loop through effects
1142  for (const auto existing_effect: effects)
1143  {
1144  root["effects"].append(existing_effect->JsonValue());
1145  }
1146 
1147  // return JsonValue
1148  return root;
1149 }
1150 
1151 // Load JSON string into this object
1152 void Timeline::SetJson(const std::string value) {
1153 
1154  // Get lock (prevent getting frames while this happens)
1155  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1156 
1157  // Parse JSON string into JSON objects
1158  try
1159  {
1160  const Json::Value root = openshot::stringToJson(value);
1161  // Set all values that match
1162  SetJsonValue(root);
1163  }
1164  catch (const std::exception& e)
1165  {
1166  // Error parsing JSON (or missing keys)
1167  throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1168  }
1169 }
1170 
1171 // Load Json::Value into this object
1172 void Timeline::SetJsonValue(const Json::Value root) {
1173 
1174  // Get lock (prevent getting frames while this happens)
1175  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1176 
1177  // Close timeline before we do anything (this closes all clips)
1178  bool was_open = is_open;
1179  Close();
1180 
1181  // Set parent data
1183 
1184  // Set data from Json (if key is found)
1185  if (!root["path"].isNull())
1186  path = root["path"].asString();
1187 
1188  if (!root["clips"].isNull()) {
1189  // Clear existing clips
1190  clips.clear();
1191 
1192  // loop through clips
1193  for (const Json::Value existing_clip : root["clips"]) {
1194  // Create Clip
1195  Clip *c = new Clip();
1196 
1197  // Keep track of allocated clip objects
1198  allocated_clips.insert(c);
1199 
1200  // When a clip is attached to an object, it searches for the object
1201  // on it's parent timeline. Setting the parent timeline of the clip here
1202  // allows attaching it to an object when exporting the project (because)
1203  // the exporter script initializes the clip and it's effects
1204  // before setting its parent timeline.
1205  c->ParentTimeline(this);
1206 
1207  // Load Json into Clip
1208  c->SetJsonValue(existing_clip);
1209 
1210  // Add Clip to Timeline
1211  AddClip(c);
1212  }
1213  }
1214 
1215  if (!root["effects"].isNull()) {
1216  // Clear existing effects
1217  effects.clear();
1218 
1219  // loop through effects
1220  for (const Json::Value existing_effect :root["effects"]) {
1221  // Create Effect
1222  EffectBase *e = NULL;
1223 
1224  if (!existing_effect["type"].isNull()) {
1225  // Create instance of effect
1226  if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) {
1227 
1228  // Keep track of allocated effect objects
1229  allocated_effects.insert(e);
1230 
1231  // Load Json into Effect
1232  e->SetJsonValue(existing_effect);
1233 
1234  // Add Effect to Timeline
1235  AddEffect(e);
1236  }
1237  }
1238  }
1239  }
1240 
1241  if (!root["duration"].isNull()) {
1242  // Update duration of timeline
1243  info.duration = root["duration"].asDouble();
1245  }
1246 
1247  // Update preview settings
1250 
1251  // Re-open if needed
1252  if (was_open)
1253  Open();
1254 }
1255 
1256 // Apply a special formatted JSON object, which represents a change to the timeline (insert, update, delete)
1257 void Timeline::ApplyJsonDiff(std::string value) {
1258 
1259  // Get lock (prevent getting frames while this happens)
1260  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1261 
1262  // Parse JSON string into JSON objects
1263  try
1264  {
1265  const Json::Value root = openshot::stringToJson(value);
1266  // Process the JSON change array, loop through each item
1267  for (const Json::Value change : root) {
1268  std::string change_key = change["key"][(uint)0].asString();
1269 
1270  // Process each type of change
1271  if (change_key == "clips")
1272  // Apply to CLIPS
1273  apply_json_to_clips(change);
1274 
1275  else if (change_key == "effects")
1276  // Apply to EFFECTS
1277  apply_json_to_effects(change);
1278 
1279  else
1280  // Apply to TIMELINE
1281  apply_json_to_timeline(change);
1282 
1283  }
1284  }
1285  catch (const std::exception& e)
1286  {
1287  // Error parsing JSON (or missing keys)
1288  throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1289  }
1290 }
1291 
1292 // Apply JSON diff to clips
1293 void Timeline::apply_json_to_clips(Json::Value change) {
1294 
1295  // Get key and type of change
1296  std::string change_type = change["type"].asString();
1297  std::string clip_id = "";
1298  Clip *existing_clip = NULL;
1299 
1300  // Find id of clip (if any)
1301  for (auto key_part : change["key"]) {
1302  // Get each change
1303  if (key_part.isObject()) {
1304  // Check for id
1305  if (!key_part["id"].isNull()) {
1306  // Set the id
1307  clip_id = key_part["id"].asString();
1308 
1309  // Find matching clip in timeline (if any)
1310  for (auto c : clips)
1311  {
1312  if (c->Id() == clip_id) {
1313  existing_clip = c;
1314  break; // clip found, exit loop
1315  }
1316  }
1317  break; // id found, exit loop
1318  }
1319  }
1320  }
1321 
1322  // Check for a more specific key (targetting this clip's effects)
1323  // For example: ["clips", {"id:123}, "effects", {"id":432}]
1324  if (existing_clip && change["key"].size() == 4 && change["key"][2] == "effects")
1325  {
1326  // This change is actually targetting a specific effect under a clip (and not the clip)
1327  Json::Value key_part = change["key"][3];
1328 
1329  if (key_part.isObject()) {
1330  // Check for id
1331  if (!key_part["id"].isNull())
1332  {
1333  // Set the id
1334  std::string effect_id = key_part["id"].asString();
1335 
1336  // Find matching effect in timeline (if any)
1337  std::list<EffectBase*> effect_list = existing_clip->Effects();
1338  for (auto e : effect_list)
1339  {
1340  if (e->Id() == effect_id) {
1341  // Apply the change to the effect directly
1342  apply_json_to_effects(change, e);
1343 
1344  // Calculate start and end frames that this impacts, and remove those frames from the cache
1345  int64_t new_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1346  int64_t new_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1347  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1348 
1349  return; // effect found, don't update clip
1350  }
1351  }
1352  }
1353  }
1354  }
1355 
1356  // Calculate start and end frames that this impacts, and remove those frames from the cache
1357  if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
1358  int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
1359  int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
1360  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1361  }
1362 
1363  // Determine type of change operation
1364  if (change_type == "insert") {
1365 
1366  // Create clip
1367  Clip *clip = new Clip();
1368 
1369  // Keep track of allocated clip objects
1370  allocated_clips.insert(clip);
1371 
1372  // Set properties of clip from JSON
1373  clip->SetJsonValue(change["value"]);
1374 
1375  // Add clip to timeline
1376  AddClip(clip);
1377 
1378  } else if (change_type == "update") {
1379 
1380  // Update existing clip
1381  if (existing_clip) {
1382 
1383  // Calculate start and end frames that this impacts, and remove those frames from the cache
1384  int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1385  int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1386  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1387 
1388  // Remove cache on clip's Reader (if found)
1389  if (existing_clip->Reader() && existing_clip->Reader()->GetCache())
1390  existing_clip->Reader()->GetCache()->Remove(old_starting_frame - 8, old_ending_frame + 8);
1391 
1392  // Update clip properties from JSON
1393  existing_clip->SetJsonValue(change["value"]);
1394 
1395  // Apply framemapper (or update existing framemapper)
1396  if (auto_map_clips) {
1397  apply_mapper_to_clip(existing_clip);
1398  }
1399  }
1400 
1401  } else if (change_type == "delete") {
1402 
1403  // Remove existing clip
1404  if (existing_clip) {
1405 
1406  // Calculate start and end frames that this impacts, and remove those frames from the cache
1407  int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1408  int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1409  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1410 
1411  // Remove clip from timeline
1412  RemoveClip(existing_clip);
1413  }
1414 
1415  }
1416 
1417  // Re-Sort Clips (since they likely changed)
1418  sort_clips();
1419 }
1420 
1421 // Apply JSON diff to effects
1422 void Timeline::apply_json_to_effects(Json::Value change) {
1423 
1424  // Get key and type of change
1425  std::string change_type = change["type"].asString();
1426  EffectBase *existing_effect = NULL;
1427 
1428  // Find id of an effect (if any)
1429  for (auto key_part : change["key"]) {
1430 
1431  if (key_part.isObject()) {
1432  // Check for id
1433  if (!key_part["id"].isNull())
1434  {
1435  // Set the id
1436  std::string effect_id = key_part["id"].asString();
1437 
1438  // Find matching effect in timeline (if any)
1439  for (auto e : effects)
1440  {
1441  if (e->Id() == effect_id) {
1442  existing_effect = e;
1443  break; // effect found, exit loop
1444  }
1445  }
1446  break; // id found, exit loop
1447  }
1448  }
1449  }
1450 
1451  // Now that we found the effect, apply the change to it
1452  if (existing_effect || change_type == "insert") {
1453  // Apply change to effect
1454  apply_json_to_effects(change, existing_effect);
1455  }
1456 }
1457 
1458 // Apply JSON diff to effects (if you already know which effect needs to be updated)
1459 void Timeline::apply_json_to_effects(Json::Value change, EffectBase* existing_effect) {
1460 
1461  // Get key and type of change
1462  std::string change_type = change["type"].asString();
1463 
1464  // Calculate start and end frames that this impacts, and remove those frames from the cache
1465  if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
1466  int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
1467  int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
1468  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1469  }
1470 
1471  // Determine type of change operation
1472  if (change_type == "insert") {
1473 
1474  // Determine type of effect
1475  std::string effect_type = change["value"]["type"].asString();
1476 
1477  // Create Effect
1478  EffectBase *e = NULL;
1479 
1480  // Init the matching effect object
1481  if ( (e = EffectInfo().CreateEffect(effect_type)) ) {
1482 
1483  // Keep track of allocated effect objects
1484  allocated_effects.insert(e);
1485 
1486  // Load Json into Effect
1487  e->SetJsonValue(change["value"]);
1488 
1489  // Add Effect to Timeline
1490  AddEffect(e);
1491  }
1492 
1493  } else if (change_type == "update") {
1494 
1495  // Update existing effect
1496  if (existing_effect) {
1497 
1498  // Calculate start and end frames that this impacts, and remove those frames from the cache
1499  int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1500  int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1501  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1502 
1503  // Update effect properties from JSON
1504  existing_effect->SetJsonValue(change["value"]);
1505  }
1506 
1507  } else if (change_type == "delete") {
1508 
1509  // Remove existing effect
1510  if (existing_effect) {
1511 
1512  // Calculate start and end frames that this impacts, and remove those frames from the cache
1513  int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1514  int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1515  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1516 
1517  // Remove effect from timeline
1518  RemoveEffect(existing_effect);
1519  }
1520 
1521  }
1522 
1523  // Re-Sort Effects (since they likely changed)
1524  sort_effects();
1525 }
1526 
1527 // Apply JSON diff to timeline properties
1528 void Timeline::apply_json_to_timeline(Json::Value change) {
1529  bool cache_dirty = true;
1530 
1531  // Get key and type of change
1532  std::string change_type = change["type"].asString();
1533  std::string root_key = change["key"][(uint)0].asString();
1534  std::string sub_key = "";
1535  if (change["key"].size() >= 2)
1536  sub_key = change["key"][(uint)1].asString();
1537 
1538  // Determine type of change operation
1539  if (change_type == "insert" || change_type == "update") {
1540 
1541  // INSERT / UPDATE
1542  // Check for valid property
1543  if (root_key == "color")
1544  // Set color
1545  color.SetJsonValue(change["value"]);
1546  else if (root_key == "viewport_scale")
1547  // Set viewport scale
1548  viewport_scale.SetJsonValue(change["value"]);
1549  else if (root_key == "viewport_x")
1550  // Set viewport x offset
1551  viewport_x.SetJsonValue(change["value"]);
1552  else if (root_key == "viewport_y")
1553  // Set viewport y offset
1554  viewport_y.SetJsonValue(change["value"]);
1555  else if (root_key == "duration") {
1556  // Update duration of timeline
1557  info.duration = change["value"].asDouble();
1559 
1560  // We don't want to clear cache for duration adjustments
1561  cache_dirty = false;
1562  }
1563  else if (root_key == "width") {
1564  // Set width
1565  info.width = change["value"].asInt();
1567  }
1568  else if (root_key == "height") {
1569  // Set height
1570  info.height = change["value"].asInt();
1572  }
1573  else if (root_key == "fps" && sub_key == "" && change["value"].isObject()) {
1574  // Set fps fraction
1575  if (!change["value"]["num"].isNull())
1576  info.fps.num = change["value"]["num"].asInt();
1577  if (!change["value"]["den"].isNull())
1578  info.fps.den = change["value"]["den"].asInt();
1579  }
1580  else if (root_key == "fps" && sub_key == "num")
1581  // Set fps.num
1582  info.fps.num = change["value"].asInt();
1583  else if (root_key == "fps" && sub_key == "den")
1584  // Set fps.den
1585  info.fps.den = change["value"].asInt();
1586  else if (root_key == "display_ratio" && sub_key == "" && change["value"].isObject()) {
1587  // Set display_ratio fraction
1588  if (!change["value"]["num"].isNull())
1589  info.display_ratio.num = change["value"]["num"].asInt();
1590  if (!change["value"]["den"].isNull())
1591  info.display_ratio.den = change["value"]["den"].asInt();
1592  }
1593  else if (root_key == "display_ratio" && sub_key == "num")
1594  // Set display_ratio.num
1595  info.display_ratio.num = change["value"].asInt();
1596  else if (root_key == "display_ratio" && sub_key == "den")
1597  // Set display_ratio.den
1598  info.display_ratio.den = change["value"].asInt();
1599  else if (root_key == "pixel_ratio" && sub_key == "" && change["value"].isObject()) {
1600  // Set pixel_ratio fraction
1601  if (!change["value"]["num"].isNull())
1602  info.pixel_ratio.num = change["value"]["num"].asInt();
1603  if (!change["value"]["den"].isNull())
1604  info.pixel_ratio.den = change["value"]["den"].asInt();
1605  }
1606  else if (root_key == "pixel_ratio" && sub_key == "num")
1607  // Set pixel_ratio.num
1608  info.pixel_ratio.num = change["value"].asInt();
1609  else if (root_key == "pixel_ratio" && sub_key == "den")
1610  // Set pixel_ratio.den
1611  info.pixel_ratio.den = change["value"].asInt();
1612 
1613  else if (root_key == "sample_rate")
1614  // Set sample rate
1615  info.sample_rate = change["value"].asInt();
1616  else if (root_key == "channels")
1617  // Set channels
1618  info.channels = change["value"].asInt();
1619  else if (root_key == "channel_layout")
1620  // Set channel layout
1621  info.channel_layout = (ChannelLayout) change["value"].asInt();
1622  else
1623  // Error parsing JSON (or missing keys)
1624  throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1625 
1626 
1627  } else if (change["type"].asString() == "delete") {
1628 
1629  // DELETE / RESET
1630  // Reset the following properties (since we can't delete them)
1631  if (root_key == "color") {
1632  color = Color();
1633  color.red = Keyframe(0.0);
1634  color.green = Keyframe(0.0);
1635  color.blue = Keyframe(0.0);
1636  }
1637  else if (root_key == "viewport_scale")
1638  viewport_scale = Keyframe(1.0);
1639  else if (root_key == "viewport_x")
1640  viewport_x = Keyframe(0.0);
1641  else if (root_key == "viewport_y")
1642  viewport_y = Keyframe(0.0);
1643  else
1644  // Error parsing JSON (or missing keys)
1645  throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1646 
1647  }
1648 
1649  if (cache_dirty) {
1650  // Clear entire cache
1651  ClearAllCache();
1652  }
1653 }
1654 
1655 // Clear all caches
1656 void Timeline::ClearAllCache(bool deep) {
1657 
1658  // Clear primary cache
1659  if (final_cache) {
1660  final_cache->Clear();
1661  }
1662 
1663  // Loop through all clips
1664  try {
1665  for (const auto clip : clips) {
1666  // Clear cache on clip
1667  clip->Reader()->GetCache()->Clear();
1668 
1669  // Clear nested Reader (if deep clear requested)
1670  if (deep && clip->Reader()->Name() == "FrameMapper") {
1671  FrameMapper *nested_reader = static_cast<FrameMapper *>(clip->Reader());
1672  if (nested_reader->Reader() && nested_reader->Reader()->GetCache())
1673  nested_reader->Reader()->GetCache()->Clear();
1674  }
1675 
1676  // Clear clip cache
1677  clip->GetCache()->Clear();
1678  }
1679  } catch (const ReaderClosed & e) {
1680  // ...
1681  }
1682 }
1683 
1684 // Set Max Image Size (used for performance optimization). Convenience function for setting
1685 // Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT.
1686 void Timeline::SetMaxSize(int width, int height) {
1687  // Maintain aspect ratio regardless of what size is passed in
1688  QSize display_ratio_size = QSize(info.width, info.height);
1689  QSize proposed_size = QSize(std::min(width, info.width), std::min(height, info.height));
1690 
1691  // Scale QSize up to proposed size
1692  display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio);
1693 
1694  // Update preview settings
1695  preview_width = display_ratio_size.width();
1696  preview_height = display_ratio_size.height();
1697 }
void ApplyJsonDiff(std::string value)
Apply a special formatted JSON object, which represents a change to the timeline (add, update, delete) This is primarily designed to keep the timeline (and its child objects... such as clips and effects) in sync with another application... such as OpenShot Video Editor (http://www.openshot.org).
Definition: Timeline.cpp:1257
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: Color.cpp:117
openshot::Color color
Background color of timeline canvas.
Definition: Timeline.h:319
This class is contains settings used by libopenshot (and can be safely toggled at any point) ...
Definition: Settings.h:26
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: Clip.cpp:965
int num
Numerator for the fraction.
Definition: Fraction.h:32
ReaderBase * Reader()
Get the current reader.
Definition: FrameMapper.cpp:64
This abstract class is the base class, used by all effects in libopenshot.
Definition: EffectBase.h:52
bool is_top_clip
Is clip on top (if overlapping another clip)
Definition: TimelineBase.h:34
std::shared_ptr< openshot::Frame > GetFrame(int64_t requested_frame) override
Definition: Timeline.cpp:889
std::string Id() const
Get the Id of this clip object.
Definition: ClipBase.h:85
float Start() const
Get start position (in seconds) of clip (trim start of video)
Definition: ClipBase.h:88
virtual void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: EffectBase.cpp:112
int width
The width of the video (in pixesl)
Definition: ReaderBase.h:46
float cy
y-coordinate of the bounding box center
void ClearAllCache(bool deep=false)
Definition: Timeline.cpp:1656
int preview_width
Optional preview width of timeline image. If your preview window is smaller than the timeline...
Definition: TimelineBase.h:43
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:35
float duration
Length of time (in seconds)
Definition: ReaderBase.h:43
float height
bounding box height
void Close() override
Close the internal reader.
Definition: Clip.cpp:339
double ToDouble() const
Return this fraction as a double (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:40
void Reduce()
Reduce this fraction (i.e. 640/480 = 4/3)
Definition: Fraction.cpp:65
This struct contains info about the current Timeline clip instance.
Definition: TimelineBase.h:32
void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
Definition: CacheBase.cpp:30
Evenly divide the overlapping clips volume keyframes, so that the sum does not exceed 100%...
Definition: Enums.h:63
openshot::Clip * GetClip(const std::string &id)
Look up a single clip by ID.
Definition: Timeline.cpp:408
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: Timeline.cpp:1172
const Json::Value stringToJson(const std::string value)
Definition: Json.cpp:16
This abstract class is the base class, used by all readers in libopenshot.
Definition: ReaderBase.h:75
#define OPEN_MP_NUM_PROCESSORS
openshot::Keyframe volume
Curve representing the volume (0 to 1)
Definition: Clip.h:315
Exception when a reader is closed, and a frame is requested.
Definition: Exceptions.h:363
bool has_video
Determines if this file has a video stream.
Definition: ReaderBase.h:40
void Open() override
Open the internal reader.
Definition: Clip.cpp:318
float angle
bounding box rotation angle [degrees]
Header file for CacheMemory class.
void Close() override
Close the timeline reader (and any resources it was consuming)
Definition: Timeline.cpp:855
Header file for Timeline class.
std::list< openshot::EffectBase * > Effects()
Return the list of effects on the timeline.
Definition: Clip.h:227
int GetInt(int64_t index) const
Get the rounded INT value at a specific index.
Definition: KeyFrame.cpp:282
void AppendDebugMethod(std::string method_name, std::string arg1_name="", float arg1_value=-1.0, std::string arg2_name="", float arg2_value=-1.0, std::string arg3_name="", float arg3_value=-1.0, std::string arg4_name="", float arg4_value=-1.0, std::string arg5_name="", float arg5_value=-1.0, std::string arg6_name="", float arg6_value=-1.0)
Append debug information.
Definition: ZmqLogger.cpp:178
openshot::Keyframe blue
Curve representing the red value (0 - 255)
Definition: Color.h:32
void SetCache(openshot::CacheBase *new_cache)
Definition: Timeline.cpp:1098
Exception for missing JSON Change key.
Definition: Exceptions.h:262
std::list< std::string > GetTrackedObjectsIds() const
Return the ID&#39;s of the tracked objects as a list of strings.
Definition: Timeline.cpp:258
Header file for CacheBase class.
int GetSamplesPerFrame(openshot::Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Definition: Frame.cpp:480
Header file for all Exception classes.
double GetMaxTime()
Look up the end time of the latest timeline element.
Definition: Timeline.cpp:463
void SetJson(const std::string value) override
Load JSON string into this object.
Definition: Timeline.cpp:1152
virtual openshot::CacheBase * GetCache()=0
Get the cache object used by this reader (note: not all readers use cache)
bool has_audio
Determines if this file has an audio stream.
Definition: ReaderBase.h:41
virtual Json::Value JsonValue() const =0
Generate Json::Value for this object.
Definition: ReaderBase.cpp:107
This class represents a clip (used to arrange readers on the timeline)
Definition: Clip.h:91
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
virtual openshot::TimelineBase * ParentTimeline()
Get the associated Timeline pointer (if any)
Definition: ClipBase.h:91
int64_t video_length
The number of frames in the video stream.
Definition: ReaderBase.h:53
openshot::Keyframe channel_filter
A number representing an audio channel to filter (clears all other channels)
Definition: Clip.h:331
openshot::Keyframe green
Curve representing the green value (0 - 255)
Definition: Color.h:31
openshot::TimelineBase * ParentTimeline() override
Get the associated Timeline pointer (if any)
Definition: Clip.h:278
int height
The height of the video (in pixels)
Definition: ReaderBase.h:45
virtual void Remove(int64_t frame_number)=0
Remove a specific frame.
void SetMaxSize(int width, int height)
Definition: Timeline.cpp:1686
openshot::Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
Definition: ReaderBase.h:55
Exception for files that can not be found or opened.
Definition: Exceptions.h:187
std::recursive_mutex getFrameMutex
Mutex for multiple threads.
Definition: ReaderBase.h:79
openshot::EffectBase * GetClipEffect(const std::string &id)
Look up a clip effect by ID.
Definition: Timeline.cpp:431
static CrashHandler * Instance()
Json::Value JsonValue() const override
Generate Json::Value for this object.
Definition: Timeline.cpp:1118
void AddClip(openshot::Clip *clip)
Add an openshot::Clip to the timeline.
Definition: Timeline.cpp:332
void ApplyMapperToClips()
Apply the timeline&#39;s framerate and samplerate to all clips.
Definition: Timeline.cpp:502
float width
bounding box width
std::list< openshot::EffectBase * > ClipEffects() const
Return the list of effects on all clips.
Definition: Timeline.cpp:444
This class represents a fraction.
Definition: Fraction.h:30
Header file for the FrameMapper class.
std::string GetColorHex(int64_t frame_number)
Get the HEX value of a color at a specific frame.
Definition: Color.cpp:47
openshot::Keyframe has_audio
An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes) ...
Definition: Clip.h:335
std::shared_ptr< openshot::Frame > GetFrame(int64_t clip_frame_number) override
Get an openshot::Frame object for a specific frame number of this clip. The image size and number of ...
Definition: Clip.cpp:389
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
Get a frame from the cache.
This struct contains info about a media file, such as height, width, frames per second, etc...
Definition: ReaderBase.h:38
openshot::ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
Definition: ReaderBase.h:62
All cache managers in libopenshot are based on this CacheBase class.
Definition: CacheBase.h:34
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround...
std::string GetTrackedObjectValues(std::string id, int64_t frame_number) const
Return the trackedObject&#39;s properties as a JSON string.
Definition: Timeline.cpp:274
Fraction Reciprocal() const
Return the reciprocal as a Fraction.
Definition: Fraction.cpp:78
openshot::Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel) ...
Definition: Clip.h:332
openshot::Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Definition: Timeline.h:316
This struct holds the information of a bounding-box.
void Clear()
Clear the cache of all frames.
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
Definition: ReaderBase.cpp:162
openshot::EffectBase * GetEffect(const std::string &id)
Look up a timeline effect by ID.
Definition: Timeline.cpp:420
openshot::Keyframe viewport_x
Curve representing the x coordinate for the viewport.
Definition: Timeline.h:315
openshot::ReaderInfo info
Information about the current media file.
Definition: ReaderBase.h:88
std::string Json() const override
Generate JSON string of this object.
Definition: Timeline.cpp:1111
Exception for frames that are out of bounds.
Definition: Exceptions.h:300
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
Definition: FrameMapper.h:201
This class represents a color (used on the timeline and clips)
Definition: Color.h:27
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method) ...
Definition: ZmqLogger.cpp:35
Reduce volume by about %25, and then mix (louder, but could cause pops if the sum exceeds 100%) ...
Definition: Enums.h:64
std::shared_ptr< openshot::TrackedObjectBase > GetTrackedObject(std::string id) const
Return tracked object pointer by it&#39;s id.
Definition: Timeline.cpp:241
float Duration() const
Get the length of this clip (in seconds)
Definition: ClipBase.h:90
std::string vcodec
The name of the video codec used to encode / decode the video stream.
Definition: ReaderBase.h:52
openshot::CacheMemory * GetCache() override
Get the cache object (always return NULL for this reader)
Definition: Clip.h:194
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: KeyFrame.cpp:372
void AddTrackedObject(std::shared_ptr< openshot::TrackedObjectBase > trackedObject)
Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
Definition: Timeline.cpp:223
openshot::ClipBase * clip
Pointer to the parent clip instance (if any)
Definition: ReaderBase.h:80
std::string PATH_OPENSHOT_INSTALL
Definition: Settings.h:108
if(!codec) codec
This namespace is the default namespace for all code in the openshot library.
Definition: Compressor.h:28
float cx
x-coordinate of the bounding box center
Do not apply pull-down techniques, just repeat or skip entire frames.
Definition: FrameMapper.h:46
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: KeyFrame.cpp:339
virtual void Clear()=0
Clear the cache of all frames.
void Open() override
Open the reader (and start consuming resources)
Definition: Timeline.cpp:877
int64_t GetMaxFrame()
Look up the end frame number of the latest element on the timeline.
Definition: Timeline.cpp:469
openshot::Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
Definition: Timeline.h:314
Exception for invalid JSON.
Definition: Exceptions.h:217
int64_t GetCount() const
Get the number of points (i.e. # of points)
Definition: KeyFrame.cpp:417
double GetValue(int64_t index) const
Get the value at a specific index.
Definition: KeyFrame.cpp:258
openshot::Fraction display_ratio
The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3) ...
Definition: ReaderBase.h:51
void Reader(openshot::ReaderBase *new_reader)
Set the current reader.
Definition: Clip.cpp:272
std::shared_ptr< openshot::Frame > apply_effects(std::shared_ptr< openshot::Frame > frame, int64_t timeline_frame_number, int layer)
Apply global/timeline effects to the source frame (if any)
Definition: Timeline.cpp:526
openshot::Keyframe red
Curve representing the red value (0 - 255)
Definition: Color.h:30
Timeline(int width, int height, openshot::Fraction fps, int sample_rate, int channels, openshot::ChannelLayout channel_layout)
Constructor for the timeline (which configures the default frame properties)
Definition: Timeline.cpp:28
static Settings * Instance()
Create or get an instance of this logger singleton (invoke the class with this method) ...
Definition: Settings.cpp:23
Header file for CacheDisk class.
Header file for CrashHandler class.
virtual void Add(std::shared_ptr< openshot::Frame > frame)=0
Add a Frame to the cache.
openshot::Fraction pixel_ratio
The pixel ratio of the video stream as a fraction (i.e. some pixels are not square) ...
Definition: ReaderBase.h:50
Like CompareClipEndFrames, but for effects.
Definition: Timeline.h:76
int preview_height
Optional preview width of timeline image. If your preview window is smaller than the timeline...
Definition: TimelineBase.h:44
float Position() const
Get position on timeline (in seconds)
Definition: ClipBase.h:86
void AddEffect(openshot::EffectBase *effect)
Add an effect to the timeline.
Definition: Timeline.cpp:358
void Clear()
Clear all clips, effects, and frame mappers from timeline (and free memory)
Definition: Timeline.cpp:809
void RemoveEffect(openshot::EffectBase *effect)
Remove an effect from the timeline.
Definition: Timeline.cpp:371
This class returns a listing of all effects supported by libopenshot.
Definition: EffectInfo.h:28
int den
Denominator for the fraction.
Definition: Fraction.h:33
int channels
The number of audio channels used in the audio stream.
Definition: ReaderBase.h:61
A Keyframe is a collection of Point instances, which is used to vary a number or property over time...
Definition: KeyFrame.h:53
openshot::VolumeMixType mixing
What strategy should be followed when mixing audio with other clips.
Definition: Clip.h:171
This class contains the properties of a tracked object and functions to manipulate it...
virtual ~Timeline()
Definition: Timeline.cpp:206
This class is a memory-based cache manager for Frame objects.
Definition: CacheMemory.h:29
openshot::Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Definition: ReaderBase.h:48
void RemoveClip(openshot::Clip *clip)
Remove an openshot::Clip from the timeline.
Definition: Timeline.cpp:388
std::string acodec
The name of the audio codec used to encode / decode the video stream.
Definition: ReaderBase.h:58
int Layer() const
Get layer of clip on timeline (lower number is covered by higher numbers)
Definition: ClipBase.h:87
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Definition: ReaderBase.h:60
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: Color.cpp:86
This class represents a timeline.
Definition: Timeline.h:150