c++ tidying.
[dcpomatic.git] / src / lib / ffmpeg_content.cc
1 /*
2     Copyright (C) 2013-2021 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "ffmpeg_content.h"
22 #include "video_content.h"
23 #include "audio_content.h"
24 #include "ffmpeg_examiner.h"
25 #include "ffmpeg_subtitle_stream.h"
26 #include "ffmpeg_audio_stream.h"
27 #include "compose.hpp"
28 #include "job.h"
29 #include "util.h"
30 #include "filter.h"
31 #include "film.h"
32 #include "log.h"
33 #include "config.h"
34 #include "exceptions.h"
35 #include "frame_rate_change.h"
36 #include "text_content.h"
37 #include <dcp/raw_convert.h>
38 #include <libcxml/cxml.h>
39 extern "C" {
40 #include <libavformat/avformat.h>
41 #include <libavutil/pixdesc.h>
42 }
43 #include <libxml++/libxml++.h>
44 #include <iostream>
45
46 #include "i18n.h"
47
48 using std::string;
49 using std::vector;
50 using std::list;
51 using std::cout;
52 using std::pair;
53 using std::make_pair;
54 using std::max;
55 using std::make_shared;
56 using std::shared_ptr;
57 using std::dynamic_pointer_cast;
58 using boost::optional;
59 using dcp::raw_convert;
60 using namespace dcpomatic;
61
62 int const FFmpegContentProperty::SUBTITLE_STREAMS = 100;
63 int const FFmpegContentProperty::SUBTITLE_STREAM = 101;
64 int const FFmpegContentProperty::FILTERS = 102;
65 int const FFmpegContentProperty::KDM = 103;
66
67 FFmpegContent::FFmpegContent (boost::filesystem::path p)
68         : Content (p)
69 {
70
71 }
72
73 template <class T>
74 optional<T>
75 get_optional_enum (cxml::ConstNodePtr node, string name)
76 {
77         auto const v = node->optional_number_child<int>(name);
78         if (!v) {
79                 return optional<T>();
80         }
81         return static_cast<T>(*v);
82 }
83
84 FFmpegContent::FFmpegContent (cxml::ConstNodePtr node, int version, list<string>& notes)
85         : Content (node)
86 {
87         video = VideoContent::from_xml (this, node, version);
88         audio = AudioContent::from_xml (this, node, version);
89         text = TextContent::from_xml (this, node, version);
90
91         for (auto i: node->node_children("SubtitleStream")) {
92                 _subtitle_streams.push_back (make_shared<FFmpegSubtitleStream>(i, version));
93                 if (i->optional_number_child<int>("Selected")) {
94                         _subtitle_stream = _subtitle_streams.back ();
95                 }
96         }
97
98         for (auto i: node->node_children("AudioStream")) {
99                 auto as = make_shared<FFmpegAudioStream>(i, version);
100                 audio->add_stream (as);
101                 if (version < 11 && !i->optional_node_child ("Selected")) {
102                         /* This is an old file and this stream is not selected, so un-map it */
103                         as->set_mapping (AudioMapping (as->channels (), MAX_DCP_AUDIO_CHANNELS));
104                 }
105         }
106
107         for (auto i: node->node_children("Filter")) {
108                 Filter const * f = Filter::from_id(i->content());
109                 if (f) {
110                         _filters.push_back (f);
111                 } else {
112                         notes.push_back (String::compose (_("DCP-o-matic no longer supports the `%1' filter, so it has been turned off."), i->content()));
113                 }
114         }
115
116         auto const f = node->optional_number_child<ContentTime::Type> ("FirstVideo");
117         if (f) {
118                 _first_video = ContentTime (f.get ());
119         }
120
121         _color_range = get_optional_enum<AVColorRange>(node, "ColorRange");
122         _color_primaries = get_optional_enum<AVColorPrimaries>(node, "ColorPrimaries");
123         _color_trc = get_optional_enum<AVColorTransferCharacteristic>(node, "ColorTransferCharacteristic");
124         _colorspace = get_optional_enum<AVColorSpace>(node, "Colorspace");
125         _bits_per_pixel = node->optional_number_child<int> ("BitsPerPixel");
126 }
127
128 FFmpegContent::FFmpegContent (vector<shared_ptr<Content> > c)
129         : Content (c)
130 {
131         auto i = c.begin ();
132
133         bool need_video = false;
134         bool need_audio = false;
135         bool need_text = false;
136
137         if (i != c.end ()) {
138                 need_video = static_cast<bool> ((*i)->video);
139                 need_audio = static_cast<bool> ((*i)->audio);
140                 need_text = !(*i)->text.empty();
141         }
142
143         while (i != c.end ()) {
144                 if (need_video != static_cast<bool> ((*i)->video)) {
145                         throw JoinError (_("Content to be joined must all have or not have video"));
146                 }
147                 if (need_audio != static_cast<bool> ((*i)->audio)) {
148                         throw JoinError (_("Content to be joined must all have or not have audio"));
149                 }
150                 if (need_text != !(*i)->text.empty()) {
151                         throw JoinError (_("Content to be joined must all have or not have subtitles or captions"));
152                 }
153                 ++i;
154         }
155
156         if (need_video) {
157                 video = make_shared<VideoContent>(this, c);
158         }
159         if (need_audio) {
160                 audio = make_shared<AudioContent>(this, c);
161         }
162         if (need_text) {
163                 text.push_back (make_shared<TextContent>(this, c));
164         }
165
166         auto ref = dynamic_pointer_cast<FFmpegContent> (c[0]);
167         DCPOMATIC_ASSERT (ref);
168
169         for (size_t i = 0; i < c.size(); ++i) {
170                 auto fc = dynamic_pointer_cast<FFmpegContent>(c[i]);
171                 if (fc->only_text() && fc->only_text()->use() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) {
172                         throw JoinError (_("Content to be joined must use the same subtitle stream."));
173                 }
174         }
175
176         /* XXX: should probably check that more of the stuff below is the same in *this and ref */
177
178         _subtitle_streams = ref->subtitle_streams ();
179         _subtitle_stream = ref->subtitle_stream ();
180         _first_video = ref->_first_video;
181         _filters = ref->_filters;
182         _color_range = ref->_color_range;
183         _color_primaries = ref->_color_primaries;
184         _color_trc = ref->_color_trc;
185         _colorspace = ref->_colorspace;
186         _bits_per_pixel = ref->_bits_per_pixel;
187 }
188
189 void
190 FFmpegContent::as_xml (xmlpp::Node* node, bool with_paths) const
191 {
192         node->add_child("Type")->add_child_text ("FFmpeg");
193         Content::as_xml (node, with_paths);
194
195         if (video) {
196                 video->as_xml (node);
197         }
198
199         if (audio) {
200                 audio->as_xml (node);
201
202                 for (auto i: audio->streams ()) {
203                         auto f = dynamic_pointer_cast<FFmpegAudioStream> (i);
204                         DCPOMATIC_ASSERT (f);
205                         f->as_xml (node->add_child("AudioStream"));
206                 }
207         }
208
209         if (only_text()) {
210                 only_text()->as_xml (node);
211         }
212
213         boost::mutex::scoped_lock lm (_mutex);
214
215         for (auto i: _subtitle_streams) {
216                 auto t = node->add_child("SubtitleStream");
217                 if (_subtitle_stream && i == _subtitle_stream) {
218                         t->add_child("Selected")->add_child_text("1");
219                 }
220                 i->as_xml (t);
221         }
222
223         for (auto i: _filters) {
224                 node->add_child("Filter")->add_child_text(i->id());
225         }
226
227         if (_first_video) {
228                 node->add_child("FirstVideo")->add_child_text (raw_convert<string> (_first_video.get().get()));
229         }
230
231         if (_color_range) {
232                 node->add_child("ColorRange")->add_child_text (raw_convert<string> (static_cast<int> (*_color_range)));
233         }
234         if (_color_primaries) {
235                 node->add_child("ColorPrimaries")->add_child_text (raw_convert<string> (static_cast<int> (*_color_primaries)));
236         }
237         if (_color_trc) {
238                 node->add_child("ColorTransferCharacteristic")->add_child_text (raw_convert<string> (static_cast<int> (*_color_trc)));
239         }
240         if (_colorspace) {
241                 node->add_child("Colorspace")->add_child_text (raw_convert<string> (static_cast<int> (*_colorspace)));
242         }
243         if (_bits_per_pixel) {
244                 node->add_child("BitsPerPixel")->add_child_text (raw_convert<string> (*_bits_per_pixel));
245         }
246 }
247
248 void
249 FFmpegContent::examine (shared_ptr<const Film> film, shared_ptr<Job> job)
250 {
251         ChangeSignaller<Content> cc1 (this, FFmpegContentProperty::SUBTITLE_STREAMS);
252         ChangeSignaller<Content> cc2 (this, FFmpegContentProperty::SUBTITLE_STREAM);
253
254         if (job) {
255                 job->set_progress_unknown ();
256         }
257
258         Content::examine (film, job);
259
260         auto examiner = make_shared<FFmpegExaminer>(shared_from_this (), job);
261
262         if (examiner->has_video ()) {
263                 video.reset (new VideoContent (this));
264                 video->take_from_examiner (examiner);
265         }
266
267         auto first_path = path (0);
268
269         {
270                 boost::mutex::scoped_lock lm (_mutex);
271
272                 if (examiner->has_video ()) {
273                         _first_video = examiner->first_video ();
274                         _color_range = examiner->color_range ();
275                         _color_primaries = examiner->color_primaries ();
276                         _color_trc = examiner->color_trc ();
277                         _colorspace = examiner->colorspace ();
278                         _bits_per_pixel = examiner->bits_per_pixel ();
279
280                         if (examiner->rotation()) {
281                                 auto rot = *examiner->rotation ();
282                                 if (fabs (rot - 180) < 1.0) {
283                                         _filters.push_back (Filter::from_id ("vflip"));
284                                         _filters.push_back (Filter::from_id ("hflip"));
285                                 } else if (fabs (rot - 90) < 1.0) {
286                                         _filters.push_back (Filter::from_id ("90clock"));
287                                 } else if (fabs (rot - 270) < 1.0) {
288                                         _filters.push_back (Filter::from_id ("90anticlock"));
289                                 }
290                         }
291                 }
292
293                 if (!examiner->audio_streams().empty ()) {
294                         audio = make_shared<AudioContent>(this);
295
296                         for (auto i: examiner->audio_streams()) {
297                                 audio->add_stream (i);
298                         }
299
300                         auto as = audio->streams().front();
301                         auto m = as->mapping ();
302                         m.make_default (film ? film->audio_processor() : 0, first_path);
303                         as->set_mapping (m);
304                 }
305
306                 _subtitle_streams = examiner->subtitle_streams ();
307                 if (!_subtitle_streams.empty ()) {
308                         text.clear ();
309                         text.push_back (make_shared<TextContent>(this, TEXT_OPEN_SUBTITLE, TEXT_UNKNOWN));
310                         _subtitle_stream = _subtitle_streams.front ();
311                 }
312         }
313
314         if (examiner->has_video ()) {
315                 set_default_colour_conversion ();
316         }
317
318         if (examiner->has_video() && examiner->pulldown() && video_frame_rate() && fabs(*video_frame_rate() - 29.97) < 0.001) {
319                 /* FFmpeg has detected this file as 29.97 and the examiner thinks it is using "soft" 2:3 pulldown (telecine).
320                  * This means we can treat it as a 23.976fps file.
321                  */
322                 set_video_frame_rate (24000.0 / 1001);
323                 video->set_length (video->length() * 24.0 / 30);
324         }
325 }
326
327 string
328 FFmpegContent::summary () const
329 {
330         if (video && audio) {
331                 return String::compose (_("%1 [movie]"), path_summary ());
332         } else if (video) {
333                 return String::compose (_("%1 [video]"), path_summary ());
334         } else if (audio) {
335                 return String::compose (_("%1 [audio]"), path_summary ());
336         }
337
338         return path_summary ();
339 }
340
341 string
342 FFmpegContent::technical_summary () const
343 {
344         string as = "";
345         for (auto i: ffmpeg_audio_streams ()) {
346                 as += i->technical_summary () + " " ;
347         }
348
349         if (as.empty ()) {
350                 as = "none";
351         }
352
353         string ss = "none";
354         if (_subtitle_stream) {
355                 ss = _subtitle_stream->technical_summary ();
356         }
357
358         auto filt = Filter::ffmpeg_string (_filters);
359
360         auto s = Content::technical_summary ();
361
362         if (video) {
363                 s += " - " + video->technical_summary ();
364         }
365
366         if (audio) {
367                 s += " - " + audio->technical_summary ();
368         }
369
370         return s + String::compose (
371                 "ffmpeg: audio %1 subtitle %2 filters %3", as, ss, filt
372                 );
373 }
374
375 void
376 FFmpegContent::set_subtitle_stream (shared_ptr<FFmpegSubtitleStream> s)
377 {
378         ChangeSignaller<Content> cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
379
380         {
381                 boost::mutex::scoped_lock lm (_mutex);
382                 _subtitle_stream = s;
383         }
384 }
385
386 bool
387 operator== (FFmpegStream const & a, FFmpegStream const & b)
388 {
389         return a._id == b._id;
390 }
391
392 bool
393 operator!= (FFmpegStream const & a, FFmpegStream const & b)
394 {
395         return a._id != b._id;
396 }
397
398 DCPTime
399 FFmpegContent::full_length (shared_ptr<const Film> film) const
400 {
401         FrameRateChange const frc (film, shared_from_this());
402         if (video) {
403                 return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film->video_frame_rate());
404         }
405
406         if (audio) {
407                 DCPTime longest;
408                 for (auto i: audio->streams()) {
409                         longest = max (longest, DCPTime::from_frames(llrint(i->length() / frc.speed_up), i->frame_rate()));
410                 }
411                 return longest;
412         }
413
414         /* XXX: subtitle content? */
415
416         return DCPTime();
417 }
418
419 DCPTime
420 FFmpegContent::approximate_length () const
421 {
422         if (video) {
423                 return DCPTime::from_frames (video->length_after_3d_combine(), 24);
424         }
425
426         DCPOMATIC_ASSERT (audio);
427
428         Frame longest = 0;
429         for (auto i: audio->streams()) {
430                 longest = max (longest, Frame(llrint(i->length())));
431         }
432
433         return DCPTime::from_frames (longest, 24);
434 }
435
436 void
437 FFmpegContent::set_filters (vector<Filter const *> const & filters)
438 {
439         ChangeSignaller<Content> cc (this, FFmpegContentProperty::FILTERS);
440
441         {
442                 boost::mutex::scoped_lock lm (_mutex);
443                 _filters = filters;
444         }
445 }
446
447 string
448 FFmpegContent::identifier () const
449 {
450         string s = Content::identifier();
451
452         if (video) {
453                 s += "_" + video->identifier();
454         }
455
456         if (only_text() && only_text()->use() && only_text()->burn()) {
457                 s += "_" + only_text()->identifier();
458         }
459
460         boost::mutex::scoped_lock lm (_mutex);
461
462         if (_subtitle_stream) {
463                 s += "_" + _subtitle_stream->identifier ();
464         }
465
466         for (auto i: _filters) {
467                 s += "_" + i->id();
468         }
469
470         return s;
471 }
472
473 void
474 FFmpegContent::set_default_colour_conversion ()
475 {
476         DCPOMATIC_ASSERT (video);
477
478         auto const s = video->size ();
479
480         boost::mutex::scoped_lock lm (_mutex);
481
482         switch (_colorspace.get_value_or(AVCOL_SPC_UNSPECIFIED)) {
483         case AVCOL_SPC_RGB:
484                 video->set_colour_conversion (PresetColourConversion::from_id ("srgb").conversion);
485                 break;
486         case AVCOL_SPC_BT709:
487                 video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion);
488                 break;
489         case AVCOL_SPC_BT470BG:
490         case AVCOL_SPC_SMPTE170M:
491         case AVCOL_SPC_SMPTE240M:
492                 video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion);
493                 break;
494         case AVCOL_SPC_BT2020_CL:
495         case AVCOL_SPC_BT2020_NCL:
496                 video->set_colour_conversion (PresetColourConversion::from_id ("rec2020").conversion);
497                 break;
498         default:
499                 if (s.width < 1080) {
500                         video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion);
501                 } else {
502                         video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion);
503                 }
504                 break;
505         }
506 }
507
508 void
509 FFmpegContent::add_properties (shared_ptr<const Film> film, list<UserProperty>& p) const
510 {
511         Content::add_properties (film, p);
512
513         if (video) {
514                 video->add_properties (p);
515
516                 if (_bits_per_pixel) {
517                         /* Assuming there's three components, so bits per pixel component is _bits_per_pixel / 3 */
518                         int const lim_start = pow(2, _bits_per_pixel.get() / 3 - 4);
519                         int const lim_end = 235 * pow(2, _bits_per_pixel.get() / 3 - 8);
520                         int const total = pow(2, _bits_per_pixel.get() / 3);
521
522                         switch (_color_range.get_value_or(AVCOL_RANGE_UNSPECIFIED)) {
523                         case AVCOL_RANGE_UNSPECIFIED:
524                                 /// TRANSLATORS: this means that the range of pixel values used in this
525                                 /// file is unknown (not specified in the file).
526                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
527                                 break;
528                         case AVCOL_RANGE_MPEG:
529                                 /// TRANSLATORS: this means that the range of pixel values used in this
530                                 /// file is limited, so that not all possible values are valid.
531                                 p.push_back (
532                                         UserProperty (
533                                                 UserProperty::VIDEO, _("Colour range"), String::compose(_("Limited (%1-%2)"), lim_start, lim_end)
534                                                 )
535                                         );
536                                 break;
537                         case AVCOL_RANGE_JPEG:
538                                 /// TRANSLATORS: this means that the range of pixel values used in this
539                                 /// file is full, so that all possible pixel values are valid.
540                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), String::compose (_("Full (0-%1)"), total)));
541                                 break;
542                         default:
543                                 DCPOMATIC_ASSERT (false);
544                         }
545                 } else {
546                         switch (_color_range.get_value_or(AVCOL_RANGE_UNSPECIFIED)) {
547                         case AVCOL_RANGE_UNSPECIFIED:
548                                 /// TRANSLATORS: this means that the range of pixel values used in this
549                                 /// file is unknown (not specified in the file).
550                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
551                                 break;
552                         case AVCOL_RANGE_MPEG:
553                                 /// TRANSLATORS: this means that the range of pixel values used in this
554                                 /// file is limited, so that not all possible values are valid.
555                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Limited")));
556                                 break;
557                         case AVCOL_RANGE_JPEG:
558                                 /// TRANSLATORS: this means that the range of pixel values used in this
559                                 /// file is full, so that all possible pixel values are valid.
560                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Full")));
561                                 break;
562                         default:
563                                 DCPOMATIC_ASSERT (false);
564                         }
565                 }
566
567                 char const * primaries[] = {
568                         _("Unspecified"),
569                         _("BT709"),
570                         _("Unspecified"),
571                         _("Unspecified"),
572                         _("BT470M"),
573                         _("BT470BG"),
574                         _("SMPTE 170M (BT601)"),
575                         _("SMPTE 240M"),
576                         _("Film"),
577                         _("BT2020"),
578                         _("SMPTE ST 428-1 (CIE 1931 XYZ)"),
579                         _("SMPTE ST 431-2 (2011)"),
580                         _("SMPTE ST 432-1 D65 (2010)"), // 12
581                         "", // 13
582                         "", // 14
583                         "", // 15
584                         "", // 16
585                         "", // 17
586                         "", // 18
587                         "", // 19
588                         "", // 20
589                         "", // 21
590                         _("JEDEC P22")
591                 };
592
593                 DCPOMATIC_ASSERT (AVCOL_PRI_NB <= 23);
594                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour primaries"), primaries[_color_primaries.get_value_or(AVCOL_PRI_UNSPECIFIED)]));
595
596                 char const * transfers[] = {
597                         _("Unspecified"),
598                         _("BT709"),
599                         _("Unspecified"),
600                         _("Unspecified"),
601                         _("Gamma 22 (BT470M)"),
602                         _("Gamma 28 (BT470BG)"),
603                         _("SMPTE 170M (BT601)"),
604                         _("SMPTE 240M"),
605                         _("Linear"),
606                         _("Logarithmic (100:1 range)"),
607                         _("Logarithmic (316:1 range)"),
608                         _("IEC61966-2-4"),
609                         _("BT1361 extended colour gamut"),
610                         _("IEC61966-2-1 (sRGB or sYCC)"),
611                         _("BT2020 for a 10-bit system"),
612                         _("BT2020 for a 12-bit system"),
613                         _("SMPTE ST 2084 for 10, 12, 14 and 16 bit systems"),
614                         _("SMPTE ST 428-1"),
615                         _("ARIB STD-B67 ('Hybrid log-gamma')")
616                 };
617
618                 DCPOMATIC_ASSERT (AVCOL_TRC_NB <= 19);
619                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour transfer characteristic"), transfers[_color_trc.get_value_or(AVCOL_TRC_UNSPECIFIED)]));
620
621                 char const * spaces[] = {
622                         _("RGB / sRGB (IEC61966-2-1)"),
623                         _("BT709"),
624                         _("Unspecified"),
625                         _("Unspecified"),
626                         _("FCC"),
627                         _("BT470BG (BT601-6)"),
628                         _("SMPTE 170M (BT601-6)"),
629                         _("SMPTE 240M"),
630                         _("YCOCG"),
631                         _("BT2020 non-constant luminance"),
632                         _("BT2020 constant luminance"),
633                         _("SMPTE 2085, Y'D'zD'x"),
634                         _("Chroma-derived non-constant luminance"),
635                         _("Chroma-derived constant luminance"),
636                         _("BT2100")
637                 };
638
639                 DCPOMATIC_ASSERT (AVCOL_SPC_NB == 15);
640                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colourspace"), spaces[_colorspace.get_value_or(AVCOL_SPC_UNSPECIFIED)]));
641
642                 if (_bits_per_pixel) {
643                         p.push_back (UserProperty (UserProperty::VIDEO, _("Bits per pixel"), *_bits_per_pixel));
644                 }
645         }
646
647         if (audio) {
648                 audio->add_properties (film, p);
649         }
650 }
651
652 /** Our subtitle streams have colour maps, which can be changed, but
653  *  they have no way of signalling that change.  As a hack, we have this
654  *  method which callers can use when they've modified one of our subtitle
655  *  streams.
656  */
657 void
658 FFmpegContent::signal_subtitle_stream_changed ()
659 {
660         /* XXX: this is too late; really it should be before the change */
661         ChangeSignaller<Content> cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
662 }
663
664 vector<shared_ptr<FFmpegAudioStream> >
665 FFmpegContent::ffmpeg_audio_streams () const
666 {
667         vector<shared_ptr<FFmpegAudioStream> > fa;
668
669         if (audio) {
670                 for (auto i: audio->streams()) {
671                         fa.push_back (dynamic_pointer_cast<FFmpegAudioStream> (i));
672                 }
673         }
674
675         return fa;
676 }
677
678 void
679 FFmpegContent::take_settings_from (shared_ptr<const Content> c)
680 {
681         auto fc = dynamic_pointer_cast<const FFmpegContent> (c);
682         if (!fc) {
683                 return;
684                 }
685
686         Content::take_settings_from (c);
687         _filters = fc->_filters;
688 }
689