Assorted C++11/formatting cleanups.
[dcpomatic.git] / src / lib / ffmpeg_content.cc
1 /*
2     Copyright (C) 2013-2021 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19
20 */
21
22 #include "ffmpeg_content.h"
23 #include "video_content.h"
24 #include "audio_content.h"
25 #include "ffmpeg_examiner.h"
26 #include "ffmpeg_subtitle_stream.h"
27 #include "ffmpeg_audio_stream.h"
28 #include "compose.hpp"
29 #include "job.h"
30 #include "util.h"
31 #include "filter.h"
32 #include "film.h"
33 #include "log.h"
34 #include "config.h"
35 #include "exceptions.h"
36 #include "frame_rate_change.h"
37 #include "text_content.h"
38 #include <dcp/raw_convert.h>
39 #include <libcxml/cxml.h>
40 extern "C" {
41 #include <libavformat/avformat.h>
42 #include <libavutil/pixdesc.h>
43 }
44 #include <libxml++/libxml++.h>
45 #include <iostream>
46
47 #include "i18n.h"
48
49
50 using std::string;
51 using std::vector;
52 using std::list;
53 using std::cout;
54 using std::pair;
55 using std::make_pair;
56 using std::max;
57 using std::make_shared;
58 using std::shared_ptr;
59 using std::dynamic_pointer_cast;
60 using boost::optional;
61 using dcp::raw_convert;
62 using namespace dcpomatic;
63
64
65 int const FFmpegContentProperty::SUBTITLE_STREAMS = 100;
66 int const FFmpegContentProperty::SUBTITLE_STREAM = 101;
67 int const FFmpegContentProperty::FILTERS = 102;
68 int const FFmpegContentProperty::KDM = 103;
69
70
71 FFmpegContent::FFmpegContent (boost::filesystem::path p)
72         : Content (p)
73 {
74
75 }
76
77
78 template <class T>
79 optional<T>
80 get_optional_enum (cxml::ConstNodePtr node, string name)
81 {
82         auto const v = node->optional_number_child<int>(name);
83         if (!v) {
84                 return optional<T>();
85         }
86         return static_cast<T>(*v);
87 }
88
89
90 FFmpegContent::FFmpegContent (cxml::ConstNodePtr node, int version, list<string>& notes)
91         : Content (node)
92 {
93         video = VideoContent::from_xml (this, node, version);
94         audio = AudioContent::from_xml (this, node, version);
95         text = TextContent::from_xml (this, node, version);
96
97         for (auto i: node->node_children("SubtitleStream")) {
98                 _subtitle_streams.push_back (make_shared<FFmpegSubtitleStream>(i, version));
99                 if (i->optional_number_child<int>("Selected")) {
100                         _subtitle_stream = _subtitle_streams.back ();
101                 }
102         }
103
104         for (auto i: node->node_children("AudioStream")) {
105                 auto as = make_shared<FFmpegAudioStream>(i, version);
106                 audio->add_stream (as);
107                 if (version < 11 && !i->optional_node_child ("Selected")) {
108                         /* This is an old file and this stream is not selected, so un-map it */
109                         as->set_mapping (AudioMapping (as->channels (), MAX_DCP_AUDIO_CHANNELS));
110                 }
111         }
112
113         for (auto i: node->node_children("Filter")) {
114                 Filter const * f = Filter::from_id(i->content());
115                 if (f) {
116                         _filters.push_back (f);
117                 } else {
118                         notes.push_back (String::compose (_("DCP-o-matic no longer supports the `%1' filter, so it has been turned off."), i->content()));
119                 }
120         }
121
122         auto const f = node->optional_number_child<ContentTime::Type> ("FirstVideo");
123         if (f) {
124                 _first_video = ContentTime (f.get ());
125         }
126
127         _color_range = get_optional_enum<AVColorRange>(node, "ColorRange");
128         _color_primaries = get_optional_enum<AVColorPrimaries>(node, "ColorPrimaries");
129         _color_trc = get_optional_enum<AVColorTransferCharacteristic>(node, "ColorTransferCharacteristic");
130         _colorspace = get_optional_enum<AVColorSpace>(node, "Colorspace");
131         _bits_per_pixel = node->optional_number_child<int> ("BitsPerPixel");
132 }
133
134
135 FFmpegContent::FFmpegContent (vector<shared_ptr<Content>> c)
136         : Content (c)
137 {
138         auto i = c.begin ();
139
140         bool need_video = false;
141         bool need_audio = false;
142         bool need_text = false;
143
144         if (i != c.end ()) {
145                 need_video = static_cast<bool> ((*i)->video);
146                 need_audio = static_cast<bool> ((*i)->audio);
147                 need_text = !(*i)->text.empty();
148         }
149
150         while (i != c.end ()) {
151                 if (need_video != static_cast<bool> ((*i)->video)) {
152                         throw JoinError (_("Content to be joined must all have or not have video"));
153                 }
154                 if (need_audio != static_cast<bool> ((*i)->audio)) {
155                         throw JoinError (_("Content to be joined must all have or not have audio"));
156                 }
157                 if (need_text != !(*i)->text.empty()) {
158                         throw JoinError (_("Content to be joined must all have or not have subtitles or captions"));
159                 }
160                 ++i;
161         }
162
163         if (need_video) {
164                 video = make_shared<VideoContent>(this, c);
165         }
166         if (need_audio) {
167                 audio = make_shared<AudioContent>(this, c);
168         }
169         if (need_text) {
170                 text.push_back (make_shared<TextContent>(this, c));
171         }
172
173         auto ref = dynamic_pointer_cast<FFmpegContent> (c[0]);
174         DCPOMATIC_ASSERT (ref);
175
176         for (size_t i = 0; i < c.size(); ++i) {
177                 auto fc = dynamic_pointer_cast<FFmpegContent>(c[i]);
178                 if (fc->only_text() && fc->only_text()->use() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) {
179                         throw JoinError (_("Content to be joined must use the same subtitle stream."));
180                 }
181         }
182
183         /* XXX: should probably check that more of the stuff below is the same in *this and ref */
184
185         _subtitle_streams = ref->subtitle_streams ();
186         _subtitle_stream = ref->subtitle_stream ();
187         _first_video = ref->_first_video;
188         _filters = ref->_filters;
189         _color_range = ref->_color_range;
190         _color_primaries = ref->_color_primaries;
191         _color_trc = ref->_color_trc;
192         _colorspace = ref->_colorspace;
193         _bits_per_pixel = ref->_bits_per_pixel;
194 }
195
196
197 void
198 FFmpegContent::as_xml (xmlpp::Node* node, bool with_paths) const
199 {
200         node->add_child("Type")->add_child_text("FFmpeg");
201         Content::as_xml (node, with_paths);
202
203         if (video) {
204                 video->as_xml (node);
205         }
206
207         if (audio) {
208                 audio->as_xml (node);
209
210                 for (auto i: audio->streams()) {
211                         auto f = dynamic_pointer_cast<FFmpegAudioStream> (i);
212                         DCPOMATIC_ASSERT (f);
213                         f->as_xml (node->add_child("AudioStream"));
214                 }
215         }
216
217         if (only_text()) {
218                 only_text()->as_xml (node);
219         }
220
221         boost::mutex::scoped_lock lm (_mutex);
222
223         for (auto i: _subtitle_streams) {
224                 auto t = node->add_child("SubtitleStream");
225                 if (_subtitle_stream && i == _subtitle_stream) {
226                         t->add_child("Selected")->add_child_text("1");
227                 }
228                 i->as_xml (t);
229         }
230
231         for (auto i: _filters) {
232                 node->add_child("Filter")->add_child_text(i->id());
233         }
234
235         if (_first_video) {
236                 node->add_child("FirstVideo")->add_child_text(raw_convert<string>(_first_video.get().get()));
237         }
238
239         if (_color_range) {
240                 node->add_child("ColorRange")->add_child_text(raw_convert<string>(static_cast<int>(*_color_range)));
241         }
242         if (_color_primaries) {
243                 node->add_child("ColorPrimaries")->add_child_text(raw_convert<string>(static_cast<int>(*_color_primaries)));
244         }
245         if (_color_trc) {
246                 node->add_child("ColorTransferCharacteristic")->add_child_text(raw_convert<string>(static_cast<int>(*_color_trc)));
247         }
248         if (_colorspace) {
249                 node->add_child("Colorspace")->add_child_text(raw_convert<string>(static_cast<int>(*_colorspace)));
250         }
251         if (_bits_per_pixel) {
252                 node->add_child("BitsPerPixel")->add_child_text(raw_convert<string>(*_bits_per_pixel));
253         }
254 }
255
256
257 void
258 FFmpegContent::examine (shared_ptr<const Film> film, shared_ptr<Job> job)
259 {
260         ContentChangeSignaller cc1 (this, FFmpegContentProperty::SUBTITLE_STREAMS);
261         ContentChangeSignaller cc2 (this, FFmpegContentProperty::SUBTITLE_STREAM);
262
263         if (job) {
264                 job->set_progress_unknown ();
265         }
266
267         Content::examine (film, job);
268
269         auto examiner = make_shared<FFmpegExaminer>(shared_from_this (), job);
270
271         if (examiner->has_video ()) {
272                 video.reset (new VideoContent (this));
273                 video->take_from_examiner (examiner);
274         }
275
276         auto first_path = path (0);
277
278         {
279                 boost::mutex::scoped_lock lm (_mutex);
280
281                 if (examiner->has_video ()) {
282                         _first_video = examiner->first_video ();
283                         _color_range = examiner->color_range ();
284                         _color_primaries = examiner->color_primaries ();
285                         _color_trc = examiner->color_trc ();
286                         _colorspace = examiner->colorspace ();
287                         _bits_per_pixel = examiner->bits_per_pixel ();
288
289                         if (examiner->rotation()) {
290                                 auto rot = *examiner->rotation ();
291                                 if (fabs (rot - 180) < 1.0) {
292                                         _filters.push_back (Filter::from_id ("vflip"));
293                                         _filters.push_back (Filter::from_id ("hflip"));
294                                 } else if (fabs (rot - 90) < 1.0) {
295                                         _filters.push_back (Filter::from_id ("90clock"));
296                                 } else if (fabs (rot - 270) < 1.0) {
297                                         _filters.push_back (Filter::from_id ("90anticlock"));
298                                 }
299                         }
300                 }
301
302                 if (!examiner->audio_streams().empty()) {
303                         audio = make_shared<AudioContent>(this);
304
305                         for (auto i: examiner->audio_streams()) {
306                                 audio->add_stream (i);
307                         }
308
309                         auto as = audio->streams().front();
310                         auto m = as->mapping ();
311                         m.make_default (film ? film->audio_processor() : 0, first_path);
312                         as->set_mapping (m);
313                 }
314
315                 _subtitle_streams = examiner->subtitle_streams ();
316                 if (!_subtitle_streams.empty ()) {
317                         text.clear ();
318                         text.push_back (make_shared<TextContent>(this, TextType::OPEN_SUBTITLE, TextType::UNKNOWN));
319                         _subtitle_stream = _subtitle_streams.front ();
320                 }
321         }
322
323         if (examiner->has_video ()) {
324                 set_default_colour_conversion ();
325         }
326
327         if (examiner->has_video() && examiner->pulldown() && video_frame_rate() && fabs(*video_frame_rate() - 29.97) < 0.001) {
328                 /* FFmpeg has detected this file as 29.97 and the examiner thinks it is using "soft" 2:3 pulldown (telecine).
329                  * This means we can treat it as a 23.976fps file.
330                  */
331                 set_video_frame_rate (24000.0 / 1001);
332                 video->set_length (video->length() * 24.0 / 30);
333         }
334 }
335
336
337 string
338 FFmpegContent::summary () const
339 {
340         if (video && audio) {
341                 return String::compose (_("%1 [movie]"), path_summary());
342         } else if (video) {
343                 return String::compose (_("%1 [video]"), path_summary());
344         } else if (audio) {
345                 return String::compose (_("%1 [audio]"), path_summary());
346         }
347
348         return path_summary ();
349 }
350
351
352 string
353 FFmpegContent::technical_summary () const
354 {
355         string as = "";
356         for (auto i: ffmpeg_audio_streams ()) {
357                 as += i->technical_summary () + " " ;
358         }
359
360         if (as.empty ()) {
361                 as = "none";
362         }
363
364         string ss = "none";
365         if (_subtitle_stream) {
366                 ss = _subtitle_stream->technical_summary ();
367         }
368
369         auto filt = Filter::ffmpeg_string (_filters);
370
371         auto s = Content::technical_summary ();
372
373         if (video) {
374                 s += " - " + video->technical_summary ();
375         }
376
377         if (audio) {
378                 s += " - " + audio->technical_summary ();
379         }
380
381         return s + String::compose (
382                 "ffmpeg: audio %1 subtitle %2 filters %3", as, ss, filt
383                 );
384 }
385
386
387 void
388 FFmpegContent::set_subtitle_stream (shared_ptr<FFmpegSubtitleStream> s)
389 {
390         ContentChangeSignaller cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
391
392         {
393                 boost::mutex::scoped_lock lm (_mutex);
394                 _subtitle_stream = s;
395         }
396 }
397
398
399 bool
400 operator== (FFmpegStream const & a, FFmpegStream const & b)
401 {
402         return a._id == b._id;
403 }
404
405
406 bool
407 operator!= (FFmpegStream const & a, FFmpegStream const & b)
408 {
409         return a._id != b._id;
410 }
411
412
413 DCPTime
414 FFmpegContent::full_length (shared_ptr<const Film> film) const
415 {
416         FrameRateChange const frc (film, shared_from_this());
417         if (video) {
418                 return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film->video_frame_rate());
419         }
420
421         if (audio) {
422                 DCPTime longest;
423                 for (auto i: audio->streams()) {
424                         longest = max (longest, DCPTime::from_frames(llrint(i->length() / frc.speed_up), i->frame_rate()));
425                 }
426                 return longest;
427         }
428
429         /* XXX: subtitle content? */
430
431         return {};
432 }
433
434
435 DCPTime
436 FFmpegContent::approximate_length () const
437 {
438         if (video) {
439                 return DCPTime::from_frames (video->length_after_3d_combine(), 24);
440         }
441
442         DCPOMATIC_ASSERT (audio);
443
444         Frame longest = 0;
445         for (auto i: audio->streams()) {
446                 longest = max (longest, Frame(llrint(i->length())));
447         }
448
449         return DCPTime::from_frames (longest, 24);
450 }
451
452
453 void
454 FFmpegContent::set_filters (vector<Filter const *> const & filters)
455 {
456         ContentChangeSignaller cc (this, FFmpegContentProperty::FILTERS);
457
458         {
459                 boost::mutex::scoped_lock lm (_mutex);
460                 _filters = filters;
461         }
462 }
463
464
465 string
466 FFmpegContent::identifier () const
467 {
468         string s = Content::identifier();
469
470         if (video) {
471                 s += "_" + video->identifier();
472         }
473
474         if (only_text() && only_text()->use() && only_text()->burn()) {
475                 s += "_" + only_text()->identifier();
476         }
477
478         boost::mutex::scoped_lock lm (_mutex);
479
480         if (_subtitle_stream) {
481                 s += "_" + _subtitle_stream->identifier ();
482         }
483
484         for (auto i: _filters) {
485                 s += "_" + i->id();
486         }
487
488         return s;
489 }
490
491
492 void
493 FFmpegContent::set_default_colour_conversion ()
494 {
495         DCPOMATIC_ASSERT (video);
496
497         auto const s = video->size ();
498
499         boost::mutex::scoped_lock lm (_mutex);
500
501         switch (_colorspace.get_value_or(AVCOL_SPC_UNSPECIFIED)) {
502         case AVCOL_SPC_RGB:
503                 video->set_colour_conversion (PresetColourConversion::from_id ("srgb").conversion);
504                 break;
505         case AVCOL_SPC_BT709:
506                 video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion);
507                 break;
508         case AVCOL_SPC_BT470BG:
509         case AVCOL_SPC_SMPTE170M:
510         case AVCOL_SPC_SMPTE240M:
511                 video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion);
512                 break;
513         case AVCOL_SPC_BT2020_CL:
514         case AVCOL_SPC_BT2020_NCL:
515                 video->set_colour_conversion (PresetColourConversion::from_id ("rec2020").conversion);
516                 break;
517         default:
518                 if (s.width < 1080) {
519                         video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion);
520                 } else {
521                         video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion);
522                 }
523                 break;
524         }
525 }
526
527
528 void
529 FFmpegContent::add_properties (shared_ptr<const Film> film, list<UserProperty>& p) const
530 {
531         Content::add_properties (film, p);
532
533         if (video) {
534                 video->add_properties (p);
535
536                 if (_bits_per_pixel) {
537                         /* Assuming there's three components, so bits per pixel component is _bits_per_pixel / 3 */
538                         int const lim_start = pow(2, _bits_per_pixel.get() / 3 - 4);
539                         int const lim_end = 235 * pow(2, _bits_per_pixel.get() / 3 - 8);
540                         int const total = pow(2, _bits_per_pixel.get() / 3);
541
542                         switch (_color_range.get_value_or(AVCOL_RANGE_UNSPECIFIED)) {
543                         case AVCOL_RANGE_UNSPECIFIED:
544                                 /// TRANSLATORS: this means that the range of pixel values used in this
545                                 /// file is unknown (not specified in the file).
546                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
547                                 break;
548                         case AVCOL_RANGE_MPEG:
549                                 /// TRANSLATORS: this means that the range of pixel values used in this
550                                 /// file is limited, so that not all possible values are valid.
551                                 p.push_back (
552                                         UserProperty (
553                                                 UserProperty::VIDEO, _("Colour range"), String::compose(_("Limited (%1-%2)"), lim_start, lim_end)
554                                                 )
555                                         );
556                                 break;
557                         case AVCOL_RANGE_JPEG:
558                                 /// TRANSLATORS: this means that the range of pixel values used in this
559                                 /// file is full, so that all possible pixel values are valid.
560                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), String::compose (_("Full (0-%1)"), total)));
561                                 break;
562                         default:
563                                 DCPOMATIC_ASSERT (false);
564                         }
565                 } else {
566                         switch (_color_range.get_value_or(AVCOL_RANGE_UNSPECIFIED)) {
567                         case AVCOL_RANGE_UNSPECIFIED:
568                                 /// TRANSLATORS: this means that the range of pixel values used in this
569                                 /// file is unknown (not specified in the file).
570                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
571                                 break;
572                         case AVCOL_RANGE_MPEG:
573                                 /// TRANSLATORS: this means that the range of pixel values used in this
574                                 /// file is limited, so that not all possible values are valid.
575                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Limited")));
576                                 break;
577                         case AVCOL_RANGE_JPEG:
578                                 /// TRANSLATORS: this means that the range of pixel values used in this
579                                 /// file is full, so that all possible pixel values are valid.
580                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Full")));
581                                 break;
582                         default:
583                                 DCPOMATIC_ASSERT (false);
584                         }
585                 }
586
587                 char const * primaries[] = {
588                         _("Unspecified"),
589                         _("BT709"),
590                         _("Unspecified"),
591                         _("Unspecified"),
592                         _("BT470M"),
593                         _("BT470BG"),
594                         _("SMPTE 170M (BT601)"),
595                         _("SMPTE 240M"),
596                         _("Film"),
597                         _("BT2020"),
598                         _("SMPTE ST 428-1 (CIE 1931 XYZ)"),
599                         _("SMPTE ST 431-2 (2011)"),
600                         _("SMPTE ST 432-1 D65 (2010)"), // 12
601                         "", // 13
602                         "", // 14
603                         "", // 15
604                         "", // 16
605                         "", // 17
606                         "", // 18
607                         "", // 19
608                         "", // 20
609                         "", // 21
610                         _("JEDEC P22")
611                 };
612
613                 DCPOMATIC_ASSERT (AVCOL_PRI_NB <= 23);
614                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour primaries"), primaries[_color_primaries.get_value_or(AVCOL_PRI_UNSPECIFIED)]));
615
616                 char const * transfers[] = {
617                         _("Unspecified"),
618                         _("BT709"),
619                         _("Unspecified"),
620                         _("Unspecified"),
621                         _("Gamma 22 (BT470M)"),
622                         _("Gamma 28 (BT470BG)"),
623                         _("SMPTE 170M (BT601)"),
624                         _("SMPTE 240M"),
625                         _("Linear"),
626                         _("Logarithmic (100:1 range)"),
627                         _("Logarithmic (316:1 range)"),
628                         _("IEC61966-2-4"),
629                         _("BT1361 extended colour gamut"),
630                         _("IEC61966-2-1 (sRGB or sYCC)"),
631                         _("BT2020 for a 10-bit system"),
632                         _("BT2020 for a 12-bit system"),
633                         _("SMPTE ST 2084 for 10, 12, 14 and 16 bit systems"),
634                         _("SMPTE ST 428-1"),
635                         _("ARIB STD-B67 ('Hybrid log-gamma')")
636                 };
637
638                 DCPOMATIC_ASSERT (AVCOL_TRC_NB <= 19);
639                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour transfer characteristic"), transfers[_color_trc.get_value_or(AVCOL_TRC_UNSPECIFIED)]));
640
641                 char const * spaces[] = {
642                         _("RGB / sRGB (IEC61966-2-1)"),
643                         _("BT709"),
644                         _("Unspecified"),
645                         _("Unspecified"),
646                         _("FCC"),
647                         _("BT470BG (BT601-6)"),
648                         _("SMPTE 170M (BT601-6)"),
649                         _("SMPTE 240M"),
650                         _("YCOCG"),
651                         _("BT2020 non-constant luminance"),
652                         _("BT2020 constant luminance"),
653                         _("SMPTE 2085, Y'D'zD'x"),
654                         _("Chroma-derived non-constant luminance"),
655                         _("Chroma-derived constant luminance"),
656                         _("BT2100")
657                 };
658
659                 DCPOMATIC_ASSERT (AVCOL_SPC_NB == 15);
660                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colourspace"), spaces[_colorspace.get_value_or(AVCOL_SPC_UNSPECIFIED)]));
661
662                 if (_bits_per_pixel) {
663                         p.push_back (UserProperty (UserProperty::VIDEO, _("Bits per pixel"), *_bits_per_pixel));
664                 }
665         }
666
667         if (audio) {
668                 audio->add_properties (film, p);
669         }
670 }
671
672
673 /** Our subtitle streams have colour maps, which can be changed, but
674  *  they have no way of signalling that change.  As a hack, we have this
675  *  method which callers can use when they've modified one of our subtitle
676  *  streams.
677  */
678 void
679 FFmpegContent::signal_subtitle_stream_changed ()
680 {
681         /* XXX: this is too late; really it should be before the change */
682         ContentChangeSignaller cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
683 }
684
685
686 vector<shared_ptr<FFmpegAudioStream>>
687 FFmpegContent::ffmpeg_audio_streams () const
688 {
689         vector<shared_ptr<FFmpegAudioStream>> fa;
690
691         if (audio) {
692                 for (auto i: audio->streams()) {
693                         fa.push_back (dynamic_pointer_cast<FFmpegAudioStream>(i));
694                 }
695         }
696
697         return fa;
698 }
699
700
701 void
702 FFmpegContent::take_settings_from (shared_ptr<const Content> c)
703 {
704         auto fc = dynamic_pointer_cast<const FFmpegContent> (c);
705         if (!fc) {
706                 return;
707                 }
708
709         Content::take_settings_from (c);
710         _filters = fc->_filters;
711 }
712