Supporters update.
[dcpomatic.git] / src / lib / ffmpeg_content.cc
1 /*
2     Copyright (C) 2013-2021 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19
20 */
21
22 #include "audio_content.h"
23 #include "compose.hpp"
24 #include "config.h"
25 #include "constants.h"
26 #include "exceptions.h"
27 #include "ffmpeg_audio_stream.h"
28 #include "ffmpeg_content.h"
29 #include "ffmpeg_examiner.h"
30 #include "ffmpeg_subtitle_stream.h"
31 #include "film.h"
32 #include "filter.h"
33 #include "frame_rate_change.h"
34 #include "job.h"
35 #include "log.h"
36 #include "text_content.h"
37 #include "video_content.h"
38 #include <dcp/raw_convert.h>
39 #include <libcxml/cxml.h>
40 extern "C" {
41 #include <libavformat/avformat.h>
42 #include <libavutil/pixdesc.h>
43 }
44 #include <libxml++/libxml++.h>
45 #include <iostream>
46
47 #include "i18n.h"
48
49
50 using std::string;
51 using std::vector;
52 using std::list;
53 using std::cout;
54 using std::pair;
55 using std::make_pair;
56 using std::max;
57 using std::make_shared;
58 using std::shared_ptr;
59 using std::dynamic_pointer_cast;
60 using boost::optional;
61 using dcp::raw_convert;
62 using namespace dcpomatic;
63
64
65 int const FFmpegContentProperty::SUBTITLE_STREAMS = 100;
66 int const FFmpegContentProperty::SUBTITLE_STREAM = 101;
67 int const FFmpegContentProperty::FILTERS = 102;
68 int const FFmpegContentProperty::KDM = 103;
69
70
71 FFmpegContent::FFmpegContent (boost::filesystem::path p)
72         : Content (p)
73 {
74
75 }
76
77
78 template <class T>
79 optional<T>
80 get_optional_enum (cxml::ConstNodePtr node, string name)
81 {
82         auto const v = node->optional_number_child<int>(name);
83         if (!v) {
84                 return optional<T>();
85         }
86         return static_cast<T>(*v);
87 }
88
89
90 FFmpegContent::FFmpegContent (cxml::ConstNodePtr node, int version, list<string>& notes)
91         : Content (node)
92 {
93         _color_range = get_optional_enum<AVColorRange>(node, "ColorRange");
94
95         VideoRange const video_range_hint = (_color_range && *_color_range == AVCOL_RANGE_JPEG) ? VideoRange::FULL : VideoRange::VIDEO;
96
97         video = VideoContent::from_xml (this, node, version, video_range_hint);
98         audio = AudioContent::from_xml (this, node, version);
99         text = TextContent::from_xml (this, node, version, notes);
100
101         for (auto i: node->node_children("SubtitleStream")) {
102                 _subtitle_streams.push_back (make_shared<FFmpegSubtitleStream>(i, version));
103                 if (i->optional_number_child<int>("Selected")) {
104                         _subtitle_stream = _subtitle_streams.back ();
105                 }
106         }
107
108         for (auto i: node->node_children("AudioStream")) {
109                 auto as = make_shared<FFmpegAudioStream>(i, version);
110                 audio->add_stream (as);
111                 if (version < 11 && !i->optional_node_child ("Selected")) {
112                         /* This is an old file and this stream is not selected, so un-map it */
113                         as->set_mapping (AudioMapping (as->channels (), MAX_DCP_AUDIO_CHANNELS));
114                 }
115         }
116
117         for (auto i: node->node_children("Filter")) {
118                 if (auto filter = Filter::from_id(i->content())) {
119                         _filters.push_back(*filter);
120                 } else {
121                         notes.push_back (String::compose (_("DCP-o-matic no longer supports the `%1' filter, so it has been turned off."), i->content()));
122                 }
123         }
124
125         auto const f = node->optional_number_child<ContentTime::Type> ("FirstVideo");
126         if (f) {
127                 _first_video = ContentTime (f.get ());
128         }
129
130         _color_primaries = get_optional_enum<AVColorPrimaries>(node, "ColorPrimaries");
131         _color_trc = get_optional_enum<AVColorTransferCharacteristic>(node, "ColorTransferCharacteristic");
132         _colorspace = get_optional_enum<AVColorSpace>(node, "Colorspace");
133         _bits_per_pixel = node->optional_number_child<int> ("BitsPerPixel");
134 }
135
136
137 FFmpegContent::FFmpegContent (vector<shared_ptr<Content>> c)
138         : Content (c)
139 {
140         auto i = c.begin ();
141
142         bool need_video = false;
143         bool need_audio = false;
144         bool need_text = false;
145
146         if (i != c.end ()) {
147                 need_video = static_cast<bool> ((*i)->video);
148                 need_audio = static_cast<bool> ((*i)->audio);
149                 need_text = !(*i)->text.empty();
150         }
151
152         while (i != c.end ()) {
153                 if (need_video != static_cast<bool> ((*i)->video)) {
154                         throw JoinError (_("Content to be joined must all have or not have video"));
155                 }
156                 if (need_audio != static_cast<bool> ((*i)->audio)) {
157                         throw JoinError (_("Content to be joined must all have or not have audio"));
158                 }
159                 if (need_text != !(*i)->text.empty()) {
160                         throw JoinError (_("Content to be joined must all have or not have subtitles or captions"));
161                 }
162                 ++i;
163         }
164
165         if (need_video) {
166                 video = make_shared<VideoContent>(this, c);
167         }
168         if (need_audio) {
169                 audio = make_shared<AudioContent>(this, c);
170         }
171         if (need_text) {
172                 text.push_back (make_shared<TextContent>(this, c));
173         }
174
175         auto ref = dynamic_pointer_cast<FFmpegContent> (c[0]);
176         DCPOMATIC_ASSERT (ref);
177
178         for (size_t i = 0; i < c.size(); ++i) {
179                 auto fc = dynamic_pointer_cast<FFmpegContent>(c[i]);
180                 if (fc->only_text() && fc->only_text()->use() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) {
181                         throw JoinError (_("Content to be joined must use the same subtitle stream."));
182                 }
183         }
184
185         /* XXX: should probably check that more of the stuff below is the same in *this and ref */
186
187         _subtitle_streams = ref->subtitle_streams ();
188         _subtitle_stream = ref->subtitle_stream ();
189         _first_video = ref->_first_video;
190         _filters = ref->_filters;
191         _color_range = ref->_color_range;
192         _color_primaries = ref->_color_primaries;
193         _color_trc = ref->_color_trc;
194         _colorspace = ref->_colorspace;
195         _bits_per_pixel = ref->_bits_per_pixel;
196 }
197
198
199 void
200 FFmpegContent::as_xml (xmlpp::Node* node, bool with_paths) const
201 {
202         node->add_child("Type")->add_child_text("FFmpeg");
203         Content::as_xml (node, with_paths);
204
205         if (video) {
206                 video->as_xml (node);
207         }
208
209         if (audio) {
210                 audio->as_xml (node);
211
212                 for (auto i: audio->streams()) {
213                         auto f = dynamic_pointer_cast<FFmpegAudioStream> (i);
214                         DCPOMATIC_ASSERT (f);
215                         f->as_xml (node->add_child("AudioStream"));
216                 }
217         }
218
219         if (only_text()) {
220                 only_text()->as_xml (node);
221         }
222
223         boost::mutex::scoped_lock lm (_mutex);
224
225         for (auto i: _subtitle_streams) {
226                 auto t = node->add_child("SubtitleStream");
227                 if (_subtitle_stream && i == _subtitle_stream) {
228                         t->add_child("Selected")->add_child_text("1");
229                 }
230                 i->as_xml (t);
231         }
232
233         for (auto i: _filters) {
234                 node->add_child("Filter")->add_child_text(i.id());
235         }
236
237         if (_first_video) {
238                 node->add_child("FirstVideo")->add_child_text(raw_convert<string>(_first_video.get().get()));
239         }
240
241         if (_color_range) {
242                 node->add_child("ColorRange")->add_child_text(raw_convert<string>(static_cast<int>(*_color_range)));
243         }
244         if (_color_primaries) {
245                 node->add_child("ColorPrimaries")->add_child_text(raw_convert<string>(static_cast<int>(*_color_primaries)));
246         }
247         if (_color_trc) {
248                 node->add_child("ColorTransferCharacteristic")->add_child_text(raw_convert<string>(static_cast<int>(*_color_trc)));
249         }
250         if (_colorspace) {
251                 node->add_child("Colorspace")->add_child_text(raw_convert<string>(static_cast<int>(*_colorspace)));
252         }
253         if (_bits_per_pixel) {
254                 node->add_child("BitsPerPixel")->add_child_text(raw_convert<string>(*_bits_per_pixel));
255         }
256 }
257
258
259 void
260 FFmpegContent::examine (shared_ptr<const Film> film, shared_ptr<Job> job)
261 {
262         ContentChangeSignaller cc1 (this, FFmpegContentProperty::SUBTITLE_STREAMS);
263         ContentChangeSignaller cc2 (this, FFmpegContentProperty::SUBTITLE_STREAM);
264
265         if (job) {
266                 job->set_progress_unknown ();
267         }
268
269         Content::examine (film, job);
270
271         auto examiner = make_shared<FFmpegExaminer>(shared_from_this (), job);
272
273         if (examiner->has_video ()) {
274                 video.reset (new VideoContent (this));
275                 video->take_from_examiner(film, examiner);
276         }
277
278         auto first_path = path (0);
279
280         {
281                 boost::mutex::scoped_lock lm (_mutex);
282
283                 if (examiner->has_video ()) {
284                         _first_video = examiner->first_video ();
285                         _color_range = examiner->color_range ();
286                         _color_primaries = examiner->color_primaries ();
287                         _color_trc = examiner->color_trc ();
288                         _colorspace = examiner->colorspace ();
289                         _bits_per_pixel = examiner->bits_per_pixel ();
290
291                         if (examiner->rotation()) {
292                                 auto rot = *examiner->rotation ();
293                                 if (fabs (rot - 180) < 1.0) {
294                                         _filters.push_back(*Filter::from_id("vflip"));
295                                         _filters.push_back(*Filter::from_id("hflip"));
296                                 } else if (fabs (rot - 90) < 1.0) {
297                                         _filters.push_back(*Filter::from_id("90clock"));
298                                 } else if (fabs (rot - 270) < 1.0) {
299                                         _filters.push_back(*Filter::from_id("90anticlock"));
300                                 }
301                         }
302                         if (examiner->has_alpha()) {
303                                 _filters.push_back(*Filter::from_id("premultiply"));
304                         }
305                 }
306
307                 if (!examiner->audio_streams().empty()) {
308                         audio = make_shared<AudioContent>(this);
309
310                         for (auto i: examiner->audio_streams()) {
311                                 audio->add_stream (i);
312                         }
313
314                         auto as = audio->streams().front();
315                         auto m = as->mapping ();
316                         m.make_default (film ? film->audio_processor() : 0, first_path);
317                         as->set_mapping (m);
318                 }
319
320                 _subtitle_streams = examiner->subtitle_streams ();
321                 if (!_subtitle_streams.empty ()) {
322                         text.clear ();
323                         text.push_back (make_shared<TextContent>(this, TextType::OPEN_SUBTITLE, TextType::UNKNOWN));
324                         _subtitle_stream = _subtitle_streams.front ();
325                         text.front()->add_font(make_shared<dcpomatic::Font>(""));
326                 }
327         }
328
329         if (examiner->has_video ()) {
330                 set_default_colour_conversion ();
331         }
332
333         if (examiner->has_video() && examiner->pulldown() && video_frame_rate() && fabs(*video_frame_rate() - 29.97) < 0.001) {
334                 /* FFmpeg has detected this file as 29.97 and the examiner thinks it is using "soft" 2:3 pulldown (telecine).
335                  * This means we can treat it as a 23.976fps file.
336                  */
337                 set_video_frame_rate(film, 24000.0 / 1001);
338                 video->set_length (video->length() * 24.0 / 30);
339         }
340 }
341
342
343 string
344 FFmpegContent::summary () const
345 {
346         if (video && audio) {
347                 return String::compose (_("%1 [movie]"), path_summary());
348         } else if (video) {
349                 return String::compose (_("%1 [video]"), path_summary());
350         } else if (audio) {
351                 return String::compose (_("%1 [audio]"), path_summary());
352         }
353
354         return path_summary ();
355 }
356
357
358 string
359 FFmpegContent::technical_summary () const
360 {
361         string as = "";
362         for (auto i: ffmpeg_audio_streams ()) {
363                 as += i->technical_summary () + " " ;
364         }
365
366         if (as.empty ()) {
367                 as = "none";
368         }
369
370         string ss = "none";
371         if (_subtitle_stream) {
372                 ss = _subtitle_stream->technical_summary ();
373         }
374
375         auto filt = Filter::ffmpeg_string (_filters);
376
377         auto s = Content::technical_summary ();
378
379         if (video) {
380                 s += " - " + video->technical_summary ();
381         }
382
383         if (audio) {
384                 s += " - " + audio->technical_summary ();
385         }
386
387         return s + String::compose (
388                 "ffmpeg: audio %1 subtitle %2 filters %3", as, ss, filt
389                 );
390 }
391
392
393 void
394 FFmpegContent::set_subtitle_stream (shared_ptr<FFmpegSubtitleStream> s)
395 {
396         ContentChangeSignaller cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
397
398         {
399                 boost::mutex::scoped_lock lm (_mutex);
400                 _subtitle_stream = s;
401         }
402 }
403
404
405 bool
406 operator== (FFmpegStream const & a, FFmpegStream const & b)
407 {
408         return a._id == b._id;
409 }
410
411
412 bool
413 operator!= (FFmpegStream const & a, FFmpegStream const & b)
414 {
415         return a._id != b._id;
416 }
417
418
419 DCPTime
420 FFmpegContent::full_length (shared_ptr<const Film> film) const
421 {
422         FrameRateChange const frc (film, shared_from_this());
423         if (video) {
424                 return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film->video_frame_rate());
425         }
426
427         if (audio) {
428                 DCPTime longest;
429                 for (auto i: audio->streams()) {
430                         longest = max (longest, DCPTime::from_frames(llrint(i->length() / frc.speed_up), i->frame_rate()));
431                 }
432                 return longest;
433         }
434
435         /* XXX: subtitle content? */
436
437         return {};
438 }
439
440
441 DCPTime
442 FFmpegContent::approximate_length () const
443 {
444         if (video) {
445                 return DCPTime::from_frames (video->length_after_3d_combine(), 24);
446         }
447
448         DCPOMATIC_ASSERT (audio);
449
450         Frame longest = 0;
451         for (auto i: audio->streams()) {
452                 longest = max (longest, Frame(llrint(i->length())));
453         }
454
455         return DCPTime::from_frames (longest, 24);
456 }
457
458
459 void
460 FFmpegContent::set_filters(vector<Filter> const& filters)
461 {
462         ContentChangeSignaller cc (this, FFmpegContentProperty::FILTERS);
463
464         {
465                 boost::mutex::scoped_lock lm (_mutex);
466                 _filters = filters;
467         }
468 }
469
470
471 string
472 FFmpegContent::identifier () const
473 {
474         string s = Content::identifier();
475
476         if (video) {
477                 s += "_" + video->identifier();
478         }
479
480         if (only_text() && only_text()->use() && only_text()->burn()) {
481                 s += "_" + only_text()->identifier();
482         }
483
484         boost::mutex::scoped_lock lm (_mutex);
485
486         if (_subtitle_stream) {
487                 s += "_" + _subtitle_stream->identifier ();
488         }
489
490         for (auto i: _filters) {
491                 s += "_" + i.id();
492         }
493
494         return s;
495 }
496
497
498 void
499 FFmpegContent::set_default_colour_conversion ()
500 {
501         DCPOMATIC_ASSERT (video);
502
503         auto const s = video->size ();
504
505         boost::mutex::scoped_lock lm (_mutex);
506
507         switch (_colorspace.get_value_or(AVCOL_SPC_UNSPECIFIED)) {
508         case AVCOL_SPC_RGB:
509                 video->set_colour_conversion (PresetColourConversion::from_id ("srgb").conversion);
510                 break;
511         case AVCOL_SPC_BT709:
512                 video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion);
513                 break;
514         case AVCOL_SPC_BT470BG:
515         case AVCOL_SPC_SMPTE170M:
516         case AVCOL_SPC_SMPTE240M:
517                 video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion);
518                 break;
519         case AVCOL_SPC_BT2020_CL:
520         case AVCOL_SPC_BT2020_NCL:
521                 video->set_colour_conversion (PresetColourConversion::from_id ("rec2020").conversion);
522                 break;
523         default:
524                 if (s && s->width < 1080) {
525                         video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion);
526                 } else {
527                         video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion);
528                 }
529                 break;
530         }
531 }
532
533
534 void
535 FFmpegContent::add_properties (shared_ptr<const Film> film, list<UserProperty>& p) const
536 {
537         Content::add_properties (film, p);
538
539         if (video) {
540                 video->add_properties (p);
541
542                 if (_bits_per_pixel) {
543                         auto pixel_quanta_product = video->pixel_quanta().x * video->pixel_quanta().y;
544                         auto bits_per_main_pixel = pixel_quanta_product * _bits_per_pixel.get() / (pixel_quanta_product + 2);
545
546                         int const lim_start = pow(2, bits_per_main_pixel - 4);
547                         int const lim_end = 235 * pow(2, bits_per_main_pixel - 8);
548                         int const total = pow(2, bits_per_main_pixel);
549
550                         switch (_color_range.get_value_or(AVCOL_RANGE_UNSPECIFIED)) {
551                         case AVCOL_RANGE_UNSPECIFIED:
552                                 /// TRANSLATORS: this means that the range of pixel values used in this
553                                 /// file is unknown (not specified in the file).
554                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
555                                 break;
556                         case AVCOL_RANGE_MPEG:
557                                 /// TRANSLATORS: this means that the range of pixel values used in this
558                                 /// file is limited, so that not all possible values are valid.
559                                 p.push_back (
560                                         UserProperty (
561                                                 UserProperty::VIDEO, _("Colour range"), String::compose(_("Limited / video (%1-%2)"), lim_start, lim_end)
562                                                 )
563                                         );
564                                 break;
565                         case AVCOL_RANGE_JPEG:
566                                 /// TRANSLATORS: this means that the range of pixel values used in this
567                                 /// file is full, so that all possible pixel values are valid.
568                                 p.push_back(UserProperty(UserProperty::VIDEO, _("Colour range"), String::compose(_("Full (0-%1)"), total - 1)));
569                                 break;
570                         default:
571                                 DCPOMATIC_ASSERT (false);
572                         }
573                 } else {
574                         switch (_color_range.get_value_or(AVCOL_RANGE_UNSPECIFIED)) {
575                         case AVCOL_RANGE_UNSPECIFIED:
576                                 /// TRANSLATORS: this means that the range of pixel values used in this
577                                 /// file is unknown (not specified in the file).
578                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
579                                 break;
580                         case AVCOL_RANGE_MPEG:
581                                 /// TRANSLATORS: this means that the range of pixel values used in this
582                                 /// file is limited, so that not all possible values are valid.
583                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Limited")));
584                                 break;
585                         case AVCOL_RANGE_JPEG:
586                                 /// TRANSLATORS: this means that the range of pixel values used in this
587                                 /// file is full, so that all possible pixel values are valid.
588                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Full")));
589                                 break;
590                         default:
591                                 DCPOMATIC_ASSERT (false);
592                         }
593                 }
594
595                 char const * primaries[] = {
596                         _("Unspecified"),
597                         _("BT709"),
598                         _("Unspecified"),
599                         _("Unspecified"),
600                         _("BT470M"),
601                         _("BT470BG"),
602                         _("SMPTE 170M (BT601)"),
603                         _("SMPTE 240M"),
604                         _("Film"),
605                         _("BT2020"),
606                         _("SMPTE ST 428-1 (CIE 1931 XYZ)"),
607                         _("SMPTE ST 431-2 (2011)"),
608                         _("SMPTE ST 432-1 D65 (2010)"), // 12
609                         "", // 13
610                         "", // 14
611                         "", // 15
612                         "", // 16
613                         "", // 17
614                         "", // 18
615                         "", // 19
616                         "", // 20
617                         "", // 21
618                         _("JEDEC P22")
619                 };
620
621                 DCPOMATIC_ASSERT (AVCOL_PRI_NB <= 23);
622                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour primaries"), primaries[_color_primaries.get_value_or(AVCOL_PRI_UNSPECIFIED)]));
623
624                 char const * transfers[] = {
625                         _("Unspecified"),
626                         _("BT709"),
627                         _("Unspecified"),
628                         _("Unspecified"),
629                         _("Gamma 22 (BT470M)"),
630                         _("Gamma 28 (BT470BG)"),
631                         _("SMPTE 170M (BT601)"),
632                         _("SMPTE 240M"),
633                         _("Linear"),
634                         _("Logarithmic (100:1 range)"),
635                         _("Logarithmic (316:1 range)"),
636                         _("IEC61966-2-4"),
637                         _("BT1361 extended colour gamut"),
638                         _("IEC61966-2-1 (sRGB or sYCC)"),
639                         _("BT2020 for a 10-bit system"),
640                         _("BT2020 for a 12-bit system"),
641                         _("SMPTE ST 2084 for 10, 12, 14 and 16 bit systems"),
642                         _("SMPTE ST 428-1"),
643                         _("ARIB STD-B67 ('Hybrid log-gamma')")
644                 };
645
646                 DCPOMATIC_ASSERT (AVCOL_TRC_NB <= 19);
647                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour transfer characteristic"), transfers[_color_trc.get_value_or(AVCOL_TRC_UNSPECIFIED)]));
648
649                 char const * spaces[] = {
650                         _("RGB / sRGB (IEC61966-2-1)"),
651                         _("BT709"),
652                         _("Unspecified"),
653                         _("Unspecified"),
654                         _("FCC"),
655                         _("BT470BG (BT601-6)"),
656                         _("SMPTE 170M (BT601-6)"),
657                         _("SMPTE 240M"),
658                         _("YCOCG"),
659                         _("BT2020 non-constant luminance"),
660                         _("BT2020 constant luminance"),
661                         _("SMPTE 2085, Y'D'zD'x"),
662                         _("Chroma-derived non-constant luminance"),
663                         _("Chroma-derived constant luminance"),
664                         _("BT2100")
665                 };
666
667                 DCPOMATIC_ASSERT (AVCOL_SPC_NB == 15);
668                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colourspace"), spaces[_colorspace.get_value_or(AVCOL_SPC_UNSPECIFIED)]));
669
670                 if (_bits_per_pixel) {
671                         p.push_back (UserProperty (UserProperty::VIDEO, _("Bits per pixel"), *_bits_per_pixel));
672                 }
673         }
674
675         if (audio) {
676                 audio->add_properties (film, p);
677         }
678 }
679
680
681 /** Our subtitle streams have colour maps, which can be changed, but
682  *  they have no way of signalling that change.  As a hack, we have this
683  *  method which callers can use when they've modified one of our subtitle
684  *  streams.
685  */
686 void
687 FFmpegContent::signal_subtitle_stream_changed ()
688 {
689         /* XXX: this is too late; really it should be before the change */
690         ContentChangeSignaller cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
691 }
692
693
694 vector<shared_ptr<FFmpegAudioStream>>
695 FFmpegContent::ffmpeg_audio_streams () const
696 {
697         vector<shared_ptr<FFmpegAudioStream>> fa;
698
699         if (audio) {
700                 for (auto i: audio->streams()) {
701                         fa.push_back (dynamic_pointer_cast<FFmpegAudioStream>(i));
702                 }
703         }
704
705         return fa;
706 }
707
708
709 void
710 FFmpegContent::take_settings_from (shared_ptr<const Content> c)
711 {
712         auto fc = dynamic_pointer_cast<const FFmpegContent> (c);
713         if (!fc) {
714                 return;
715                 }
716
717         Content::take_settings_from (c);
718         _filters = fc->_filters;
719 }
720