Fix missing content properties when using translations.
[dcpomatic.git] / src / lib / ffmpeg_content.cc
1 /*
2     Copyright (C) 2013-2016 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "ffmpeg_content.h"
22 #include "video_content.h"
23 #include "audio_content.h"
24 #include "ffmpeg_examiner.h"
25 #include "ffmpeg_subtitle_stream.h"
26 #include "ffmpeg_audio_stream.h"
27 #include "compose.hpp"
28 #include "job.h"
29 #include "util.h"
30 #include "filter.h"
31 #include "film.h"
32 #include "log.h"
33 #include "exceptions.h"
34 #include "frame_rate_change.h"
35 #include "safe_stringstream.h"
36 #include "raw_convert.h"
37 #include "subtitle_content.h"
38 #include <libcxml/cxml.h>
39 extern "C" {
40 #include <libavformat/avformat.h>
41 #include <libavutil/pixdesc.h>
42 }
43 #include <libxml++/libxml++.h>
44 #include <boost/foreach.hpp>
45 #include <iostream>
46
47 #include "i18n.h"
48
49 #define LOG_GENERAL(...) film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
50
51 using std::string;
52 using std::vector;
53 using std::list;
54 using std::cout;
55 using std::pair;
56 using std::make_pair;
57 using std::max;
58 using boost::shared_ptr;
59 using boost::dynamic_pointer_cast;
60 using boost::optional;
61
62 int const FFmpegContentProperty::SUBTITLE_STREAMS = 100;
63 int const FFmpegContentProperty::SUBTITLE_STREAM = 101;
64 int const FFmpegContentProperty::FILTERS = 102;
65
66 FFmpegContent::FFmpegContent (shared_ptr<const Film> film, boost::filesystem::path p)
67         : Content (film, p)
68 {
69
70 }
71
72 FFmpegContent::FFmpegContent (shared_ptr<const Film> film, cxml::ConstNodePtr node, int version, list<string>& notes)
73         : Content (film, node)
74 {
75         video = VideoContent::from_xml (this, node, version);
76         audio = AudioContent::from_xml (this, node);
77         subtitle = SubtitleContent::from_xml (this, node, version);
78
79         list<cxml::NodePtr> c = node->node_children ("SubtitleStream");
80         for (list<cxml::NodePtr>::const_iterator i = c.begin(); i != c.end(); ++i) {
81                 _subtitle_streams.push_back (shared_ptr<FFmpegSubtitleStream> (new FFmpegSubtitleStream (*i, version)));
82                 if ((*i)->optional_number_child<int> ("Selected")) {
83                         _subtitle_stream = _subtitle_streams.back ();
84                 }
85         }
86
87         c = node->node_children ("AudioStream");
88         for (list<cxml::NodePtr>::const_iterator i = c.begin(); i != c.end(); ++i) {
89                 shared_ptr<FFmpegAudioStream> as (new FFmpegAudioStream (*i, version));
90                 audio->add_stream (as);
91                 if (version < 11 && !(*i)->optional_node_child ("Selected")) {
92                         /* This is an old file and this stream is not selected, so un-map it */
93                         as->set_mapping (AudioMapping (as->channels (), MAX_DCP_AUDIO_CHANNELS));
94                 }
95         }
96
97         c = node->node_children ("Filter");
98         for (list<cxml::NodePtr>::iterator i = c.begin(); i != c.end(); ++i) {
99                 Filter const * f = Filter::from_id ((*i)->content ());
100                 if (f) {
101                         _filters.push_back (f);
102                 } else {
103                         notes.push_back (String::compose (_("DCP-o-matic no longer supports the `%1' filter, so it has been turned off."), (*i)->content()));
104                 }
105         }
106
107         optional<ContentTime::Type> const f = node->optional_number_child<ContentTime::Type> ("FirstVideo");
108         if (f) {
109                 _first_video = ContentTime (f.get ());
110         }
111
112         _color_range = static_cast<AVColorRange> (node->optional_number_child<int>("ColorRange").get_value_or (AVCOL_RANGE_UNSPECIFIED));
113         _color_primaries = static_cast<AVColorPrimaries> (node->optional_number_child<int>("ColorPrimaries").get_value_or (AVCOL_PRI_UNSPECIFIED));
114         _color_trc = static_cast<AVColorTransferCharacteristic> (
115                 node->optional_number_child<int>("ColorTransferCharacteristic").get_value_or (AVCOL_TRC_UNSPECIFIED)
116                 );
117         _colorspace = static_cast<AVColorSpace> (node->optional_number_child<int>("Colorspace").get_value_or (AVCOL_SPC_UNSPECIFIED));
118         _bits_per_pixel = node->optional_number_child<int> ("BitsPerPixel");
119
120 }
121
122 FFmpegContent::FFmpegContent (shared_ptr<const Film> film, vector<boost::shared_ptr<Content> > c)
123         : Content (film, c)
124 {
125         /* XXX: this should look at c to decide which of video/audio/subtitle
126            get created.
127         */
128         video.reset (new VideoContent (this, c));
129         audio.reset (new AudioContent (this, c));
130         subtitle.reset (new SubtitleContent (this, c));
131
132         shared_ptr<FFmpegContent> ref = dynamic_pointer_cast<FFmpegContent> (c[0]);
133         DCPOMATIC_ASSERT (ref);
134
135         for (size_t i = 0; i < c.size(); ++i) {
136                 shared_ptr<FFmpegContent> fc = dynamic_pointer_cast<FFmpegContent> (c[i]);
137                 if (fc->subtitle->use() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) {
138                         throw JoinError (_("Content to be joined must use the same subtitle stream."));
139                 }
140         }
141
142         /* XXX: should probably check that more of the stuff below is the same in *this and ref */
143
144         _subtitle_streams = ref->subtitle_streams ();
145         _subtitle_stream = ref->subtitle_stream ();
146         _first_video = ref->_first_video;
147         _filters = ref->_filters;
148         _color_range = ref->_color_range;
149         _color_primaries = ref->_color_primaries;
150         _color_trc = ref->_color_trc;
151         _colorspace = ref->_colorspace;
152         _bits_per_pixel = ref->_bits_per_pixel;
153 }
154
155 void
156 FFmpegContent::as_xml (xmlpp::Node* node) const
157 {
158         node->add_child("Type")->add_child_text ("FFmpeg");
159         Content::as_xml (node);
160
161         if (video) {
162                 video->as_xml (node);
163         }
164
165         if (audio) {
166                 audio->as_xml (node);
167
168                 BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) {
169                         shared_ptr<FFmpegAudioStream> f = dynamic_pointer_cast<FFmpegAudioStream> (i);
170                         DCPOMATIC_ASSERT (f);
171                         f->as_xml (node->add_child("AudioStream"));
172                 }
173         }
174
175         if (subtitle) {
176                 subtitle->as_xml (node);
177         }
178
179         boost::mutex::scoped_lock lm (_mutex);
180
181         for (vector<shared_ptr<FFmpegSubtitleStream> >::const_iterator i = _subtitle_streams.begin(); i != _subtitle_streams.end(); ++i) {
182                 xmlpp::Node* t = node->add_child("SubtitleStream");
183                 if (_subtitle_stream && *i == _subtitle_stream) {
184                         t->add_child("Selected")->add_child_text("1");
185                 }
186                 (*i)->as_xml (t);
187         }
188
189         for (vector<Filter const *>::const_iterator i = _filters.begin(); i != _filters.end(); ++i) {
190                 node->add_child("Filter")->add_child_text ((*i)->id ());
191         }
192
193         if (_first_video) {
194                 node->add_child("FirstVideo")->add_child_text (raw_convert<string> (_first_video.get().get()));
195         }
196
197         node->add_child("ColorRange")->add_child_text (raw_convert<string> (_color_range));
198         node->add_child("ColorPrimaries")->add_child_text (raw_convert<string> (_color_primaries));
199         node->add_child("ColorTransferCharacteristic")->add_child_text (raw_convert<string> (_color_trc));
200         node->add_child("Colorspace")->add_child_text (raw_convert<string> (_colorspace));
201         if (_bits_per_pixel) {
202                 node->add_child("BitsPerPixel")->add_child_text (raw_convert<string> (_bits_per_pixel.get ()));
203         }
204 }
205
206 void
207 FFmpegContent::examine (shared_ptr<Job> job)
208 {
209         job->set_progress_unknown ();
210
211         Content::examine (job);
212
213         shared_ptr<FFmpegExaminer> examiner (new FFmpegExaminer (shared_from_this (), job));
214
215         if (examiner->has_video ()) {
216                 video.reset (new VideoContent (this));
217                 video->take_from_examiner (examiner);
218                 set_default_colour_conversion ();
219         }
220
221         {
222                 boost::mutex::scoped_lock lm (_mutex);
223
224                 if (examiner->has_video ()) {
225                         _first_video = examiner->first_video ();
226                         _color_range = examiner->color_range ();
227                         _color_primaries = examiner->color_primaries ();
228                         _color_trc = examiner->color_trc ();
229                         _colorspace = examiner->colorspace ();
230                         _bits_per_pixel = examiner->bits_per_pixel ();
231                 }
232
233                 if (!examiner->audio_streams().empty ()) {
234                         audio.reset (new AudioContent (this));
235
236                         BOOST_FOREACH (shared_ptr<FFmpegAudioStream> i, examiner->audio_streams ()) {
237                                 audio->add_stream (i);
238                         }
239
240                         AudioStreamPtr as = audio->streams().front();
241                         AudioMapping m = as->mapping ();
242                         film()->make_audio_mapping_default (m);
243                         as->set_mapping (m);
244                 }
245
246                 _subtitle_streams = examiner->subtitle_streams ();
247                 if (!_subtitle_streams.empty ()) {
248                         subtitle.reset (new SubtitleContent (this));
249                         _subtitle_stream = _subtitle_streams.front ();
250                 }
251
252         }
253
254         signal_changed (FFmpegContentProperty::SUBTITLE_STREAMS);
255         signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
256 }
257
258 string
259 FFmpegContent::summary () const
260 {
261         if (video && audio) {
262                 return String::compose (_("%1 [movie]"), path_summary ());
263         } else if (video) {
264                 return String::compose (_("%1 [video]"), path_summary ());
265         } else if (audio) {
266                 return String::compose (_("%1 [audio]"), path_summary ());
267         }
268
269         return path_summary ();
270 }
271
272 string
273 FFmpegContent::technical_summary () const
274 {
275         string as = "";
276         BOOST_FOREACH (shared_ptr<FFmpegAudioStream> i, ffmpeg_audio_streams ()) {
277                 as += i->technical_summary () + " " ;
278         }
279
280         if (as.empty ()) {
281                 as = "none";
282         }
283
284         string ss = "none";
285         if (_subtitle_stream) {
286                 ss = _subtitle_stream->technical_summary ();
287         }
288
289         string filt = Filter::ffmpeg_string (_filters);
290
291         string s = Content::technical_summary ();
292
293         if (video) {
294                 s += " - " + video->technical_summary ();
295         }
296
297         if (audio) {
298                 s += " - " + audio->technical_summary ();
299         }
300
301         return s + String::compose (
302                 "ffmpeg: audio %1 subtitle %2 filters %3", as, ss, filt
303                 );
304 }
305
306 void
307 FFmpegContent::set_subtitle_stream (shared_ptr<FFmpegSubtitleStream> s)
308 {
309         {
310                 boost::mutex::scoped_lock lm (_mutex);
311                 _subtitle_stream = s;
312         }
313
314         signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
315 }
316
317 bool
318 operator== (FFmpegStream const & a, FFmpegStream const & b)
319 {
320         return a._id == b._id;
321 }
322
323 bool
324 operator!= (FFmpegStream const & a, FFmpegStream const & b)
325 {
326         return a._id != b._id;
327 }
328
329 DCPTime
330 FFmpegContent::full_length () const
331 {
332         FrameRateChange const frc (active_video_frame_rate (), film()->video_frame_rate ());
333         if (video) {
334                 return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film()->video_frame_rate());
335         }
336
337         DCPOMATIC_ASSERT (audio);
338
339         DCPTime longest;
340         BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) {
341                 longest = max (longest, DCPTime::from_frames (llrint (i->length() / frc.speed_up), i->frame_rate()));
342         }
343
344         return longest;
345 }
346
347 void
348 FFmpegContent::set_filters (vector<Filter const *> const & filters)
349 {
350         {
351                 boost::mutex::scoped_lock lm (_mutex);
352                 _filters = filters;
353         }
354
355         signal_changed (FFmpegContentProperty::FILTERS);
356 }
357
358 string
359 FFmpegContent::identifier () const
360 {
361         SafeStringStream s;
362
363         s << Content::identifier();
364
365         if (video) {
366                 s << "_" << video->identifier();
367         }
368
369         if (subtitle) {
370                 s << "_" << subtitle->identifier();
371         }
372
373         boost::mutex::scoped_lock lm (_mutex);
374
375         if (_subtitle_stream) {
376                 s << "_" << _subtitle_stream->identifier ();
377         }
378
379         for (vector<Filter const *>::const_iterator i = _filters.begin(); i != _filters.end(); ++i) {
380                 s << "_" << (*i)->id ();
381         }
382
383         return s.str ();
384 }
385
386 list<ContentTimePeriod>
387 FFmpegContent::image_subtitles_during (ContentTimePeriod period, bool starting) const
388 {
389         shared_ptr<FFmpegSubtitleStream> stream = subtitle_stream ();
390         if (!stream) {
391                 return list<ContentTimePeriod> ();
392         }
393
394         return stream->image_subtitles_during (period, starting);
395 }
396
397 list<ContentTimePeriod>
398 FFmpegContent::text_subtitles_during (ContentTimePeriod period, bool starting) const
399 {
400         shared_ptr<FFmpegSubtitleStream> stream = subtitle_stream ();
401         if (!stream) {
402                 return list<ContentTimePeriod> ();
403         }
404
405         return stream->text_subtitles_during (period, starting);
406 }
407
408 void
409 FFmpegContent::set_default_colour_conversion ()
410 {
411         DCPOMATIC_ASSERT (video);
412
413         dcp::Size const s = video->size ();
414
415         boost::mutex::scoped_lock lm (_mutex);
416
417         if (s.width < 1080) {
418                 video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion);
419         } else {
420                 video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion);
421         }
422 }
423
424 void
425 FFmpegContent::add_properties (list<UserProperty>& p) const
426 {
427         Content::add_properties (p);
428
429         if (video) {
430                 video->add_properties (p);
431
432                 if (_bits_per_pixel) {
433                         int const sub = 219 * pow (2, _bits_per_pixel.get() - 8);
434                         int const total = pow (2, _bits_per_pixel.get());
435
436                         switch (_color_range) {
437                         case AVCOL_RANGE_UNSPECIFIED:
438                                 /// TRANSLATORS: this means that the range of pixel values used in this
439                                 /// file is unknown (not specified in the file).
440                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
441                                 break;
442                         case AVCOL_RANGE_MPEG:
443                                 /// TRANSLATORS: this means that the range of pixel values used in this
444                                 /// file is limited, so that not all possible values are valid.
445                                 p.push_back (
446                                         UserProperty (
447                                                 UserProperty::VIDEO, _("Colour range"), String::compose (_("Limited (%1-%2)"), (total - sub) / 2, (total + sub) / 2)
448                                                 )
449                                         );
450                                 break;
451                         case AVCOL_RANGE_JPEG:
452                                 /// TRANSLATORS: this means that the range of pixel values used in this
453                                 /// file is full, so that all possible pixel values are valid.
454                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), String::compose (_("Full (0-%1)"), total)));
455                                 break;
456                         default:
457                                 DCPOMATIC_ASSERT (false);
458                         }
459                 } else {
460                         switch (_color_range) {
461                         case AVCOL_RANGE_UNSPECIFIED:
462                                 /// TRANSLATORS: this means that the range of pixel values used in this
463                                 /// file is unknown (not specified in the file).
464                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
465                                 break;
466                         case AVCOL_RANGE_MPEG:
467                                 /// TRANSLATORS: this means that the range of pixel values used in this
468                                 /// file is limited, so that not all possible values are valid.
469                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Limited")));
470                                 break;
471                         case AVCOL_RANGE_JPEG:
472                                 /// TRANSLATORS: this means that the range of pixel values used in this
473                                 /// file is full, so that all possible pixel values are valid.
474                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Full")));
475                                 break;
476                         default:
477                                 DCPOMATIC_ASSERT (false);
478                         }
479                 }
480
481                 char const * primaries[] = {
482                         _("Unspecified"),
483                         _("BT709"),
484                         _("Unspecified"),
485                         _("Unspecified"),
486                         _("BT470M"),
487                         _("BT470BG"),
488                         _("SMPTE 170M (BT601)"),
489                         _("SMPTE 240M"),
490                         _("Film"),
491                         _("BT2020"),
492                         _("SMPTE ST 428-1 (CIE 1931 XYZ)")
493                 };
494
495                 DCPOMATIC_ASSERT (AVCOL_PRI_NB <= 11);
496                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour primaries"), primaries[_color_primaries]));
497
498                 char const * transfers[] = {
499                         _("Unspecified"),
500                         _("BT709"),
501                         _("Unspecified"),
502                         _("Unspecified"),
503                         _("Gamma 22 (BT470M)"),
504                         _("Gamma 28 (BT470BG)"),
505                         _("SMPTE 170M (BT601)"),
506                         _("SMPTE 240M"),
507                         _("Linear"),
508                         _("Logarithmic (100:1 range)"),
509                         _("Logarithmic (316:1 range)"),
510                         _("IEC61966-2-4"),
511                         _("BT1361 extended colour gamut"),
512                         _("IEC61966-2-1 (sRGB or sYCC)"),
513                         _("BT2020 for a 10-bit system"),
514                         _("BT2020 for a 12-bit system"),
515                         _("SMPTE ST 2084 for 10, 12, 14 and 16 bit systems"),
516                         _("SMPTE ST 428-1")
517                 };
518
519                 DCPOMATIC_ASSERT (AVCOL_TRC_NB <= 18);
520                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour transfer characteristic"), transfers[_color_trc]));
521
522                 char const * spaces[] = {
523                         _("RGB / sRGB (IEC61966-2-1)"),
524                         _("BT709"),
525                         _("Unspecified"),
526                         _("Unspecified"),
527                         _("FCC"),
528                         _("BT470BG (BT601-6)"),
529                         _("SMPTE 170M (BT601-6)"),
530                         _("SMPTE 240M"),
531                         _("YCOCG"),
532                         _("BT2020 non-constant luminance"),
533                         _("BT2020 constant luminance"),
534                 };
535
536                 DCPOMATIC_ASSERT (AVCOL_SPC_NB == 11);
537                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colourspace"), spaces[_colorspace]));
538
539                 if (_bits_per_pixel) {
540                         p.push_back (UserProperty (UserProperty::VIDEO, _("Bits per pixel"), raw_convert<string> (_bits_per_pixel.get ())));
541                 }
542         }
543
544         if (audio) {
545                 audio->add_properties (p);
546         }
547 }
548
549 /** Our subtitle streams have colour maps, which can be changed, but
550  *  they have no way of signalling that change.  As a hack, we have this
551  *  method which callers can use when they've modified one of our subtitle
552  *  streams.
553  */
554 void
555 FFmpegContent::signal_subtitle_stream_changed ()
556 {
557         signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
558 }
559
560 vector<shared_ptr<FFmpegAudioStream> >
561 FFmpegContent::ffmpeg_audio_streams () const
562 {
563         vector<shared_ptr<FFmpegAudioStream> > fa;
564
565         if (audio) {
566                 BOOST_FOREACH (AudioStreamPtr i, audio->streams()) {
567                         fa.push_back (dynamic_pointer_cast<FFmpegAudioStream> (i));
568                 }
569         }
570
571         return fa;
572 }