Remove all use of stringstream in an attempt to fix
[dcpomatic.git] / src / lib / ffmpeg_content.cc
1 /*
2     Copyright (C) 2013-2016 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "ffmpeg_content.h"
22 #include "video_content.h"
23 #include "audio_content.h"
24 #include "ffmpeg_examiner.h"
25 #include "ffmpeg_subtitle_stream.h"
26 #include "ffmpeg_audio_stream.h"
27 #include "compose.hpp"
28 #include "job.h"
29 #include "util.h"
30 #include "filter.h"
31 #include "film.h"
32 #include "log.h"
33 #include "exceptions.h"
34 #include "frame_rate_change.h"
35 #include "raw_convert.h"
36 #include "subtitle_content.h"
37 #include <libcxml/cxml.h>
38 extern "C" {
39 #include <libavformat/avformat.h>
40 #include <libavutil/pixdesc.h>
41 }
42 #include <libxml++/libxml++.h>
43 #include <boost/foreach.hpp>
44 #include <iostream>
45
46 #include "i18n.h"
47
48 #define LOG_GENERAL(...) film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
49
50 using std::string;
51 using std::vector;
52 using std::list;
53 using std::cout;
54 using std::pair;
55 using std::make_pair;
56 using std::max;
57 using boost::shared_ptr;
58 using boost::dynamic_pointer_cast;
59 using boost::optional;
60
61 int const FFmpegContentProperty::SUBTITLE_STREAMS = 100;
62 int const FFmpegContentProperty::SUBTITLE_STREAM = 101;
63 int const FFmpegContentProperty::FILTERS = 102;
64
65 FFmpegContent::FFmpegContent (shared_ptr<const Film> film, boost::filesystem::path p)
66         : Content (film, p)
67 {
68
69 }
70
71 FFmpegContent::FFmpegContent (shared_ptr<const Film> film, cxml::ConstNodePtr node, int version, list<string>& notes)
72         : Content (film, node)
73 {
74         video = VideoContent::from_xml (this, node, version);
75         audio = AudioContent::from_xml (this, node, version);
76         subtitle = SubtitleContent::from_xml (this, node, version);
77
78         list<cxml::NodePtr> c = node->node_children ("SubtitleStream");
79         for (list<cxml::NodePtr>::const_iterator i = c.begin(); i != c.end(); ++i) {
80                 _subtitle_streams.push_back (shared_ptr<FFmpegSubtitleStream> (new FFmpegSubtitleStream (*i, version)));
81                 if ((*i)->optional_number_child<int> ("Selected")) {
82                         _subtitle_stream = _subtitle_streams.back ();
83                 }
84         }
85
86         c = node->node_children ("AudioStream");
87         for (list<cxml::NodePtr>::const_iterator i = c.begin(); i != c.end(); ++i) {
88                 shared_ptr<FFmpegAudioStream> as (new FFmpegAudioStream (*i, version));
89                 audio->add_stream (as);
90                 if (version < 11 && !(*i)->optional_node_child ("Selected")) {
91                         /* This is an old file and this stream is not selected, so un-map it */
92                         as->set_mapping (AudioMapping (as->channels (), MAX_DCP_AUDIO_CHANNELS));
93                 }
94         }
95
96         c = node->node_children ("Filter");
97         for (list<cxml::NodePtr>::iterator i = c.begin(); i != c.end(); ++i) {
98                 Filter const * f = Filter::from_id ((*i)->content ());
99                 if (f) {
100                         _filters.push_back (f);
101                 } else {
102                         notes.push_back (String::compose (_("DCP-o-matic no longer supports the `%1' filter, so it has been turned off."), (*i)->content()));
103                 }
104         }
105
106         optional<ContentTime::Type> const f = node->optional_number_child<ContentTime::Type> ("FirstVideo");
107         if (f) {
108                 _first_video = ContentTime (f.get ());
109         }
110
111         _color_range = static_cast<AVColorRange> (node->optional_number_child<int>("ColorRange").get_value_or (AVCOL_RANGE_UNSPECIFIED));
112         _color_primaries = static_cast<AVColorPrimaries> (node->optional_number_child<int>("ColorPrimaries").get_value_or (AVCOL_PRI_UNSPECIFIED));
113         _color_trc = static_cast<AVColorTransferCharacteristic> (
114                 node->optional_number_child<int>("ColorTransferCharacteristic").get_value_or (AVCOL_TRC_UNSPECIFIED)
115                 );
116         _colorspace = static_cast<AVColorSpace> (node->optional_number_child<int>("Colorspace").get_value_or (AVCOL_SPC_UNSPECIFIED));
117         _bits_per_pixel = node->optional_number_child<int> ("BitsPerPixel");
118
119 }
120
121 FFmpegContent::FFmpegContent (shared_ptr<const Film> film, vector<shared_ptr<Content> > c)
122         : Content (film, c)
123 {
124         vector<shared_ptr<Content> >::const_iterator i = c.begin ();
125
126         bool need_video = false;
127         bool need_audio = false;
128         bool need_subtitle = false;
129
130         if (i != c.end ()) {
131                 need_video = static_cast<bool> ((*i)->video);
132                 need_audio = static_cast<bool> ((*i)->audio);
133                 need_subtitle = static_cast<bool> ((*i)->subtitle);
134         }
135
136         while (i != c.end ()) {
137                 if (need_video != static_cast<bool> ((*i)->video)) {
138                         throw JoinError (_("Content to be joined must all have or not have video"));
139                 }
140                 if (need_audio != static_cast<bool> ((*i)->audio)) {
141                         throw JoinError (_("Content to be joined must all have or not have audio"));
142                 }
143                 if (need_subtitle != static_cast<bool> ((*i)->subtitle)) {
144                         throw JoinError (_("Content to be joined must all have or not have subtitles"));
145                 }
146                 ++i;
147         }
148
149         if (need_video) {
150                 video.reset (new VideoContent (this, c));
151         }
152         if (need_audio) {
153                 audio.reset (new AudioContent (this, c));
154         }
155         if (need_subtitle) {
156                 subtitle.reset (new SubtitleContent (this, c));
157         }
158
159         shared_ptr<FFmpegContent> ref = dynamic_pointer_cast<FFmpegContent> (c[0]);
160         DCPOMATIC_ASSERT (ref);
161
162         for (size_t i = 0; i < c.size(); ++i) {
163                 shared_ptr<FFmpegContent> fc = dynamic_pointer_cast<FFmpegContent> (c[i]);
164                 if (fc->subtitle && fc->subtitle->use() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) {
165                         throw JoinError (_("Content to be joined must use the same subtitle stream."));
166                 }
167         }
168
169         /* XXX: should probably check that more of the stuff below is the same in *this and ref */
170
171         _subtitle_streams = ref->subtitle_streams ();
172         _subtitle_stream = ref->subtitle_stream ();
173         _first_video = ref->_first_video;
174         _filters = ref->_filters;
175         _color_range = ref->_color_range;
176         _color_primaries = ref->_color_primaries;
177         _color_trc = ref->_color_trc;
178         _colorspace = ref->_colorspace;
179         _bits_per_pixel = ref->_bits_per_pixel;
180 }
181
182 void
183 FFmpegContent::as_xml (xmlpp::Node* node) const
184 {
185         node->add_child("Type")->add_child_text ("FFmpeg");
186         Content::as_xml (node);
187
188         if (video) {
189                 video->as_xml (node);
190         }
191
192         if (audio) {
193                 audio->as_xml (node);
194
195                 BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) {
196                         shared_ptr<FFmpegAudioStream> f = dynamic_pointer_cast<FFmpegAudioStream> (i);
197                         DCPOMATIC_ASSERT (f);
198                         f->as_xml (node->add_child("AudioStream"));
199                 }
200         }
201
202         if (subtitle) {
203                 subtitle->as_xml (node);
204         }
205
206         boost::mutex::scoped_lock lm (_mutex);
207
208         for (vector<shared_ptr<FFmpegSubtitleStream> >::const_iterator i = _subtitle_streams.begin(); i != _subtitle_streams.end(); ++i) {
209                 xmlpp::Node* t = node->add_child("SubtitleStream");
210                 if (_subtitle_stream && *i == _subtitle_stream) {
211                         t->add_child("Selected")->add_child_text("1");
212                 }
213                 (*i)->as_xml (t);
214         }
215
216         for (vector<Filter const *>::const_iterator i = _filters.begin(); i != _filters.end(); ++i) {
217                 node->add_child("Filter")->add_child_text ((*i)->id ());
218         }
219
220         if (_first_video) {
221                 node->add_child("FirstVideo")->add_child_text (raw_convert<string> (_first_video.get().get()));
222         }
223
224         node->add_child("ColorRange")->add_child_text (raw_convert<string> (_color_range));
225         node->add_child("ColorPrimaries")->add_child_text (raw_convert<string> (_color_primaries));
226         node->add_child("ColorTransferCharacteristic")->add_child_text (raw_convert<string> (_color_trc));
227         node->add_child("Colorspace")->add_child_text (raw_convert<string> (_colorspace));
228         if (_bits_per_pixel) {
229                 node->add_child("BitsPerPixel")->add_child_text (raw_convert<string> (_bits_per_pixel.get ()));
230         }
231 }
232
233 void
234 FFmpegContent::examine (shared_ptr<Job> job)
235 {
236         job->set_progress_unknown ();
237
238         Content::examine (job);
239
240         shared_ptr<FFmpegExaminer> examiner (new FFmpegExaminer (shared_from_this (), job));
241
242         if (examiner->has_video ()) {
243                 video.reset (new VideoContent (this));
244                 video->take_from_examiner (examiner);
245                 set_default_colour_conversion ();
246         }
247
248         boost::filesystem::path first_path = path (0);
249
250         {
251                 boost::mutex::scoped_lock lm (_mutex);
252
253                 if (examiner->has_video ()) {
254                         _first_video = examiner->first_video ();
255                         _color_range = examiner->color_range ();
256                         _color_primaries = examiner->color_primaries ();
257                         _color_trc = examiner->color_trc ();
258                         _colorspace = examiner->colorspace ();
259                         _bits_per_pixel = examiner->bits_per_pixel ();
260                 }
261
262                 if (!examiner->audio_streams().empty ()) {
263                         audio.reset (new AudioContent (this));
264
265                         BOOST_FOREACH (shared_ptr<FFmpegAudioStream> i, examiner->audio_streams ()) {
266                                 audio->add_stream (i);
267                         }
268
269                         AudioStreamPtr as = audio->streams().front();
270                         AudioMapping m = as->mapping ();
271                         film()->make_audio_mapping_default (m, first_path);
272                         as->set_mapping (m);
273                 }
274
275                 _subtitle_streams = examiner->subtitle_streams ();
276                 if (!_subtitle_streams.empty ()) {
277                         subtitle.reset (new SubtitleContent (this));
278                         _subtitle_stream = _subtitle_streams.front ();
279                 }
280
281         }
282
283         signal_changed (FFmpegContentProperty::SUBTITLE_STREAMS);
284         signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
285 }
286
287 string
288 FFmpegContent::summary () const
289 {
290         if (video && audio) {
291                 return String::compose (_("%1 [movie]"), path_summary ());
292         } else if (video) {
293                 return String::compose (_("%1 [video]"), path_summary ());
294         } else if (audio) {
295                 return String::compose (_("%1 [audio]"), path_summary ());
296         }
297
298         return path_summary ();
299 }
300
301 string
302 FFmpegContent::technical_summary () const
303 {
304         string as = "";
305         BOOST_FOREACH (shared_ptr<FFmpegAudioStream> i, ffmpeg_audio_streams ()) {
306                 as += i->technical_summary () + " " ;
307         }
308
309         if (as.empty ()) {
310                 as = "none";
311         }
312
313         string ss = "none";
314         if (_subtitle_stream) {
315                 ss = _subtitle_stream->technical_summary ();
316         }
317
318         string filt = Filter::ffmpeg_string (_filters);
319
320         string s = Content::technical_summary ();
321
322         if (video) {
323                 s += " - " + video->technical_summary ();
324         }
325
326         if (audio) {
327                 s += " - " + audio->technical_summary ();
328         }
329
330         return s + String::compose (
331                 "ffmpeg: audio %1 subtitle %2 filters %3", as, ss, filt
332                 );
333 }
334
335 void
336 FFmpegContent::set_subtitle_stream (shared_ptr<FFmpegSubtitleStream> s)
337 {
338         {
339                 boost::mutex::scoped_lock lm (_mutex);
340                 _subtitle_stream = s;
341         }
342
343         signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
344 }
345
346 bool
347 operator== (FFmpegStream const & a, FFmpegStream const & b)
348 {
349         return a._id == b._id;
350 }
351
352 bool
353 operator!= (FFmpegStream const & a, FFmpegStream const & b)
354 {
355         return a._id != b._id;
356 }
357
358 DCPTime
359 FFmpegContent::full_length () const
360 {
361         FrameRateChange const frc (active_video_frame_rate (), film()->video_frame_rate ());
362         if (video) {
363                 return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film()->video_frame_rate());
364         }
365
366         DCPOMATIC_ASSERT (audio);
367
368         DCPTime longest;
369         BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) {
370                 longest = max (longest, DCPTime::from_frames (llrint (i->length() / frc.speed_up), i->frame_rate()));
371         }
372
373         return longest;
374 }
375
376 void
377 FFmpegContent::set_filters (vector<Filter const *> const & filters)
378 {
379         {
380                 boost::mutex::scoped_lock lm (_mutex);
381                 _filters = filters;
382         }
383
384         signal_changed (FFmpegContentProperty::FILTERS);
385 }
386
387 string
388 FFmpegContent::identifier () const
389 {
390         string s = Content::identifier();
391
392         if (video) {
393                 s += "_" + video->identifier();
394         }
395
396         if (subtitle) {
397                 s += "_" + subtitle->identifier();
398         }
399
400         boost::mutex::scoped_lock lm (_mutex);
401
402         if (_subtitle_stream) {
403                 s += "_" + _subtitle_stream->identifier ();
404         }
405
406         for (vector<Filter const *>::const_iterator i = _filters.begin(); i != _filters.end(); ++i) {
407                 s += "_" + (*i)->id ();
408         }
409
410         return s;
411 }
412
413 list<ContentTimePeriod>
414 FFmpegContent::image_subtitles_during (ContentTimePeriod period, bool starting) const
415 {
416         shared_ptr<FFmpegSubtitleStream> stream = subtitle_stream ();
417         if (!stream) {
418                 return list<ContentTimePeriod> ();
419         }
420
421         return stream->image_subtitles_during (period, starting);
422 }
423
424 list<ContentTimePeriod>
425 FFmpegContent::text_subtitles_during (ContentTimePeriod period, bool starting) const
426 {
427         shared_ptr<FFmpegSubtitleStream> stream = subtitle_stream ();
428         if (!stream) {
429                 return list<ContentTimePeriod> ();
430         }
431
432         return stream->text_subtitles_during (period, starting);
433 }
434
435 void
436 FFmpegContent::set_default_colour_conversion ()
437 {
438         DCPOMATIC_ASSERT (video);
439
440         dcp::Size const s = video->size ();
441
442         boost::mutex::scoped_lock lm (_mutex);
443
444         if (s.width < 1080) {
445                 video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion);
446         } else {
447                 video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion);
448         }
449 }
450
451 void
452 FFmpegContent::add_properties (list<UserProperty>& p) const
453 {
454         Content::add_properties (p);
455
456         if (video) {
457                 video->add_properties (p);
458
459                 if (_bits_per_pixel) {
460                         int const sub = 219 * pow (2, _bits_per_pixel.get() - 8);
461                         int const total = pow (2, _bits_per_pixel.get());
462
463                         switch (_color_range) {
464                         case AVCOL_RANGE_UNSPECIFIED:
465                                 /// TRANSLATORS: this means that the range of pixel values used in this
466                                 /// file is unknown (not specified in the file).
467                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
468                                 break;
469                         case AVCOL_RANGE_MPEG:
470                                 /// TRANSLATORS: this means that the range of pixel values used in this
471                                 /// file is limited, so that not all possible values are valid.
472                                 p.push_back (
473                                         UserProperty (
474                                                 UserProperty::VIDEO, _("Colour range"), String::compose (_("Limited (%1-%2)"), (total - sub) / 2, (total + sub) / 2)
475                                                 )
476                                         );
477                                 break;
478                         case AVCOL_RANGE_JPEG:
479                                 /// TRANSLATORS: this means that the range of pixel values used in this
480                                 /// file is full, so that all possible pixel values are valid.
481                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), String::compose (_("Full (0-%1)"), total)));
482                                 break;
483                         default:
484                                 DCPOMATIC_ASSERT (false);
485                         }
486                 } else {
487                         switch (_color_range) {
488                         case AVCOL_RANGE_UNSPECIFIED:
489                                 /// TRANSLATORS: this means that the range of pixel values used in this
490                                 /// file is unknown (not specified in the file).
491                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
492                                 break;
493                         case AVCOL_RANGE_MPEG:
494                                 /// TRANSLATORS: this means that the range of pixel values used in this
495                                 /// file is limited, so that not all possible values are valid.
496                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Limited")));
497                                 break;
498                         case AVCOL_RANGE_JPEG:
499                                 /// TRANSLATORS: this means that the range of pixel values used in this
500                                 /// file is full, so that all possible pixel values are valid.
501                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Full")));
502                                 break;
503                         default:
504                                 DCPOMATIC_ASSERT (false);
505                         }
506                 }
507
508                 char const * primaries[] = {
509                         _("Unspecified"),
510                         _("BT709"),
511                         _("Unspecified"),
512                         _("Unspecified"),
513                         _("BT470M"),
514                         _("BT470BG"),
515                         _("SMPTE 170M (BT601)"),
516                         _("SMPTE 240M"),
517                         _("Film"),
518                         _("BT2020"),
519                         _("SMPTE ST 428-1 (CIE 1931 XYZ)")
520                 };
521
522                 DCPOMATIC_ASSERT (AVCOL_PRI_NB <= 11);
523                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour primaries"), primaries[_color_primaries]));
524
525                 char const * transfers[] = {
526                         _("Unspecified"),
527                         _("BT709"),
528                         _("Unspecified"),
529                         _("Unspecified"),
530                         _("Gamma 22 (BT470M)"),
531                         _("Gamma 28 (BT470BG)"),
532                         _("SMPTE 170M (BT601)"),
533                         _("SMPTE 240M"),
534                         _("Linear"),
535                         _("Logarithmic (100:1 range)"),
536                         _("Logarithmic (316:1 range)"),
537                         _("IEC61966-2-4"),
538                         _("BT1361 extended colour gamut"),
539                         _("IEC61966-2-1 (sRGB or sYCC)"),
540                         _("BT2020 for a 10-bit system"),
541                         _("BT2020 for a 12-bit system"),
542                         _("SMPTE ST 2084 for 10, 12, 14 and 16 bit systems"),
543                         _("SMPTE ST 428-1"),
544                         _("ARIB STD-B67 ('Hybrid log-gamma')")
545                 };
546
547                 DCPOMATIC_ASSERT (AVCOL_TRC_NB <= 19);
548                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour transfer characteristic"), transfers[_color_trc]));
549
550                 char const * spaces[] = {
551                         _("RGB / sRGB (IEC61966-2-1)"),
552                         _("BT709"),
553                         _("Unspecified"),
554                         _("Unspecified"),
555                         _("FCC"),
556                         _("BT470BG (BT601-6)"),
557                         _("SMPTE 170M (BT601-6)"),
558                         _("SMPTE 240M"),
559                         _("YCOCG"),
560                         _("BT2020 non-constant luminance"),
561                         _("BT2020 constant luminance"),
562                 };
563
564                 DCPOMATIC_ASSERT (AVCOL_SPC_NB == 11);
565                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colourspace"), spaces[_colorspace]));
566
567                 if (_bits_per_pixel) {
568                         p.push_back (UserProperty (UserProperty::VIDEO, _("Bits per pixel"), raw_convert<string> (_bits_per_pixel.get ())));
569                 }
570         }
571
572         if (audio) {
573                 audio->add_properties (p);
574         }
575 }
576
577 /** Our subtitle streams have colour maps, which can be changed, but
578  *  they have no way of signalling that change.  As a hack, we have this
579  *  method which callers can use when they've modified one of our subtitle
580  *  streams.
581  */
582 void
583 FFmpegContent::signal_subtitle_stream_changed ()
584 {
585         signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
586 }
587
588 vector<shared_ptr<FFmpegAudioStream> >
589 FFmpegContent::ffmpeg_audio_streams () const
590 {
591         vector<shared_ptr<FFmpegAudioStream> > fa;
592
593         if (audio) {
594                 BOOST_FOREACH (AudioStreamPtr i, audio->streams()) {
595                         fa.push_back (dynamic_pointer_cast<FFmpegAudioStream> (i));
596                 }
597         }
598
599         return fa;
600 }