2 Copyright (C) 2013-2015 Carl Hetherington <cth@carlh.net>
4 This program is free software; you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation; either version 2 of the License, or
7 (at your option) any later version.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU General Public License
15 along with this program; if not, write to the Free Software
16 Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
21 #include <libavcodec/avcodec.h>
22 #include <libavformat/avformat.h>
23 #include <libavutil/pixfmt.h>
24 #include <libavutil/pixdesc.h>
26 #include "ffmpeg_examiner.h"
27 #include "ffmpeg_content.h"
29 #include "ffmpeg_audio_stream.h"
30 #include "ffmpeg_subtitle_stream.h"
32 #include "safe_stringstream.h"
33 #include <boost/foreach.hpp>
41 using boost::shared_ptr;
42 using boost::optional;
44 /** @param job job that the examiner is operating in, or 0 */
45 FFmpegExaminer::FFmpegExaminer (shared_ptr<const FFmpegContent> c, shared_ptr<Job> job)
48 , _need_video_length (false)
50 /* Find audio and subtitle streams */
52 for (uint32_t i = 0; i < _format_context->nb_streams; ++i) {
53 AVStream* s = _format_context->streams[i];
54 if (s->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
56 /* This is a hack; sometimes it seems that _audio_codec_context->channel_layout isn't set up,
57 so bodge it here. No idea why we should have to do this.
60 if (s->codec->channel_layout == 0) {
61 s->codec->channel_layout = av_get_default_channel_layout (s->codec->channels);
64 _audio_streams.push_back (
65 shared_ptr<FFmpegAudioStream> (
66 new FFmpegAudioStream (audio_stream_name (s), s->id, s->codec->sample_rate, s->codec->channels)
70 } else if (s->codec->codec_type == AVMEDIA_TYPE_SUBTITLE) {
71 _subtitle_streams.push_back (shared_ptr<FFmpegSubtitleStream> (new FFmpegSubtitleStream (subtitle_stream_name (s), s->id)));
75 /* See if the header has duration information in it */
76 _need_video_length = _format_context->duration == AV_NOPTS_VALUE;
77 if (!_need_video_length) {
78 _video_length = (double (_format_context->duration) / AV_TIME_BASE) * video_frame_rate().get ();
82 if (_need_video_length) {
83 job->sub (_("Finding length and subtitles"));
85 job->sub (_("Finding subtitles"));
89 /* Run through until we find:
91 * - the first audio for each stream.
92 * - the subtitle periods for each stream.
94 * We have to note subtitle periods as otherwise we have no way of knowing
95 * where we should look for subtitles (video and audio are always present,
99 int64_t const len = _file_group.length ();
101 int r = av_read_frame (_format_context, &_packet);
108 job->set_progress (float (_format_context->pb->pos) / len);
110 job->set_progress_unknown ();
114 AVCodecContext* context = _format_context->streams[_packet.stream_index]->codec;
116 if (_packet.stream_index == _video_stream) {
117 video_packet (context);
120 bool got_all_audio = true;
122 for (size_t i = 0; i < _audio_streams.size(); ++i) {
123 if (_audio_streams[i]->uses_index (_format_context, _packet.stream_index)) {
124 audio_packet (context, _audio_streams[i]);
126 if (!_audio_streams[i]->first_audio) {
127 got_all_audio = false;
131 for (size_t i = 0; i < _subtitle_streams.size(); ++i) {
132 if (_subtitle_streams[i]->uses_index (_format_context, _packet.stream_index)) {
133 subtitle_packet (context, _subtitle_streams[i]);
137 av_free_packet (&_packet);
139 if (_first_video && got_all_audio && _subtitle_streams.empty ()) {
145 for (LastSubtitleMap::const_iterator i = _last_subtitle_start.begin(); i != _last_subtitle_start.end(); ++i) {
147 i->first->add_subtitle (
150 ContentTime::from_frames (video_length(), video_frame_rate().get_value_or (24))
156 /* We just added subtitles to our streams without taking the PTS offset into account;
157 this is because we might not know the PTS offset when the first subtitle is seen.
158 Now we know the PTS offset so we can apply it to those subtitles.
160 if (video_frame_rate()) {
161 BOOST_FOREACH (shared_ptr<FFmpegSubtitleStream> i, _subtitle_streams) {
162 i->add_offset (pts_offset (_audio_streams, _first_video, video_frame_rate().get()));
168 FFmpegExaminer::video_packet (AVCodecContext* context)
170 if (_first_video && !_need_video_length) {
175 if (avcodec_decode_video2 (context, _frame, &frame_finished, &_packet) >= 0 && frame_finished) {
177 _first_video = frame_time (_format_context->streams[_video_stream]);
179 if (_need_video_length) {
180 _video_length = frame_time (
181 _format_context->streams[_video_stream]
182 ).get_value_or (ContentTime ()).frames_round (video_frame_rate().get ());
188 FFmpegExaminer::audio_packet (AVCodecContext* context, shared_ptr<FFmpegAudioStream> stream)
190 if (stream->first_audio) {
195 if (avcodec_decode_audio4 (context, _frame, &frame_finished, &_packet) >= 0 && frame_finished) {
196 stream->first_audio = frame_time (stream->stream (_format_context));
201 FFmpegExaminer::subtitle_packet (AVCodecContext* context, shared_ptr<FFmpegSubtitleStream> stream)
205 if (avcodec_decode_subtitle2 (context, &sub, &frame_finished, &_packet) >= 0 && frame_finished) {
206 FFmpegSubtitlePeriod const period = subtitle_period (sub);
207 LastSubtitleMap::iterator last = _last_subtitle_start.find (stream);
208 if (last != _last_subtitle_start.end() && last->second) {
209 /* We have seen the start of a subtitle but not yet the end. Whatever this is
210 finishes the previous subtitle, so add it */
211 stream->add_subtitle (ContentTimePeriod (last->second.get (), period.from));
212 if (sub.num_rects == 0) {
213 /* This is a `proper' end-of-subtitle */
214 _last_subtitle_start[stream] = optional<ContentTime> ();
216 /* This is just another subtitle, so we start again */
217 _last_subtitle_start[stream] = period.from;
219 } else if (sub.num_rects == 1) {
221 stream->add_subtitle (ContentTimePeriod (period.from, period.to.get ()));
223 _last_subtitle_start[stream] = period.from;
226 avsubtitle_free (&sub);
230 optional<ContentTime>
231 FFmpegExaminer::frame_time (AVStream* s) const
233 optional<ContentTime> t;
235 int64_t const bet = av_frame_get_best_effort_timestamp (_frame);
236 if (bet != AV_NOPTS_VALUE) {
237 t = ContentTime::from_seconds (bet * av_q2d (s->time_base));
244 FFmpegExaminer::video_frame_rate () const
246 /* This use of r_frame_rate is debateable; there's a few different
247 * frame rates in the format context, but this one seems to be the most
250 return av_q2d (av_stream_get_r_frame_rate (_format_context->streams[_video_stream]));
254 FFmpegExaminer::video_size () const
256 return dcp::Size (video_codec_context()->width, video_codec_context()->height);
259 /** @return Length according to our content's header */
261 FFmpegExaminer::video_length () const
263 return max (Frame (1), _video_length);
267 FFmpegExaminer::sample_aspect_ratio () const
269 AVRational sar = av_guess_sample_aspect_ratio (_format_context, _format_context->streams[_video_stream], 0);
271 /* I assume this means that we don't know */
272 return optional<double> ();
274 return double (sar.num) / sar.den;
278 FFmpegExaminer::audio_stream_name (AVStream* s) const
282 n << stream_name (s);
284 if (!n.str().empty()) {
288 n << s->codec->channels << " channels";
294 FFmpegExaminer::subtitle_stream_name (AVStream* s) const
298 n << stream_name (s);
300 if (n.str().empty()) {
308 FFmpegExaminer::stream_name (AVStream* s) const
313 AVDictionaryEntry const * lang = av_dict_get (s->metadata, "language", 0, 0);
318 AVDictionaryEntry const * title = av_dict_get (s->metadata, "title", 0, 0);
320 if (!n.str().empty()) {
331 FFmpegExaminer::bits_per_pixel () const
333 return av_get_bits_per_pixel (av_pix_fmt_desc_get (video_codec_context()->pix_fmt));