#include "ffmpeg_subtitle_stream.h"
#include "util.h"
#include "safe_stringstream.h"
+#include <boost/foreach.hpp>
+#include <iostream>
#include "i18n.h"
_need_video_length = _format_context->duration == AV_NOPTS_VALUE;
if (!_need_video_length) {
_video_length = (double (_format_context->duration) / AV_TIME_BASE) * video_frame_rate().get ();
- } else if (job) {
- job->sub (_("Finding length"));
- job->set_progress_unknown ();
}
if (job) {
- job->sub (_("Finding subtitles"));
+ if (_need_video_length) {
+ job->sub (_("Finding length and subtitles"));
+ } else {
+ job->sub (_("Finding subtitles"));
+ }
}
/* Run through until we find:
* so they are ok).
*/
+ int64_t const len = _file_group.length ();
while (true) {
int r = av_read_frame (_format_context, &_packet);
if (r < 0) {
}
if (job) {
- job->set_progress_unknown ();
+ if (len > 0) {
+ job->set_progress (float (_format_context->pb->pos) / len);
+ } else {
+ job->set_progress_unknown ();
+ }
}
AVCodecContext* context = _format_context->streams[_packet.stream_index]->codec;
break;
}
}
+
+ /* Finish off any hanging subtitles at the end */
+ for (LastSubtitleMap::const_iterator i = _last_subtitle_start.begin(); i != _last_subtitle_start.end(); ++i) {
+ if (i->second) {
+ i->first->add_subtitle (
+ i->second->id,
+ ContentTimePeriod (
+ i->second->time,
+ ContentTime::from_frames (video_length(), video_frame_rate().get_value_or (24))
+ )
+ );
+ }
+ }
+
+ /* We just added subtitles to our streams without taking the PTS offset into account;
+ this is because we might not know the PTS offset when the first subtitle is seen.
+ Now we know the PTS offset so we can apply it to those subtitles.
+ */
+ if (video_frame_rate()) {
+ BOOST_FOREACH (shared_ptr<FFmpegSubtitleStream> i, _subtitle_streams) {
+ i->add_offset (pts_offset (_audio_streams, _first_video, video_frame_rate().get()));
+ }
+ }
}
void
int frame_finished;
AVSubtitle sub;
if (avcodec_decode_subtitle2 (context, &sub, &frame_finished, &_packet) >= 0 && frame_finished) {
+ string id = subtitle_id (sub);
FFmpegSubtitlePeriod const period = subtitle_period (sub);
- if (sub.num_rects <= 0 && _last_subtitle_start) {
- stream->add_subtitle (ContentTimePeriod (_last_subtitle_start.get (), period.from));
- _last_subtitle_start = optional<ContentTime> ();
+ LastSubtitleMap::iterator last = _last_subtitle_start.find (stream);
+ if (last != _last_subtitle_start.end() && last->second) {
+ /* We have seen the start of a subtitle but not yet the end. Whatever this is
+ finishes the previous subtitle, so add it */
+ stream->add_subtitle (last->second->id, ContentTimePeriod (last->second->time, period.from));
+ if (sub.num_rects == 0) {
+ /* This is a `proper' end-of-subtitle */
+ _last_subtitle_start[stream] = optional<SubtitleStart> ();
+ } else {
+ /* This is just another subtitle, so we start again */
+ _last_subtitle_start[stream] = SubtitleStart (id, period.from);
+ }
} else if (sub.num_rects == 1) {
if (period.to) {
- stream->add_subtitle (ContentTimePeriod (period.from, period.to.get ()));
+ stream->add_subtitle (id, ContentTimePeriod (period.from, period.to.get ()));
} else {
- _last_subtitle_start = period.from;
+ _last_subtitle_start[stream] = SubtitleStart (id, period.from);
}
}
avsubtitle_free (&sub);
{
return av_get_bits_per_pixel (av_pix_fmt_desc_get (video_codec_context()->pix_fmt));
}
+
+bool
+FFmpegExaminer::yuv () const
+{
+ switch (video_codec_context()->pix_fmt) {
+ case AV_PIX_FMT_YUV420P:
+ case AV_PIX_FMT_YUYV422:
+ case AV_PIX_FMT_YUV422P:
+ case AV_PIX_FMT_YUV444P:
+ case AV_PIX_FMT_YUV410P:
+ case AV_PIX_FMT_YUV411P:
+ case AV_PIX_FMT_YUVJ420P:
+ case AV_PIX_FMT_YUVJ422P:
+ case AV_PIX_FMT_YUVJ444P:
+ case AV_PIX_FMT_UYVY422:
+ case AV_PIX_FMT_UYYVYY411:
+ case AV_PIX_FMT_NV12:
+ case AV_PIX_FMT_NV21:
+ case AV_PIX_FMT_YUV440P:
+ case AV_PIX_FMT_YUVJ440P:
+ case AV_PIX_FMT_YUVA420P:
+ case AV_PIX_FMT_YUV420P16LE:
+ case AV_PIX_FMT_YUV420P16BE:
+ case AV_PIX_FMT_YUV422P16LE:
+ case AV_PIX_FMT_YUV422P16BE:
+ case AV_PIX_FMT_YUV444P16LE:
+ case AV_PIX_FMT_YUV444P16BE:
+ case AV_PIX_FMT_YUV420P9BE:
+ case AV_PIX_FMT_YUV420P9LE:
+ case AV_PIX_FMT_YUV420P10BE:
+ case AV_PIX_FMT_YUV420P10LE:
+ case AV_PIX_FMT_YUV422P10BE:
+ case AV_PIX_FMT_YUV422P10LE:
+ case AV_PIX_FMT_YUV444P9BE:
+ case AV_PIX_FMT_YUV444P9LE:
+ case AV_PIX_FMT_YUV444P10BE:
+ case AV_PIX_FMT_YUV444P10LE:
+ case AV_PIX_FMT_YUV422P9BE:
+ case AV_PIX_FMT_YUV422P9LE:
+ case AV_PIX_FMT_YUVA422P_LIBAV:
+ case AV_PIX_FMT_YUVA444P_LIBAV:
+ case AV_PIX_FMT_YUVA420P9BE:
+ case AV_PIX_FMT_YUVA420P9LE:
+ case AV_PIX_FMT_YUVA422P9BE:
+ case AV_PIX_FMT_YUVA422P9LE:
+ case AV_PIX_FMT_YUVA444P9BE:
+ case AV_PIX_FMT_YUVA444P9LE:
+ case AV_PIX_FMT_YUVA420P10BE:
+ case AV_PIX_FMT_YUVA420P10LE:
+ case AV_PIX_FMT_YUVA422P10BE:
+ case AV_PIX_FMT_YUVA422P10LE:
+ case AV_PIX_FMT_YUVA444P10BE:
+ case AV_PIX_FMT_YUVA444P10LE:
+ case AV_PIX_FMT_YUVA420P16BE:
+ case AV_PIX_FMT_YUVA420P16LE:
+ case AV_PIX_FMT_YUVA422P16BE:
+ case AV_PIX_FMT_YUVA422P16LE:
+ case AV_PIX_FMT_YUVA444P16BE:
+ case AV_PIX_FMT_YUVA444P16LE:
+ case AV_PIX_FMT_NV16:
+ case AV_PIX_FMT_NV20LE:
+ case AV_PIX_FMT_NV20BE:
+ case AV_PIX_FMT_YVYU422:
+ case AV_PIX_FMT_YUVA444P:
+ case AV_PIX_FMT_YUVA422P:
+ case AV_PIX_FMT_YUV420P12BE:
+ case AV_PIX_FMT_YUV420P12LE:
+ case AV_PIX_FMT_YUV420P14BE:
+ case AV_PIX_FMT_YUV420P14LE:
+ case AV_PIX_FMT_YUV422P12BE:
+ case AV_PIX_FMT_YUV422P12LE:
+ case AV_PIX_FMT_YUV422P14BE:
+ case AV_PIX_FMT_YUV422P14LE:
+ case AV_PIX_FMT_YUV444P12BE:
+ case AV_PIX_FMT_YUV444P12LE:
+ case AV_PIX_FMT_YUV444P14BE:
+ case AV_PIX_FMT_YUV444P14LE:
+ case AV_PIX_FMT_YUVJ411P:
+ return true;
+ default:
+ return false;
+ }
+}