#include "content_audio.h"
#include "content_text.h"
#include "content_video.h"
+#include "dcp_text_track.h"
#include "empty.h"
#include "enum_indexed_vector.h"
-#include "film.h"
+#include "film_property.h"
#include "image.h"
#include "player_text.h"
#include "position_image.h"
class AtmosContent;
class AudioBuffers;
class Content;
+class Film;
class PlayerVideo;
class Playlist;
class ReferencedReelAsset;
/** @class Player
* @brief A class which can play a Playlist.
*/
-class Player : public std::enable_shared_from_this<Player>
+class Player
{
public:
Player (std::shared_ptr<const Film>, Image::Alignment subtitle_alignment);
void construct ();
void connect();
void setup_pieces ();
- void film_change (ChangeType, Film::Property);
+ void film_change(ChangeType, FilmProperty);
void playlist_change (ChangeType);
void playlist_content_change (ChangeType, int, bool);
Frame dcp_to_content_video (std::shared_ptr<const Piece> piece, dcpomatic::DCPTime t) const;
dcpomatic::ContentTime dcp_to_content_time (std::shared_ptr<const Piece> piece, dcpomatic::DCPTime t) const;
dcpomatic::DCPTime content_time_to_dcp (std::shared_ptr<const Piece> piece, dcpomatic::ContentTime t) const;
std::shared_ptr<PlayerVideo> black_player_video_frame (Eyes eyes) const;
+ void emit_video_until(dcpomatic::DCPTime time);
+ void insert_video(std::shared_ptr<PlayerVideo> pv, dcpomatic::DCPTime time, dcpomatic::DCPTime end);
void video (std::weak_ptr<Piece>, ContentVideo);
void audio (std::weak_ptr<Piece>, AudioStreamPtr, ContentAudio);
std::shared_ptr<const AudioBuffers> audio, dcpomatic::DCPTime time, dcpomatic::DCPTime discard_to
) const;
boost::optional<PositionImage> open_subtitles_for_frame (dcpomatic::DCPTime time) const;
- void emit_video (std::shared_ptr<PlayerVideo> pv, dcpomatic::DCPTime time);
- void do_emit_video (std::shared_ptr<PlayerVideo> pv, dcpomatic::DCPTime time);
+ void emit_video(std::shared_ptr<PlayerVideo> pv, dcpomatic::DCPTime time);
+ void use_video(std::shared_ptr<PlayerVideo> pv, dcpomatic::DCPTime time, dcpomatic::DCPTime end);
void emit_audio (std::shared_ptr<AudioBuffers> data, dcpomatic::DCPTime time);
std::shared_ptr<const Playlist> playlist () const;
/** > 0 if we are suspended (i.e. pass() and seek() do nothing) */
boost::atomic<int> _suspended;
- std::list<std::shared_ptr<Piece>> _pieces;
+ std::vector<std::shared_ptr<Piece>> _pieces;
/** Size of the image we are rendering to; this may be the DCP frame size, or
* the size of preview in a window.
/** Time of the next video that we will emit, or the time of the last accurate seek */
boost::optional<dcpomatic::DCPTime> _next_video_time;
- /** Eyes of the next video that we will emit */
- boost::optional<Eyes> _next_video_eyes;
/** Time of the next audio that we will emit, or the time of the last accurate seek */
boost::optional<dcpomatic::DCPTime> _next_audio_time;
boost::atomic<boost::optional<int>> _dcp_decode_reduction;
- typedef std::map<std::weak_ptr<Piece>, std::shared_ptr<PlayerVideo>, std::owner_less<std::weak_ptr<Piece>>> LastVideoMap;
- LastVideoMap _last_video;
+ EnumIndexedVector<std::pair<std::shared_ptr<PlayerVideo>, dcpomatic::DCPTime>, Eyes> _last_video;
AudioMerger _audio_merger;
std::unique_ptr<Shuffler> _shuffler;
public:
StreamState () {}
- StreamState (std::shared_ptr<Piece> p, dcpomatic::DCPTime l)
+ explicit StreamState(std::shared_ptr<Piece> p)
: piece(p)
- , last_push_end(l)
{}
std::shared_ptr<Piece> piece;
- dcpomatic::DCPTime last_push_end;
+ boost::optional<dcpomatic::DCPTime> last_push_end;
};
std::map<AudioStreamPtr, StreamState> _stream_states;