34 #ifndef OPENSHOT_FFMPEG_READER_H 35 #define OPENSHOT_FFMPEG_READER_H 98 AVFormatContext *pFormatCtx;
99 int i, videoStream, audioStream;
100 AVCodecContext *pCodecCtx, *aCodecCtx;
101 #if (LIBAVFORMAT_VERSION_MAJOR >= 57) 102 AVBufferRef *hw_device_ctx = NULL;
104 AVStream *pStream, *aStream;
108 bool is_duration_known;
109 bool check_interlace;
111 bool has_missing_frames;
115 std::map<int64_t, int64_t> processing_video_frames;
116 std::multimap<int64_t, int64_t> processing_audio_frames;
117 std::map<int64_t, int64_t> processed_video_frames;
118 std::map<int64_t, int64_t> processed_audio_frames;
119 std::multimap<int64_t, int64_t> missing_video_frames;
120 std::multimap<int64_t, int64_t> missing_video_frames_source;
121 std::multimap<int64_t, int64_t> missing_audio_frames;
122 std::multimap<int64_t, int64_t> missing_audio_frames_source;
123 std::map<int64_t, int> checked_frames;
131 int64_t num_packets_since_video_frame;
132 int64_t num_checks_since_final;
133 std::shared_ptr<openshot::Frame> last_video_frame;
137 int64_t seeking_frame;
140 int64_t seek_audio_frame_found;
141 int64_t seek_video_frame_found;
143 int64_t audio_pts_offset;
144 int64_t video_pts_offset;
146 int64_t largest_frame_processed;
147 int64_t current_video_frame;
149 int hw_de_supported = 0;
151 AVPixelFormat hw_de_av_pix_fmt = AV_PIX_FMT_NONE;
152 AVHWDeviceType hw_de_av_device_type = AV_HWDEVICE_TYPE_NONE;
155 int IsHardwareDecodeSupported(
int codecid);
161 bool CheckSeek(
bool is_video);
164 bool CheckMissingFrame(int64_t requested_frame);
167 void CheckWorkingFrames(
bool end_of_stream, int64_t requested_frame);
170 int64_t ConvertFrameToAudioPTS(int64_t frame_number);
173 int64_t ConvertFrameToVideoPTS(int64_t frame_number);
176 int64_t ConvertVideoPTStoFrame(int64_t pts);
179 std::shared_ptr<openshot::Frame> CreateFrame(int64_t requested_frame);
191 int64_t GetSmallestVideoFrame();
194 int64_t GetSmallestAudioFrame();
197 int64_t GetVideoPTS();
200 bool IsPartialFrame(int64_t requested_frame);
203 void ProcessVideoPacket(int64_t requested_frame);
206 void ProcessAudioPacket(int64_t requested_frame, int64_t target_frame,
int starting_sample);
209 std::shared_ptr<openshot::Frame> ReadStream(int64_t requested_frame);
212 void RemoveAVFrame(AVFrame *);
215 void RemoveAVPacket(AVPacket *);
218 void Seek(int64_t requested_frame);
221 void UpdatePTSOffset(
bool is_video);
224 void UpdateAudioInfo();
227 void UpdateVideoInfo();
259 std::shared_ptr<openshot::Frame> GetFrame(int64_t requested_frame);
265 std::string
Name() {
return "FFmpegReader"; };
269 void SetJson(std::string value);
270 Json::Value JsonValue();
271 void SetJsonValue(Json::Value root);
Header file for ReaderBase class.
Header file for OpenMPUtilities (set some common macros)
bool is_near(AudioLocation location, int samples_per_frame, int64_t amount)
CacheMemory * GetCache()
Get the cache object used by this reader.
This abstract class is the base class, used by all readers in libopenshot.
Header file for CacheMemory class.
bool IsOpen()
Determine if reader is open or closed.
Header file for all Exception classes.
This class uses the FFmpeg libraries, to open video files and audio files, and return openshot::Frame...
std::string Name()
Return the type name of the class.
Header file for Clip class.
Header file for global Settings class.
This namespace is the default namespace for all code in the openshot library.
This struct holds the associated video frame and starting sample # for an audio packet.
CacheMemory final_cache
Final cache object used to hold final frames.
Header file for FFmpegUtilities.
This class is a memory-based cache manager for Frame objects.