[DEV] have next/previous/play/pause fully functionnal

This commit is contained in:
Edouard DUPIN 2016-11-18 00:32:03 +01:00
parent 0f2da05591
commit 7d471b0af5
6 changed files with 171 additions and 80 deletions

View File

@ -15,6 +15,15 @@
#include <etk/tool.hpp>
#include <egami/egami.hpp>
static void init_ffmpeg() {
static bool isInit = false;
if (isInit == false) {
isInit = true;
av_register_all();
}
}
static void unPlanar(void* _bufferOut, const void* _bufferIn, int32_t _nbSample, audio::format _format, int32_t _channelId, int32_t _nbChannel) {
switch(_format) {
case audio::format_int8: {
@ -119,6 +128,7 @@ void appl::BufferElementAudio::configure(audio::format _format, uint32_t _sample
}
appl::MediaDecoder::MediaDecoder() {
init_ffmpeg();
m_formatContext = nullptr;
m_videoDecoderContext = nullptr;
m_audioDecoderContext = nullptr;
@ -136,10 +146,12 @@ appl::MediaDecoder::MediaDecoder() {
m_convertContext = nullptr;
m_audioPresent = false;
m_audioFormat = audio::format_unknow;
// Enable or disable frame reference counting.
// You are not supposed to support both paths in your application but pick the one most appropriate to your needs.
// Look for the use of refcount in this example to see what are the differences of API usage between them.
m_refCount = false;
m_isInit = false;
m_stopRequested = false;
}
appl::MediaDecoder::~MediaDecoder() {
uninit();
}
int appl::MediaDecoder::decode_packet(int *_gotFrame, int _cached) {
@ -258,9 +270,6 @@ int appl::MediaDecoder::decode_packet(int *_gotFrame, int _cached) {
}
}
}
// If we use frame reference counting, we own the data and need to de-reference it when we don't use it anymore
if (*_gotFrame && m_refCount)
av_frame_unref(m_frame);
return decoded;
}
@ -286,7 +295,7 @@ int appl::MediaDecoder::open_codec_context(int *_streamId, AVFormatContext *_for
return AVERROR(EINVAL);
}
// Init the decoders, with or without reference counting
av_dict_set(&opts, "refcounted_frames", m_refCount ? "1" : "0", 0);
av_dict_set(&opts, "refcounted_frames", "0", 0);
//av_dict_set(&opts, "threads", "auto", 0);
if ((ret = avcodec_open2(dec_ctx, dec, &opts)) < 0) {
APPL_ERROR("Failed to open " << av_get_media_type_string(_type) << " codec");
@ -302,15 +311,8 @@ double appl::MediaDecoder::getFps(AVCodecContext *_avctx) {
}
void appl::MediaDecoder::init(const std::string& _filename) {
int ret = 0;
// Enable or disable refcount:
if (false) {
m_refCount = true;
}
m_updateVideoTimeStampAfterSeek = false;
m_sourceFilename = _filename;
// register all formats and codecs
av_register_all();
// open input file, and allocate format context
if (avformat_open_input(&m_formatContext, m_sourceFilename.c_str(), nullptr, nullptr) < 0) {
APPL_ERROR("Could not open source file " << m_sourceFilename);
@ -319,7 +321,8 @@ void appl::MediaDecoder::init(const std::string& _filename) {
// retrieve stream information
if (avformat_find_stream_info(m_formatContext, nullptr) < 0) {
APPL_ERROR("Could not find stream information");
exit(1);
// TODO : check this, this will create a memeory leak
return;;
}
m_duration = echrono::Duration(double(m_formatContext->duration)/double(AV_TIME_BASE));
APPL_INFO("Stream duration : " << m_duration);
@ -390,24 +393,30 @@ void appl::MediaDecoder::init(const std::string& _filename) {
APPL_PRINT("Audio configuration : " << m_audioMap << " " << m_audioFormat << " sampleRate=" <<m_audioSampleRate);
}
// dump input information to stderr
av_dump_format(m_formatContext, 0, m_sourceFilename.c_str(), 0);
if (!m_audioStream && !m_videoStream) {
// For test only: av_dump_format(m_formatContext, 0, m_sourceFilename.c_str(), 0);
if ( m_audioStream == nullptr
&& m_videoStream == nullptr) {
APPL_ERROR("Could not find audio or video stream in the input, aborting");
ret = 1;
return; // TODO : An error occured ... !!!!!
}
m_frame = av_frame_alloc();
if (!m_frame) {
APPL_ERROR("Could not allocate frame");
ret = AVERROR(ENOMEM);
int ret = AVERROR(ENOMEM);
APPL_ERROR("Could not allocate frame ret=" << ret);
return; // TODO : An error occured ... !!!!!
}
// initialize packet, set data to nullptr, let the demuxer fill it
av_init_packet(&m_packet);
m_packet.data = nullptr;
m_packet.size = 0;
m_isInit = true;
}
bool appl::MediaDecoder::onThreadCall() {
if (m_stopRequested == true) {
return true;
}
if (m_seek >= echrono::Duration(0)) {
// seek requested (create a copy to permit to update it in background):
echrono::Duration tmpSeek = m_seek;
@ -443,18 +452,19 @@ bool appl::MediaDecoder::onThreadCall() {
return (ret < 0);
}
void appl::MediaDecoder::uninit() {
// flush cached frames
m_packet.data = nullptr;
m_packet.size = 0;
int gotFrame;
do {
decode_packet(&gotFrame, 1);
} while (gotFrame);
if (m_isInit == false) {
return;
}
APPL_PRINT("Demuxing & Decoding succeeded...");
avcodec_close(m_videoDecoderContext);
avcodec_close(m_audioDecoderContext);
avformat_close_input(&m_formatContext);
av_frame_free(&m_frame);
m_isInit = false;
}
void appl::MediaDecoder::stop() {
m_stopRequested = true;
}
void appl::MediaDecoder::flushBuffer() {

View File

@ -54,6 +54,7 @@ namespace appl {
};
class MediaDecoder : public gale::Thread {
bool m_stopRequested;
public:
echrono::Duration m_seekApply;
private:
@ -94,13 +95,10 @@ namespace appl {
// output format convertion:
SwsContext* m_convertContext;
// Enable or disable frame reference counting.
// You are not supposed to support both paths in your application but pick the one most appropriate to your needs.
// Look for the use of refcount in this example to see what are the differences of API usage between them.
bool m_refCount;
bool m_isInit;
public:
MediaDecoder();
~MediaDecoder();
int decode_packet(int *_gotFrame, int _cached);
int open_codec_context(int *_streamId, AVFormatContext *_formatContext, enum AVMediaType _type);
@ -129,6 +127,8 @@ namespace appl {
m_seek = _time;
}
void flushBuffer();
void stop() override;
};
}

View File

@ -28,10 +28,9 @@ void appl::Windows::init() {
composition += " previous\n";
composition += " </label>\n";
composition += " </button>\n";
composition += " <button name='bt-play'>\n";
composition += " <label>\n";
composition += " play/pause\n";
composition += " </label>\n";
composition += " <button name='bt-play' toggle='true'>\n";
composition += " <label>play</label>\n";
composition += " <label>pause</label>\n";
composition += " </button>\n";
composition += " <button name='bt-next'>\n";
composition += " <label>\n";
@ -53,7 +52,7 @@ void appl::Windows::init() {
m_composer->loadFromString(composition);
setSubWidget(m_composer);
subBind(ewol::widget::Button, "bt-previous", signalPressed, sharedFromThis(), &appl::Windows::onCallbackPrevious);
subBind(ewol::widget::Button, "bt-play", signalPressed, sharedFromThis(), &appl::Windows::onCallbackPlay);
subBind(ewol::widget::Button, "bt-play", signalValue, sharedFromThis(), &appl::Windows::onCallbackPlay);
subBind(ewol::widget::Button, "bt-next", signalPressed, sharedFromThis(), &appl::Windows::onCallbackNext);
subBind(appl::widget::VideoDisplay, "displayer", signalFps, sharedFromThis(), &appl::Windows::onCallbackFPS);
subBind(appl::widget::VideoDisplay, "displayer", signalPosition, sharedFromThis(), &appl::Windows::onCallbackPosition);
@ -67,12 +66,11 @@ void appl::Windows::onCallbackPrevious() {
if (m_id < 0) {
m_id = m_list.size()-1;
}
onCallbackPlay();
}
void appl::Windows::onCallbackPlay() {
ememory::SharedPtr<appl::widget::VideoDisplay> tmpDisp = ememory::dynamicPointerCast<appl::widget::VideoDisplay>(getSubObjectNamed("displayer"));
if (tmpDisp != nullptr) {
// stop previous (if needed)
tmpDisp->stop();
// Set new file:
tmpDisp->setFile(m_list[m_id]);
tmpDisp->play();
echrono::Duration time = tmpDisp->getDuration();
@ -82,12 +80,35 @@ void appl::Windows::onCallbackPlay() {
}
}
void appl::Windows::onCallbackPlay(const bool& _isPressed) {
ememory::SharedPtr<appl::widget::VideoDisplay> tmpDisp = ememory::dynamicPointerCast<appl::widget::VideoDisplay>(getSubObjectNamed("displayer"));
if (tmpDisp == nullptr) {
return;
}
if (_isPressed == true) {
tmpDisp->play();
} else {
tmpDisp->pause();
}
}
void appl::Windows::onCallbackNext() {
m_id++;
if (m_id >= m_list.size()) {
m_id = 0;
}
onCallbackPlay();
ememory::SharedPtr<appl::widget::VideoDisplay> tmpDisp = ememory::dynamicPointerCast<appl::widget::VideoDisplay>(getSubObjectNamed("displayer"));
if (tmpDisp != nullptr) {
// stop previous (if needed)
tmpDisp->stop();
// Set new file:
tmpDisp->setFile(m_list[m_id]);
tmpDisp->play();
echrono::Duration time = tmpDisp->getDuration();
APPL_DEBUG("duration = " << time << " " << etk::to_string(time.toSeconds()));
propertySetOnWidgetNamed("progress-bar", "value", "0");
propertySetOnWidgetNamed("progress-bar", "max", etk::to_string(time.toSeconds()));
}
}
@ -99,6 +120,17 @@ void appl::Windows::onCallbackFPS(const int32_t& _fps) {
void appl::Windows::addFile(const std::string& _file) {
APPL_DEBUG("Add file : " << _file);
m_list.push_back(_file);
if (m_list.size() == 1) {
m_id = 0;
ememory::SharedPtr<appl::widget::VideoDisplay> tmpDisp = ememory::dynamicPointerCast<appl::widget::VideoDisplay>(getSubObjectNamed("displayer"));
if (tmpDisp != nullptr) {
tmpDisp->setFile(m_list[m_id]);
echrono::Duration time = tmpDisp->getDuration();
APPL_DEBUG("duration = " << time << " " << etk::to_string(time.toSeconds()));
propertySetOnWidgetNamed("progress-bar", "value", "0");
propertySetOnWidgetNamed("progress-bar", "max", etk::to_string(time.toSeconds()));
}
}
}
void appl::Windows::onCallbackPosition(const echrono::Duration& _time) {

View File

@ -23,7 +23,7 @@ namespace appl {
DECLARE_FACTORY(Windows);
public: // callback functions
void onCallbackPrevious();
void onCallbackPlay();
void onCallbackPlay(const bool& _isPressed);
void onCallbackNext();
void onCallbackFPS(const int32_t& _fps);
void onCallbackPosition(const echrono::Duration& _time);

View File

@ -74,22 +74,17 @@ void appl::widget::VideoDisplay::loadProgram() {
}
}
void appl::widget::VideoDisplay::setFile(const std::string& _filename) {
m_decoder.init(_filename);
if (m_decoder.haveAudio() == true) {
//Get the generic input:
m_audioInterface = m_audioManager->createOutput(m_decoder.audioGetSampleRate(),
m_decoder.audioGetChannelMap(),
m_decoder.audioGetFormat(),
"speaker");
if(m_audioInterface == nullptr) {
APPL_ERROR("Can not creata Audio interface");
}
m_audioInterface->setReadwrite();
m_audioInterface->start();
// Stop playing in all case...
stop();
// Clear the old interface
m_decoder.reset();
// Create a new interface
m_decoder = ememory::makeShared<appl::MediaDecoder>();
if (m_decoder == nullptr) {
APPL_ERROR("Can not create sharedPtr on decoder ...");
return;
}
m_decoder.start();
m_audioManager->generateDotAll("out/local_player_flow.dot");
m_decoder->init(_filename);
markToRedraw();
}
@ -98,13 +93,56 @@ bool appl::widget::VideoDisplay::isPlaying() {
}
void appl::widget::VideoDisplay::play() {
if (m_decoder == nullptr) {
APPL_WARNING("Request play with no associated decoder");
return;
}
m_isPalying = true;
if (m_decoder->getState() != gale::Thread::state::stop) {
// The thread is already active ==> then it is just in pause ...
APPL_DEBUG("Already started");
return;
}
if (m_decoder->haveAudio() == true) {
m_audioInterface = m_audioManager->createOutput(m_decoder->audioGetSampleRate(),
m_decoder->audioGetChannelMap(),
m_decoder->audioGetFormat(),
"speaker");
if(m_audioInterface == nullptr) {
APPL_ERROR("Can not creata Audio interface");
}
m_audioInterface->setReadwrite();
m_audioInterface->start();
}
// Start decoder, this is maybe not the good point, but if we configure a decoder, it is to use it ...
m_decoder->start();
//TODO: Set an option to river to auto-generate dot: m_audioManager->generateDotAll("out/local_player_flow.dot");
}
void appl::widget::VideoDisplay::pause() {
m_isPalying = false;
}
void appl::widget::VideoDisplay::stop() {
m_isPalying = false;
if ( m_decoder != nullptr
&& m_decoder->getState() != gale::Thread::state::stop) {
APPL_ERROR("Stop Decoder");
// stop it ... and request seet at 0 position ...
m_decoder->seek(echrono::Duration(0));
m_decoder->stop();
}
if (m_audioInterface != nullptr) {
APPL_ERROR("Stop audio interface");
// Stop audio interface
m_audioInterface->stop();
// wait a little to be sure it is done correctly:
// TODO : Set this in an asynchronous loop ...
m_audioInterface.reset();
}
}
void appl::widget::VideoDisplay::onDraw() {
if (m_VBO->bufferSize(m_vboIdCoord) <= 0) {
APPL_WARNING("Nothink to draw...");
@ -118,9 +156,7 @@ void appl::widget::VideoDisplay::onDraw() {
APPL_ERROR("No shader ...");
return;
}
//APPL_WARNING("Display image : " << m_VBO->bufferSize(m_vboIdCoord));
gale::openGL::disable(gale::openGL::flag_depthTest);
// set Matrix : translation/positionMatrix
mat4 tmpMatrix = gale::openGL::getMatrix()*m_matrixApply;
m_GLprogram->use();
m_GLprogram->uniformMatrix(m_GLMatrix, tmpMatrix);
@ -199,34 +235,39 @@ void appl::widget::VideoDisplay::periodicEvent(const ewol::event::Time& _event)
if (m_isPalying == true) {
m_currentTime += _event.getDeltaCallDuration();
}
if (m_decoder.m_seekApply >= echrono::Duration(0)) {
m_currentTime = m_decoder.m_seekApply;
m_decoder.m_seekApply = echrono::Duration(-1);
if (m_decoder == nullptr) {
return;
}
if (m_decoder->m_seekApply >= echrono::Duration(0)) {
m_currentTime = m_decoder->m_seekApply;
m_decoder->m_seekApply = echrono::Duration(-1);
if (m_audioInterface != nullptr) {
m_audioInterface->clearInternalBuffer();
}
}
// SET AUDIO:
int32_t idSlot = m_decoder.audioGetOlderSlot();
int32_t idSlot = m_decoder->audioGetOlderSlot();
if ( idSlot != -1
&& m_currentTime > m_decoder.m_audioPool[idSlot].m_time) {
int32_t nbSample = m_decoder.m_audioPool[idSlot].m_buffer.size()
/ audio::getFormatBytes(m_decoder.m_audioPool[idSlot].m_format)
/ m_decoder.m_audioPool[idSlot].m_map.size();
m_audioInterface->write(&m_decoder.m_audioPool[idSlot].m_buffer[0], nbSample);
m_decoder.m_audioPool[idSlot].m_isUsed = false;
&& m_currentTime > m_decoder->m_audioPool[idSlot].m_time) {
if (m_audioInterface != nullptr) {
int32_t nbSample = m_decoder->m_audioPool[idSlot].m_buffer.size()
/ audio::getFormatBytes(m_decoder->m_audioPool[idSlot].m_format)
/ m_decoder->m_audioPool[idSlot].m_map.size();
m_audioInterface->write(&m_decoder->m_audioPool[idSlot].m_buffer[0], nbSample);
}
m_decoder->m_audioPool[idSlot].m_isUsed = false;
}
// SET VIDEO:
idSlot = m_decoder.videoGetOlderSlot();
idSlot = m_decoder->videoGetOlderSlot();
// check the slot is valid and check display time of the element:
if ( idSlot != -1
&& m_currentTime > m_decoder.m_videoPool[idSlot].m_time) {
m_resource->get().swap(m_decoder.m_videoPool[idSlot].m_image);
&& m_currentTime > m_decoder->m_videoPool[idSlot].m_time) {
m_resource->get().swap(m_decoder->m_videoPool[idSlot].m_image);
m_imageSize = m_resource->get().getSize();
ivec2 tmpSize = m_decoder.m_videoPool[idSlot].m_imagerealSize;
m_decoder.m_videoPool[idSlot].m_imagerealSize = m_videoSize;
ivec2 tmpSize = m_decoder->m_videoPool[idSlot].m_imagerealSize;
m_decoder->m_videoPool[idSlot].m_imagerealSize = m_videoSize;
m_videoSize = tmpSize;
m_decoder.m_videoPool[idSlot].m_isUsed = false;
m_decoder->m_videoPool[idSlot].m_isUsed = false;
m_resource->flush();
m_nbFramePushed++;
}
@ -238,10 +279,14 @@ void appl::widget::VideoDisplay::periodicEvent(const ewol::event::Time& _event)
m_nbFramePushed = 0;
}
signalPosition.emit(m_currentTime);
// TODO : Chek if this is needed, the display configuration not change too much ...
markToRedraw();
}
void appl::widget::VideoDisplay::seek(const echrono::Duration& _time) {
APPL_PRINT("seek request = " << _time);
m_decoder.seek(_time);
if (m_decoder == nullptr) {
return;
}
m_decoder->seek(_time);
}

View File

@ -26,7 +26,7 @@ namespace appl {
esignal::Signal<echrono::Duration> signalPosition; //!< signal the current duration of the video duration
private:
mat4 m_matrixApply;
appl::MediaDecoder m_decoder;
ememory::SharedPtr<appl::MediaDecoder> m_decoder;
ivec2 m_videoSize;
ivec2 m_imageSize;
echrono::Duration m_LastResetCounter;
@ -68,6 +68,7 @@ namespace appl {
bool isPlaying();
void play();
void pause();
void stop();
public:
void periodicEvent(const ewol::event::Time& _event);
private:
@ -78,7 +79,10 @@ namespace appl {
ememory::SharedPtr<audio::river::Interface> m_audioInterface; //!< Play audio interface
public:
echrono::Duration getDuration() {
return m_decoder.getDuration();
if (m_decoder != nullptr) {
return m_decoder->getDuration();
}
return echrono::Duration(0);
}
void seek(const echrono::Duration& _time);
};