alffplay.cpp 45 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570
  1. /*
  2. * An example showing how to play a stream sync'd to video, using ffmpeg.
  3. *
  4. * Requires C++11.
  5. */
  6. #include <condition_variable>
  7. #include <functional>
  8. #include <algorithm>
  9. #include <iostream>
  10. #include <iomanip>
  11. #include <cstring>
  12. #include <limits>
  13. #include <thread>
  14. #include <chrono>
  15. #include <atomic>
  16. #include <mutex>
  17. #include <deque>
  18. #include <array>
  19. extern "C" {
  20. #include "libavcodec/avcodec.h"
  21. #include "libavformat/avformat.h"
  22. #include "libavformat/avio.h"
  23. #include "libavutil/time.h"
  24. #include "libavutil/pixfmt.h"
  25. #include "libavutil/avstring.h"
  26. #include "libavutil/channel_layout.h"
  27. #include "libswscale/swscale.h"
  28. #include "libswresample/swresample.h"
  29. }
  30. #include "SDL.h"
  31. #include "AL/alc.h"
  32. #include "AL/al.h"
  33. #include "AL/alext.h"
  34. namespace
  35. {
  36. static const std::string AppName("alffplay");
  37. static bool do_direct_out = false;
  38. static bool has_latency_check = false;
  39. static LPALGETSOURCEDVSOFT alGetSourcedvSOFT;
  40. #define AUDIO_BUFFER_TIME 100 /* In milliseconds, per-buffer */
  41. #define AUDIO_BUFFER_QUEUE_SIZE 8 /* Number of buffers to queue */
  42. #define MAX_QUEUE_SIZE (15 * 1024 * 1024) /* Bytes of compressed data to keep queued */
  43. #define AV_SYNC_THRESHOLD 0.01
  44. #define AV_NOSYNC_THRESHOLD 10.0
  45. #define SAMPLE_CORRECTION_MAX_DIFF 0.05
  46. #define AUDIO_DIFF_AVG_NB 20
  47. #define VIDEO_PICTURE_QUEUE_SIZE 16
  48. enum {
  49. FF_UPDATE_EVENT = SDL_USEREVENT,
  50. FF_REFRESH_EVENT,
  51. FF_MOVIE_DONE_EVENT
  52. };
  53. enum {
  54. AV_SYNC_AUDIO_MASTER,
  55. AV_SYNC_VIDEO_MASTER,
  56. AV_SYNC_EXTERNAL_MASTER,
  57. DEFAULT_AV_SYNC_TYPE = AV_SYNC_EXTERNAL_MASTER
  58. };
  59. struct PacketQueue {
  60. std::deque<AVPacket> mPackets;
  61. std::atomic<int> mTotalSize;
  62. std::atomic<bool> mFinished;
  63. std::mutex mMutex;
  64. std::condition_variable mCond;
  65. PacketQueue() : mTotalSize(0), mFinished(false)
  66. { }
  67. ~PacketQueue()
  68. { clear(); }
  69. int put(const AVPacket *pkt);
  70. int peek(AVPacket *pkt, std::atomic<bool> &quit_var);
  71. void pop();
  72. void clear();
  73. void finish();
  74. };
  75. struct MovieState;
  76. struct AudioState {
  77. MovieState *mMovie;
  78. AVStream *mStream;
  79. AVCodecContext *mCodecCtx;
  80. PacketQueue mQueue;
  81. /* Used for clock difference average computation */
  82. struct {
  83. std::atomic<int> Clocks; /* In microseconds */
  84. double Accum;
  85. double AvgCoeff;
  86. double Threshold;
  87. int AvgCount;
  88. } mDiff;
  89. /* Time (in seconds) of the next sample to be buffered */
  90. double mCurrentPts;
  91. /* Decompressed sample frame, and swresample context for conversion */
  92. AVFrame *mDecodedFrame;
  93. struct SwrContext *mSwresCtx;
  94. /* Conversion format, for what gets fed to Alure */
  95. int mDstChanLayout;
  96. enum AVSampleFormat mDstSampleFmt;
  97. /* Storage of converted samples */
  98. uint8_t *mSamples;
  99. int mSamplesLen; /* In samples */
  100. int mSamplesPos;
  101. int mSamplesMax;
  102. /* OpenAL format */
  103. ALenum mFormat;
  104. ALsizei mFrameSize;
  105. std::recursive_mutex mSrcMutex;
  106. ALuint mSource;
  107. ALuint mBuffers[AUDIO_BUFFER_QUEUE_SIZE];
  108. ALsizei mBufferIdx;
  109. AudioState(MovieState *movie)
  110. : mMovie(movie), mStream(nullptr), mCodecCtx(nullptr)
  111. , mDiff{{0}, 0.0, 0.0, 0.0, 0}, mCurrentPts(0.0), mDecodedFrame(nullptr)
  112. , mSwresCtx(nullptr), mDstChanLayout(0), mDstSampleFmt(AV_SAMPLE_FMT_NONE)
  113. , mSamples(nullptr), mSamplesLen(0), mSamplesPos(0), mSamplesMax(0)
  114. , mFormat(AL_NONE), mFrameSize(0), mSource(0), mBufferIdx(0)
  115. {
  116. for(auto &buf : mBuffers)
  117. buf = 0;
  118. }
  119. ~AudioState()
  120. {
  121. if(mSource)
  122. alDeleteSources(1, &mSource);
  123. alDeleteBuffers(AUDIO_BUFFER_QUEUE_SIZE, mBuffers);
  124. av_frame_free(&mDecodedFrame);
  125. swr_free(&mSwresCtx);
  126. av_freep(&mSamples);
  127. avcodec_free_context(&mCodecCtx);
  128. }
  129. double getClock();
  130. int getSync();
  131. int decodeFrame();
  132. int readAudio(uint8_t *samples, int length);
  133. int handler();
  134. };
  135. struct VideoState {
  136. MovieState *mMovie;
  137. AVStream *mStream;
  138. AVCodecContext *mCodecCtx;
  139. PacketQueue mQueue;
  140. double mClock;
  141. double mFrameTimer;
  142. double mFrameLastPts;
  143. double mFrameLastDelay;
  144. double mCurrentPts;
  145. /* time (av_gettime) at which we updated mCurrentPts - used to have running video pts */
  146. int64_t mCurrentPtsTime;
  147. /* Decompressed video frame, and swscale context for conversion */
  148. AVFrame *mDecodedFrame;
  149. struct SwsContext *mSwscaleCtx;
  150. struct Picture {
  151. SDL_Texture *mImage;
  152. int mWidth, mHeight; /* Logical image size (actual size may be larger) */
  153. std::atomic<bool> mUpdated;
  154. double mPts;
  155. Picture()
  156. : mImage(nullptr), mWidth(0), mHeight(0), mUpdated(false), mPts(0.0)
  157. { }
  158. ~Picture()
  159. {
  160. if(mImage)
  161. SDL_DestroyTexture(mImage);
  162. mImage = nullptr;
  163. }
  164. };
  165. std::array<Picture,VIDEO_PICTURE_QUEUE_SIZE> mPictQ;
  166. size_t mPictQSize, mPictQRead, mPictQWrite;
  167. std::mutex mPictQMutex;
  168. std::condition_variable mPictQCond;
  169. bool mFirstUpdate;
  170. std::atomic<bool> mEOS;
  171. std::atomic<bool> mFinalUpdate;
  172. VideoState(MovieState *movie)
  173. : mMovie(movie), mStream(nullptr), mCodecCtx(nullptr), mClock(0.0)
  174. , mFrameTimer(0.0), mFrameLastPts(0.0), mFrameLastDelay(0.0)
  175. , mCurrentPts(0.0), mCurrentPtsTime(0), mDecodedFrame(nullptr)
  176. , mSwscaleCtx(nullptr), mPictQSize(0), mPictQRead(0), mPictQWrite(0)
  177. , mFirstUpdate(true), mEOS(false), mFinalUpdate(false)
  178. { }
  179. ~VideoState()
  180. {
  181. sws_freeContext(mSwscaleCtx);
  182. mSwscaleCtx = nullptr;
  183. av_frame_free(&mDecodedFrame);
  184. avcodec_free_context(&mCodecCtx);
  185. }
  186. double getClock();
  187. static Uint32 SDLCALL sdl_refresh_timer_cb(Uint32 interval, void *opaque);
  188. void schedRefresh(int delay);
  189. void display(SDL_Window *screen, SDL_Renderer *renderer);
  190. void refreshTimer(SDL_Window *screen, SDL_Renderer *renderer);
  191. void updatePicture(SDL_Window *screen, SDL_Renderer *renderer);
  192. int queuePicture(double pts);
  193. double synchronize(double pts);
  194. int handler();
  195. };
  196. struct MovieState {
  197. AVFormatContext *mFormatCtx;
  198. int mVideoStream, mAudioStream;
  199. int mAVSyncType;
  200. int64_t mExternalClockBase;
  201. std::atomic<bool> mQuit;
  202. AudioState mAudio;
  203. VideoState mVideo;
  204. std::thread mParseThread;
  205. std::thread mAudioThread;
  206. std::thread mVideoThread;
  207. std::string mFilename;
  208. MovieState(std::string fname)
  209. : mFormatCtx(nullptr), mVideoStream(0), mAudioStream(0)
  210. , mAVSyncType(DEFAULT_AV_SYNC_TYPE), mExternalClockBase(0), mQuit(false)
  211. , mAudio(this), mVideo(this), mFilename(std::move(fname))
  212. { }
  213. ~MovieState()
  214. {
  215. mQuit = true;
  216. if(mParseThread.joinable())
  217. mParseThread.join();
  218. avformat_close_input(&mFormatCtx);
  219. }
  220. static int decode_interrupt_cb(void *ctx);
  221. bool prepare();
  222. void setTitle(SDL_Window *window);
  223. double getClock();
  224. double getMasterClock();
  225. int streamComponentOpen(int stream_index);
  226. int parse_handler();
  227. };
  228. int PacketQueue::put(const AVPacket *pkt)
  229. {
  230. std::unique_lock<std::mutex> lock(mMutex);
  231. mPackets.push_back(AVPacket{});
  232. if(av_packet_ref(&mPackets.back(), pkt) != 0)
  233. {
  234. mPackets.pop_back();
  235. return -1;
  236. }
  237. mTotalSize += mPackets.back().size;
  238. lock.unlock();
  239. mCond.notify_one();
  240. return 0;
  241. }
  242. int PacketQueue::peek(AVPacket *pkt, std::atomic<bool> &quit_var)
  243. {
  244. std::unique_lock<std::mutex> lock(mMutex);
  245. while(!quit_var.load())
  246. {
  247. if(!mPackets.empty())
  248. {
  249. if(av_packet_ref(pkt, &mPackets.front()) != 0)
  250. return -1;
  251. return 1;
  252. }
  253. if(mFinished.load())
  254. return 0;
  255. mCond.wait(lock);
  256. }
  257. return -1;
  258. }
  259. void PacketQueue::pop()
  260. {
  261. std::unique_lock<std::mutex> lock(mMutex);
  262. AVPacket *pkt = &mPackets.front();
  263. mTotalSize -= pkt->size;
  264. av_packet_unref(pkt);
  265. mPackets.pop_front();
  266. }
  267. void PacketQueue::clear()
  268. {
  269. std::unique_lock<std::mutex> lock(mMutex);
  270. std::for_each(mPackets.begin(), mPackets.end(),
  271. [](AVPacket &pkt) { av_packet_unref(&pkt); }
  272. );
  273. mPackets.clear();
  274. mTotalSize = 0;
  275. }
  276. void PacketQueue::finish()
  277. {
  278. std::unique_lock<std::mutex> lock(mMutex);
  279. mFinished = true;
  280. lock.unlock();
  281. mCond.notify_all();
  282. }
  283. double AudioState::getClock()
  284. {
  285. double pts;
  286. std::unique_lock<std::recursive_mutex> lock(mSrcMutex);
  287. /* The audio clock is the timestamp of the sample currently being heard.
  288. * It's based on 4 components:
  289. * 1 - The timestamp of the next sample to buffer (state->current_pts)
  290. * 2 - The length of the source's buffer queue
  291. * 3 - The offset OpenAL is currently at in the source (the first value
  292. * from AL_SEC_OFFSET_LATENCY_SOFT)
  293. * 4 - The latency between OpenAL and the DAC (the second value from
  294. * AL_SEC_OFFSET_LATENCY_SOFT)
  295. *
  296. * Subtracting the length of the source queue from the next sample's
  297. * timestamp gives the timestamp of the sample at start of the source
  298. * queue. Adding the source offset to that results in the timestamp for
  299. * OpenAL's current position, and subtracting the source latency from that
  300. * gives the timestamp of the sample currently at the DAC.
  301. */
  302. pts = mCurrentPts;
  303. if(mSource)
  304. {
  305. ALdouble offset[2];
  306. ALint queue_size;
  307. ALint status;
  308. /* NOTE: The source state must be checked last, in case an underrun
  309. * occurs and the source stops between retrieving the offset+latency
  310. * and getting the state. */
  311. if(has_latency_check)
  312. {
  313. alGetSourcedvSOFT(mSource, AL_SEC_OFFSET_LATENCY_SOFT, offset);
  314. alGetSourcei(mSource, AL_BUFFERS_QUEUED, &queue_size);
  315. }
  316. else
  317. {
  318. ALint ioffset;
  319. alGetSourcei(mSource, AL_SAMPLE_OFFSET, &ioffset);
  320. alGetSourcei(mSource, AL_BUFFERS_QUEUED, &queue_size);
  321. offset[0] = (double)ioffset / (double)mCodecCtx->sample_rate;
  322. offset[1] = 0.0f;
  323. }
  324. alGetSourcei(mSource, AL_SOURCE_STATE, &status);
  325. /* If the source is AL_STOPPED, then there was an underrun and all
  326. * buffers are processed, so ignore the source queue. The audio thread
  327. * will put the source into an AL_INITIAL state and clear the queue
  328. * when it starts recovery. */
  329. if(status != AL_STOPPED)
  330. pts -= queue_size*((double)AUDIO_BUFFER_TIME/1000.0) - offset[0];
  331. if(status == AL_PLAYING)
  332. pts -= offset[1];
  333. }
  334. lock.unlock();
  335. return std::max(pts, 0.0);
  336. }
  337. int AudioState::getSync()
  338. {
  339. double diff, avg_diff, ref_clock;
  340. if(mMovie->mAVSyncType == AV_SYNC_AUDIO_MASTER)
  341. return 0;
  342. ref_clock = mMovie->getMasterClock();
  343. diff = ref_clock - getClock();
  344. if(!(fabs(diff) < AV_NOSYNC_THRESHOLD))
  345. {
  346. /* Difference is TOO big; reset diff stuff */
  347. mDiff.Accum = 0.0;
  348. return 0;
  349. }
  350. /* Accumulate the diffs */
  351. mDiff.Accum = mDiff.Accum*mDiff.AvgCoeff + diff;
  352. avg_diff = mDiff.Accum*(1.0 - mDiff.AvgCoeff);
  353. if(fabs(avg_diff) < mDiff.Threshold)
  354. return 0;
  355. /* Constrain the per-update difference to avoid exceedingly large skips */
  356. if(!(diff <= SAMPLE_CORRECTION_MAX_DIFF))
  357. diff = SAMPLE_CORRECTION_MAX_DIFF;
  358. else if(!(diff >= -SAMPLE_CORRECTION_MAX_DIFF))
  359. diff = -SAMPLE_CORRECTION_MAX_DIFF;
  360. return (int)(diff*mCodecCtx->sample_rate);
  361. }
  362. int AudioState::decodeFrame()
  363. {
  364. while(!mMovie->mQuit.load())
  365. {
  366. while(!mMovie->mQuit.load())
  367. {
  368. /* Get the next packet */
  369. AVPacket pkt{};
  370. if(mQueue.peek(&pkt, mMovie->mQuit) <= 0)
  371. return -1;
  372. int ret = avcodec_send_packet(mCodecCtx, &pkt);
  373. if(ret != AVERROR(EAGAIN))
  374. {
  375. if(ret < 0)
  376. std::cerr<< "Failed to send encoded packet: 0x"<<std::hex<<ret<<std::dec <<std::endl;
  377. mQueue.pop();
  378. }
  379. av_packet_unref(&pkt);
  380. if(ret == 0 || ret == AVERROR(EAGAIN))
  381. break;
  382. }
  383. int ret = avcodec_receive_frame(mCodecCtx, mDecodedFrame);
  384. if(ret == AVERROR(EAGAIN))
  385. continue;
  386. if(ret == AVERROR_EOF || ret < 0)
  387. {
  388. std::cerr<< "Failed to decode frame: "<<ret <<std::endl;
  389. return 0;
  390. }
  391. if(mDecodedFrame->nb_samples <= 0)
  392. {
  393. av_frame_unref(mDecodedFrame);
  394. continue;
  395. }
  396. /* If provided, update w/ pts */
  397. int64_t pts = av_frame_get_best_effort_timestamp(mDecodedFrame);
  398. if(pts != AV_NOPTS_VALUE)
  399. mCurrentPts = av_q2d(mStream->time_base)*pts;
  400. if(mDecodedFrame->nb_samples > mSamplesMax)
  401. {
  402. av_freep(&mSamples);
  403. av_samples_alloc(
  404. &mSamples, nullptr, mCodecCtx->channels,
  405. mDecodedFrame->nb_samples, mDstSampleFmt, 0
  406. );
  407. mSamplesMax = mDecodedFrame->nb_samples;
  408. }
  409. /* Return the amount of sample frames converted */
  410. int data_size = swr_convert(mSwresCtx, &mSamples, mDecodedFrame->nb_samples,
  411. (const uint8_t**)mDecodedFrame->data, mDecodedFrame->nb_samples
  412. );
  413. av_frame_unref(mDecodedFrame);
  414. return data_size;
  415. }
  416. return 0;
  417. }
  418. /* Duplicates the sample at in to out, count times. The frame size is a
  419. * multiple of the template type size.
  420. */
  421. template<typename T>
  422. static void sample_dup(uint8_t *out, const uint8_t *in, int count, int frame_size)
  423. {
  424. const T *sample = reinterpret_cast<const T*>(in);
  425. T *dst = reinterpret_cast<T*>(out);
  426. if(frame_size == sizeof(T))
  427. std::fill_n(dst, count, *sample);
  428. else
  429. {
  430. /* NOTE: frame_size is a multiple of sizeof(T). */
  431. int type_mult = frame_size / sizeof(T);
  432. int i = 0;
  433. std::generate_n(dst, count*type_mult,
  434. [sample,type_mult,&i]() -> T
  435. {
  436. T ret = sample[i];
  437. i = (i+1)%type_mult;
  438. return ret;
  439. }
  440. );
  441. }
  442. }
  443. int AudioState::readAudio(uint8_t *samples, int length)
  444. {
  445. int sample_skip = getSync();
  446. int audio_size = 0;
  447. /* Read the next chunk of data, refill the buffer, and queue it
  448. * on the source */
  449. length /= mFrameSize;
  450. while(audio_size < length)
  451. {
  452. if(mSamplesLen <= 0 || mSamplesPos >= mSamplesLen)
  453. {
  454. int frame_len = decodeFrame();
  455. if(frame_len <= 0) break;
  456. mSamplesLen = frame_len;
  457. mSamplesPos = std::min(mSamplesLen, sample_skip);
  458. sample_skip -= mSamplesPos;
  459. mCurrentPts += (double)mSamplesPos / (double)mCodecCtx->sample_rate;
  460. continue;
  461. }
  462. int rem = length - audio_size;
  463. if(mSamplesPos >= 0)
  464. {
  465. int len = mSamplesLen - mSamplesPos;
  466. if(rem > len) rem = len;
  467. memcpy(samples, mSamples + mSamplesPos*mFrameSize, rem*mFrameSize);
  468. }
  469. else
  470. {
  471. rem = std::min(rem, -mSamplesPos);
  472. /* Add samples by copying the first sample */
  473. if((mFrameSize&7) == 0)
  474. sample_dup<uint64_t>(samples, mSamples, rem, mFrameSize);
  475. else if((mFrameSize&3) == 0)
  476. sample_dup<uint32_t>(samples, mSamples, rem, mFrameSize);
  477. else if((mFrameSize&1) == 0)
  478. sample_dup<uint16_t>(samples, mSamples, rem, mFrameSize);
  479. else
  480. sample_dup<uint8_t>(samples, mSamples, rem, mFrameSize);
  481. }
  482. mSamplesPos += rem;
  483. mCurrentPts += (double)rem / mCodecCtx->sample_rate;
  484. samples += rem*mFrameSize;
  485. audio_size += rem;
  486. }
  487. if(audio_size < length && audio_size > 0)
  488. {
  489. int rem = length - audio_size;
  490. std::fill_n(samples, rem*mFrameSize,
  491. (mDstSampleFmt == AV_SAMPLE_FMT_U8) ? 0x80 : 0x00);
  492. mCurrentPts += (double)rem / mCodecCtx->sample_rate;
  493. audio_size += rem;
  494. }
  495. return audio_size * mFrameSize;
  496. }
  497. int AudioState::handler()
  498. {
  499. std::unique_lock<std::recursive_mutex> lock(mSrcMutex);
  500. ALenum fmt;
  501. /* Find a suitable format for Alure. */
  502. mDstChanLayout = 0;
  503. if(mCodecCtx->sample_fmt == AV_SAMPLE_FMT_U8 || mCodecCtx->sample_fmt == AV_SAMPLE_FMT_U8P)
  504. {
  505. mDstSampleFmt = AV_SAMPLE_FMT_U8;
  506. mFrameSize = 1;
  507. if(mCodecCtx->channel_layout == AV_CH_LAYOUT_7POINT1 &&
  508. alIsExtensionPresent("AL_EXT_MCFORMATS") &&
  509. (fmt=alGetEnumValue("AL_FORMAT_71CHN8")) != AL_NONE && fmt != -1)
  510. {
  511. mDstChanLayout = mCodecCtx->channel_layout;
  512. mFrameSize *= 8;
  513. mFormat = fmt;
  514. }
  515. if((mCodecCtx->channel_layout == AV_CH_LAYOUT_5POINT1 ||
  516. mCodecCtx->channel_layout == AV_CH_LAYOUT_5POINT1_BACK) &&
  517. alIsExtensionPresent("AL_EXT_MCFORMATS") &&
  518. (fmt=alGetEnumValue("AL_FORMAT_51CHN8")) != AL_NONE && fmt != -1)
  519. {
  520. mDstChanLayout = mCodecCtx->channel_layout;
  521. mFrameSize *= 6;
  522. mFormat = fmt;
  523. }
  524. if(mCodecCtx->channel_layout == AV_CH_LAYOUT_MONO)
  525. {
  526. mDstChanLayout = mCodecCtx->channel_layout;
  527. mFrameSize *= 1;
  528. mFormat = AL_FORMAT_MONO8;
  529. }
  530. if(!mDstChanLayout)
  531. {
  532. mDstChanLayout = AV_CH_LAYOUT_STEREO;
  533. mFrameSize *= 2;
  534. mFormat = AL_FORMAT_STEREO8;
  535. }
  536. }
  537. if((mCodecCtx->sample_fmt == AV_SAMPLE_FMT_FLT || mCodecCtx->sample_fmt == AV_SAMPLE_FMT_FLTP) &&
  538. alIsExtensionPresent("AL_EXT_FLOAT32"))
  539. {
  540. mDstSampleFmt = AV_SAMPLE_FMT_FLT;
  541. mFrameSize = 4;
  542. if(mCodecCtx->channel_layout == AV_CH_LAYOUT_7POINT1 &&
  543. alIsExtensionPresent("AL_EXT_MCFORMATS") &&
  544. (fmt=alGetEnumValue("AL_FORMAT_71CHN32")) != AL_NONE && fmt != -1)
  545. {
  546. mDstChanLayout = mCodecCtx->channel_layout;
  547. mFrameSize *= 8;
  548. mFormat = fmt;
  549. }
  550. if((mCodecCtx->channel_layout == AV_CH_LAYOUT_5POINT1 ||
  551. mCodecCtx->channel_layout == AV_CH_LAYOUT_5POINT1_BACK) &&
  552. alIsExtensionPresent("AL_EXT_MCFORMATS") &&
  553. (fmt=alGetEnumValue("AL_FORMAT_51CHN32")) != AL_NONE && fmt != -1)
  554. {
  555. mDstChanLayout = mCodecCtx->channel_layout;
  556. mFrameSize *= 6;
  557. mFormat = fmt;
  558. }
  559. if(mCodecCtx->channel_layout == AV_CH_LAYOUT_MONO)
  560. {
  561. mDstChanLayout = mCodecCtx->channel_layout;
  562. mFrameSize *= 1;
  563. mFormat = AL_FORMAT_MONO_FLOAT32;
  564. }
  565. if(!mDstChanLayout)
  566. {
  567. mDstChanLayout = AV_CH_LAYOUT_STEREO;
  568. mFrameSize *= 2;
  569. mFormat = AL_FORMAT_STEREO_FLOAT32;
  570. }
  571. }
  572. if(!mDstChanLayout)
  573. {
  574. mDstSampleFmt = AV_SAMPLE_FMT_S16;
  575. mFrameSize = 2;
  576. if(mCodecCtx->channel_layout == AV_CH_LAYOUT_7POINT1 &&
  577. alIsExtensionPresent("AL_EXT_MCFORMATS") &&
  578. (fmt=alGetEnumValue("AL_FORMAT_71CHN16")) != AL_NONE && fmt != -1)
  579. {
  580. mDstChanLayout = mCodecCtx->channel_layout;
  581. mFrameSize *= 8;
  582. mFormat = fmt;
  583. }
  584. if((mCodecCtx->channel_layout == AV_CH_LAYOUT_5POINT1 ||
  585. mCodecCtx->channel_layout == AV_CH_LAYOUT_5POINT1_BACK) &&
  586. alIsExtensionPresent("AL_EXT_MCFORMATS") &&
  587. (fmt=alGetEnumValue("AL_FORMAT_51CHN16")) != AL_NONE && fmt != -1)
  588. {
  589. mDstChanLayout = mCodecCtx->channel_layout;
  590. mFrameSize *= 6;
  591. mFormat = fmt;
  592. }
  593. if(mCodecCtx->channel_layout == AV_CH_LAYOUT_MONO)
  594. {
  595. mDstChanLayout = mCodecCtx->channel_layout;
  596. mFrameSize *= 1;
  597. mFormat = AL_FORMAT_MONO16;
  598. }
  599. if(!mDstChanLayout)
  600. {
  601. mDstChanLayout = AV_CH_LAYOUT_STEREO;
  602. mFrameSize *= 2;
  603. mFormat = AL_FORMAT_STEREO16;
  604. }
  605. }
  606. ALsizei buffer_len = mCodecCtx->sample_rate * AUDIO_BUFFER_TIME / 1000 *
  607. mFrameSize;
  608. void *samples = av_malloc(buffer_len);
  609. mSamples = NULL;
  610. mSamplesMax = 0;
  611. mSamplesPos = 0;
  612. mSamplesLen = 0;
  613. if(!(mDecodedFrame=av_frame_alloc()))
  614. {
  615. std::cerr<< "Failed to allocate audio frame" <<std::endl;
  616. goto finish;
  617. }
  618. mSwresCtx = swr_alloc_set_opts(nullptr,
  619. mDstChanLayout, mDstSampleFmt, mCodecCtx->sample_rate,
  620. mCodecCtx->channel_layout ? mCodecCtx->channel_layout :
  621. (uint64_t)av_get_default_channel_layout(mCodecCtx->channels),
  622. mCodecCtx->sample_fmt, mCodecCtx->sample_rate,
  623. 0, nullptr
  624. );
  625. if(!mSwresCtx || swr_init(mSwresCtx) != 0)
  626. {
  627. std::cerr<< "Failed to initialize audio converter" <<std::endl;
  628. goto finish;
  629. }
  630. alGenBuffers(AUDIO_BUFFER_QUEUE_SIZE, mBuffers);
  631. alGenSources(1, &mSource);
  632. if(do_direct_out)
  633. {
  634. if(!alIsExtensionPresent("AL_SOFT_direct_channels"))
  635. std::cerr<< "AL_SOFT_direct_channels not supported for direct output" <<std::endl;
  636. else
  637. {
  638. alSourcei(mSource, AL_DIRECT_CHANNELS_SOFT, AL_TRUE);
  639. std::cout<< "Direct out enabled" <<std::endl;
  640. }
  641. }
  642. while(alGetError() == AL_NO_ERROR && !mMovie->mQuit.load())
  643. {
  644. /* First remove any processed buffers. */
  645. ALint processed;
  646. alGetSourcei(mSource, AL_BUFFERS_PROCESSED, &processed);
  647. if(processed > 0)
  648. {
  649. std::array<ALuint,AUDIO_BUFFER_QUEUE_SIZE> tmp;
  650. alSourceUnqueueBuffers(mSource, processed, tmp.data());
  651. }
  652. /* Refill the buffer queue. */
  653. ALint queued;
  654. alGetSourcei(mSource, AL_BUFFERS_QUEUED, &queued);
  655. while(queued < AUDIO_BUFFER_QUEUE_SIZE)
  656. {
  657. int audio_size;
  658. /* Read the next chunk of data, fill the buffer, and queue it on
  659. * the source */
  660. audio_size = readAudio(reinterpret_cast<uint8_t*>(samples), buffer_len);
  661. if(audio_size <= 0) break;
  662. ALuint bufid = mBuffers[mBufferIdx++];
  663. mBufferIdx %= AUDIO_BUFFER_QUEUE_SIZE;
  664. alBufferData(bufid, mFormat, samples, audio_size, mCodecCtx->sample_rate);
  665. alSourceQueueBuffers(mSource, 1, &bufid);
  666. queued++;
  667. }
  668. if(queued == 0)
  669. break;
  670. /* Check that the source is playing. */
  671. ALint state;
  672. alGetSourcei(mSource, AL_SOURCE_STATE, &state);
  673. if(state == AL_STOPPED)
  674. {
  675. /* AL_STOPPED means there was an underrun. Rewind the source to get
  676. * it back into an AL_INITIAL state.
  677. */
  678. alSourceRewind(mSource);
  679. continue;
  680. }
  681. lock.unlock();
  682. /* (re)start the source if needed, and wait for a buffer to finish */
  683. if(state != AL_PLAYING && state != AL_PAUSED)
  684. alSourcePlay(mSource);
  685. SDL_Delay(AUDIO_BUFFER_TIME / 3);
  686. lock.lock();
  687. }
  688. finish:
  689. alSourceRewind(mSource);
  690. alSourcei(mSource, AL_BUFFER, 0);
  691. av_frame_free(&mDecodedFrame);
  692. swr_free(&mSwresCtx);
  693. av_freep(&mSamples);
  694. return 0;
  695. }
  696. double VideoState::getClock()
  697. {
  698. double delta = (av_gettime() - mCurrentPtsTime) / 1000000.0;
  699. return mCurrentPts + delta;
  700. }
  701. Uint32 SDLCALL VideoState::sdl_refresh_timer_cb(Uint32 /*interval*/, void *opaque)
  702. {
  703. SDL_Event evt{};
  704. evt.user.type = FF_REFRESH_EVENT;
  705. evt.user.data1 = opaque;
  706. SDL_PushEvent(&evt);
  707. return 0; /* 0 means stop timer */
  708. }
  709. /* Schedules an FF_REFRESH_EVENT event to occur in 'delay' ms. */
  710. void VideoState::schedRefresh(int delay)
  711. {
  712. SDL_AddTimer(delay, sdl_refresh_timer_cb, this);
  713. }
  714. /* Called by VideoState::refreshTimer to display the next video frame. */
  715. void VideoState::display(SDL_Window *screen, SDL_Renderer *renderer)
  716. {
  717. Picture *vp = &mPictQ[mPictQRead];
  718. if(!vp->mImage)
  719. return;
  720. float aspect_ratio;
  721. int win_w, win_h;
  722. int w, h, x, y;
  723. if(mCodecCtx->sample_aspect_ratio.num == 0)
  724. aspect_ratio = 0.0f;
  725. else
  726. {
  727. aspect_ratio = av_q2d(mCodecCtx->sample_aspect_ratio) * mCodecCtx->width /
  728. mCodecCtx->height;
  729. }
  730. if(aspect_ratio <= 0.0f)
  731. aspect_ratio = (float)mCodecCtx->width / (float)mCodecCtx->height;
  732. SDL_GetWindowSize(screen, &win_w, &win_h);
  733. h = win_h;
  734. w = ((int)rint(h * aspect_ratio) + 3) & ~3;
  735. if(w > win_w)
  736. {
  737. w = win_w;
  738. h = ((int)rint(w / aspect_ratio) + 3) & ~3;
  739. }
  740. x = (win_w - w) / 2;
  741. y = (win_h - h) / 2;
  742. SDL_Rect src_rect{ 0, 0, vp->mWidth, vp->mHeight };
  743. SDL_Rect dst_rect{ x, y, w, h };
  744. SDL_RenderCopy(renderer, vp->mImage, &src_rect, &dst_rect);
  745. SDL_RenderPresent(renderer);
  746. }
  747. /* FF_REFRESH_EVENT handler called on the main thread where the SDL_Renderer
  748. * was created. It handles the display of the next decoded video frame (if not
  749. * falling behind), and sets up the timer for the following video frame.
  750. */
  751. void VideoState::refreshTimer(SDL_Window *screen, SDL_Renderer *renderer)
  752. {
  753. if(!mStream)
  754. {
  755. if(mEOS)
  756. {
  757. mFinalUpdate = true;
  758. std::unique_lock<std::mutex>(mPictQMutex).unlock();
  759. mPictQCond.notify_all();
  760. return;
  761. }
  762. schedRefresh(100);
  763. return;
  764. }
  765. std::unique_lock<std::mutex> lock(mPictQMutex);
  766. retry:
  767. if(mPictQSize == 0)
  768. {
  769. if(mEOS)
  770. mFinalUpdate = true;
  771. else
  772. schedRefresh(1);
  773. lock.unlock();
  774. mPictQCond.notify_all();
  775. return;
  776. }
  777. Picture *vp = &mPictQ[mPictQRead];
  778. mCurrentPts = vp->mPts;
  779. mCurrentPtsTime = av_gettime();
  780. /* Get delay using the frame pts and the pts from last frame. */
  781. double delay = vp->mPts - mFrameLastPts;
  782. if(delay <= 0 || delay >= 1.0)
  783. {
  784. /* If incorrect delay, use previous one. */
  785. delay = mFrameLastDelay;
  786. }
  787. /* Save for next frame. */
  788. mFrameLastDelay = delay;
  789. mFrameLastPts = vp->mPts;
  790. /* Update delay to sync to clock if not master source. */
  791. if(mMovie->mAVSyncType != AV_SYNC_VIDEO_MASTER)
  792. {
  793. double ref_clock = mMovie->getMasterClock();
  794. double diff = vp->mPts - ref_clock;
  795. /* Skip or repeat the frame. Take delay into account. */
  796. double sync_threshold = std::min(delay, AV_SYNC_THRESHOLD);
  797. if(fabs(diff) < AV_NOSYNC_THRESHOLD)
  798. {
  799. if(diff <= -sync_threshold)
  800. delay = 0;
  801. else if(diff >= sync_threshold)
  802. delay *= 2.0;
  803. }
  804. }
  805. mFrameTimer += delay;
  806. /* Compute the REAL delay. */
  807. double actual_delay = mFrameTimer - (av_gettime() / 1000000.0);
  808. if(!(actual_delay >= 0.010))
  809. {
  810. /* We don't have time to handle this picture, just skip to the next one. */
  811. mPictQRead = (mPictQRead+1)%mPictQ.size();
  812. mPictQSize--;
  813. goto retry;
  814. }
  815. schedRefresh((int)(actual_delay*1000.0 + 0.5));
  816. /* Show the picture! */
  817. display(screen, renderer);
  818. /* Update queue for next picture. */
  819. mPictQRead = (mPictQRead+1)%mPictQ.size();
  820. mPictQSize--;
  821. lock.unlock();
  822. mPictQCond.notify_all();
  823. }
  824. /* FF_UPDATE_EVENT handler, updates the picture's texture. It's called on the
  825. * main thread where the renderer was created.
  826. */
  827. void VideoState::updatePicture(SDL_Window *screen, SDL_Renderer *renderer)
  828. {
  829. Picture *vp = &mPictQ[mPictQWrite];
  830. bool fmt_updated = false;
  831. /* allocate or resize the buffer! */
  832. if(!vp->mImage || vp->mWidth != mCodecCtx->width || vp->mHeight != mCodecCtx->height)
  833. {
  834. fmt_updated = true;
  835. if(vp->mImage)
  836. SDL_DestroyTexture(vp->mImage);
  837. vp->mImage = SDL_CreateTexture(
  838. renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,
  839. mCodecCtx->coded_width, mCodecCtx->coded_height
  840. );
  841. if(!vp->mImage)
  842. std::cerr<< "Failed to create YV12 texture!" <<std::endl;
  843. vp->mWidth = mCodecCtx->width;
  844. vp->mHeight = mCodecCtx->height;
  845. if(mFirstUpdate && vp->mWidth > 0 && vp->mHeight > 0)
  846. {
  847. /* For the first update, set the window size to the video size. */
  848. mFirstUpdate = false;
  849. int w = vp->mWidth;
  850. int h = vp->mHeight;
  851. if(mCodecCtx->sample_aspect_ratio.den != 0)
  852. {
  853. double aspect_ratio = av_q2d(mCodecCtx->sample_aspect_ratio);
  854. if(aspect_ratio >= 1.0)
  855. w = (int)(w*aspect_ratio + 0.5);
  856. else if(aspect_ratio > 0.0)
  857. h = (int)(h/aspect_ratio + 0.5);
  858. }
  859. SDL_SetWindowSize(screen, w, h);
  860. }
  861. }
  862. if(vp->mImage)
  863. {
  864. AVFrame *frame = mDecodedFrame;
  865. void *pixels = nullptr;
  866. int pitch = 0;
  867. if(mCodecCtx->pix_fmt == AV_PIX_FMT_YUV420P)
  868. SDL_UpdateYUVTexture(vp->mImage, nullptr,
  869. frame->data[0], frame->linesize[0],
  870. frame->data[1], frame->linesize[1],
  871. frame->data[2], frame->linesize[2]
  872. );
  873. else if(SDL_LockTexture(vp->mImage, nullptr, &pixels, &pitch) != 0)
  874. std::cerr<< "Failed to lock texture" <<std::endl;
  875. else
  876. {
  877. // Convert the image into YUV format that SDL uses
  878. int coded_w = mCodecCtx->coded_width;
  879. int coded_h = mCodecCtx->coded_height;
  880. int w = mCodecCtx->width;
  881. int h = mCodecCtx->height;
  882. if(!mSwscaleCtx || fmt_updated)
  883. {
  884. sws_freeContext(mSwscaleCtx);
  885. mSwscaleCtx = sws_getContext(
  886. w, h, mCodecCtx->pix_fmt,
  887. w, h, AV_PIX_FMT_YUV420P, 0,
  888. nullptr, nullptr, nullptr
  889. );
  890. }
  891. /* point pict at the queue */
  892. uint8_t *pict_data[3];
  893. pict_data[0] = reinterpret_cast<uint8_t*>(pixels);
  894. pict_data[1] = pict_data[0] + coded_w*coded_h;
  895. pict_data[2] = pict_data[1] + coded_w*coded_h/4;
  896. int pict_linesize[3];
  897. pict_linesize[0] = pitch;
  898. pict_linesize[1] = pitch / 2;
  899. pict_linesize[2] = pitch / 2;
  900. sws_scale(mSwscaleCtx, (const uint8_t**)frame->data,
  901. frame->linesize, 0, h, pict_data, pict_linesize);
  902. SDL_UnlockTexture(vp->mImage);
  903. }
  904. }
  905. std::unique_lock<std::mutex> lock(mPictQMutex);
  906. vp->mUpdated = true;
  907. lock.unlock();
  908. mPictQCond.notify_one();
  909. }
  910. int VideoState::queuePicture(double pts)
  911. {
  912. /* Wait until we have space for a new pic */
  913. std::unique_lock<std::mutex> lock(mPictQMutex);
  914. while(mPictQSize >= mPictQ.size() && !mMovie->mQuit.load())
  915. mPictQCond.wait(lock);
  916. lock.unlock();
  917. if(mMovie->mQuit.load())
  918. return -1;
  919. Picture *vp = &mPictQ[mPictQWrite];
  920. /* We have to create/update the picture in the main thread */
  921. vp->mUpdated = false;
  922. SDL_Event evt{};
  923. evt.user.type = FF_UPDATE_EVENT;
  924. evt.user.data1 = this;
  925. SDL_PushEvent(&evt);
  926. /* Wait until the picture is updated. */
  927. lock.lock();
  928. while(!vp->mUpdated && !mMovie->mQuit.load())
  929. mPictQCond.wait(lock);
  930. if(mMovie->mQuit.load())
  931. return -1;
  932. vp->mPts = pts;
  933. mPictQWrite = (mPictQWrite+1)%mPictQ.size();
  934. mPictQSize++;
  935. lock.unlock();
  936. return 0;
  937. }
  938. double VideoState::synchronize(double pts)
  939. {
  940. double frame_delay;
  941. if(pts == 0.0) /* if we aren't given a pts, set it to the clock */
  942. pts = mClock;
  943. else /* if we have pts, set video clock to it */
  944. mClock = pts;
  945. /* update the video clock */
  946. frame_delay = av_q2d(mCodecCtx->time_base);
  947. /* if we are repeating a frame, adjust clock accordingly */
  948. frame_delay += mDecodedFrame->repeat_pict * (frame_delay * 0.5);
  949. mClock += frame_delay;
  950. return pts;
  951. }
  952. int VideoState::handler()
  953. {
  954. mDecodedFrame = av_frame_alloc();
  955. while(!mMovie->mQuit)
  956. {
  957. while(!mMovie->mQuit)
  958. {
  959. AVPacket packet{};
  960. if(mQueue.peek(&packet, mMovie->mQuit) <= 0)
  961. goto finish;
  962. int ret = avcodec_send_packet(mCodecCtx, &packet);
  963. if(ret != AVERROR(EAGAIN))
  964. {
  965. if(ret < 0)
  966. std::cerr<< "Failed to send encoded packet: 0x"<<std::hex<<ret<<std::dec <<std::endl;
  967. mQueue.pop();
  968. }
  969. av_packet_unref(&packet);
  970. if(ret == 0 || ret == AVERROR(EAGAIN))
  971. break;
  972. }
  973. /* Decode video frame */
  974. int ret = avcodec_receive_frame(mCodecCtx, mDecodedFrame);
  975. if(ret == AVERROR(EAGAIN))
  976. continue;
  977. if(ret < 0)
  978. {
  979. std::cerr<< "Failed to decode frame: "<<ret <<std::endl;
  980. break;
  981. }
  982. double pts = synchronize(
  983. av_q2d(mStream->time_base) * av_frame_get_best_effort_timestamp(mDecodedFrame)
  984. );
  985. if(queuePicture(pts) < 0)
  986. break;
  987. av_frame_unref(mDecodedFrame);
  988. }
  989. finish:
  990. mEOS = true;
  991. av_frame_free(&mDecodedFrame);
  992. std::unique_lock<std::mutex> lock(mPictQMutex);
  993. if(mMovie->mQuit)
  994. {
  995. mPictQRead = 0;
  996. mPictQWrite = 0;
  997. mPictQSize = 0;
  998. }
  999. while(!mFinalUpdate)
  1000. mPictQCond.wait(lock);
  1001. return 0;
  1002. }
  1003. int MovieState::decode_interrupt_cb(void *ctx)
  1004. {
  1005. return reinterpret_cast<MovieState*>(ctx)->mQuit;
  1006. }
  1007. bool MovieState::prepare()
  1008. {
  1009. mFormatCtx = avformat_alloc_context();
  1010. mFormatCtx->interrupt_callback.callback = decode_interrupt_cb;
  1011. mFormatCtx->interrupt_callback.opaque = this;
  1012. if(avio_open2(&mFormatCtx->pb, mFilename.c_str(), AVIO_FLAG_READ,
  1013. &mFormatCtx->interrupt_callback, nullptr))
  1014. {
  1015. std::cerr<< "Failed to open "<<mFilename <<std::endl;
  1016. return false;
  1017. }
  1018. /* Open movie file */
  1019. if(avformat_open_input(&mFormatCtx, mFilename.c_str(), nullptr, nullptr) != 0)
  1020. {
  1021. std::cerr<< "Failed to open "<<mFilename <<std::endl;
  1022. return false;
  1023. }
  1024. /* Retrieve stream information */
  1025. if(avformat_find_stream_info(mFormatCtx, nullptr) < 0)
  1026. {
  1027. std::cerr<< mFilename<<": failed to find stream info" <<std::endl;
  1028. return false;
  1029. }
  1030. mVideo.schedRefresh(40);
  1031. mParseThread = std::thread(std::mem_fn(&MovieState::parse_handler), this);
  1032. return true;
  1033. }
  1034. void MovieState::setTitle(SDL_Window *window)
  1035. {
  1036. auto pos1 = mFilename.rfind('/');
  1037. auto pos2 = mFilename.rfind('\\');
  1038. auto fpos = ((pos1 == std::string::npos) ? pos2 :
  1039. (pos2 == std::string::npos) ? pos1 :
  1040. std::max(pos1, pos2)) + 1;
  1041. SDL_SetWindowTitle(window, (mFilename.substr(fpos)+" - "+AppName).c_str());
  1042. }
  1043. double MovieState::getClock()
  1044. {
  1045. return (av_gettime()-mExternalClockBase) / 1000000.0;
  1046. }
  1047. double MovieState::getMasterClock()
  1048. {
  1049. if(mAVSyncType == AV_SYNC_VIDEO_MASTER)
  1050. return mVideo.getClock();
  1051. if(mAVSyncType == AV_SYNC_AUDIO_MASTER)
  1052. return mAudio.getClock();
  1053. return getClock();
  1054. }
  1055. int MovieState::streamComponentOpen(int stream_index)
  1056. {
  1057. if(stream_index < 0 || (unsigned int)stream_index >= mFormatCtx->nb_streams)
  1058. return -1;
  1059. /* Get a pointer to the codec context for the stream, and open the
  1060. * associated codec.
  1061. */
  1062. AVCodecContext *avctx = avcodec_alloc_context3(nullptr);
  1063. if(!avctx) return -1;
  1064. if(avcodec_parameters_to_context(avctx, mFormatCtx->streams[stream_index]->codecpar))
  1065. {
  1066. avcodec_free_context(&avctx);
  1067. return -1;
  1068. }
  1069. AVCodec *codec = avcodec_find_decoder(avctx->codec_id);
  1070. if(!codec || avcodec_open2(avctx, codec, nullptr) < 0)
  1071. {
  1072. std::cerr<< "Unsupported codec: "<<avcodec_get_name(avctx->codec_id)
  1073. << " (0x"<<std::hex<<avctx->codec_id<<std::dec<<")" <<std::endl;
  1074. avcodec_free_context(&avctx);
  1075. return -1;
  1076. }
  1077. /* Initialize and start the media type handler */
  1078. switch(avctx->codec_type)
  1079. {
  1080. case AVMEDIA_TYPE_AUDIO:
  1081. mAudioStream = stream_index;
  1082. mAudio.mStream = mFormatCtx->streams[stream_index];
  1083. mAudio.mCodecCtx = avctx;
  1084. /* Averaging filter for audio sync */
  1085. mAudio.mDiff.AvgCoeff = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
  1086. /* Correct audio only if larger error than this */
  1087. mAudio.mDiff.Threshold = 0.050/* 50 ms */;
  1088. mAudioThread = std::thread(std::mem_fn(&AudioState::handler), &mAudio);
  1089. break;
  1090. case AVMEDIA_TYPE_VIDEO:
  1091. mVideoStream = stream_index;
  1092. mVideo.mStream = mFormatCtx->streams[stream_index];
  1093. mVideo.mCodecCtx = avctx;
  1094. mVideo.mCurrentPtsTime = av_gettime();
  1095. mVideo.mFrameTimer = (double)mVideo.mCurrentPtsTime / 1000000.0;
  1096. mVideo.mFrameLastDelay = 40e-3;
  1097. mVideoThread = std::thread(std::mem_fn(&VideoState::handler), &mVideo);
  1098. break;
  1099. default:
  1100. avcodec_free_context(&avctx);
  1101. break;
  1102. }
  1103. return 0;
  1104. }
  1105. int MovieState::parse_handler()
  1106. {
  1107. int video_index = -1;
  1108. int audio_index = -1;
  1109. mVideoStream = -1;
  1110. mAudioStream = -1;
  1111. /* Dump information about file onto standard error */
  1112. av_dump_format(mFormatCtx, 0, mFilename.c_str(), 0);
  1113. /* Find the first video and audio streams */
  1114. for(unsigned int i = 0;i < mFormatCtx->nb_streams;i++)
  1115. {
  1116. if(mFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && video_index < 0)
  1117. video_index = i;
  1118. else if(mFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO && audio_index < 0)
  1119. audio_index = i;
  1120. }
  1121. /* Start the external clock in 50ms, to give the audio and video
  1122. * components time to start without needing to skip ahead.
  1123. */
  1124. mExternalClockBase = av_gettime() + 50000;
  1125. if(audio_index >= 0)
  1126. streamComponentOpen(audio_index);
  1127. if(video_index >= 0)
  1128. streamComponentOpen(video_index);
  1129. if(mVideoStream < 0 && mAudioStream < 0)
  1130. {
  1131. std::cerr<< mFilename<<": could not open codecs" <<std::endl;
  1132. mQuit = true;
  1133. }
  1134. /* Main packet handling loop */
  1135. while(!mQuit.load())
  1136. {
  1137. if(mAudio.mQueue.mTotalSize + mVideo.mQueue.mTotalSize >= MAX_QUEUE_SIZE)
  1138. {
  1139. std::this_thread::sleep_for(std::chrono::milliseconds(10));
  1140. continue;
  1141. }
  1142. AVPacket packet;
  1143. if(av_read_frame(mFormatCtx, &packet) < 0)
  1144. break;
  1145. /* Copy the packet in the queue it's meant for. */
  1146. if(packet.stream_index == mVideoStream)
  1147. mVideo.mQueue.put(&packet);
  1148. else if(packet.stream_index == mAudioStream)
  1149. mAudio.mQueue.put(&packet);
  1150. av_packet_unref(&packet);
  1151. }
  1152. mVideo.mQueue.finish();
  1153. mAudio.mQueue.finish();
  1154. /* all done - wait for it */
  1155. if(mVideoThread.joinable())
  1156. mVideoThread.join();
  1157. if(mAudioThread.joinable())
  1158. mAudioThread.join();
  1159. mVideo.mEOS = true;
  1160. std::unique_lock<std::mutex> lock(mVideo.mPictQMutex);
  1161. while(!mVideo.mFinalUpdate)
  1162. mVideo.mPictQCond.wait(lock);
  1163. lock.unlock();
  1164. SDL_Event evt{};
  1165. evt.user.type = FF_MOVIE_DONE_EVENT;
  1166. SDL_PushEvent(&evt);
  1167. return 0;
  1168. }
  1169. } // namespace
  1170. int main(int argc, char *argv[])
  1171. {
  1172. std::unique_ptr<MovieState> movState;
  1173. if(argc < 2)
  1174. {
  1175. std::cerr<< "Usage: "<<argv[0]<<" [-device <device name>] [-direct] <files...>" <<std::endl;
  1176. return 1;
  1177. }
  1178. /* Register all formats and codecs */
  1179. av_register_all();
  1180. /* Initialize networking protocols */
  1181. avformat_network_init();
  1182. if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_TIMER))
  1183. {
  1184. std::cerr<< "Could not initialize SDL - <<"<<SDL_GetError() <<std::endl;
  1185. return 1;
  1186. }
  1187. /* Make a window to put our video */
  1188. SDL_Window *screen = SDL_CreateWindow(AppName.c_str(), 0, 0, 640, 480, SDL_WINDOW_RESIZABLE);
  1189. if(!screen)
  1190. {
  1191. std::cerr<< "SDL: could not set video mode - exiting" <<std::endl;
  1192. return 1;
  1193. }
  1194. /* Make a renderer to handle the texture image surface and rendering. */
  1195. SDL_Renderer *renderer = SDL_CreateRenderer(screen, -1, SDL_RENDERER_ACCELERATED);
  1196. if(renderer)
  1197. {
  1198. SDL_RendererInfo rinf{};
  1199. bool ok = false;
  1200. /* Make sure the renderer supports IYUV textures. If not, fallback to a
  1201. * software renderer. */
  1202. if(SDL_GetRendererInfo(renderer, &rinf) == 0)
  1203. {
  1204. for(Uint32 i = 0;!ok && i < rinf.num_texture_formats;i++)
  1205. ok = (rinf.texture_formats[i] == SDL_PIXELFORMAT_IYUV);
  1206. }
  1207. if(!ok)
  1208. {
  1209. std::cerr<< "IYUV pixelformat textures not supported on renderer "<<rinf.name <<std::endl;
  1210. SDL_DestroyRenderer(renderer);
  1211. renderer = nullptr;
  1212. }
  1213. }
  1214. if(!renderer)
  1215. renderer = SDL_CreateRenderer(screen, -1, SDL_RENDERER_SOFTWARE);
  1216. if(!renderer)
  1217. {
  1218. std::cerr<< "SDL: could not create renderer - exiting" <<std::endl;
  1219. return 1;
  1220. }
  1221. SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255);
  1222. SDL_RenderFillRect(renderer, nullptr);
  1223. SDL_RenderPresent(renderer);
  1224. /* Open an audio device */
  1225. int fileidx = 1;
  1226. ALCdevice *device = [argc,argv,&fileidx]() -> ALCdevice*
  1227. {
  1228. ALCdevice *dev = NULL;
  1229. if(argc > 3 && strcmp(argv[1], "-device") == 0)
  1230. {
  1231. fileidx = 3;
  1232. dev = alcOpenDevice(argv[2]);
  1233. if(dev) return dev;
  1234. std::cerr<< "Failed to open \""<<argv[2]<<"\" - trying default" <<std::endl;
  1235. }
  1236. return alcOpenDevice(nullptr);
  1237. }();
  1238. ALCcontext *context = alcCreateContext(device, nullptr);
  1239. if(!context || alcMakeContextCurrent(context) == ALC_FALSE)
  1240. {
  1241. std::cerr<< "Failed to set up audio device" <<std::endl;
  1242. if(context)
  1243. alcDestroyContext(context);
  1244. return 1;
  1245. }
  1246. const ALCchar *name = nullptr;
  1247. if(alcIsExtensionPresent(device, "ALC_ENUMERATE_ALL_EXT"))
  1248. name = alcGetString(device, ALC_ALL_DEVICES_SPECIFIER);
  1249. if(!name || alcGetError(device) != AL_NO_ERROR)
  1250. name = alcGetString(device, ALC_DEVICE_SPECIFIER);
  1251. std::cout<< "Opened \""<<name<<"\"" <<std::endl;
  1252. if(fileidx < argc && strcmp(argv[fileidx], "-direct") == 0)
  1253. {
  1254. ++fileidx;
  1255. do_direct_out = true;
  1256. }
  1257. while(fileidx < argc && !movState)
  1258. {
  1259. movState = std::unique_ptr<MovieState>(new MovieState(argv[fileidx++]));
  1260. if(!movState->prepare()) movState = nullptr;
  1261. }
  1262. if(!movState)
  1263. {
  1264. std::cerr<< "Could not start a video" <<std::endl;
  1265. return 1;
  1266. }
  1267. movState->setTitle(screen);
  1268. /* Default to going to the next movie at the end of one. */
  1269. enum class EomAction {
  1270. Next, Quit
  1271. } eom_action = EomAction::Next;
  1272. SDL_Event event;
  1273. while(SDL_WaitEvent(&event) == 1)
  1274. {
  1275. switch(event.type)
  1276. {
  1277. case SDL_KEYDOWN:
  1278. switch(event.key.keysym.sym)
  1279. {
  1280. case SDLK_ESCAPE:
  1281. movState->mQuit = true;
  1282. eom_action = EomAction::Quit;
  1283. break;
  1284. case SDLK_n:
  1285. movState->mQuit = true;
  1286. eom_action = EomAction::Next;
  1287. break;
  1288. default:
  1289. break;
  1290. }
  1291. break;
  1292. case SDL_WINDOWEVENT:
  1293. switch(event.window.event)
  1294. {
  1295. case SDL_WINDOWEVENT_RESIZED:
  1296. SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255);
  1297. SDL_RenderFillRect(renderer, nullptr);
  1298. break;
  1299. default:
  1300. break;
  1301. }
  1302. break;
  1303. case SDL_QUIT:
  1304. movState->mQuit = true;
  1305. eom_action = EomAction::Quit;
  1306. break;
  1307. case FF_UPDATE_EVENT:
  1308. reinterpret_cast<VideoState*>(event.user.data1)->updatePicture(
  1309. screen, renderer
  1310. );
  1311. break;
  1312. case FF_REFRESH_EVENT:
  1313. reinterpret_cast<VideoState*>(event.user.data1)->refreshTimer(
  1314. screen, renderer
  1315. );
  1316. break;
  1317. case FF_MOVIE_DONE_EVENT:
  1318. if(eom_action != EomAction::Quit)
  1319. {
  1320. movState = nullptr;
  1321. while(fileidx < argc && !movState)
  1322. {
  1323. movState = std::unique_ptr<MovieState>(new MovieState(argv[fileidx++]));
  1324. if(!movState->prepare()) movState = nullptr;
  1325. }
  1326. if(movState)
  1327. {
  1328. movState->setTitle(screen);
  1329. break;
  1330. }
  1331. }
  1332. /* Nothing more to play. Shut everything down and quit. */
  1333. movState = nullptr;
  1334. alcMakeContextCurrent(nullptr);
  1335. alcDestroyContext(context);
  1336. alcCloseDevice(device);
  1337. SDL_DestroyRenderer(renderer);
  1338. renderer = nullptr;
  1339. SDL_DestroyWindow(screen);
  1340. screen = nullptr;
  1341. SDL_Quit();
  1342. exit(0);
  1343. default:
  1344. break;
  1345. }
  1346. }
  1347. std::cerr<< "SDL_WaitEvent error - "<<SDL_GetError() <<std::endl;
  1348. return 1;
  1349. }