alffplay.cpp 60 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880
  1. /*
  2. * An example showing how to play a stream sync'd to video, using ffmpeg.
  3. *
  4. * Requires C++11.
  5. */
  6. #include <condition_variable>
  7. #include <functional>
  8. #include <algorithm>
  9. #include <iostream>
  10. #include <iomanip>
  11. #include <cstring>
  12. #include <limits>
  13. #include <thread>
  14. #include <chrono>
  15. #include <atomic>
  16. #include <vector>
  17. #include <mutex>
  18. #include <deque>
  19. #include <array>
  20. #include <cmath>
  21. #include <string>
  22. extern "C" {
  23. #include "libavcodec/avcodec.h"
  24. #include "libavformat/avformat.h"
  25. #include "libavformat/avio.h"
  26. #include "libavutil/time.h"
  27. #include "libavutil/pixfmt.h"
  28. #include "libavutil/avstring.h"
  29. #include "libavutil/channel_layout.h"
  30. #include "libswscale/swscale.h"
  31. #include "libswresample/swresample.h"
  32. }
  33. #include "SDL.h"
  34. #include "AL/alc.h"
  35. #include "AL/al.h"
  36. #include "AL/alext.h"
  37. extern "C" {
  38. #ifndef AL_SOFT_map_buffer
  39. #define AL_SOFT_map_buffer 1
  40. typedef unsigned int ALbitfieldSOFT;
  41. #define AL_MAP_READ_BIT_SOFT 0x00000001
  42. #define AL_MAP_WRITE_BIT_SOFT 0x00000002
  43. #define AL_MAP_PERSISTENT_BIT_SOFT 0x00000004
  44. #define AL_PRESERVE_DATA_BIT_SOFT 0x00000008
  45. typedef void (AL_APIENTRY*LPALBUFFERSTORAGESOFT)(ALuint buffer, ALenum format, const ALvoid *data, ALsizei size, ALsizei freq, ALbitfieldSOFT flags);
  46. typedef void* (AL_APIENTRY*LPALMAPBUFFERSOFT)(ALuint buffer, ALsizei offset, ALsizei length, ALbitfieldSOFT access);
  47. typedef void (AL_APIENTRY*LPALUNMAPBUFFERSOFT)(ALuint buffer);
  48. typedef void (AL_APIENTRY*LPALFLUSHMAPPEDBUFFERSOFT)(ALuint buffer, ALsizei offset, ALsizei length);
  49. #endif
  50. #ifndef AL_SOFT_events
  51. #define AL_SOFT_events 1
  52. #define AL_EVENT_CALLBACK_FUNCTION_SOFT 0x1220
  53. #define AL_EVENT_CALLBACK_USER_PARAM_SOFT 0x1221
  54. #define AL_EVENT_TYPE_BUFFER_COMPLETED_SOFT 0x1222
  55. #define AL_EVENT_TYPE_SOURCE_STATE_CHANGED_SOFT 0x1223
  56. #define AL_EVENT_TYPE_ERROR_SOFT 0x1224
  57. #define AL_EVENT_TYPE_PERFORMANCE_SOFT 0x1225
  58. #define AL_EVENT_TYPE_DEPRECATED_SOFT 0x1226
  59. #define AL_EVENT_TYPE_DISCONNECTED_SOFT 0x1227
  60. typedef void (AL_APIENTRY*ALEVENTPROCSOFT)(ALenum eventType, ALuint object, ALuint param,
  61. ALsizei length, const ALchar *message,
  62. void *userParam);
  63. typedef void (AL_APIENTRY*LPALEVENTCONTROLSOFT)(ALsizei count, const ALenum *types, ALboolean enable);
  64. typedef void (AL_APIENTRY*LPALEVENTCALLBACKSOFT)(ALEVENTPROCSOFT callback, void *userParam);
  65. typedef void* (AL_APIENTRY*LPALGETPOINTERSOFT)(ALenum pname);
  66. typedef void (AL_APIENTRY*LPALGETPOINTERVSOFT)(ALenum pname, void **values);
  67. #endif
  68. }
  69. namespace {
  70. using nanoseconds = std::chrono::nanoseconds;
  71. using microseconds = std::chrono::microseconds;
  72. using milliseconds = std::chrono::milliseconds;
  73. using seconds = std::chrono::seconds;
  74. using seconds_d64 = std::chrono::duration<double>;
  75. const std::string AppName("alffplay");
  76. bool EnableDirectOut = false;
  77. LPALGETSOURCEI64VSOFT alGetSourcei64vSOFT;
  78. LPALCGETINTEGER64VSOFT alcGetInteger64vSOFT;
  79. LPALBUFFERSTORAGESOFT alBufferStorageSOFT;
  80. LPALMAPBUFFERSOFT alMapBufferSOFT;
  81. LPALUNMAPBUFFERSOFT alUnmapBufferSOFT;
  82. LPALEVENTCONTROLSOFT alEventControlSOFT;
  83. LPALEVENTCALLBACKSOFT alEventCallbackSOFT;
  84. const seconds AVNoSyncThreshold(10);
  85. const milliseconds VideoSyncThreshold(10);
  86. #define VIDEO_PICTURE_QUEUE_SIZE 16
  87. const seconds_d64 AudioSyncThreshold(0.03);
  88. const milliseconds AudioSampleCorrectionMax(50);
  89. /* Averaging filter coefficient for audio sync. */
  90. #define AUDIO_DIFF_AVG_NB 20
  91. const double AudioAvgFilterCoeff = std::pow(0.01, 1.0/AUDIO_DIFF_AVG_NB);
  92. /* Per-buffer size, in time */
  93. const milliseconds AudioBufferTime(20);
  94. /* Buffer total size, in time (should be divisible by the buffer time) */
  95. const milliseconds AudioBufferTotalTime(800);
  96. #define MAX_QUEUE_SIZE (15 * 1024 * 1024) /* Bytes of compressed data to keep queued */
  97. enum {
  98. FF_UPDATE_EVENT = SDL_USEREVENT,
  99. FF_REFRESH_EVENT,
  100. FF_MOVIE_DONE_EVENT
  101. };
  102. enum class SyncMaster {
  103. Audio,
  104. Video,
  105. External,
  106. Default = External
  107. };
  108. inline microseconds get_avtime()
  109. { return microseconds(av_gettime()); }
  110. /* Define unique_ptrs to auto-cleanup associated ffmpeg objects. */
  111. struct AVIOContextDeleter {
  112. void operator()(AVIOContext *ptr) { avio_closep(&ptr); }
  113. };
  114. using AVIOContextPtr = std::unique_ptr<AVIOContext,AVIOContextDeleter>;
  115. struct AVFormatCtxDeleter {
  116. void operator()(AVFormatContext *ptr) { avformat_close_input(&ptr); }
  117. };
  118. using AVFormatCtxPtr = std::unique_ptr<AVFormatContext,AVFormatCtxDeleter>;
  119. struct AVCodecCtxDeleter {
  120. void operator()(AVCodecContext *ptr) { avcodec_free_context(&ptr); }
  121. };
  122. using AVCodecCtxPtr = std::unique_ptr<AVCodecContext,AVCodecCtxDeleter>;
  123. struct AVFrameDeleter {
  124. void operator()(AVFrame *ptr) { av_frame_free(&ptr); }
  125. };
  126. using AVFramePtr = std::unique_ptr<AVFrame,AVFrameDeleter>;
  127. struct SwrContextDeleter {
  128. void operator()(SwrContext *ptr) { swr_free(&ptr); }
  129. };
  130. using SwrContextPtr = std::unique_ptr<SwrContext,SwrContextDeleter>;
  131. struct SwsContextDeleter {
  132. void operator()(SwsContext *ptr) { sws_freeContext(ptr); }
  133. };
  134. using SwsContextPtr = std::unique_ptr<SwsContext,SwsContextDeleter>;
  135. class PacketQueue {
  136. std::deque<AVPacket> mPackets;
  137. size_t mTotalSize{0};
  138. public:
  139. ~PacketQueue() { clear(); }
  140. bool empty() const noexcept { return mPackets.empty(); }
  141. size_t totalSize() const noexcept { return mTotalSize; }
  142. void put(const AVPacket *pkt)
  143. {
  144. mPackets.push_back(AVPacket{});
  145. if(av_packet_ref(&mPackets.back(), pkt) != 0)
  146. mPackets.pop_back();
  147. else
  148. mTotalSize += mPackets.back().size;
  149. }
  150. AVPacket *front() noexcept
  151. { return &mPackets.front(); }
  152. void pop()
  153. {
  154. AVPacket *pkt = &mPackets.front();
  155. mTotalSize -= pkt->size;
  156. av_packet_unref(pkt);
  157. mPackets.pop_front();
  158. }
  159. void clear()
  160. {
  161. for(AVPacket &pkt : mPackets)
  162. av_packet_unref(&pkt);
  163. mPackets.clear();
  164. mTotalSize = 0;
  165. }
  166. };
  167. struct MovieState;
  168. struct AudioState {
  169. MovieState &mMovie;
  170. AVStream *mStream{nullptr};
  171. AVCodecCtxPtr mCodecCtx;
  172. std::mutex mQueueMtx;
  173. std::condition_variable mQueueCond;
  174. /* Used for clock difference average computation */
  175. seconds_d64 mClockDiffAvg{0};
  176. /* Time of the next sample to be buffered */
  177. nanoseconds mCurrentPts{0};
  178. /* Device clock time that the stream started at. */
  179. nanoseconds mDeviceStartTime{nanoseconds::min()};
  180. /* Decompressed sample frame, and swresample context for conversion */
  181. AVFramePtr mDecodedFrame;
  182. SwrContextPtr mSwresCtx;
  183. /* Conversion format, for what gets fed to OpenAL */
  184. int mDstChanLayout{0};
  185. AVSampleFormat mDstSampleFmt{AV_SAMPLE_FMT_NONE};
  186. /* Storage of converted samples */
  187. uint8_t *mSamples{nullptr};
  188. int mSamplesLen{0}; /* In samples */
  189. int mSamplesPos{0};
  190. int mSamplesMax{0};
  191. /* OpenAL format */
  192. ALenum mFormat{AL_NONE};
  193. ALsizei mFrameSize{0};
  194. std::mutex mSrcMutex;
  195. std::condition_variable mSrcCond;
  196. std::atomic_flag mConnected;
  197. ALuint mSource{0};
  198. std::vector<ALuint> mBuffers;
  199. ALsizei mBufferIdx{0};
  200. AudioState(MovieState &movie) : mMovie(movie)
  201. { mConnected.test_and_set(std::memory_order_relaxed); }
  202. ~AudioState()
  203. {
  204. if(mSource)
  205. alDeleteSources(1, &mSource);
  206. if(!mBuffers.empty())
  207. alDeleteBuffers(mBuffers.size(), mBuffers.data());
  208. av_freep(&mSamples);
  209. }
  210. static void AL_APIENTRY EventCallback(ALenum eventType, ALuint object, ALuint param,
  211. ALsizei length, const ALchar *message,
  212. void *userParam);
  213. nanoseconds getClockNoLock();
  214. nanoseconds getClock()
  215. {
  216. std::lock_guard<std::mutex> lock(mSrcMutex);
  217. return getClockNoLock();
  218. }
  219. bool isBufferFilled();
  220. void startPlayback();
  221. int getSync();
  222. int decodeFrame();
  223. bool readAudio(uint8_t *samples, int length);
  224. int handler();
  225. };
  226. struct VideoState {
  227. MovieState &mMovie;
  228. AVStream *mStream{nullptr};
  229. AVCodecCtxPtr mCodecCtx;
  230. std::mutex mQueueMtx;
  231. std::condition_variable mQueueCond;
  232. nanoseconds mClock{0};
  233. nanoseconds mFrameTimer{0};
  234. nanoseconds mFrameLastPts{0};
  235. nanoseconds mFrameLastDelay{0};
  236. nanoseconds mCurrentPts{0};
  237. /* time (av_gettime) at which we updated mCurrentPts - used to have running video pts */
  238. microseconds mCurrentPtsTime{0};
  239. /* Decompressed video frame, and swscale context for conversion */
  240. AVFramePtr mDecodedFrame;
  241. SwsContextPtr mSwscaleCtx;
  242. struct Picture {
  243. SDL_Texture *mImage{nullptr};
  244. int mWidth{0}, mHeight{0}; /* Logical image size (actual size may be larger) */
  245. std::atomic<bool> mUpdated{false};
  246. nanoseconds mPts{0};
  247. ~Picture()
  248. {
  249. if(mImage)
  250. SDL_DestroyTexture(mImage);
  251. mImage = nullptr;
  252. }
  253. };
  254. std::array<Picture,VIDEO_PICTURE_QUEUE_SIZE> mPictQ;
  255. size_t mPictQSize{0}, mPictQRead{0}, mPictQWrite{0};
  256. std::mutex mPictQMutex;
  257. std::condition_variable mPictQCond;
  258. bool mFirstUpdate{true};
  259. std::atomic<bool> mEOS{false};
  260. std::atomic<bool> mFinalUpdate{false};
  261. VideoState(MovieState &movie) : mMovie(movie) { }
  262. nanoseconds getClock();
  263. bool isBufferFilled();
  264. static Uint32 SDLCALL sdl_refresh_timer_cb(Uint32 interval, void *opaque);
  265. void schedRefresh(milliseconds delay);
  266. void display(SDL_Window *screen, SDL_Renderer *renderer);
  267. void refreshTimer(SDL_Window *screen, SDL_Renderer *renderer);
  268. void updatePicture(SDL_Window *screen, SDL_Renderer *renderer);
  269. int queuePicture(nanoseconds pts);
  270. int handler();
  271. };
  272. struct MovieState {
  273. AVIOContextPtr mIOContext;
  274. AVFormatCtxPtr mFormatCtx;
  275. SyncMaster mAVSyncType{SyncMaster::Default};
  276. microseconds mClockBase{0};
  277. std::atomic<bool> mPlaying{false};
  278. std::mutex mSendMtx;
  279. std::condition_variable mSendCond;
  280. /* NOTE: false/clear = need data, true/set = no data needed */
  281. std::atomic_flag mSendDataGood;
  282. std::atomic<bool> mQuit{false};
  283. AudioState mAudio;
  284. VideoState mVideo;
  285. std::thread mParseThread;
  286. std::thread mAudioThread;
  287. std::thread mVideoThread;
  288. std::string mFilename;
  289. MovieState(std::string fname)
  290. : mAudio(*this), mVideo(*this), mFilename(std::move(fname))
  291. { }
  292. ~MovieState()
  293. {
  294. mQuit = true;
  295. if(mParseThread.joinable())
  296. mParseThread.join();
  297. }
  298. static int decode_interrupt_cb(void *ctx);
  299. bool prepare();
  300. void setTitle(SDL_Window *window);
  301. nanoseconds getClock();
  302. nanoseconds getMasterClock();
  303. nanoseconds getDuration();
  304. int streamComponentOpen(int stream_index);
  305. int parse_handler();
  306. };
  307. nanoseconds AudioState::getClockNoLock()
  308. {
  309. // The audio clock is the timestamp of the sample currently being heard.
  310. if(alcGetInteger64vSOFT)
  311. {
  312. // If device start time = min, we aren't playing yet.
  313. if(mDeviceStartTime == nanoseconds::min())
  314. return nanoseconds::zero();
  315. // Get the current device clock time and latency.
  316. auto device = alcGetContextsDevice(alcGetCurrentContext());
  317. ALCint64SOFT devtimes[2] = {0,0};
  318. alcGetInteger64vSOFT(device, ALC_DEVICE_CLOCK_LATENCY_SOFT, 2, devtimes);
  319. auto latency = nanoseconds(devtimes[1]);
  320. auto device_time = nanoseconds(devtimes[0]);
  321. // The clock is simply the current device time relative to the recorded
  322. // start time. We can also subtract the latency to get more a accurate
  323. // position of where the audio device actually is in the output stream.
  324. return device_time - mDeviceStartTime - latency;
  325. }
  326. /* The source-based clock is based on 4 components:
  327. * 1 - The timestamp of the next sample to buffer (mCurrentPts)
  328. * 2 - The length of the source's buffer queue
  329. * (AudioBufferTime*AL_BUFFERS_QUEUED)
  330. * 3 - The offset OpenAL is currently at in the source (the first value
  331. * from AL_SAMPLE_OFFSET_LATENCY_SOFT)
  332. * 4 - The latency between OpenAL and the DAC (the second value from
  333. * AL_SAMPLE_OFFSET_LATENCY_SOFT)
  334. *
  335. * Subtracting the length of the source queue from the next sample's
  336. * timestamp gives the timestamp of the sample at the start of the source
  337. * queue. Adding the source offset to that results in the timestamp for the
  338. * sample at OpenAL's current position, and subtracting the source latency
  339. * from that gives the timestamp of the sample currently at the DAC.
  340. */
  341. nanoseconds pts = mCurrentPts;
  342. if(mSource)
  343. {
  344. ALint64SOFT offset[2];
  345. ALint queued;
  346. ALint status;
  347. /* NOTE: The source state must be checked last, in case an underrun
  348. * occurs and the source stops between retrieving the offset+latency
  349. * and getting the state. */
  350. if(alGetSourcei64vSOFT)
  351. alGetSourcei64vSOFT(mSource, AL_SAMPLE_OFFSET_LATENCY_SOFT, offset);
  352. else
  353. {
  354. ALint ioffset;
  355. alGetSourcei(mSource, AL_SAMPLE_OFFSET, &ioffset);
  356. offset[0] = (ALint64SOFT)ioffset << 32;
  357. offset[1] = 0;
  358. }
  359. alGetSourcei(mSource, AL_BUFFERS_QUEUED, &queued);
  360. alGetSourcei(mSource, AL_SOURCE_STATE, &status);
  361. /* If the source is AL_STOPPED, then there was an underrun and all
  362. * buffers are processed, so ignore the source queue. The audio thread
  363. * will put the source into an AL_INITIAL state and clear the queue
  364. * when it starts recovery. */
  365. if(status != AL_STOPPED)
  366. {
  367. using fixed32 = std::chrono::duration<int64_t,std::ratio<1,(1ll<<32)>>;
  368. pts -= AudioBufferTime*queued;
  369. pts += std::chrono::duration_cast<nanoseconds>(
  370. fixed32(offset[0] / mCodecCtx->sample_rate)
  371. );
  372. }
  373. /* Don't offset by the latency if the source isn't playing. */
  374. if(status == AL_PLAYING)
  375. pts -= nanoseconds(offset[1]);
  376. }
  377. return std::max(pts, nanoseconds::zero());
  378. }
  379. bool AudioState::isBufferFilled()
  380. {
  381. /* All of OpenAL's buffer queueing happens under the mSrcMutex lock, as
  382. * does the source gen. So when we're able to grab the lock and the source
  383. * is valid, the queue must be full.
  384. */
  385. std::lock_guard<std::mutex> lock(mSrcMutex);
  386. return mSource != 0;
  387. }
  388. void AudioState::startPlayback()
  389. {
  390. alSourcePlay(mSource);
  391. if(alcGetInteger64vSOFT)
  392. {
  393. using fixed32 = std::chrono::duration<int64_t,std::ratio<1,(1ll<<32)>>;
  394. // Subtract the total buffer queue time from the current pts to get the
  395. // pts of the start of the queue.
  396. nanoseconds startpts = mCurrentPts - AudioBufferTotalTime;
  397. int64_t srctimes[2]={0,0};
  398. alGetSourcei64vSOFT(mSource, AL_SAMPLE_OFFSET_CLOCK_SOFT, srctimes);
  399. auto device_time = nanoseconds(srctimes[1]);
  400. auto src_offset = std::chrono::duration_cast<nanoseconds>(fixed32(srctimes[0])) /
  401. mCodecCtx->sample_rate;
  402. // The mixer may have ticked and incremented the device time and sample
  403. // offset, so subtract the source offset from the device time to get
  404. // the device time the source started at. Also subtract startpts to get
  405. // the device time the stream would have started at to reach where it
  406. // is now.
  407. mDeviceStartTime = device_time - src_offset - startpts;
  408. }
  409. }
  410. int AudioState::getSync()
  411. {
  412. if(mMovie.mAVSyncType == SyncMaster::Audio)
  413. return 0;
  414. auto ref_clock = mMovie.getMasterClock();
  415. auto diff = ref_clock - getClockNoLock();
  416. if(!(diff < AVNoSyncThreshold && diff > -AVNoSyncThreshold))
  417. {
  418. /* Difference is TOO big; reset accumulated average */
  419. mClockDiffAvg = seconds_d64::zero();
  420. return 0;
  421. }
  422. /* Accumulate the diffs */
  423. mClockDiffAvg = mClockDiffAvg*AudioAvgFilterCoeff + diff;
  424. auto avg_diff = mClockDiffAvg*(1.0 - AudioAvgFilterCoeff);
  425. if(avg_diff < AudioSyncThreshold/2.0 && avg_diff > -AudioSyncThreshold)
  426. return 0;
  427. /* Constrain the per-update difference to avoid exceedingly large skips */
  428. diff = std::min<nanoseconds>(std::max<nanoseconds>(diff, -AudioSampleCorrectionMax),
  429. AudioSampleCorrectionMax);
  430. return (int)std::chrono::duration_cast<seconds>(diff*mCodecCtx->sample_rate).count();
  431. }
  432. int AudioState::decodeFrame()
  433. {
  434. while(!mMovie.mQuit.load(std::memory_order_relaxed))
  435. {
  436. std::unique_lock<std::mutex> lock(mQueueMtx);
  437. int ret = avcodec_receive_frame(mCodecCtx.get(), mDecodedFrame.get());
  438. if(ret == AVERROR(EAGAIN))
  439. {
  440. mMovie.mSendDataGood.clear(std::memory_order_relaxed);
  441. std::unique_lock<std::mutex>(mMovie.mSendMtx).unlock();
  442. mMovie.mSendCond.notify_one();
  443. do {
  444. mQueueCond.wait(lock);
  445. ret = avcodec_receive_frame(mCodecCtx.get(), mDecodedFrame.get());
  446. } while(ret == AVERROR(EAGAIN));
  447. }
  448. lock.unlock();
  449. if(ret == AVERROR_EOF) break;
  450. mMovie.mSendDataGood.clear(std::memory_order_relaxed);
  451. mMovie.mSendCond.notify_one();
  452. if(ret < 0)
  453. {
  454. std::cerr<< "Failed to decode frame: "<<ret <<std::endl;
  455. return 0;
  456. }
  457. if(mDecodedFrame->nb_samples <= 0)
  458. {
  459. av_frame_unref(mDecodedFrame.get());
  460. continue;
  461. }
  462. /* If provided, update w/ pts */
  463. if(mDecodedFrame->best_effort_timestamp != AV_NOPTS_VALUE)
  464. mCurrentPts = std::chrono::duration_cast<nanoseconds>(
  465. seconds_d64(av_q2d(mStream->time_base)*mDecodedFrame->best_effort_timestamp)
  466. );
  467. if(mDecodedFrame->nb_samples > mSamplesMax)
  468. {
  469. av_freep(&mSamples);
  470. av_samples_alloc(
  471. &mSamples, nullptr, mCodecCtx->channels,
  472. mDecodedFrame->nb_samples, mDstSampleFmt, 0
  473. );
  474. mSamplesMax = mDecodedFrame->nb_samples;
  475. }
  476. /* Return the amount of sample frames converted */
  477. int data_size = swr_convert(mSwresCtx.get(), &mSamples, mDecodedFrame->nb_samples,
  478. (const uint8_t**)mDecodedFrame->data, mDecodedFrame->nb_samples
  479. );
  480. av_frame_unref(mDecodedFrame.get());
  481. return data_size;
  482. }
  483. return 0;
  484. }
  485. /* Duplicates the sample at in to out, count times. The frame size is a
  486. * multiple of the template type size.
  487. */
  488. template<typename T>
  489. static void sample_dup(uint8_t *out, const uint8_t *in, int count, int frame_size)
  490. {
  491. const T *sample = reinterpret_cast<const T*>(in);
  492. T *dst = reinterpret_cast<T*>(out);
  493. if(frame_size == sizeof(T))
  494. std::fill_n(dst, count, *sample);
  495. else
  496. {
  497. /* NOTE: frame_size is a multiple of sizeof(T). */
  498. int type_mult = frame_size / sizeof(T);
  499. int i = 0;
  500. std::generate_n(dst, count*type_mult,
  501. [sample,type_mult,&i]() -> T
  502. {
  503. T ret = sample[i];
  504. i = (i+1)%type_mult;
  505. return ret;
  506. }
  507. );
  508. }
  509. }
  510. bool AudioState::readAudio(uint8_t *samples, int length)
  511. {
  512. int sample_skip = getSync();
  513. int audio_size = 0;
  514. /* Read the next chunk of data, refill the buffer, and queue it
  515. * on the source */
  516. length /= mFrameSize;
  517. while(audio_size < length)
  518. {
  519. if(mSamplesLen <= 0 || mSamplesPos >= mSamplesLen)
  520. {
  521. int frame_len = decodeFrame();
  522. if(frame_len <= 0) break;
  523. mSamplesLen = frame_len;
  524. mSamplesPos = std::min(mSamplesLen, sample_skip);
  525. sample_skip -= mSamplesPos;
  526. // Adjust the device start time and current pts by the amount we're
  527. // skipping/duplicating, so that the clock remains correct for the
  528. // current stream position.
  529. auto skip = nanoseconds(seconds(mSamplesPos)) / mCodecCtx->sample_rate;
  530. mDeviceStartTime -= skip;
  531. mCurrentPts += skip;
  532. continue;
  533. }
  534. int rem = length - audio_size;
  535. if(mSamplesPos >= 0)
  536. {
  537. int len = mSamplesLen - mSamplesPos;
  538. if(rem > len) rem = len;
  539. memcpy(samples, mSamples + mSamplesPos*mFrameSize, rem*mFrameSize);
  540. }
  541. else
  542. {
  543. rem = std::min(rem, -mSamplesPos);
  544. /* Add samples by copying the first sample */
  545. if((mFrameSize&7) == 0)
  546. sample_dup<uint64_t>(samples, mSamples, rem, mFrameSize);
  547. else if((mFrameSize&3) == 0)
  548. sample_dup<uint32_t>(samples, mSamples, rem, mFrameSize);
  549. else if((mFrameSize&1) == 0)
  550. sample_dup<uint16_t>(samples, mSamples, rem, mFrameSize);
  551. else
  552. sample_dup<uint8_t>(samples, mSamples, rem, mFrameSize);
  553. }
  554. mSamplesPos += rem;
  555. mCurrentPts += nanoseconds(seconds(rem)) / mCodecCtx->sample_rate;
  556. samples += rem*mFrameSize;
  557. audio_size += rem;
  558. }
  559. if(audio_size <= 0)
  560. return false;
  561. if(audio_size < length)
  562. {
  563. int rem = length - audio_size;
  564. std::fill_n(samples, rem*mFrameSize,
  565. (mDstSampleFmt == AV_SAMPLE_FMT_U8) ? 0x80 : 0x00);
  566. mCurrentPts += nanoseconds(seconds(rem)) / mCodecCtx->sample_rate;
  567. audio_size += rem;
  568. }
  569. return true;
  570. }
  571. void AL_APIENTRY AudioState::EventCallback(ALenum eventType, ALuint object, ALuint param,
  572. ALsizei length, const ALchar *message,
  573. void *userParam)
  574. {
  575. AudioState *self = reinterpret_cast<AudioState*>(userParam);
  576. if(eventType == AL_EVENT_TYPE_BUFFER_COMPLETED_SOFT)
  577. {
  578. /* Temporarily lock the source mutex to ensure it's not between
  579. * checking the processed count and going to sleep.
  580. */
  581. std::unique_lock<std::mutex>(self->mSrcMutex).unlock();
  582. self->mSrcCond.notify_one();
  583. return;
  584. }
  585. std::cout<< "---- AL Event on AudioState "<<self<<" ----\nEvent: ";
  586. switch(eventType)
  587. {
  588. case AL_EVENT_TYPE_BUFFER_COMPLETED_SOFT: std::cout<< "Buffer completed"; break;
  589. case AL_EVENT_TYPE_SOURCE_STATE_CHANGED_SOFT: std::cout<< "Source state changed"; break;
  590. case AL_EVENT_TYPE_ERROR_SOFT: std::cout<< "API error"; break;
  591. case AL_EVENT_TYPE_PERFORMANCE_SOFT: std::cout<< "Performance"; break;
  592. case AL_EVENT_TYPE_DEPRECATED_SOFT: std::cout<< "Deprecated"; break;
  593. case AL_EVENT_TYPE_DISCONNECTED_SOFT: std::cout<< "Disconnected"; break;
  594. default: std::cout<< "0x"<<std::hex<<std::setw(4)<<std::setfill('0')<<eventType<<
  595. std::dec<<std::setw(0)<<std::setfill(' '); break;
  596. }
  597. std::cout<< "\n"
  598. "Object ID: "<<object<<'\n'<<
  599. "Parameter: "<<param<<'\n'<<
  600. "Message: "<<std::string(message, length)<<"\n----"<<
  601. std::endl;
  602. if(eventType == AL_EVENT_TYPE_DISCONNECTED_SOFT)
  603. {
  604. { std::lock_guard<std::mutex> lock(self->mSrcMutex);
  605. self->mConnected.clear(std::memory_order_release);
  606. }
  607. std::unique_lock<std::mutex>(self->mSrcMutex).unlock();
  608. self->mSrcCond.notify_one();
  609. }
  610. }
  611. int AudioState::handler()
  612. {
  613. const std::array<ALenum,6> types{{
  614. AL_EVENT_TYPE_BUFFER_COMPLETED_SOFT, AL_EVENT_TYPE_SOURCE_STATE_CHANGED_SOFT,
  615. AL_EVENT_TYPE_ERROR_SOFT, AL_EVENT_TYPE_PERFORMANCE_SOFT, AL_EVENT_TYPE_DEPRECATED_SOFT,
  616. AL_EVENT_TYPE_DISCONNECTED_SOFT
  617. }};
  618. std::unique_lock<std::mutex> lock(mSrcMutex);
  619. milliseconds sleep_time = AudioBufferTime / 3;
  620. ALenum fmt;
  621. if(alEventControlSOFT)
  622. {
  623. alEventControlSOFT(types.size(), types.data(), AL_TRUE);
  624. alEventCallbackSOFT(EventCallback, this);
  625. sleep_time = AudioBufferTotalTime;
  626. }
  627. /* Find a suitable format for OpenAL. */
  628. mDstChanLayout = 0;
  629. if(mCodecCtx->sample_fmt == AV_SAMPLE_FMT_U8 || mCodecCtx->sample_fmt == AV_SAMPLE_FMT_U8P)
  630. {
  631. mDstSampleFmt = AV_SAMPLE_FMT_U8;
  632. mFrameSize = 1;
  633. if(mCodecCtx->channel_layout == AV_CH_LAYOUT_7POINT1 &&
  634. alIsExtensionPresent("AL_EXT_MCFORMATS") &&
  635. (fmt=alGetEnumValue("AL_FORMAT_71CHN8")) != AL_NONE && fmt != -1)
  636. {
  637. mDstChanLayout = mCodecCtx->channel_layout;
  638. mFrameSize *= 8;
  639. mFormat = fmt;
  640. }
  641. if((mCodecCtx->channel_layout == AV_CH_LAYOUT_5POINT1 ||
  642. mCodecCtx->channel_layout == AV_CH_LAYOUT_5POINT1_BACK) &&
  643. alIsExtensionPresent("AL_EXT_MCFORMATS") &&
  644. (fmt=alGetEnumValue("AL_FORMAT_51CHN8")) != AL_NONE && fmt != -1)
  645. {
  646. mDstChanLayout = mCodecCtx->channel_layout;
  647. mFrameSize *= 6;
  648. mFormat = fmt;
  649. }
  650. if(mCodecCtx->channel_layout == AV_CH_LAYOUT_MONO)
  651. {
  652. mDstChanLayout = mCodecCtx->channel_layout;
  653. mFrameSize *= 1;
  654. mFormat = AL_FORMAT_MONO8;
  655. }
  656. if(!mDstChanLayout)
  657. {
  658. mDstChanLayout = AV_CH_LAYOUT_STEREO;
  659. mFrameSize *= 2;
  660. mFormat = AL_FORMAT_STEREO8;
  661. }
  662. }
  663. if((mCodecCtx->sample_fmt == AV_SAMPLE_FMT_FLT || mCodecCtx->sample_fmt == AV_SAMPLE_FMT_FLTP) &&
  664. alIsExtensionPresent("AL_EXT_FLOAT32"))
  665. {
  666. mDstSampleFmt = AV_SAMPLE_FMT_FLT;
  667. mFrameSize = 4;
  668. if(mCodecCtx->channel_layout == AV_CH_LAYOUT_7POINT1 &&
  669. alIsExtensionPresent("AL_EXT_MCFORMATS") &&
  670. (fmt=alGetEnumValue("AL_FORMAT_71CHN32")) != AL_NONE && fmt != -1)
  671. {
  672. mDstChanLayout = mCodecCtx->channel_layout;
  673. mFrameSize *= 8;
  674. mFormat = fmt;
  675. }
  676. if((mCodecCtx->channel_layout == AV_CH_LAYOUT_5POINT1 ||
  677. mCodecCtx->channel_layout == AV_CH_LAYOUT_5POINT1_BACK) &&
  678. alIsExtensionPresent("AL_EXT_MCFORMATS") &&
  679. (fmt=alGetEnumValue("AL_FORMAT_51CHN32")) != AL_NONE && fmt != -1)
  680. {
  681. mDstChanLayout = mCodecCtx->channel_layout;
  682. mFrameSize *= 6;
  683. mFormat = fmt;
  684. }
  685. if(mCodecCtx->channel_layout == AV_CH_LAYOUT_MONO)
  686. {
  687. mDstChanLayout = mCodecCtx->channel_layout;
  688. mFrameSize *= 1;
  689. mFormat = AL_FORMAT_MONO_FLOAT32;
  690. }
  691. if(!mDstChanLayout)
  692. {
  693. mDstChanLayout = AV_CH_LAYOUT_STEREO;
  694. mFrameSize *= 2;
  695. mFormat = AL_FORMAT_STEREO_FLOAT32;
  696. }
  697. }
  698. if(!mDstChanLayout)
  699. {
  700. mDstSampleFmt = AV_SAMPLE_FMT_S16;
  701. mFrameSize = 2;
  702. if(mCodecCtx->channel_layout == AV_CH_LAYOUT_7POINT1 &&
  703. alIsExtensionPresent("AL_EXT_MCFORMATS") &&
  704. (fmt=alGetEnumValue("AL_FORMAT_71CHN16")) != AL_NONE && fmt != -1)
  705. {
  706. mDstChanLayout = mCodecCtx->channel_layout;
  707. mFrameSize *= 8;
  708. mFormat = fmt;
  709. }
  710. if((mCodecCtx->channel_layout == AV_CH_LAYOUT_5POINT1 ||
  711. mCodecCtx->channel_layout == AV_CH_LAYOUT_5POINT1_BACK) &&
  712. alIsExtensionPresent("AL_EXT_MCFORMATS") &&
  713. (fmt=alGetEnumValue("AL_FORMAT_51CHN16")) != AL_NONE && fmt != -1)
  714. {
  715. mDstChanLayout = mCodecCtx->channel_layout;
  716. mFrameSize *= 6;
  717. mFormat = fmt;
  718. }
  719. if(mCodecCtx->channel_layout == AV_CH_LAYOUT_MONO)
  720. {
  721. mDstChanLayout = mCodecCtx->channel_layout;
  722. mFrameSize *= 1;
  723. mFormat = AL_FORMAT_MONO16;
  724. }
  725. if(!mDstChanLayout)
  726. {
  727. mDstChanLayout = AV_CH_LAYOUT_STEREO;
  728. mFrameSize *= 2;
  729. mFormat = AL_FORMAT_STEREO16;
  730. }
  731. }
  732. void *samples = nullptr;
  733. ALsizei buffer_len = std::chrono::duration_cast<std::chrono::duration<int>>(
  734. mCodecCtx->sample_rate * AudioBufferTime).count() * mFrameSize;
  735. mSamples = NULL;
  736. mSamplesMax = 0;
  737. mSamplesPos = 0;
  738. mSamplesLen = 0;
  739. mDecodedFrame.reset(av_frame_alloc());
  740. if(!mDecodedFrame)
  741. {
  742. std::cerr<< "Failed to allocate audio frame" <<std::endl;
  743. goto finish;
  744. }
  745. mSwresCtx.reset(swr_alloc_set_opts(nullptr,
  746. mDstChanLayout, mDstSampleFmt, mCodecCtx->sample_rate,
  747. mCodecCtx->channel_layout ? mCodecCtx->channel_layout :
  748. (uint64_t)av_get_default_channel_layout(mCodecCtx->channels),
  749. mCodecCtx->sample_fmt, mCodecCtx->sample_rate,
  750. 0, nullptr
  751. ));
  752. if(!mSwresCtx || swr_init(mSwresCtx.get()) != 0)
  753. {
  754. std::cerr<< "Failed to initialize audio converter" <<std::endl;
  755. goto finish;
  756. }
  757. mBuffers.assign(AudioBufferTotalTime / AudioBufferTime, 0);
  758. alGenBuffers(mBuffers.size(), mBuffers.data());
  759. alGenSources(1, &mSource);
  760. if(EnableDirectOut)
  761. alSourcei(mSource, AL_DIRECT_CHANNELS_SOFT, AL_TRUE);
  762. if(alGetError() != AL_NO_ERROR)
  763. goto finish;
  764. if(!alBufferStorageSOFT)
  765. samples = av_malloc(buffer_len);
  766. else
  767. {
  768. for(ALuint bufid : mBuffers)
  769. alBufferStorageSOFT(bufid, mFormat, nullptr, buffer_len, mCodecCtx->sample_rate,
  770. AL_MAP_WRITE_BIT_SOFT);
  771. if(alGetError() != AL_NO_ERROR)
  772. {
  773. fprintf(stderr, "Failed to use mapped buffers\n");
  774. samples = av_malloc(buffer_len);
  775. }
  776. }
  777. while(alGetError() == AL_NO_ERROR && !mMovie.mQuit.load(std::memory_order_relaxed) &&
  778. mConnected.test_and_set(std::memory_order_relaxed))
  779. {
  780. /* First remove any processed buffers. */
  781. ALint processed;
  782. alGetSourcei(mSource, AL_BUFFERS_PROCESSED, &processed);
  783. while(processed > 0)
  784. {
  785. std::array<ALuint,4> bids;
  786. alSourceUnqueueBuffers(mSource, std::min<ALsizei>(bids.size(), processed),
  787. bids.data());
  788. processed -= std::min<ALsizei>(bids.size(), processed);
  789. }
  790. /* Refill the buffer queue. */
  791. ALint queued;
  792. alGetSourcei(mSource, AL_BUFFERS_QUEUED, &queued);
  793. while((ALuint)queued < mBuffers.size())
  794. {
  795. ALuint bufid = mBuffers[mBufferIdx];
  796. uint8_t *ptr = reinterpret_cast<uint8_t*>(
  797. samples ? samples : alMapBufferSOFT(bufid, 0, buffer_len, AL_MAP_WRITE_BIT_SOFT)
  798. );
  799. if(!ptr) break;
  800. /* Read the next chunk of data, filling the buffer, and queue it on
  801. * the source */
  802. bool got_audio = readAudio(ptr, buffer_len);
  803. if(!samples) alUnmapBufferSOFT(bufid);
  804. if(!got_audio) break;
  805. if(samples)
  806. alBufferData(bufid, mFormat, samples, buffer_len, mCodecCtx->sample_rate);
  807. alSourceQueueBuffers(mSource, 1, &bufid);
  808. mBufferIdx = (mBufferIdx+1) % mBuffers.size();
  809. ++queued;
  810. }
  811. if(queued == 0)
  812. break;
  813. /* Check that the source is playing. */
  814. ALint state;
  815. alGetSourcei(mSource, AL_SOURCE_STATE, &state);
  816. if(state == AL_STOPPED)
  817. {
  818. /* AL_STOPPED means there was an underrun. Clear the buffer queue
  819. * since this likely means we're late, and rewind the source to get
  820. * it back into an AL_INITIAL state.
  821. */
  822. alSourceRewind(mSource);
  823. alSourcei(mSource, AL_BUFFER, 0);
  824. continue;
  825. }
  826. /* (re)start the source if needed, and wait for a buffer to finish */
  827. if(state != AL_PLAYING && state != AL_PAUSED &&
  828. mMovie.mPlaying.load(std::memory_order_relaxed))
  829. startPlayback();
  830. mSrcCond.wait_for(lock, sleep_time);
  831. }
  832. alSourceRewind(mSource);
  833. alSourcei(mSource, AL_BUFFER, 0);
  834. finish:
  835. av_freep(&samples);
  836. if(alEventControlSOFT)
  837. {
  838. alEventControlSOFT(types.size(), types.data(), AL_FALSE);
  839. alEventCallbackSOFT(nullptr, nullptr);
  840. }
  841. return 0;
  842. }
  843. nanoseconds VideoState::getClock()
  844. {
  845. /* NOTE: This returns incorrect times while not playing. */
  846. auto delta = get_avtime() - mCurrentPtsTime;
  847. return mCurrentPts + delta;
  848. }
  849. bool VideoState::isBufferFilled()
  850. {
  851. std::unique_lock<std::mutex> lock(mPictQMutex);
  852. return mPictQSize >= mPictQ.size();
  853. }
  854. Uint32 SDLCALL VideoState::sdl_refresh_timer_cb(Uint32 /*interval*/, void *opaque)
  855. {
  856. SDL_Event evt{};
  857. evt.user.type = FF_REFRESH_EVENT;
  858. evt.user.data1 = opaque;
  859. SDL_PushEvent(&evt);
  860. return 0; /* 0 means stop timer */
  861. }
  862. /* Schedules an FF_REFRESH_EVENT event to occur in 'delay' ms. */
  863. void VideoState::schedRefresh(milliseconds delay)
  864. {
  865. SDL_AddTimer(delay.count(), sdl_refresh_timer_cb, this);
  866. }
  867. /* Called by VideoState::refreshTimer to display the next video frame. */
  868. void VideoState::display(SDL_Window *screen, SDL_Renderer *renderer)
  869. {
  870. Picture *vp = &mPictQ[mPictQRead];
  871. if(!vp->mImage)
  872. return;
  873. float aspect_ratio;
  874. int win_w, win_h;
  875. int w, h, x, y;
  876. if(mCodecCtx->sample_aspect_ratio.num == 0)
  877. aspect_ratio = 0.0f;
  878. else
  879. {
  880. aspect_ratio = av_q2d(mCodecCtx->sample_aspect_ratio) * mCodecCtx->width /
  881. mCodecCtx->height;
  882. }
  883. if(aspect_ratio <= 0.0f)
  884. aspect_ratio = (float)mCodecCtx->width / (float)mCodecCtx->height;
  885. SDL_GetWindowSize(screen, &win_w, &win_h);
  886. h = win_h;
  887. w = ((int)rint(h * aspect_ratio) + 3) & ~3;
  888. if(w > win_w)
  889. {
  890. w = win_w;
  891. h = ((int)rint(w / aspect_ratio) + 3) & ~3;
  892. }
  893. x = (win_w - w) / 2;
  894. y = (win_h - h) / 2;
  895. SDL_Rect src_rect{ 0, 0, vp->mWidth, vp->mHeight };
  896. SDL_Rect dst_rect{ x, y, w, h };
  897. SDL_RenderCopy(renderer, vp->mImage, &src_rect, &dst_rect);
  898. SDL_RenderPresent(renderer);
  899. }
  900. /* FF_REFRESH_EVENT handler called on the main thread where the SDL_Renderer
  901. * was created. It handles the display of the next decoded video frame (if not
  902. * falling behind), and sets up the timer for the following video frame.
  903. */
  904. void VideoState::refreshTimer(SDL_Window *screen, SDL_Renderer *renderer)
  905. {
  906. if(!mStream)
  907. {
  908. if(mEOS)
  909. {
  910. mFinalUpdate = true;
  911. std::unique_lock<std::mutex>(mPictQMutex).unlock();
  912. mPictQCond.notify_all();
  913. return;
  914. }
  915. schedRefresh(milliseconds(100));
  916. return;
  917. }
  918. if(!mMovie.mPlaying.load(std::memory_order_relaxed))
  919. {
  920. schedRefresh(milliseconds(1));
  921. return;
  922. }
  923. std::unique_lock<std::mutex> lock(mPictQMutex);
  924. retry:
  925. if(mPictQSize == 0)
  926. {
  927. if(mEOS)
  928. mFinalUpdate = true;
  929. else
  930. schedRefresh(milliseconds(1));
  931. lock.unlock();
  932. mPictQCond.notify_all();
  933. return;
  934. }
  935. Picture *vp = &mPictQ[mPictQRead];
  936. mCurrentPts = vp->mPts;
  937. mCurrentPtsTime = get_avtime();
  938. /* Get delay using the frame pts and the pts from last frame. */
  939. auto delay = vp->mPts - mFrameLastPts;
  940. if(delay <= seconds::zero() || delay >= seconds(1))
  941. {
  942. /* If incorrect delay, use previous one. */
  943. delay = mFrameLastDelay;
  944. }
  945. /* Save for next frame. */
  946. mFrameLastDelay = delay;
  947. mFrameLastPts = vp->mPts;
  948. /* Update delay to sync to clock if not master source. */
  949. if(mMovie.mAVSyncType != SyncMaster::Video)
  950. {
  951. auto ref_clock = mMovie.getMasterClock();
  952. auto diff = vp->mPts - ref_clock;
  953. /* Skip or repeat the frame. Take delay into account. */
  954. auto sync_threshold = std::min<nanoseconds>(delay, VideoSyncThreshold);
  955. if(!(diff < AVNoSyncThreshold && diff > -AVNoSyncThreshold))
  956. {
  957. if(diff <= -sync_threshold)
  958. delay = nanoseconds::zero();
  959. else if(diff >= sync_threshold)
  960. delay *= 2;
  961. }
  962. }
  963. mFrameTimer += delay;
  964. /* Compute the REAL delay. */
  965. auto actual_delay = mFrameTimer - get_avtime();
  966. if(!(actual_delay >= VideoSyncThreshold))
  967. {
  968. /* We don't have time to handle this picture, just skip to the next one. */
  969. mPictQRead = (mPictQRead+1)%mPictQ.size();
  970. mPictQSize--;
  971. goto retry;
  972. }
  973. schedRefresh(std::chrono::duration_cast<milliseconds>(actual_delay));
  974. /* Show the picture! */
  975. display(screen, renderer);
  976. /* Update queue for next picture. */
  977. mPictQRead = (mPictQRead+1)%mPictQ.size();
  978. mPictQSize--;
  979. lock.unlock();
  980. mPictQCond.notify_all();
  981. }
  982. /* FF_UPDATE_EVENT handler, updates the picture's texture. It's called on the
  983. * main thread where the renderer was created.
  984. */
  985. void VideoState::updatePicture(SDL_Window *screen, SDL_Renderer *renderer)
  986. {
  987. Picture *vp = &mPictQ[mPictQWrite];
  988. bool fmt_updated = false;
  989. /* allocate or resize the buffer! */
  990. if(!vp->mImage || vp->mWidth != mCodecCtx->width || vp->mHeight != mCodecCtx->height)
  991. {
  992. fmt_updated = true;
  993. if(vp->mImage)
  994. SDL_DestroyTexture(vp->mImage);
  995. vp->mImage = SDL_CreateTexture(
  996. renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,
  997. mCodecCtx->coded_width, mCodecCtx->coded_height
  998. );
  999. if(!vp->mImage)
  1000. std::cerr<< "Failed to create YV12 texture!" <<std::endl;
  1001. vp->mWidth = mCodecCtx->width;
  1002. vp->mHeight = mCodecCtx->height;
  1003. if(mFirstUpdate && vp->mWidth > 0 && vp->mHeight > 0)
  1004. {
  1005. /* For the first update, set the window size to the video size. */
  1006. mFirstUpdate = false;
  1007. int w = vp->mWidth;
  1008. int h = vp->mHeight;
  1009. if(mCodecCtx->sample_aspect_ratio.den != 0)
  1010. {
  1011. double aspect_ratio = av_q2d(mCodecCtx->sample_aspect_ratio);
  1012. if(aspect_ratio >= 1.0)
  1013. w = (int)(w*aspect_ratio + 0.5);
  1014. else if(aspect_ratio > 0.0)
  1015. h = (int)(h/aspect_ratio + 0.5);
  1016. }
  1017. SDL_SetWindowSize(screen, w, h);
  1018. }
  1019. }
  1020. if(vp->mImage)
  1021. {
  1022. AVFrame *frame = mDecodedFrame.get();
  1023. void *pixels = nullptr;
  1024. int pitch = 0;
  1025. if(mCodecCtx->pix_fmt == AV_PIX_FMT_YUV420P)
  1026. SDL_UpdateYUVTexture(vp->mImage, nullptr,
  1027. frame->data[0], frame->linesize[0],
  1028. frame->data[1], frame->linesize[1],
  1029. frame->data[2], frame->linesize[2]
  1030. );
  1031. else if(SDL_LockTexture(vp->mImage, nullptr, &pixels, &pitch) != 0)
  1032. std::cerr<< "Failed to lock texture" <<std::endl;
  1033. else
  1034. {
  1035. // Convert the image into YUV format that SDL uses
  1036. int coded_w = mCodecCtx->coded_width;
  1037. int coded_h = mCodecCtx->coded_height;
  1038. int w = mCodecCtx->width;
  1039. int h = mCodecCtx->height;
  1040. if(!mSwscaleCtx || fmt_updated)
  1041. {
  1042. mSwscaleCtx.reset(sws_getContext(
  1043. w, h, mCodecCtx->pix_fmt,
  1044. w, h, AV_PIX_FMT_YUV420P, 0,
  1045. nullptr, nullptr, nullptr
  1046. ));
  1047. }
  1048. /* point pict at the queue */
  1049. uint8_t *pict_data[3];
  1050. pict_data[0] = reinterpret_cast<uint8_t*>(pixels);
  1051. pict_data[1] = pict_data[0] + coded_w*coded_h;
  1052. pict_data[2] = pict_data[1] + coded_w*coded_h/4;
  1053. int pict_linesize[3];
  1054. pict_linesize[0] = pitch;
  1055. pict_linesize[1] = pitch / 2;
  1056. pict_linesize[2] = pitch / 2;
  1057. sws_scale(mSwscaleCtx.get(), (const uint8_t**)frame->data,
  1058. frame->linesize, 0, h, pict_data, pict_linesize);
  1059. SDL_UnlockTexture(vp->mImage);
  1060. }
  1061. }
  1062. vp->mUpdated.store(true, std::memory_order_release);
  1063. std::unique_lock<std::mutex>(mPictQMutex).unlock();
  1064. mPictQCond.notify_one();
  1065. }
  1066. int VideoState::queuePicture(nanoseconds pts)
  1067. {
  1068. /* Wait until we have space for a new pic */
  1069. std::unique_lock<std::mutex> lock(mPictQMutex);
  1070. while(mPictQSize >= mPictQ.size() && !mMovie.mQuit.load(std::memory_order_relaxed))
  1071. mPictQCond.wait(lock);
  1072. lock.unlock();
  1073. if(mMovie.mQuit.load(std::memory_order_relaxed))
  1074. return -1;
  1075. Picture *vp = &mPictQ[mPictQWrite];
  1076. /* We have to create/update the picture in the main thread */
  1077. vp->mUpdated.store(false, std::memory_order_relaxed);
  1078. SDL_Event evt{};
  1079. evt.user.type = FF_UPDATE_EVENT;
  1080. evt.user.data1 = this;
  1081. SDL_PushEvent(&evt);
  1082. /* Wait until the picture is updated. */
  1083. lock.lock();
  1084. while(!vp->mUpdated.load(std::memory_order_relaxed))
  1085. {
  1086. if(mMovie.mQuit.load(std::memory_order_relaxed))
  1087. return -1;
  1088. mPictQCond.wait(lock);
  1089. }
  1090. if(mMovie.mQuit.load(std::memory_order_relaxed))
  1091. return -1;
  1092. vp->mPts = pts;
  1093. mPictQWrite = (mPictQWrite+1)%mPictQ.size();
  1094. mPictQSize++;
  1095. lock.unlock();
  1096. return 0;
  1097. }
  1098. int VideoState::handler()
  1099. {
  1100. mDecodedFrame.reset(av_frame_alloc());
  1101. while(!mMovie.mQuit.load(std::memory_order_relaxed))
  1102. {
  1103. std::unique_lock<std::mutex> lock(mQueueMtx);
  1104. /* Decode video frame */
  1105. int ret = avcodec_receive_frame(mCodecCtx.get(), mDecodedFrame.get());
  1106. if(ret == AVERROR(EAGAIN))
  1107. {
  1108. mMovie.mSendDataGood.clear(std::memory_order_relaxed);
  1109. std::unique_lock<std::mutex>(mMovie.mSendMtx).unlock();
  1110. mMovie.mSendCond.notify_one();
  1111. do {
  1112. mQueueCond.wait(lock);
  1113. ret = avcodec_receive_frame(mCodecCtx.get(), mDecodedFrame.get());
  1114. } while(ret == AVERROR(EAGAIN));
  1115. }
  1116. lock.unlock();
  1117. if(ret == AVERROR_EOF) break;
  1118. mMovie.mSendDataGood.clear(std::memory_order_relaxed);
  1119. mMovie.mSendCond.notify_one();
  1120. if(ret < 0)
  1121. {
  1122. std::cerr<< "Failed to decode frame: "<<ret <<std::endl;
  1123. continue;
  1124. }
  1125. /* Get the PTS for this frame. */
  1126. nanoseconds pts;
  1127. if(mDecodedFrame->best_effort_timestamp != AV_NOPTS_VALUE)
  1128. mClock = std::chrono::duration_cast<nanoseconds>(
  1129. seconds_d64(av_q2d(mStream->time_base)*mDecodedFrame->best_effort_timestamp)
  1130. );
  1131. pts = mClock;
  1132. /* Update the video clock to the next expected PTS. */
  1133. auto frame_delay = av_q2d(mCodecCtx->time_base);
  1134. frame_delay += mDecodedFrame->repeat_pict * (frame_delay * 0.5);
  1135. mClock += std::chrono::duration_cast<nanoseconds>(seconds_d64(frame_delay));
  1136. if(queuePicture(pts) < 0)
  1137. break;
  1138. av_frame_unref(mDecodedFrame.get());
  1139. }
  1140. mEOS = true;
  1141. std::unique_lock<std::mutex> lock(mPictQMutex);
  1142. if(mMovie.mQuit.load(std::memory_order_relaxed))
  1143. {
  1144. mPictQRead = 0;
  1145. mPictQWrite = 0;
  1146. mPictQSize = 0;
  1147. }
  1148. while(!mFinalUpdate)
  1149. mPictQCond.wait(lock);
  1150. return 0;
  1151. }
  1152. int MovieState::decode_interrupt_cb(void *ctx)
  1153. {
  1154. return reinterpret_cast<MovieState*>(ctx)->mQuit.load(std::memory_order_relaxed);
  1155. }
  1156. bool MovieState::prepare()
  1157. {
  1158. AVIOContext *avioctx = nullptr;
  1159. AVIOInterruptCB intcb = { decode_interrupt_cb, this };
  1160. if(avio_open2(&avioctx, mFilename.c_str(), AVIO_FLAG_READ, &intcb, nullptr))
  1161. {
  1162. std::cerr<< "Failed to open "<<mFilename <<std::endl;
  1163. return false;
  1164. }
  1165. mIOContext.reset(avioctx);
  1166. /* Open movie file. If avformat_open_input fails it will automatically free
  1167. * this context, so don't set it onto a smart pointer yet.
  1168. */
  1169. AVFormatContext *fmtctx = avformat_alloc_context();
  1170. fmtctx->pb = mIOContext.get();
  1171. fmtctx->interrupt_callback = intcb;
  1172. if(avformat_open_input(&fmtctx, mFilename.c_str(), nullptr, nullptr) != 0)
  1173. {
  1174. std::cerr<< "Failed to open "<<mFilename <<std::endl;
  1175. return false;
  1176. }
  1177. mFormatCtx.reset(fmtctx);
  1178. /* Retrieve stream information */
  1179. if(avformat_find_stream_info(mFormatCtx.get(), nullptr) < 0)
  1180. {
  1181. std::cerr<< mFilename<<": failed to find stream info" <<std::endl;
  1182. return false;
  1183. }
  1184. mVideo.schedRefresh(milliseconds(40));
  1185. mParseThread = std::thread(std::mem_fn(&MovieState::parse_handler), this);
  1186. return true;
  1187. }
  1188. void MovieState::setTitle(SDL_Window *window)
  1189. {
  1190. auto pos1 = mFilename.rfind('/');
  1191. auto pos2 = mFilename.rfind('\\');
  1192. auto fpos = ((pos1 == std::string::npos) ? pos2 :
  1193. (pos2 == std::string::npos) ? pos1 :
  1194. std::max(pos1, pos2)) + 1;
  1195. SDL_SetWindowTitle(window, (mFilename.substr(fpos)+" - "+AppName).c_str());
  1196. }
  1197. nanoseconds MovieState::getClock()
  1198. {
  1199. if(!mPlaying.load(std::memory_order_relaxed))
  1200. return nanoseconds::zero();
  1201. return get_avtime() - mClockBase;
  1202. }
  1203. nanoseconds MovieState::getMasterClock()
  1204. {
  1205. if(mAVSyncType == SyncMaster::Video)
  1206. return mVideo.getClock();
  1207. if(mAVSyncType == SyncMaster::Audio)
  1208. return mAudio.getClock();
  1209. return getClock();
  1210. }
  1211. nanoseconds MovieState::getDuration()
  1212. { return std::chrono::duration<int64_t,std::ratio<1,AV_TIME_BASE>>(mFormatCtx->duration); }
  1213. int MovieState::streamComponentOpen(int stream_index)
  1214. {
  1215. if(stream_index < 0 || (unsigned int)stream_index >= mFormatCtx->nb_streams)
  1216. return -1;
  1217. /* Get a pointer to the codec context for the stream, and open the
  1218. * associated codec.
  1219. */
  1220. AVCodecCtxPtr avctx(avcodec_alloc_context3(nullptr));
  1221. if(!avctx) return -1;
  1222. if(avcodec_parameters_to_context(avctx.get(), mFormatCtx->streams[stream_index]->codecpar))
  1223. return -1;
  1224. AVCodec *codec = avcodec_find_decoder(avctx->codec_id);
  1225. if(!codec || avcodec_open2(avctx.get(), codec, nullptr) < 0)
  1226. {
  1227. std::cerr<< "Unsupported codec: "<<avcodec_get_name(avctx->codec_id)
  1228. << " (0x"<<std::hex<<avctx->codec_id<<std::dec<<")" <<std::endl;
  1229. return -1;
  1230. }
  1231. /* Initialize and start the media type handler */
  1232. switch(avctx->codec_type)
  1233. {
  1234. case AVMEDIA_TYPE_AUDIO:
  1235. mAudio.mStream = mFormatCtx->streams[stream_index];
  1236. mAudio.mCodecCtx = std::move(avctx);
  1237. mAudioThread = std::thread(std::mem_fn(&AudioState::handler), &mAudio);
  1238. break;
  1239. case AVMEDIA_TYPE_VIDEO:
  1240. mVideo.mStream = mFormatCtx->streams[stream_index];
  1241. mVideo.mCodecCtx = std::move(avctx);
  1242. mVideoThread = std::thread(std::mem_fn(&VideoState::handler), &mVideo);
  1243. break;
  1244. default:
  1245. return -1;
  1246. }
  1247. return stream_index;
  1248. }
  1249. int MovieState::parse_handler()
  1250. {
  1251. int video_index = -1;
  1252. int audio_index = -1;
  1253. /* Dump information about file onto standard error */
  1254. av_dump_format(mFormatCtx.get(), 0, mFilename.c_str(), 0);
  1255. /* Find the first video and audio streams */
  1256. for(unsigned int i = 0;i < mFormatCtx->nb_streams;i++)
  1257. {
  1258. auto codecpar = mFormatCtx->streams[i]->codecpar;
  1259. if(codecpar->codec_type == AVMEDIA_TYPE_VIDEO && video_index < 0)
  1260. video_index = streamComponentOpen(i);
  1261. else if(codecpar->codec_type == AVMEDIA_TYPE_AUDIO && audio_index < 0)
  1262. audio_index = streamComponentOpen(i);
  1263. }
  1264. if(video_index < 0 && audio_index < 0)
  1265. {
  1266. std::cerr<< mFilename<<": could not open codecs" <<std::endl;
  1267. mQuit = true;
  1268. }
  1269. PacketQueue audio_queue, video_queue;
  1270. bool input_finished = false;
  1271. /* Main packet reading/dispatching loop */
  1272. while(!mQuit.load(std::memory_order_relaxed) && !input_finished)
  1273. {
  1274. AVPacket packet;
  1275. if(av_read_frame(mFormatCtx.get(), &packet) < 0)
  1276. input_finished = true;
  1277. else
  1278. {
  1279. /* Copy the packet into the queue it's meant for. */
  1280. if(packet.stream_index == video_index)
  1281. video_queue.put(&packet);
  1282. else if(packet.stream_index == audio_index)
  1283. audio_queue.put(&packet);
  1284. av_packet_unref(&packet);
  1285. }
  1286. do {
  1287. /* Send whatever queued packets we have. */
  1288. if(!audio_queue.empty())
  1289. {
  1290. std::unique_lock<std::mutex> lock(mAudio.mQueueMtx);
  1291. int ret;
  1292. do {
  1293. ret = avcodec_send_packet(mAudio.mCodecCtx.get(), audio_queue.front());
  1294. if(ret != AVERROR(EAGAIN)) audio_queue.pop();
  1295. } while(ret != AVERROR(EAGAIN) && !audio_queue.empty());
  1296. lock.unlock();
  1297. mAudio.mQueueCond.notify_one();
  1298. }
  1299. if(!video_queue.empty())
  1300. {
  1301. std::unique_lock<std::mutex> lock(mVideo.mQueueMtx);
  1302. int ret;
  1303. do {
  1304. ret = avcodec_send_packet(mVideo.mCodecCtx.get(), video_queue.front());
  1305. if(ret != AVERROR(EAGAIN)) video_queue.pop();
  1306. } while(ret != AVERROR(EAGAIN) && !video_queue.empty());
  1307. lock.unlock();
  1308. mVideo.mQueueCond.notify_one();
  1309. }
  1310. /* If the queues are completely empty, or it's not full and there's
  1311. * more input to read, go get more.
  1312. */
  1313. size_t queue_size = audio_queue.totalSize() + video_queue.totalSize();
  1314. if(queue_size == 0 || (queue_size < MAX_QUEUE_SIZE && !input_finished))
  1315. break;
  1316. if(!mPlaying.load(std::memory_order_relaxed))
  1317. {
  1318. if((!mAudio.mCodecCtx || mAudio.isBufferFilled()) &&
  1319. (!mVideo.mCodecCtx || mVideo.isBufferFilled()))
  1320. {
  1321. /* Set the base time 50ms ahead of the current av time. */
  1322. mClockBase = get_avtime() + milliseconds(50);
  1323. mVideo.mCurrentPtsTime = mClockBase;
  1324. mVideo.mFrameTimer = mVideo.mCurrentPtsTime;
  1325. mAudio.startPlayback();
  1326. mPlaying.store(std::memory_order_release);
  1327. }
  1328. }
  1329. /* Nothing to send or get for now, wait a bit and try again. */
  1330. { std::unique_lock<std::mutex> lock(mSendMtx);
  1331. if(mSendDataGood.test_and_set(std::memory_order_relaxed))
  1332. mSendCond.wait_for(lock, milliseconds(10));
  1333. }
  1334. } while(!mQuit.load(std::memory_order_relaxed));
  1335. }
  1336. /* Pass a null packet to finish the send buffers (the receive functions
  1337. * will get AVERROR_EOF when emptied).
  1338. */
  1339. if(mVideo.mCodecCtx)
  1340. {
  1341. { std::lock_guard<std::mutex> lock(mVideo.mQueueMtx);
  1342. avcodec_send_packet(mVideo.mCodecCtx.get(), nullptr);
  1343. }
  1344. mVideo.mQueueCond.notify_one();
  1345. }
  1346. if(mAudio.mCodecCtx)
  1347. {
  1348. { std::lock_guard<std::mutex> lock(mAudio.mQueueMtx);
  1349. avcodec_send_packet(mAudio.mCodecCtx.get(), nullptr);
  1350. }
  1351. mAudio.mQueueCond.notify_one();
  1352. }
  1353. video_queue.clear();
  1354. audio_queue.clear();
  1355. /* all done - wait for it */
  1356. if(mVideoThread.joinable())
  1357. mVideoThread.join();
  1358. if(mAudioThread.joinable())
  1359. mAudioThread.join();
  1360. mVideo.mEOS = true;
  1361. std::unique_lock<std::mutex> lock(mVideo.mPictQMutex);
  1362. while(!mVideo.mFinalUpdate)
  1363. mVideo.mPictQCond.wait(lock);
  1364. lock.unlock();
  1365. SDL_Event evt{};
  1366. evt.user.type = FF_MOVIE_DONE_EVENT;
  1367. SDL_PushEvent(&evt);
  1368. return 0;
  1369. }
  1370. // Helper class+method to print the time with human-readable formatting.
  1371. struct PrettyTime {
  1372. seconds mTime;
  1373. };
  1374. inline std::ostream &operator<<(std::ostream &os, const PrettyTime &rhs)
  1375. {
  1376. using hours = std::chrono::hours;
  1377. using minutes = std::chrono::minutes;
  1378. using std::chrono::duration_cast;
  1379. seconds t = rhs.mTime;
  1380. if(t.count() < 0)
  1381. {
  1382. os << '-';
  1383. t *= -1;
  1384. }
  1385. // Only handle up to hour formatting
  1386. if(t >= hours(1))
  1387. os << duration_cast<hours>(t).count() << 'h' << std::setfill('0') << std::setw(2)
  1388. << (duration_cast<minutes>(t).count() % 60) << 'm';
  1389. else
  1390. os << duration_cast<minutes>(t).count() << 'm' << std::setfill('0');
  1391. os << std::setw(2) << (duration_cast<seconds>(t).count() % 60) << 's' << std::setw(0)
  1392. << std::setfill(' ');
  1393. return os;
  1394. }
  1395. } // namespace
  1396. int main(int argc, char *argv[])
  1397. {
  1398. std::unique_ptr<MovieState> movState;
  1399. if(argc < 2)
  1400. {
  1401. std::cerr<< "Usage: "<<argv[0]<<" [-device <device name>] [-direct] <files...>" <<std::endl;
  1402. return 1;
  1403. }
  1404. /* Register all formats and codecs */
  1405. av_register_all();
  1406. /* Initialize networking protocols */
  1407. avformat_network_init();
  1408. if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_TIMER))
  1409. {
  1410. std::cerr<< "Could not initialize SDL - <<"<<SDL_GetError() <<std::endl;
  1411. return 1;
  1412. }
  1413. /* Make a window to put our video */
  1414. SDL_Window *screen = SDL_CreateWindow(AppName.c_str(), 0, 0, 640, 480, SDL_WINDOW_RESIZABLE);
  1415. if(!screen)
  1416. {
  1417. std::cerr<< "SDL: could not set video mode - exiting" <<std::endl;
  1418. return 1;
  1419. }
  1420. /* Make a renderer to handle the texture image surface and rendering. */
  1421. Uint32 render_flags = SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC;
  1422. SDL_Renderer *renderer = SDL_CreateRenderer(screen, -1, render_flags);
  1423. if(renderer)
  1424. {
  1425. SDL_RendererInfo rinf{};
  1426. bool ok = false;
  1427. /* Make sure the renderer supports IYUV textures. If not, fallback to a
  1428. * software renderer. */
  1429. if(SDL_GetRendererInfo(renderer, &rinf) == 0)
  1430. {
  1431. for(Uint32 i = 0;!ok && i < rinf.num_texture_formats;i++)
  1432. ok = (rinf.texture_formats[i] == SDL_PIXELFORMAT_IYUV);
  1433. }
  1434. if(!ok)
  1435. {
  1436. std::cerr<< "IYUV pixelformat textures not supported on renderer "<<rinf.name <<std::endl;
  1437. SDL_DestroyRenderer(renderer);
  1438. renderer = nullptr;
  1439. }
  1440. }
  1441. if(!renderer)
  1442. {
  1443. render_flags = SDL_RENDERER_SOFTWARE | SDL_RENDERER_PRESENTVSYNC;
  1444. renderer = SDL_CreateRenderer(screen, -1, render_flags);
  1445. }
  1446. if(!renderer)
  1447. {
  1448. std::cerr<< "SDL: could not create renderer - exiting" <<std::endl;
  1449. return 1;
  1450. }
  1451. SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255);
  1452. SDL_RenderFillRect(renderer, nullptr);
  1453. SDL_RenderPresent(renderer);
  1454. /* Open an audio device */
  1455. int fileidx = 1;
  1456. ALCdevice *device = [argc,argv,&fileidx]() -> ALCdevice*
  1457. {
  1458. ALCdevice *dev = NULL;
  1459. if(argc > 3 && strcmp(argv[1], "-device") == 0)
  1460. {
  1461. fileidx = 3;
  1462. dev = alcOpenDevice(argv[2]);
  1463. if(dev) return dev;
  1464. std::cerr<< "Failed to open \""<<argv[2]<<"\" - trying default" <<std::endl;
  1465. }
  1466. return alcOpenDevice(nullptr);
  1467. }();
  1468. ALCcontext *context = alcCreateContext(device, nullptr);
  1469. if(!context || alcMakeContextCurrent(context) == ALC_FALSE)
  1470. {
  1471. std::cerr<< "Failed to set up audio device" <<std::endl;
  1472. if(context)
  1473. alcDestroyContext(context);
  1474. return 1;
  1475. }
  1476. const ALCchar *name = nullptr;
  1477. if(alcIsExtensionPresent(device, "ALC_ENUMERATE_ALL_EXT"))
  1478. name = alcGetString(device, ALC_ALL_DEVICES_SPECIFIER);
  1479. if(!name || alcGetError(device) != AL_NO_ERROR)
  1480. name = alcGetString(device, ALC_DEVICE_SPECIFIER);
  1481. std::cout<< "Opened \""<<name<<"\"" <<std::endl;
  1482. if(alcIsExtensionPresent(device, "ALC_SOFT_device_clock"))
  1483. {
  1484. std::cout<< "Found ALC_SOFT_device_clock" <<std::endl;
  1485. alcGetInteger64vSOFT = reinterpret_cast<LPALCGETINTEGER64VSOFT>(
  1486. alcGetProcAddress(device, "alcGetInteger64vSOFT")
  1487. );
  1488. }
  1489. if(alIsExtensionPresent("AL_SOFT_source_latency"))
  1490. {
  1491. std::cout<< "Found AL_SOFT_source_latency" <<std::endl;
  1492. alGetSourcei64vSOFT = reinterpret_cast<LPALGETSOURCEI64VSOFT>(
  1493. alGetProcAddress("alGetSourcei64vSOFT")
  1494. );
  1495. }
  1496. if(alIsExtensionPresent("AL_SOFTX_map_buffer"))
  1497. {
  1498. std::cout<< "Found AL_SOFT_map_buffer" <<std::endl;
  1499. alBufferStorageSOFT = reinterpret_cast<LPALBUFFERSTORAGESOFT>(
  1500. alGetProcAddress("alBufferStorageSOFT"));
  1501. alMapBufferSOFT = reinterpret_cast<LPALMAPBUFFERSOFT>(
  1502. alGetProcAddress("alMapBufferSOFT"));
  1503. alUnmapBufferSOFT = reinterpret_cast<LPALUNMAPBUFFERSOFT>(
  1504. alGetProcAddress("alUnmapBufferSOFT"));
  1505. }
  1506. if(alIsExtensionPresent("AL_SOFTX_events"))
  1507. {
  1508. std::cout<< "Found AL_SOFT_events" <<std::endl;
  1509. alEventControlSOFT = reinterpret_cast<LPALEVENTCONTROLSOFT>(
  1510. alGetProcAddress("alEventControlSOFT"));
  1511. alEventCallbackSOFT = reinterpret_cast<LPALEVENTCALLBACKSOFT>(
  1512. alGetProcAddress("alEventCallbackSOFT"));
  1513. }
  1514. if(fileidx < argc && strcmp(argv[fileidx], "-direct") == 0)
  1515. {
  1516. ++fileidx;
  1517. if(!alIsExtensionPresent("AL_SOFT_direct_channels"))
  1518. std::cerr<< "AL_SOFT_direct_channels not supported for direct output" <<std::endl;
  1519. else
  1520. {
  1521. std::cout<< "Found AL_SOFT_direct_channels" <<std::endl;
  1522. EnableDirectOut = true;
  1523. }
  1524. }
  1525. while(fileidx < argc && !movState)
  1526. {
  1527. movState = std::unique_ptr<MovieState>(new MovieState(argv[fileidx++]));
  1528. if(!movState->prepare()) movState = nullptr;
  1529. }
  1530. if(!movState)
  1531. {
  1532. std::cerr<< "Could not start a video" <<std::endl;
  1533. return 1;
  1534. }
  1535. movState->setTitle(screen);
  1536. /* Default to going to the next movie at the end of one. */
  1537. enum class EomAction {
  1538. Next, Quit
  1539. } eom_action = EomAction::Next;
  1540. seconds last_time(-1);
  1541. SDL_Event event;
  1542. while(1)
  1543. {
  1544. int have_evt = SDL_WaitEventTimeout(&event, 10);
  1545. auto cur_time = std::chrono::duration_cast<seconds>(movState->getMasterClock());
  1546. if(cur_time != last_time)
  1547. {
  1548. auto end_time = std::chrono::duration_cast<seconds>(movState->getDuration());
  1549. std::cout<< "\r "<<PrettyTime{cur_time}<<" / "<<PrettyTime{end_time} <<std::flush;
  1550. last_time = cur_time;
  1551. }
  1552. if(!have_evt) continue;
  1553. switch(event.type)
  1554. {
  1555. case SDL_KEYDOWN:
  1556. switch(event.key.keysym.sym)
  1557. {
  1558. case SDLK_ESCAPE:
  1559. movState->mQuit = true;
  1560. eom_action = EomAction::Quit;
  1561. break;
  1562. case SDLK_n:
  1563. movState->mQuit = true;
  1564. eom_action = EomAction::Next;
  1565. break;
  1566. default:
  1567. break;
  1568. }
  1569. break;
  1570. case SDL_WINDOWEVENT:
  1571. switch(event.window.event)
  1572. {
  1573. case SDL_WINDOWEVENT_RESIZED:
  1574. SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255);
  1575. SDL_RenderFillRect(renderer, nullptr);
  1576. break;
  1577. default:
  1578. break;
  1579. }
  1580. break;
  1581. case SDL_QUIT:
  1582. movState->mQuit = true;
  1583. eom_action = EomAction::Quit;
  1584. break;
  1585. case FF_UPDATE_EVENT:
  1586. reinterpret_cast<VideoState*>(event.user.data1)->updatePicture(
  1587. screen, renderer
  1588. );
  1589. break;
  1590. case FF_REFRESH_EVENT:
  1591. reinterpret_cast<VideoState*>(event.user.data1)->refreshTimer(
  1592. screen, renderer
  1593. );
  1594. break;
  1595. case FF_MOVIE_DONE_EVENT:
  1596. std::cout<<'\n';
  1597. last_time = seconds(-1);
  1598. if(eom_action != EomAction::Quit)
  1599. {
  1600. movState = nullptr;
  1601. while(fileidx < argc && !movState)
  1602. {
  1603. movState = std::unique_ptr<MovieState>(new MovieState(argv[fileidx++]));
  1604. if(!movState->prepare()) movState = nullptr;
  1605. }
  1606. if(movState)
  1607. {
  1608. movState->setTitle(screen);
  1609. break;
  1610. }
  1611. }
  1612. /* Nothing more to play. Shut everything down and quit. */
  1613. movState = nullptr;
  1614. alcMakeContextCurrent(nullptr);
  1615. alcDestroyContext(context);
  1616. alcCloseDevice(device);
  1617. SDL_DestroyRenderer(renderer);
  1618. renderer = nullptr;
  1619. SDL_DestroyWindow(screen);
  1620. screen = nullptr;
  1621. SDL_Quit();
  1622. exit(0);
  1623. default:
  1624. break;
  1625. }
  1626. }
  1627. std::cerr<< "SDL_WaitEvent error - "<<SDL_GetError() <<std::endl;
  1628. return 1;
  1629. }