Sound Record.cpp 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653
  1. /******************************************************************************
  2. 'SoundRecord' methods on DirectSound always require usage of 'SoundAPILock' locks,
  3. not because of the API thread-safety, but because of:
  4. -objects are added/removed to 'SoundRecords' list
  5. -objects can be processed on the main thread by the user, and on the sound thread by the engine
  6. /******************************************************************************/
  7. #include "stdafx.h"
  8. namespace EE{
  9. #if APPLE
  10. #if MAC
  11. ASSERT(SIZE(AudioDeviceID)==MEMBER_SIZE(UID, i[0])); // because we're storing 'AudioDeviceID' in the 'UID.i[0]'
  12. #endif
  13. #define kOutputBus 0
  14. #define kInputBus 1
  15. #endif
  16. /******************************************************************************/
  17. #if WINDOWS_NEW || !APPLE && !ANDROID && (DIRECT_SOUND || OPEN_AL)
  18. Memc<SoundRecord*> SoundRecords;
  19. #endif
  20. /******************************************************************************/
  21. #if WINDOWS_NEW
  22. using namespace Microsoft::WRL;
  23. using namespace Windows::Media::Devices;
  24. struct AudioClientGetter : RuntimeClass<RuntimeClassFlags<ClassicCom>, FtmBase, IActivateAudioInterfaceCompletionHandler>
  25. {
  26. Bool finished;
  27. IAudioClient3 *audio_client;
  28. virtual HRESULT STDMETHODCALLTYPE ActivateCompleted(IActivateAudioInterfaceAsyncOperation *operation)
  29. {
  30. finished=true; return S_OK;
  31. }
  32. AudioClientGetter()
  33. {
  34. audio_client=null;
  35. if(App.hwnd()) // can call methods only if app was initialized, otherwise it will crash
  36. {
  37. finished=false;
  38. IActivateAudioInterfaceAsyncOperation *audio_activate;
  39. auto device_name=MediaDevice::GetDefaultAudioCaptureId(AudioDeviceRole::Default); // this will be something like "\\?\SWD#MMDEVAPI#{0.0.1.00000000}.{5871ae83-5ebe-46cc-9bce-b486b524e679}#{2eef81be-33fa-4800-9670-1cd474972c3f}"
  40. if(OK(ActivateAudioInterfaceAsync(device_name->Data(), __uuidof(IAudioClient3), null, this, &audio_activate)))
  41. {
  42. if(App.mainThread())App.loopUntil(finished, true);else for(; !finished; )Time.wait(1); // wait because app may have to wait a long time until user agrees to provide permission which would cause full CPU usage
  43. HRESULT hr; IUnknown *ac=null; audio_activate->GetActivateResult(&hr, &ac); audio_activate->Release(); if(ac)
  44. {
  45. ac->QueryInterface(__uuidof(IAudioClient3), (Ptr*)&audio_client);
  46. ac->Release();
  47. }
  48. }
  49. }
  50. }
  51. };
  52. struct _SoundRecord
  53. {
  54. Int block;
  55. IAudioClient3 *audio_client;
  56. IAudioCaptureClient *audio_capture_client;
  57. SyncEvent samples_ready;
  58. ~_SoundRecord() {del();}
  59. void del()
  60. {
  61. RELEASE(audio_capture_client);
  62. RELEASE(audio_client);
  63. }
  64. _SoundRecord()
  65. {
  66. block=0;
  67. audio_client=null;
  68. audio_capture_client=null;
  69. }
  70. Bool create(Int bits, Int channels, Int frequency)
  71. {
  72. del();
  73. if(audio_client=AudioClientGetter().audio_client)
  74. {
  75. WAVEFORMATEX *wf=null; audio_client->GetMixFormat(&wf); if(wf)
  76. {
  77. C Int bytes=bits/8;
  78. Zero(*wf);
  79. wf->wFormatTag =WAVE_FORMAT_PCM;
  80. wf->nChannels =channels;
  81. wf->nSamplesPerSec =frequency;
  82. wf->wBitsPerSample =bits;
  83. wf->nBlockAlign =channels*bytes;
  84. wf->nAvgBytesPerSec=wf->nBlockAlign*frequency;
  85. UInt default_frames, fundamental_frames, min_frames, max_frames, frames;
  86. if(OK(audio_client->GetSharedModeEnginePeriod(wf, &default_frames, &fundamental_frames, &min_frames, &max_frames)))
  87. if(OK(audio_client->InitializeSharedAudioStream(AUDCLNT_STREAMFLAGS_EVENTCALLBACK, min_frames, wf, null)))
  88. if(OK(audio_client->GetBufferSize(&frames)))
  89. if(OK(audio_client->GetService(__uuidof(IAudioCaptureClient), (Ptr*)&audio_capture_client)))
  90. if(OK(audio_client->SetEventHandle(samples_ready.handle()))) // this is needed
  91. if(OK(audio_client->Start()))
  92. {
  93. block=wf->nBlockAlign; // get this once initialization finishes, in case it will be different than what was requested
  94. return true;
  95. }
  96. }
  97. }
  98. del(); return false;
  99. }
  100. void update(SoundRecord &sound_record)
  101. {
  102. for(;;)
  103. {
  104. BYTE *data; UINT32 frames; DWORD flags; UINT64 pos, pc_pos;
  105. if(OK(audio_capture_client->GetNextPacketSize(&frames))
  106. && frames>0
  107. && OK(audio_capture_client->GetBuffer(&data, &frames, &flags, &pos, &pc_pos)))
  108. {
  109. Int size=frames*block;
  110. if(flags&AUDCLNT_BUFFERFLAGS_SILENT)Zero(data, size);
  111. //if(flags&AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY){extern Int lost; lost++;}
  112. sound_record.receivedData(data, size);
  113. audio_capture_client->ReleaseBuffer(frames);
  114. }else break;
  115. }
  116. }
  117. };
  118. #endif
  119. /******************************************************************************/
  120. SoundRecord::SoundRecord()
  121. {
  122. _handle=null;
  123. #if WINDOWS_OLD
  124. _pos=_size=0;
  125. #endif
  126. #if APPLE
  127. _flags=0;
  128. #elif ANDROID
  129. #elif DIRECT_SOUND
  130. _dscb=null;
  131. #elif OPEN_AL
  132. _block=0;
  133. #endif
  134. }
  135. void SoundRecord::del()
  136. {
  137. #if WINDOWS_NEW
  138. if(_handle)
  139. {
  140. SafeSyncLocker locker(SoundAPILock);
  141. SoundRecords.exclude(this);
  142. Delete(_handle);
  143. }
  144. #elif APPLE
  145. if(_handle)
  146. {
  147. OSStatus status=AudioOutputUnitStop(_handle);
  148. AudioUnitUninitialize(_handle);
  149. AudioComponentInstanceDispose(_handle);
  150. _handle=null;
  151. }
  152. _flags=0;
  153. #elif ANDROID
  154. if(_handle)
  155. {
  156. JNI jni; if(jni && ActivityClass)
  157. {
  158. Str8 signature=S8+"(L"+AndroidPackageName+"/EsenthelActivity$EsenthelAudioRecord;)V"; signature.replace('.', '/');
  159. if(JMethodID delAudioRecord=jni->GetStaticMethodID(ActivityClass, "delAudioRecord", signature))
  160. jni->CallStaticVoidMethod(ActivityClass, delAudioRecord, jobject(_handle));
  161. jni->DeleteGlobalRef(jobject(_handle));
  162. }
  163. _handle=null;
  164. }
  165. #elif DIRECT_SOUND
  166. if(_handle || _dscb)
  167. {
  168. SafeSyncLocker locker(SoundAPILock);
  169. SoundRecords.exclude(this);
  170. RELEASE(_dscb );
  171. RELEASE(_handle);
  172. }
  173. #elif OPEN_AL
  174. if(_handle)
  175. {
  176. SafeSyncLocker locker(SoundAPILock);
  177. SoundRecords.exclude(this);
  178. alcCaptureStop (_handle);
  179. alcCaptureCloseDevice(_handle);
  180. _handle=null;
  181. }
  182. _block=0;
  183. #endif
  184. #if WINDOWS_OLD
  185. _pos=_size=0;
  186. #endif
  187. }
  188. /******************************************************************************/
  189. #if APPLE
  190. static OSStatus AudioInputProc(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData)
  191. {
  192. SoundRecord &sr=*(SoundRecord*)inRefCon;
  193. union
  194. {
  195. Byte extra[SIZE(AudioBufferList)+MEMBER_SIZE(AudioBufferList, mBuffers[0])];
  196. AudioBufferList list;
  197. }buffer;
  198. Memt<Byte> temp;
  199. UInt flags =sr._flags;
  200. Bool int_signed =(flags>>0)&1,
  201. non_interleaved=(flags>>1)&1,
  202. want_stereo =(flags>>2)&1,
  203. got_stereo =(flags>>3)&1;
  204. UInt want_bytes =(flags>>4)&7,
  205. got_bytes =(flags>>7)&7,
  206. got_channels = got_stereo+1, got_block= got_bytes* got_channels, got_data_size=inNumberFrames* got_block,
  207. want_channels =want_stereo+1, want_block=want_bytes*want_channels, want_data_size=inNumberFrames*want_block;
  208. Bool same_format =(want_stereo==got_stereo && want_bytes==got_bytes && !non_interleaved);
  209. temp.setNum(same_format ? want_data_size : got_data_size+want_data_size);
  210. if(non_interleaved)
  211. {
  212. UInt data_size_2=got_data_size/2;
  213. buffer.list.mNumberBuffers=2;
  214. buffer.list.mBuffers[0].mNumberChannels=buffer.list.mBuffers[1].mNumberChannels=1;
  215. buffer.list.mBuffers[0].mDataByteSize =buffer.list.mBuffers[1].mDataByteSize =data_size_2;
  216. buffer.list.mBuffers[0].mData=temp.data();
  217. buffer.list.mBuffers[1].mData=temp.data()+data_size_2;
  218. }else
  219. {
  220. buffer.list.mNumberBuffers=1;
  221. buffer.list.mBuffers[0].mNumberChannels=got_channels;
  222. buffer.list.mBuffers[0].mDataByteSize =got_data_size;
  223. buffer.list.mBuffers[0].mData=temp.data();
  224. }
  225. if(AudioUnitRender(sr._handle, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &buffer.list)==noErr)
  226. {
  227. if(same_format)
  228. {
  229. sr.receivedData(temp.data(), temp.elms());
  230. }else
  231. {
  232. CPtr src=temp.data(), src_left, src_right;
  233. Ptr dest=temp.data()+got_data_size, dest_temp=dest;
  234. Int src_inc;
  235. src_left =src;
  236. if(non_interleaved){src_right=(Byte*)src+got_data_size/2; src_inc=got_bytes ;}else
  237. if(got_stereo ){src_right=(Byte*)src+got_bytes ; src_inc=got_bytes*2;}else
  238. {src_right= src ; src_inc=got_bytes ;}
  239. Bool dest_bit16=(want_bytes==2);
  240. REP(inNumberFrames)
  241. {
  242. Int left, right;
  243. switch(got_bytes)
  244. {
  245. case 1: if(int_signed){left= (*(I8 *)src_left<<8) ; right= (*(I8 *)src_right<<8) ;}
  246. else {left= (*(U8 *)src_left<<8)-32768 ; right= (*(U8 *)src_right<<8)-32768 ;} break;
  247. case 2: if(int_signed){left= *(I16*)src_left ; right= *(I16*)src_right ;}
  248. else {left= *(U16*)src_left -32768 ; right= *(U16*)src_right -32768 ;} break;
  249. case 4: left=Round( *(Flt*)src_left *32767); right=Round( *(Flt*)src_right *32767); break;
  250. }
  251. src_left =(Byte*)src_left +src_inc;
  252. src_right=(Byte*)src_right+src_inc;
  253. if(want_stereo)
  254. {
  255. if(dest_bit16)
  256. {
  257. ((I16*)dest)[0]=left ;
  258. ((I16*)dest)[1]=right;
  259. dest=(I16*)dest+2;
  260. }else
  261. {
  262. ((U8*)dest)[0]=(left >>8)+128;
  263. ((U8*)dest)[1]=(right>>8)+128;
  264. dest=(U8*)dest+2;
  265. }
  266. }else
  267. {
  268. Int sample=((left+right)>>1);
  269. if(dest_bit16)
  270. {
  271. *(I16*)dest=sample; dest=(I16*)dest+1;
  272. }else
  273. {
  274. *(U8*)dest=(sample>>8)+128; dest=(U8*)dest+1;
  275. }
  276. }
  277. }
  278. sr.receivedData(dest_temp, want_data_size);
  279. }
  280. }
  281. return noErr;
  282. }
  283. #endif
  284. Bool SoundRecord::create(Device *device, Int bits, Int channels, Int frequency) // this needs to be called after 'DirectSound' playback device
  285. {
  286. C Int bytes=bits/8, block=channels*bytes;
  287. #if WINDOWS_NEW
  288. del();
  289. _SoundRecord sr; if(sr.create(bits, channels, frequency))
  290. {
  291. New(_handle); Swap(*_handle, sr);
  292. SafeSyncLocker locker(SoundAPILock);
  293. SoundRecords.add(this);
  294. return true;
  295. }
  296. #elif APPLE
  297. del();
  298. if(channels>=1 && channels<=2 && (bits==8 || bits==16))
  299. {
  300. UInt32 size;
  301. OSStatus status;
  302. #if MAC
  303. AudioDeviceID device_id=(device ? device->_id.i[0] : kAudioDeviceUnknown);
  304. if(device_id==kAudioDeviceUnknown)
  305. {
  306. AudioObjectPropertyAddress propertyAddress;
  307. propertyAddress.mSelector=kAudioHardwarePropertyDefaultInputDevice;
  308. propertyAddress.mScope=kAudioObjectPropertyScopeGlobal;
  309. propertyAddress.mElement=kAudioObjectPropertyElementMaster;
  310. size=SIZE(device_id); status=AudioObjectGetPropertyData(kAudioObjectSystemObject, &propertyAddress, 0, null, &size, &device_id);
  311. }
  312. if(device_id!=kAudioDeviceUnknown)
  313. #elif IOS
  314. [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
  315. [[AVAudioSession sharedInstance] setActive:YES error:nil];
  316. #endif
  317. {
  318. AudioComponentDescription desc;
  319. desc.componentType=kAudioUnitType_Output;
  320. #if MAC
  321. desc.componentSubType=kAudioUnitSubType_HALOutput;
  322. #elif IOS
  323. desc.componentSubType=kAudioUnitSubType_RemoteIO;
  324. #else
  325. #error unknown platform
  326. #endif
  327. desc.componentManufacturer=kAudioUnitManufacturer_Apple;
  328. desc.componentFlags=0;
  329. desc.componentFlagsMask=0;
  330. if(AudioComponent inputComponent=AudioComponentFindNext(null, &desc))
  331. {
  332. AudioComponentInstanceNew(inputComponent, &_handle);
  333. if(_handle)
  334. {
  335. UInt32 zero=0, one=1;
  336. #if MAC
  337. status=AudioUnitSetProperty(_handle, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &zero, SIZE(zero)); // disable output
  338. #endif
  339. if(AudioUnitSetProperty(_handle, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input , kInputBus , &one , SIZE(one ))==noErr) // enable input
  340. #if MAC
  341. if(AudioUnitSetProperty(_handle, kAudioOutputUnitProperty_CurrentDevice, kAudioUnitScope_Global, kOutputBus, &device_id, SIZE(device_id))==noErr) // set device
  342. #endif
  343. {
  344. AudioStreamBasicDescription format;
  345. format.mSampleRate=frequency;
  346. format.mFormatID=kAudioFormatLinearPCM;
  347. format.mFormatFlags=((bits==16) ? kAudioFormatFlagIsSignedInteger : 0)|kAudioFormatFlagIsPacked;
  348. format.mBytesPerPacket=bytes*channels;
  349. format.mFramesPerPacket=1;
  350. format.mBytesPerFrame=bytes*channels;
  351. format.mChannelsPerFrame=channels;
  352. format.mBitsPerChannel=bits;
  353. format.mReserved=0;
  354. // set desired format
  355. status=AudioUnitSetProperty(_handle, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &format, SIZE(format)); //LogN(S+"set:"+Int(status));
  356. // get actual format
  357. size=SIZE(format);
  358. status=AudioUnitGetProperty(_handle, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &format, &size); //LogN(S+"get:"+Int(status)+", frequency:"+Round(format.mSampleRate)+" flags:"+UInt(format.mFormatFlags)+" bits:"+Int(format.mBitsPerChannel)+" ch:"+Int(format.mChannelsPerFrame)+" bytespp:"+Int(format.mBytesPerPacket)+" bytespf:"+Int(format.mBytesPerFrame)+" framespp:"+Int(format.mFramesPerPacket));
  359. UInt got_channels=format.mChannelsPerFrame,
  360. got_bits =format.mBitsPerChannel;
  361. if(status==noErr && got_channels>=1 && got_channels<=2 && (got_bits==8 || got_bits==16 || got_bits==32))
  362. {
  363. //UInt32 samples=0; size=SIZE(samples); status=AudioUnitGetProperty(_handle, kAudioDevicePropertyBufferFrameSize, kAudioUnitScope_Global, 0, &samples, &size);
  364. _flags=(
  365. (( FlagTest(format.mFormatFlags, kAudioFormatFlagIsSignedInteger ))<<0)
  366. | ((got_channels==2 && FlagTest(format.mFormatFlags, kAudioFormatFlagIsNonInterleaved))<<1)
  367. | (( channels==2)<<2)
  368. | ((got_channels==2)<<3)
  369. | (( bits / 8)<<4)
  370. | ((got_bits / 8)<<7));
  371. AURenderCallbackStruct callback;
  372. callback.inputProc=AudioInputProc;
  373. callback.inputProcRefCon=this;
  374. if(AudioUnitSetProperty(_handle, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 0, &callback, SIZE(callback))==noErr)
  375. if(AudioUnitInitialize(_handle)==noErr)
  376. if(AudioOutputUnitStart(_handle)==noErr)
  377. return true;
  378. }
  379. }
  380. }
  381. }
  382. }
  383. }
  384. #elif ANDROID
  385. del();
  386. JNI jni;
  387. if( jni && ActivityClass)
  388. {
  389. RequirePermission(PERMISSION_SOUND_RECORD);
  390. Str8 signature=S8+"(JIII)L"+AndroidPackageName+"/EsenthelActivity$EsenthelAudioRecord;"; signature.replace('.', '/');
  391. if(JMethodID newAudioRecord=jni->GetStaticMethodID(ActivityClass, "newAudioRecord", signature))
  392. if(JObject audio_record=JObject(jni, jni->CallStaticObjectMethod(ActivityClass, newAudioRecord, jlong(this), jint(bits), jint(channels), jint(frequency))))
  393. {
  394. audio_record.makeGlobal(); _handle=audio_record(); audio_record.clear();
  395. return true;
  396. }
  397. }
  398. #elif DIRECT_SOUND
  399. SafeSyncLocker locker(SoundAPILock);
  400. del();
  401. if(channels>=1 && channels<=2 && (bits==8 || bits==16))
  402. {
  403. DirectSoundCaptureCreate8(device ? &device->_id.guid() : null, &_handle, null);
  404. if(_handle)
  405. {
  406. WAVEFORMATEX wfx; Zero(wfx);
  407. wfx.wFormatTag =WAVE_FORMAT_PCM;
  408. wfx.nChannels =channels;
  409. wfx.nSamplesPerSec =frequency;
  410. wfx.wBitsPerSample =bits;
  411. wfx.nBlockAlign =block;
  412. wfx.nAvgBytesPerSec=block*frequency;
  413. DSCBUFFERDESC desc; Zero(desc);
  414. desc.dwSize=SIZE(desc);
  415. desc.dwFlags=DSCBCAPS_WAVEMAPPED; // 'DSCBCAPS_WAVEMAPPED'=The Win32 wave mapper will be used for formats not supported by the device
  416. desc.lpwfxFormat=&wfx;
  417. desc.dwBufferBytes=(frequency*SOUND_TIME_RECORD/1000)*block; // mul by 'block' last to make sure that size is aligned to it
  418. _handle->CreateCaptureBuffer(&desc, &_dscb, null);
  419. if(_dscb)
  420. {
  421. _size=desc.dwBufferBytes;
  422. _dscb->Start(DSCBSTART_LOOPING);
  423. //Zero(wfx); _dscb->GetFormat(&wfx, SIZE(wfx), null);
  424. SoundRecords.add(this);
  425. return true;
  426. }
  427. }
  428. }
  429. #elif OPEN_AL
  430. SafeSyncLocker locker(SoundAPILock);
  431. del();
  432. if(channels>=1 && channels<=2 && (bits==8 || bits==16))
  433. {
  434. if(_handle=alcCaptureOpenDevice(device ? Str8(device->name)() : null, frequency,
  435. (channels==1) ? ((bits==8) ? AL_FORMAT_MONO8 : AL_FORMAT_MONO16 )
  436. : ((bits==8) ? AL_FORMAT_STEREO8 : AL_FORMAT_STEREO16), frequency*SOUND_TIME_RECORD/1000))
  437. {
  438. alcCaptureStart(_handle);
  439. _block=block;
  440. SoundRecords.add(this);
  441. return true;
  442. }
  443. }
  444. #endif
  445. del(); return false;
  446. }
  447. /******************************************************************************/
  448. #if DIRECT_SOUND
  449. static BOOL CALLBACK SoundRecordDeviceCallback(LPGUID lpGuid, LPCWSTR lpcstrDescription, LPCWSTR lpcstrModule, LPVOID lpContext)
  450. {
  451. MemPtr<SoundRecord::Device> &devices=*(MemPtr<SoundRecord::Device>*)lpContext;
  452. SoundRecord::Device &device =devices.New();
  453. device.name=lpcstrDescription;
  454. if(lpGuid)device._id.guid()=*lpGuid;
  455. return true; // keep going
  456. }
  457. #endif
  458. void SoundRecord::GetDevices(MemPtr<Device> devices)
  459. {
  460. devices.clear();
  461. #if WINDOWS_NEW
  462. if(auto id=MediaDevice::GetDefaultAudioCaptureId(AudioDeviceRole::Default))if(Is(id->Data()))
  463. devices.New().name="Microphone";
  464. #elif MAC
  465. AudioObjectPropertyAddress propertyAddress=
  466. {
  467. kAudioHardwarePropertyDevices,
  468. kAudioObjectPropertyScopeGlobal,
  469. kAudioObjectPropertyElementMaster,
  470. };
  471. UInt32 dataSize=0; if(AudioObjectGetPropertyDataSize(kAudioObjectSystemObject, &propertyAddress, 0, null, &dataSize)==kAudioHardwareNoError)
  472. {
  473. MemtN<AudioDeviceID, 16> audioDevices; audioDevices.setNum(dataSize/SIZE(AudioDeviceID));
  474. if(audioDevices.elms())
  475. {
  476. dataSize=audioDevices.elms()*SIZE(AudioDeviceID);
  477. if(AudioObjectGetPropertyData(kAudioObjectSystemObject, &propertyAddress, 0, null, &dataSize, audioDevices.data())==kAudioHardwareNoError)
  478. {
  479. // Iterate through all the devices and determine which are input-capable
  480. propertyAddress.mScope=kAudioDevicePropertyScopeInput;
  481. FREPA(audioDevices)
  482. {
  483. // check if this is an input device
  484. propertyAddress.mSelector=kAudioDevicePropertyStreams;
  485. AudioObjectGetPropertyDataSize(audioDevices[i], &propertyAddress, 0, null, &dataSize);
  486. UInt streamCount=dataSize/SIZE(AudioStreamID);
  487. if( streamCount>0)
  488. {
  489. Device &device=devices.New();
  490. device._id.i[0]=audioDevices[i];
  491. // Query device name
  492. CFStringRef deviceName=null; dataSize=SIZE(deviceName);
  493. propertyAddress.mSelector=kAudioDevicePropertyDeviceNameCFString;
  494. if(AudioObjectGetPropertyData(audioDevices[i], &propertyAddress, 0, null, &dataSize, &deviceName)==kAudioHardwareNoError) {}
  495. if(deviceName)
  496. {
  497. Char8 str[1024]; str[0]='\0'; CFStringGetCString(deviceName, str, Elms(str), kCFStringEncodingUTF8);
  498. device.name=FromUTF8(str);
  499. CFRelease(deviceName);
  500. }
  501. /*
  502. // Query device UID
  503. CFStringRef deviceUID=null; dataSize=SIZE(deviceUID);
  504. propertyAddress.mSelector=kAudioDevicePropertyDeviceUID;
  505. if(AudioObjectGetPropertyData(audioDevices[i], &propertyAddress, 0, null, &dataSize, &deviceUID)==kAudioHardwareNoError) {}
  506. if(deviceUID)CFRelease(deviceUID);
  507. // Query device manufacturer
  508. CFStringRef deviceManufacturer=null; dataSize=SIZE(deviceManufacturer);
  509. propertyAddress.mSelector=kAudioDevicePropertyDeviceManufacturerCFString;
  510. if(AudioObjectGetPropertyData(audioDevices[i], &propertyAddress, 0, null, &dataSize, &deviceManufacturer)==kAudioHardwareNoError) {}
  511. if(deviceManufacturer)CFRelease(deviceManufacturer);
  512. // Determine if the device is an input device (it is an input device if it has input channels)
  513. dataSize=0;
  514. propertyAddress.mSelector=kAudioDevicePropertyStreamConfiguration;
  515. if(AudioObjectGetPropertyDataSize(audioDevices[i], &propertyAddress, 0, null, &dataSize)==kAudioHardwareNoError)
  516. {
  517. MemtN<AudioBufferList, 16> bufferList; bufferList.setNum(dataSize/SIZE(AudioBufferList));
  518. //LogN(S+"bufferList:"+bufferList.elms());
  519. if(bufferList.elms())
  520. {
  521. dataSize=bufferList.elms()*SIZE(AudioBufferList);
  522. if(AudioObjectGetPropertyData(audioDevices[i], &propertyAddress, 0, null, &dataSize, bufferList.data())==kAudioHardwareNoError)
  523. {
  524. //LogN(S+"bufferList[0].mNumberBuffers:"+bufferList[0].mNumberBuffers);
  525. if(bufferList[0].mNumberBuffers)
  526. {
  527. }
  528. }
  529. }
  530. }*/
  531. }
  532. }
  533. }
  534. }
  535. }
  536. #elif IOS
  537. if([AVAudioSession sharedInstance].inputAvailable)devices.New().name="Microphone";
  538. #elif ANDROID
  539. JNI jni;
  540. if( jni && ActivityClass)
  541. if(JMethodID hasAudioRecord=jni->GetStaticMethodID(ActivityClass, "hasAudioRecord", "()Z"))
  542. if(jni->CallStaticBooleanMethod(ActivityClass, hasAudioRecord))
  543. devices.New().name="Microphone";
  544. #elif DIRECT_SOUND
  545. DirectSoundCaptureEnumerate(SoundRecordDeviceCallback, &devices);
  546. #elif OPEN_AL
  547. if(CChar8 *s=alcGetString(null, ALC_CAPTURE_DEVICE_SPECIFIER))
  548. for(; *s; s+=Length(s)+1)
  549. devices.New().name=s;
  550. #endif
  551. }
  552. /******************************************************************************/
  553. Int SoundRecord::curPosNoLock()C
  554. {
  555. #if DIRECT_SOUND
  556. DWORD capture=0, read=0; // The capture cursor is ahead of the read cursor. The data after the read position up to and including the capture position is not necessarily valid data.
  557. if(_dscb)_dscb->GetCurrentPosition(&capture, &read);
  558. return read;
  559. #else
  560. return 0;
  561. #endif
  562. }
  563. void SoundRecord::updateNoLock()
  564. {
  565. #if WINDOWS_NEW
  566. if(_handle)_handle->update(T);
  567. #elif APPLE
  568. #elif ANDROID
  569. #elif DIRECT_SOUND
  570. if(_dscb)
  571. {
  572. Int cur_pos =curPosNoLock();
  573. if( cur_pos!=_pos)
  574. {
  575. Int size=cur_pos-_pos; if(size<0)size+=_size;
  576. Ptr data=null, data2=null; DWORD locked=0, locked2=0;
  577. if(OK(_dscb->Lock(_pos, size, &data, &locked, &data2, &locked2, 0)))
  578. {
  579. if(data )receivedData(data , locked );
  580. if(data2)receivedData(data2, locked2);
  581. _dscb->Unlock(data, locked, data2, locked2);
  582. }
  583. _pos=cur_pos;
  584. }
  585. }
  586. #elif OPEN_AL
  587. if(_handle)
  588. {
  589. again:
  590. ALint samples=0; alcGetIntegerv(_handle, ALC_CAPTURE_SAMPLES, 1, &samples);
  591. if( samples>0)
  592. {
  593. Byte data[64*1024]; Int size=samples*_block; Bool full=(size>=SIZE(data)); if(full)
  594. {
  595. samples=SIZEU(data)/_block;
  596. size =SIZE (data);
  597. }
  598. alcCaptureSamples(_handle, data, samples);
  599. receivedData(data, size);
  600. if(full)goto again;
  601. }
  602. }
  603. #endif
  604. }
  605. void SoundRecord::update()
  606. {
  607. SafeSyncLocker locker(SoundAPILock);
  608. updateNoLock();
  609. }
  610. /******************************************************************************/
  611. } // namespace EE
  612. /******************************************************************************/
  613. #if ANDROID
  614. extern "C"
  615. {
  616. JNIEXPORT void JNICALL Java_com_esenthel_Native_audioRecord(JNIEnv *env, jclass clazz, jlong sound_record, jobject buf, jint size)
  617. {
  618. if(Ptr data=env->GetDirectBufferAddress(buf))
  619. {
  620. SoundRecord *sr=(SoundRecord*)sound_record;
  621. sr->receivedData(data, size);
  622. }
  623. }
  624. }
  625. #endif
  626. /******************************************************************************/