scene.cpp 40 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032
  1. // Copyright 2009-2021 Intel Corporation
  2. // SPDX-License-Identifier: Apache-2.0
  3. #include "scene.h"
  4. #include "../../common/tasking/taskscheduler.h"
  5. #include "../bvh/bvh4_factory.h"
  6. #include "../bvh/bvh8_factory.h"
  7. #include "../../common/algorithms/parallel_reduce.h"
  8. #if defined(EMBREE_SYCL_SUPPORT)
  9. # include "../sycl/rthwif_embree_builder.h"
  10. #endif
  11. namespace embree
  12. {
  13. struct TaskGroup {
  14. /*! global lock step task scheduler */
  15. #if defined(TASKING_INTERNAL)
  16. MutexSys schedulerMutex;
  17. Ref<TaskScheduler> scheduler;
  18. #elif defined(TASKING_TBB) && TASKING_TBB_USE_TASK_ISOLATION
  19. tbb::isolated_task_group group;
  20. #elif defined(TASKING_TBB)
  21. tbb::task_group group;
  22. #elif defined(TASKING_PPL)
  23. concurrency::task_group group;
  24. #endif
  25. };
  26. /* error raising rtcIntersect and rtcOccluded functions */
  27. void missing_rtcCommit() { throw_RTCError(RTC_ERROR_INVALID_OPERATION,"scene not committed"); }
  28. void invalid_rtcIntersect1() { throw_RTCError(RTC_ERROR_INVALID_OPERATION,"rtcIntersect and rtcOccluded not enabled"); }
  29. void invalid_rtcIntersect4() { throw_RTCError(RTC_ERROR_INVALID_OPERATION,"rtcIntersect4 and rtcOccluded4 not enabled"); }
  30. void invalid_rtcIntersect8() { throw_RTCError(RTC_ERROR_INVALID_OPERATION,"rtcIntersect8 and rtcOccluded8 not enabled"); }
  31. void invalid_rtcIntersect16() { throw_RTCError(RTC_ERROR_INVALID_OPERATION,"rtcIntersect16 and rtcOccluded16 not enabled"); }
  32. void invalid_rtcIntersectN() { throw_RTCError(RTC_ERROR_INVALID_OPERATION,"rtcIntersectN and rtcOccludedN not enabled"); }
  33. Scene::Scene (Device* device)
  34. : device(device),
  35. flags_modified(true), enabled_geometry_types(0),
  36. scene_flags(RTC_SCENE_FLAG_NONE),
  37. quality_flags(RTC_BUILD_QUALITY_MEDIUM),
  38. modified(true),
  39. taskGroup(new TaskGroup()),
  40. progressInterface(this), progress_monitor_function(nullptr), progress_monitor_ptr(nullptr), progress_monitor_counter(0)
  41. {
  42. device->refInc();
  43. intersectors = Accel::Intersectors(missing_rtcCommit);
  44. /* use proper device and context for SYCL allocations */
  45. #if defined(EMBREE_SYCL_SUPPORT)
  46. if (DeviceGPU* gpu_device = dynamic_cast<DeviceGPU*>(device))
  47. hwaccel = AccelBuffer(AccelAllocator<char>(device,gpu_device->getGPUDevice(),gpu_device->getGPUContext()),0);
  48. #endif
  49. /* one can overwrite flags through device for debugging */
  50. if (device->quality_flags != -1)
  51. quality_flags = (RTCBuildQuality) device->quality_flags;
  52. if (device->scene_flags != -1)
  53. scene_flags = (RTCSceneFlags) device->scene_flags;
  54. }
  55. Scene::~Scene() noexcept
  56. {
  57. device->refDec();
  58. }
  59. void Scene::printStatistics()
  60. {
  61. /* calculate maximum number of time segments */
  62. unsigned max_time_steps = 0;
  63. for (size_t i=0; i<size(); i++) {
  64. if (!get(i)) continue;
  65. max_time_steps = max(max_time_steps,get(i)->numTimeSteps);
  66. }
  67. /* initialize vectors*/
  68. std::vector<size_t> statistics[Geometry::GTY_END];
  69. for (size_t i=0; i<Geometry::GTY_END; i++)
  70. statistics[i].resize(max_time_steps);
  71. /* gather statistics */
  72. for (size_t i=0; i<size(); i++)
  73. {
  74. if (!get(i)) continue;
  75. int ty = get(i)->getType();
  76. assert(ty<Geometry::GTY_END);
  77. int timesegments = get(i)->numTimeSegments();
  78. assert((unsigned int)timesegments < max_time_steps);
  79. statistics[ty][timesegments] += get(i)->size();
  80. }
  81. /* print statistics */
  82. std::cout << std::setw(23) << "segments" << ": ";
  83. for (size_t t=0; t<max_time_steps; t++)
  84. std::cout << std::setw(10) << t;
  85. std::cout << std::endl;
  86. std::cout << "-------------------------";
  87. for (size_t t=0; t<max_time_steps; t++)
  88. std::cout << "----------";
  89. std::cout << std::endl;
  90. for (size_t p=0; p<Geometry::GTY_END; p++)
  91. {
  92. if (std::string(Geometry::gtype_names[p]) == "") continue;
  93. std::cout << std::setw(23) << Geometry::gtype_names[p] << ": ";
  94. for (size_t t=0; t<max_time_steps; t++)
  95. std::cout << std::setw(10) << statistics[p][t];
  96. std::cout << std::endl;
  97. }
  98. }
  99. void Scene::createTriangleAccel()
  100. {
  101. #if defined(EMBREE_GEOMETRY_TRIANGLE)
  102. if (device->tri_accel == "default")
  103. {
  104. if (quality_flags != RTC_BUILD_QUALITY_LOW)
  105. {
  106. int mode = 2*(int)isCompactAccel() + 1*(int)isRobustAccel();
  107. switch (mode) {
  108. case /*0b00*/ 0:
  109. #if defined (EMBREE_TARGET_SIMD8)
  110. if (device->canUseAVX())
  111. {
  112. if (quality_flags == RTC_BUILD_QUALITY_HIGH)
  113. accels_add(device->bvh8_factory->BVH8Triangle4(this,BVHFactory::BuildVariant::HIGH_QUALITY,BVHFactory::IntersectVariant::FAST));
  114. else
  115. accels_add(device->bvh8_factory->BVH8Triangle4(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST));
  116. }
  117. else
  118. #endif
  119. {
  120. if (quality_flags == RTC_BUILD_QUALITY_HIGH)
  121. accels_add(device->bvh4_factory->BVH4Triangle4(this,BVHFactory::BuildVariant::HIGH_QUALITY,BVHFactory::IntersectVariant::FAST));
  122. else
  123. accels_add(device->bvh4_factory->BVH4Triangle4(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST));
  124. }
  125. break;
  126. case /*0b01*/ 1:
  127. #if defined (EMBREE_TARGET_SIMD8)
  128. if (device->canUseAVX())
  129. accels_add(device->bvh8_factory->BVH8Triangle4v(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST));
  130. else
  131. #endif
  132. accels_add(device->bvh4_factory->BVH4Triangle4v(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST));
  133. break;
  134. case /*0b10*/ 2: accels_add(device->bvh4_factory->BVH4Triangle4i(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST )); break;
  135. case /*0b11*/ 3: accels_add(device->bvh4_factory->BVH4Triangle4i(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST)); break;
  136. }
  137. }
  138. else /* dynamic */
  139. {
  140. #if defined (EMBREE_TARGET_SIMD8)
  141. if (device->canUseAVX())
  142. {
  143. int mode = 2*(int)isCompactAccel() + 1*(int)isRobustAccel();
  144. switch (mode) {
  145. case /*0b00*/ 0: accels_add(device->bvh8_factory->BVH8Triangle4 (this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::FAST )); break;
  146. case /*0b01*/ 1: accels_add(device->bvh8_factory->BVH8Triangle4v(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::ROBUST)); break;
  147. case /*0b10*/ 2: accels_add(device->bvh4_factory->BVH4Triangle4i(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::FAST )); break;
  148. case /*0b11*/ 3: accels_add(device->bvh4_factory->BVH4Triangle4i(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::ROBUST)); break;
  149. }
  150. }
  151. else
  152. #endif
  153. {
  154. int mode = 2*(int)isCompactAccel() + 1*(int)isRobustAccel();
  155. switch (mode) {
  156. case /*0b00*/ 0: accels_add(device->bvh4_factory->BVH4Triangle4 (this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::FAST )); break;
  157. case /*0b01*/ 1: accels_add(device->bvh4_factory->BVH4Triangle4v(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::ROBUST)); break;
  158. case /*0b10*/ 2: accels_add(device->bvh4_factory->BVH4Triangle4i(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::FAST )); break;
  159. case /*0b11*/ 3: accels_add(device->bvh4_factory->BVH4Triangle4i(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::ROBUST)); break;
  160. }
  161. }
  162. }
  163. }
  164. else if (device->tri_accel == "bvh4.triangle4") accels_add(device->bvh4_factory->BVH4Triangle4 (this));
  165. else if (device->tri_accel == "bvh4.triangle4v") accels_add(device->bvh4_factory->BVH4Triangle4v(this));
  166. else if (device->tri_accel == "bvh4.triangle4i") accels_add(device->bvh4_factory->BVH4Triangle4i(this));
  167. else if (device->tri_accel == "qbvh4.triangle4i") accels_add(device->bvh4_factory->BVH4QuantizedTriangle4i(this));
  168. #if defined (EMBREE_TARGET_SIMD8)
  169. else if (device->tri_accel == "bvh8.triangle4") accels_add(device->bvh8_factory->BVH8Triangle4 (this));
  170. else if (device->tri_accel == "bvh8.triangle4v") accels_add(device->bvh8_factory->BVH8Triangle4v(this));
  171. else if (device->tri_accel == "bvh8.triangle4i") accels_add(device->bvh8_factory->BVH8Triangle4i(this));
  172. else if (device->tri_accel == "qbvh8.triangle4i") accels_add(device->bvh8_factory->BVH8QuantizedTriangle4i(this));
  173. else if (device->tri_accel == "qbvh8.triangle4") accels_add(device->bvh8_factory->BVH8QuantizedTriangle4(this));
  174. #endif
  175. else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown triangle acceleration structure "+device->tri_accel);
  176. #endif
  177. }
  178. void Scene::createTriangleMBAccel()
  179. {
  180. #if defined(EMBREE_GEOMETRY_TRIANGLE)
  181. if (device->tri_accel_mb == "default")
  182. {
  183. int mode = 2*(int)isCompactAccel() + 1*(int)isRobustAccel();
  184. #if defined (EMBREE_TARGET_SIMD8)
  185. if (device->canUseAVX2()) // BVH8 reduces performance on AVX only-machines
  186. {
  187. switch (mode) {
  188. case /*0b00*/ 0: accels_add(device->bvh8_factory->BVH8Triangle4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST )); break;
  189. case /*0b01*/ 1: accels_add(device->bvh8_factory->BVH8Triangle4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST)); break;
  190. case /*0b10*/ 2: accels_add(device->bvh4_factory->BVH4Triangle4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST )); break;
  191. case /*0b11*/ 3: accels_add(device->bvh4_factory->BVH4Triangle4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST)); break;
  192. }
  193. }
  194. else
  195. #endif
  196. {
  197. switch (mode) {
  198. case /*0b00*/ 0: accels_add(device->bvh4_factory->BVH4Triangle4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST )); break;
  199. case /*0b01*/ 1: accels_add(device->bvh4_factory->BVH4Triangle4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST)); break;
  200. case /*0b10*/ 2: accels_add(device->bvh4_factory->BVH4Triangle4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST )); break;
  201. case /*0b11*/ 3: accels_add(device->bvh4_factory->BVH4Triangle4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST)); break;
  202. }
  203. }
  204. }
  205. else if (device->tri_accel_mb == "bvh4.triangle4imb") accels_add(device->bvh4_factory->BVH4Triangle4iMB(this));
  206. else if (device->tri_accel_mb == "bvh4.triangle4vmb") accels_add(device->bvh4_factory->BVH4Triangle4vMB(this));
  207. #if defined (EMBREE_TARGET_SIMD8)
  208. else if (device->tri_accel_mb == "bvh8.triangle4imb") accels_add(device->bvh8_factory->BVH8Triangle4iMB(this));
  209. else if (device->tri_accel_mb == "bvh8.triangle4vmb") accels_add(device->bvh8_factory->BVH8Triangle4vMB(this));
  210. #endif
  211. else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown motion blur triangle acceleration structure "+device->tri_accel_mb);
  212. #endif
  213. }
  214. void Scene::createQuadAccel()
  215. {
  216. #if defined(EMBREE_GEOMETRY_QUAD)
  217. if (device->quad_accel == "default")
  218. {
  219. if (quality_flags != RTC_BUILD_QUALITY_LOW)
  220. {
  221. /* static */
  222. int mode = 2*(int)isCompactAccel() + 1*(int)isRobustAccel();
  223. switch (mode) {
  224. case /*0b00*/ 0:
  225. #if defined (EMBREE_TARGET_SIMD8)
  226. if (device->canUseAVX())
  227. {
  228. if (quality_flags == RTC_BUILD_QUALITY_HIGH)
  229. accels_add(device->bvh8_factory->BVH8Quad4v(this,BVHFactory::BuildVariant::HIGH_QUALITY,BVHFactory::IntersectVariant::FAST));
  230. else
  231. accels_add(device->bvh8_factory->BVH8Quad4v(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST));
  232. }
  233. else
  234. #endif
  235. {
  236. if (quality_flags == RTC_BUILD_QUALITY_HIGH)
  237. accels_add(device->bvh4_factory->BVH4Quad4v(this,BVHFactory::BuildVariant::HIGH_QUALITY,BVHFactory::IntersectVariant::FAST));
  238. else
  239. accels_add(device->bvh4_factory->BVH4Quad4v(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST));
  240. }
  241. break;
  242. case /*0b01*/ 1:
  243. #if defined (EMBREE_TARGET_SIMD8)
  244. if (device->canUseAVX())
  245. accels_add(device->bvh8_factory->BVH8Quad4v(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST));
  246. else
  247. #endif
  248. accels_add(device->bvh4_factory->BVH4Quad4v(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST));
  249. break;
  250. case /*0b10*/ 2: accels_add(device->bvh4_factory->BVH4Quad4i(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST)); break;
  251. case /*0b11*/ 3: accels_add(device->bvh4_factory->BVH4Quad4i(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST)); break;
  252. }
  253. }
  254. else /* dynamic */
  255. {
  256. #if defined (EMBREE_TARGET_SIMD8)
  257. if (device->canUseAVX())
  258. {
  259. int mode = 2*(int)isCompactAccel() + 1*(int)isRobustAccel();
  260. switch (mode) {
  261. case /*0b00*/ 0: accels_add(device->bvh8_factory->BVH8Quad4v(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::FAST)); break;
  262. case /*0b01*/ 1: accels_add(device->bvh8_factory->BVH8Quad4v(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::ROBUST)); break;
  263. case /*0b10*/ 2: accels_add(device->bvh4_factory->BVH4Quad4v(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::FAST)); break;
  264. case /*0b11*/ 3: accels_add(device->bvh4_factory->BVH4Quad4v(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::ROBUST)); break;
  265. }
  266. }
  267. else
  268. #endif
  269. {
  270. int mode = 2*(int)isCompactAccel() + 1*(int)isRobustAccel();
  271. switch (mode) {
  272. case /*0b00*/ 0: accels_add(device->bvh4_factory->BVH4Quad4v(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::FAST)); break;
  273. case /*0b01*/ 1: accels_add(device->bvh4_factory->BVH4Quad4v(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::ROBUST)); break;
  274. case /*0b10*/ 2: accels_add(device->bvh4_factory->BVH4Quad4v(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::FAST)); break;
  275. case /*0b11*/ 3: accels_add(device->bvh4_factory->BVH4Quad4v(this,BVHFactory::BuildVariant::DYNAMIC,BVHFactory::IntersectVariant::ROBUST)); break;
  276. }
  277. }
  278. }
  279. }
  280. else if (device->quad_accel == "bvh4.quad4v") accels_add(device->bvh4_factory->BVH4Quad4v(this));
  281. else if (device->quad_accel == "bvh4.quad4i") accels_add(device->bvh4_factory->BVH4Quad4i(this));
  282. else if (device->quad_accel == "qbvh4.quad4i") accels_add(device->bvh4_factory->BVH4QuantizedQuad4i(this));
  283. #if defined (EMBREE_TARGET_SIMD8)
  284. else if (device->quad_accel == "bvh8.quad4v") accels_add(device->bvh8_factory->BVH8Quad4v(this));
  285. else if (device->quad_accel == "bvh8.quad4i") accels_add(device->bvh8_factory->BVH8Quad4i(this));
  286. else if (device->quad_accel == "qbvh8.quad4i") accels_add(device->bvh8_factory->BVH8QuantizedQuad4i(this));
  287. #endif
  288. else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown quad acceleration structure "+device->quad_accel);
  289. #endif
  290. }
  291. void Scene::createQuadMBAccel()
  292. {
  293. #if defined(EMBREE_GEOMETRY_QUAD)
  294. if (device->quad_accel_mb == "default")
  295. {
  296. int mode = 2*(int)isCompactAccel() + 1*(int)isRobustAccel();
  297. switch (mode) {
  298. case /*0b00*/ 0:
  299. #if defined (EMBREE_TARGET_SIMD8)
  300. if (device->canUseAVX())
  301. accels_add(device->bvh8_factory->BVH8Quad4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST));
  302. else
  303. #endif
  304. accels_add(device->bvh4_factory->BVH4Quad4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST));
  305. break;
  306. case /*0b01*/ 1:
  307. #if defined (EMBREE_TARGET_SIMD8)
  308. if (device->canUseAVX())
  309. accels_add(device->bvh8_factory->BVH8Quad4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST));
  310. else
  311. #endif
  312. accels_add(device->bvh4_factory->BVH4Quad4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST));
  313. break;
  314. case /*0b10*/ 2: accels_add(device->bvh4_factory->BVH4Quad4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::FAST )); break;
  315. case /*0b11*/ 3: accels_add(device->bvh4_factory->BVH4Quad4iMB(this,BVHFactory::BuildVariant::STATIC,BVHFactory::IntersectVariant::ROBUST)); break;
  316. }
  317. }
  318. else if (device->quad_accel_mb == "bvh4.quad4imb") accels_add(device->bvh4_factory->BVH4Quad4iMB(this));
  319. #if defined (EMBREE_TARGET_SIMD8)
  320. else if (device->quad_accel_mb == "bvh8.quad4imb") accels_add(device->bvh8_factory->BVH8Quad4iMB(this));
  321. #endif
  322. else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown quad motion blur acceleration structure "+device->quad_accel_mb);
  323. #endif
  324. }
  325. void Scene::createHairAccel()
  326. {
  327. #if defined(EMBREE_GEOMETRY_CURVE) || defined(EMBREE_GEOMETRY_POINT)
  328. if (device->hair_accel == "default")
  329. {
  330. int mode = 2*(int)isCompactAccel() + 1*(int)isRobustAccel();
  331. #if defined (EMBREE_TARGET_SIMD8)
  332. if (device->canUseAVX2()) // only enable on HSW machines, for SNB this codepath is slower
  333. {
  334. switch (mode) {
  335. case /*0b00*/ 0: accels_add(device->bvh8_factory->BVH8OBBVirtualCurve8v(this,BVHFactory::IntersectVariant::FAST)); break;
  336. case /*0b01*/ 1: accels_add(device->bvh8_factory->BVH8OBBVirtualCurve8v(this,BVHFactory::IntersectVariant::ROBUST)); break;
  337. case /*0b10*/ 2: accels_add(device->bvh4_factory->BVH4OBBVirtualCurve8i(this,BVHFactory::IntersectVariant::FAST)); break;
  338. case /*0b11*/ 3: accels_add(device->bvh4_factory->BVH4OBBVirtualCurve8i(this,BVHFactory::IntersectVariant::ROBUST)); break;
  339. }
  340. }
  341. else
  342. #endif
  343. {
  344. switch (mode) {
  345. case /*0b00*/ 0: accels_add(device->bvh4_factory->BVH4OBBVirtualCurve4v(this,BVHFactory::IntersectVariant::FAST)); break;
  346. case /*0b01*/ 1: accels_add(device->bvh4_factory->BVH4OBBVirtualCurve4v(this,BVHFactory::IntersectVariant::ROBUST)); break;
  347. case /*0b10*/ 2: accels_add(device->bvh4_factory->BVH4OBBVirtualCurve4i(this,BVHFactory::IntersectVariant::FAST)); break;
  348. case /*0b11*/ 3: accels_add(device->bvh4_factory->BVH4OBBVirtualCurve4i(this,BVHFactory::IntersectVariant::ROBUST)); break;
  349. }
  350. }
  351. }
  352. else if (device->hair_accel == "bvh4obb.virtualcurve4v" ) accels_add(device->bvh4_factory->BVH4OBBVirtualCurve4v(this,BVHFactory::IntersectVariant::FAST));
  353. else if (device->hair_accel == "bvh4obb.virtualcurve4i" ) accels_add(device->bvh4_factory->BVH4OBBVirtualCurve4i(this,BVHFactory::IntersectVariant::FAST));
  354. #if defined (EMBREE_TARGET_SIMD8)
  355. else if (device->hair_accel == "bvh8obb.virtualcurve8v" ) accels_add(device->bvh8_factory->BVH8OBBVirtualCurve8v(this,BVHFactory::IntersectVariant::FAST));
  356. else if (device->hair_accel == "bvh4obb.virtualcurve8i" ) accels_add(device->bvh4_factory->BVH4OBBVirtualCurve8i(this,BVHFactory::IntersectVariant::FAST));
  357. #endif
  358. else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown hair acceleration structure "+device->hair_accel);
  359. #endif
  360. }
  361. void Scene::createHairMBAccel()
  362. {
  363. #if defined(EMBREE_GEOMETRY_CURVE) || defined(EMBREE_GEOMETRY_POINT)
  364. if (device->hair_accel_mb == "default")
  365. {
  366. #if defined (EMBREE_TARGET_SIMD8)
  367. if (device->canUseAVX2()) // only enable on HSW machines, on SNB this codepath is slower
  368. {
  369. if (isRobustAccel()) accels_add(device->bvh8_factory->BVH8OBBVirtualCurve8iMB(this,BVHFactory::IntersectVariant::ROBUST));
  370. else accels_add(device->bvh8_factory->BVH8OBBVirtualCurve8iMB(this,BVHFactory::IntersectVariant::FAST));
  371. }
  372. else
  373. #endif
  374. {
  375. if (isRobustAccel()) accels_add(device->bvh4_factory->BVH4OBBVirtualCurve4iMB(this,BVHFactory::IntersectVariant::ROBUST));
  376. else accels_add(device->bvh4_factory->BVH4OBBVirtualCurve4iMB(this,BVHFactory::IntersectVariant::FAST));
  377. }
  378. }
  379. else if (device->hair_accel_mb == "bvh4.virtualcurve4imb") accels_add(device->bvh4_factory->BVH4OBBVirtualCurve4iMB(this,BVHFactory::IntersectVariant::FAST));
  380. #if defined (EMBREE_TARGET_SIMD8)
  381. else if (device->hair_accel_mb == "bvh4.virtualcurve8imb") accels_add(device->bvh4_factory->BVH4OBBVirtualCurve8iMB(this,BVHFactory::IntersectVariant::FAST));
  382. else if (device->hair_accel_mb == "bvh8.virtualcurve8imb") accels_add(device->bvh8_factory->BVH8OBBVirtualCurve8iMB(this,BVHFactory::IntersectVariant::FAST));
  383. #endif
  384. else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown motion blur hair acceleration structure "+device->hair_accel_mb);
  385. #endif
  386. }
  387. void Scene::createSubdivAccel()
  388. {
  389. #if defined(EMBREE_GEOMETRY_SUBDIVISION)
  390. if (device->subdiv_accel == "default") {
  391. accels_add(device->bvh4_factory->BVH4SubdivPatch1(this));
  392. }
  393. else if (device->subdiv_accel == "bvh4.grid.eager" ) accels_add(device->bvh4_factory->BVH4SubdivPatch1(this));
  394. else if (device->subdiv_accel == "bvh4.subdivpatch1eager" ) accels_add(device->bvh4_factory->BVH4SubdivPatch1(this));
  395. else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown subdiv accel "+device->subdiv_accel);
  396. #endif
  397. }
  398. void Scene::createSubdivMBAccel()
  399. {
  400. #if defined(EMBREE_GEOMETRY_SUBDIVISION)
  401. if (device->subdiv_accel_mb == "default") {
  402. accels_add(device->bvh4_factory->BVH4SubdivPatch1MB(this));
  403. }
  404. else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown subdiv mblur accel "+device->subdiv_accel_mb);
  405. #endif
  406. }
  407. void Scene::createUserGeometryAccel()
  408. {
  409. #if defined(EMBREE_GEOMETRY_USER)
  410. if (device->object_accel == "default")
  411. {
  412. #if defined (EMBREE_TARGET_SIMD8)
  413. if (device->canUseAVX() && !isCompactAccel())
  414. {
  415. if (quality_flags != RTC_BUILD_QUALITY_LOW) {
  416. accels_add(device->bvh8_factory->BVH8UserGeometry(this,BVHFactory::BuildVariant::STATIC));
  417. } else {
  418. accels_add(device->bvh8_factory->BVH8UserGeometry(this,BVHFactory::BuildVariant::DYNAMIC));
  419. }
  420. }
  421. else
  422. #endif
  423. {
  424. if (quality_flags != RTC_BUILD_QUALITY_LOW) {
  425. accels_add(device->bvh4_factory->BVH4UserGeometry(this,BVHFactory::BuildVariant::STATIC));
  426. } else {
  427. accels_add(device->bvh4_factory->BVH4UserGeometry(this,BVHFactory::BuildVariant::DYNAMIC));
  428. }
  429. }
  430. }
  431. else if (device->object_accel == "bvh4.object") accels_add(device->bvh4_factory->BVH4UserGeometry(this));
  432. #if defined (EMBREE_TARGET_SIMD8)
  433. else if (device->object_accel == "bvh8.object") accels_add(device->bvh8_factory->BVH8UserGeometry(this));
  434. #endif
  435. else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown user geometry accel "+device->object_accel);
  436. #endif
  437. }
  438. void Scene::createUserGeometryMBAccel()
  439. {
  440. #if defined(EMBREE_GEOMETRY_USER)
  441. if (device->object_accel_mb == "default" ) {
  442. #if defined (EMBREE_TARGET_SIMD8)
  443. if (device->canUseAVX() && !isCompactAccel())
  444. accels_add(device->bvh8_factory->BVH8UserGeometryMB(this));
  445. else
  446. #endif
  447. accels_add(device->bvh4_factory->BVH4UserGeometryMB(this));
  448. }
  449. else if (device->object_accel_mb == "bvh4.object") accels_add(device->bvh4_factory->BVH4UserGeometryMB(this));
  450. #if defined (EMBREE_TARGET_SIMD8)
  451. else if (device->object_accel_mb == "bvh8.object") accels_add(device->bvh8_factory->BVH8UserGeometryMB(this));
  452. #endif
  453. else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown user geometry mblur accel "+device->object_accel_mb);
  454. #endif
  455. }
  456. void Scene::createInstanceAccel()
  457. {
  458. #if defined(EMBREE_GEOMETRY_INSTANCE)
  459. // if (device->object_accel == "default")
  460. {
  461. #if defined (EMBREE_TARGET_SIMD8)
  462. if (device->canUseAVX() && !isCompactAccel()) {
  463. if (quality_flags != RTC_BUILD_QUALITY_LOW) {
  464. accels_add(device->bvh8_factory->BVH8Instance(this, false, BVHFactory::BuildVariant::STATIC));
  465. } else {
  466. accels_add(device->bvh8_factory->BVH8Instance(this, false, BVHFactory::BuildVariant::DYNAMIC));
  467. }
  468. }
  469. else
  470. #endif
  471. {
  472. if (quality_flags != RTC_BUILD_QUALITY_LOW) {
  473. accels_add(device->bvh4_factory->BVH4Instance(this, false, BVHFactory::BuildVariant::STATIC));
  474. } else {
  475. accels_add(device->bvh4_factory->BVH4Instance(this, false, BVHFactory::BuildVariant::DYNAMIC));
  476. }
  477. }
  478. }
  479. // else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown instance accel "+device->instance_accel);
  480. #endif
  481. }
  482. void Scene::createInstanceMBAccel()
  483. {
  484. #if defined(EMBREE_GEOMETRY_INSTANCE)
  485. //if (device->instance_accel_mb == "default")
  486. {
  487. #if defined (EMBREE_TARGET_SIMD8)
  488. if (device->canUseAVX() && !isCompactAccel())
  489. accels_add(device->bvh8_factory->BVH8InstanceMB(this, false));
  490. else
  491. #endif
  492. accels_add(device->bvh4_factory->BVH4InstanceMB(this, false));
  493. }
  494. //else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown instance mblur accel "+device->instance_accel_mb);
  495. #endif
  496. }
  497. void Scene::createInstanceExpensiveAccel()
  498. {
  499. #if defined(EMBREE_GEOMETRY_INSTANCE)
  500. // if (device->object_accel == "default")
  501. {
  502. #if defined (EMBREE_TARGET_SIMD8)
  503. if (device->canUseAVX() && !isCompactAccel()) {
  504. if (quality_flags != RTC_BUILD_QUALITY_LOW) {
  505. accels_add(device->bvh8_factory->BVH8Instance(this, true, BVHFactory::BuildVariant::STATIC));
  506. } else {
  507. accels_add(device->bvh8_factory->BVH8Instance(this, true, BVHFactory::BuildVariant::DYNAMIC));
  508. }
  509. }
  510. else
  511. #endif
  512. {
  513. if (quality_flags != RTC_BUILD_QUALITY_LOW) {
  514. accels_add(device->bvh4_factory->BVH4Instance(this, true, BVHFactory::BuildVariant::STATIC));
  515. } else {
  516. accels_add(device->bvh4_factory->BVH4Instance(this, true, BVHFactory::BuildVariant::DYNAMIC));
  517. }
  518. }
  519. }
  520. // else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown instance accel "+device->instance_accel);
  521. #endif
  522. }
  523. void Scene::createInstanceExpensiveMBAccel()
  524. {
  525. #if defined(EMBREE_GEOMETRY_INSTANCE)
  526. //if (device->instance_accel_mb == "default")
  527. {
  528. #if defined (EMBREE_TARGET_SIMD8)
  529. if (device->canUseAVX() && !isCompactAccel())
  530. accels_add(device->bvh8_factory->BVH8InstanceMB(this, true));
  531. else
  532. #endif
  533. accels_add(device->bvh4_factory->BVH4InstanceMB(this, true));
  534. }
  535. //else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown instance mblur accel "+device->instance_accel_mb);
  536. #endif
  537. }
  538. void Scene::createInstanceArrayAccel()
  539. {
  540. #if defined(EMBREE_GEOMETRY_INSTANCE_ARRAY)
  541. // if (device->object_accel == "default")
  542. {
  543. #if defined (EMBREE_TARGET_SIMD8)
  544. if (device->canUseAVX() && !isCompactAccel()) {
  545. if (quality_flags != RTC_BUILD_QUALITY_LOW) {
  546. accels_add(device->bvh8_factory->BVH8InstanceArray(this, BVHFactory::BuildVariant::STATIC));
  547. } else {
  548. accels_add(device->bvh8_factory->BVH8InstanceArray(this, BVHFactory::BuildVariant::DYNAMIC));
  549. }
  550. }
  551. else
  552. #endif
  553. {
  554. if (quality_flags != RTC_BUILD_QUALITY_LOW) {
  555. accels_add(device->bvh4_factory->BVH4InstanceArray(this, BVHFactory::BuildVariant::STATIC));
  556. } else {
  557. accels_add(device->bvh4_factory->BVH4InstanceArray(this, BVHFactory::BuildVariant::DYNAMIC));
  558. }
  559. }
  560. }
  561. // else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown instance accel "+device->instance_accel);
  562. #endif
  563. }
  564. void Scene::createInstanceArrayMBAccel()
  565. {
  566. #if defined(EMBREE_GEOMETRY_INSTANCE_ARRAY)
  567. //if (device->instance_accel_mb == "default")
  568. {
  569. #if defined (EMBREE_TARGET_SIMD8)
  570. if (device->canUseAVX() && !isCompactAccel())
  571. accels_add(device->bvh8_factory->BVH8InstanceArrayMB(this));
  572. else
  573. #endif
  574. accels_add(device->bvh4_factory->BVH4InstanceArrayMB(this));
  575. }
  576. //else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown instance mblur accel "+device->instance_accel_mb);
  577. #endif
  578. }
  579. void Scene::createGridAccel()
  580. {
  581. #if defined(EMBREE_GEOMETRY_GRID)
  582. BVHFactory::IntersectVariant ivariant = isRobustAccel() ? BVHFactory::IntersectVariant::ROBUST : BVHFactory::IntersectVariant::FAST;
  583. if (device->grid_accel == "default")
  584. {
  585. #if defined (EMBREE_TARGET_SIMD8)
  586. if (device->canUseAVX() && !isCompactAccel())
  587. {
  588. accels_add(device->bvh8_factory->BVH8Grid(this,BVHFactory::BuildVariant::STATIC,ivariant));
  589. }
  590. else
  591. #endif
  592. {
  593. accels_add(device->bvh4_factory->BVH4Grid(this,BVHFactory::BuildVariant::STATIC,ivariant));
  594. }
  595. }
  596. else if (device->grid_accel == "bvh4.grid") accels_add(device->bvh4_factory->BVH4Grid(this,BVHFactory::BuildVariant::STATIC,ivariant));
  597. #if defined (EMBREE_TARGET_SIMD8)
  598. else if (device->grid_accel == "bvh8.grid") accels_add(device->bvh8_factory->BVH8Grid(this,BVHFactory::BuildVariant::STATIC,ivariant));
  599. #endif
  600. else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown grid accel "+device->grid_accel);
  601. #endif
  602. }
  603. void Scene::createGridMBAccel()
  604. {
  605. #if defined(EMBREE_GEOMETRY_GRID)
  606. if (device->grid_accel_mb == "default")
  607. {
  608. accels_add(device->bvh4_factory->BVH4GridMB(this,BVHFactory::BuildVariant::STATIC));
  609. }
  610. else if (device->grid_accel_mb == "bvh4mb.grid") accels_add(device->bvh4_factory->BVH4GridMB(this));
  611. else throw_RTCError(RTC_ERROR_INVALID_ARGUMENT,"unknown grid mb accel "+device->grid_accel);
  612. #endif
  613. }
  614. void Scene::clear() {
  615. }
  616. unsigned Scene::bind(unsigned geomID, Ref<Geometry> geometry)
  617. {
  618. Lock<MutexSys> lock(geometriesMutex);
  619. if (geomID == RTC_INVALID_GEOMETRY_ID) {
  620. geomID = id_pool.allocate();
  621. if (geomID == RTC_INVALID_GEOMETRY_ID)
  622. throw_RTCError(RTC_ERROR_INVALID_OPERATION,"too many geometries inside scene");
  623. }
  624. else
  625. {
  626. if (!id_pool.add(geomID))
  627. throw_RTCError(RTC_ERROR_INVALID_OPERATION,"invalid geometry ID provided");
  628. }
  629. if (geomID >= geometries.size()) {
  630. geometries.resize(geomID+1);
  631. vertices.resize(geomID+1);
  632. geometryModCounters_.resize(geomID+1);
  633. }
  634. geometries[geomID] = geometry;
  635. geometryModCounters_[geomID] = 0;
  636. if (geometry->isEnabled()) {
  637. setModified ();
  638. }
  639. return geomID;
  640. }
  641. void Scene::detachGeometry(size_t geomID)
  642. {
  643. Lock<MutexSys> lock(geometriesMutex);
  644. if (geomID >= geometries.size())
  645. throw_RTCError(RTC_ERROR_INVALID_OPERATION,"invalid geometry ID");
  646. Ref<Geometry>& geometry = geometries[geomID];
  647. if (geometry == null)
  648. throw_RTCError(RTC_ERROR_INVALID_OPERATION,"invalid geometry");
  649. setModified ();
  650. accels_deleteGeometry(unsigned(geomID));
  651. id_pool.deallocate((unsigned)geomID);
  652. geometries[geomID] = null;
  653. vertices[geomID] = nullptr;
  654. geometryModCounters_[geomID] = 0;
  655. }
  656. void Scene::build_cpu_accels()
  657. {
  658. /* select acceleration structures to build */
  659. unsigned int new_enabled_geometry_types = world.enabledGeometryTypesMask();
  660. if (flags_modified || new_enabled_geometry_types != enabled_geometry_types)
  661. {
  662. accels_init();
  663. /* we need to make all geometries modified, otherwise two level builder will
  664. not rebuild currently not modified geometries */
  665. parallel_for(geometryModCounters_.size(), [&] ( const size_t i ) {
  666. geometryModCounters_[i] = 0;
  667. });
  668. if (getNumPrimitives(TriangleMesh::geom_type,false)) createTriangleAccel();
  669. if (getNumPrimitives(TriangleMesh::geom_type,true)) createTriangleMBAccel();
  670. if (getNumPrimitives(QuadMesh::geom_type,false)) createQuadAccel();
  671. if (getNumPrimitives(QuadMesh::geom_type,true)) createQuadMBAccel();
  672. if (getNumPrimitives(GridMesh::geom_type,false)) createGridAccel();
  673. if (getNumPrimitives(GridMesh::geom_type,true)) createGridMBAccel();
  674. if (getNumPrimitives(SubdivMesh::geom_type,false)) createSubdivAccel();
  675. if (getNumPrimitives(SubdivMesh::geom_type,true)) createSubdivMBAccel();
  676. if (getNumPrimitives(Geometry::MTY_CURVES,false)) createHairAccel();
  677. if (getNumPrimitives(Geometry::MTY_CURVES,true)) createHairMBAccel();
  678. if (getNumPrimitives(UserGeometry::geom_type,false)) createUserGeometryAccel();
  679. if (getNumPrimitives(UserGeometry::geom_type,true)) createUserGeometryMBAccel();
  680. if (getNumPrimitives(Geometry::MTY_INSTANCE_CHEAP,false)) createInstanceAccel();
  681. if (getNumPrimitives(Geometry::MTY_INSTANCE_CHEAP,true)) createInstanceMBAccel();
  682. if (getNumPrimitives(Geometry::MTY_INSTANCE_EXPENSIVE,false)) createInstanceExpensiveAccel();
  683. if (getNumPrimitives(Geometry::MTY_INSTANCE_EXPENSIVE,true)) createInstanceExpensiveMBAccel();
  684. if (getNumPrimitives(Geometry::MTY_INSTANCE_ARRAY,false)) createInstanceArrayAccel();
  685. if (getNumPrimitives(Geometry::MTY_INSTANCE_ARRAY,true)) createInstanceArrayMBAccel();
  686. flags_modified = false;
  687. enabled_geometry_types = new_enabled_geometry_types;
  688. }
  689. /* select fast code path if no filter function is present */
  690. accels_select(hasFilterFunction());
  691. /* build all hierarchies of this scene */
  692. accels_build();
  693. /* make static geometry immutable */
  694. if (!isDynamicAccel()) {
  695. accels_immutable();
  696. flags_modified = true; // in non-dynamic mode we have to re-create accels
  697. }
  698. if (device->verbosity(2)) {
  699. std::cout << "created scene intersector" << std::endl;
  700. accels_print(2);
  701. std::cout << "selected scene intersector" << std::endl;
  702. intersectors.print(2);
  703. }
  704. }
  705. void Scene::build_gpu_accels()
  706. {
  707. #if defined(EMBREE_SYCL_SUPPORT)
  708. const BBox3f aabb = rthwifBuild(this,hwaccel);
  709. bounds = LBBox<embree::Vec3fa>(aabb);
  710. hwaccel_bounds = aabb;
  711. #endif
  712. }
  713. void Scene::commit_task ()
  714. {
  715. checkIfModifiedAndSet();
  716. if (!isModified()) return;
  717. /* print scene statistics */
  718. if (device->verbosity(2))
  719. printStatistics();
  720. progress_monitor_counter = 0;
  721. /* gather scene stats and call preCommit function of each geometry */
  722. this->world = parallel_reduce (size_t(0), geometries.size(), GeometryCounts (),
  723. [this](const range<size_t>& r)->GeometryCounts
  724. {
  725. GeometryCounts c;
  726. for (auto i=r.begin(); i<r.end(); ++i)
  727. {
  728. if (geometries[i] && geometries[i]->isEnabled())
  729. {
  730. geometries[i]->preCommit();
  731. geometries[i]->addElementsToCount (c);
  732. c.numFilterFunctions += (int) geometries[i]->hasArgumentFilterFunctions();
  733. c.numFilterFunctions += (int) geometries[i]->hasGeometryFilterFunctions();
  734. }
  735. }
  736. return c;
  737. },
  738. std::plus<GeometryCounts>()
  739. );
  740. #if defined(EMBREE_SYCL_SUPPORT)
  741. if (DeviceGPU* gpu_device = dynamic_cast<DeviceGPU*>(device))
  742. build_gpu_accels();
  743. else
  744. #endif
  745. build_cpu_accels();
  746. /* call postCommit function of each geometry */
  747. parallel_for(geometries.size(), [&] ( const size_t i ) {
  748. if (geometries[i] && geometries[i]->isEnabled()) {
  749. geometries[i]->postCommit();
  750. vertices[i] = geometries[i]->getCompactVertexArray();
  751. geometryModCounters_[i] = geometries[i]->getModCounter();
  752. }
  753. });
  754. setModified(false);
  755. }
  756. void Scene::setBuildQuality(RTCBuildQuality quality_flags_i)
  757. {
  758. if (quality_flags == quality_flags_i) return;
  759. quality_flags = quality_flags_i;
  760. flags_modified = true;
  761. }
  762. RTCBuildQuality Scene::getBuildQuality() const {
  763. return quality_flags;
  764. }
  765. void Scene::setSceneFlags(RTCSceneFlags scene_flags_i)
  766. {
  767. if (scene_flags == scene_flags_i) return;
  768. scene_flags = scene_flags_i;
  769. flags_modified = true;
  770. }
  771. RTCSceneFlags Scene::getSceneFlags() const {
  772. return scene_flags;
  773. }
  774. #if defined(TASKING_INTERNAL)
  775. void Scene::commit (bool join)
  776. {
  777. Lock<MutexSys> buildLock(buildMutex,false);
  778. /* allocates own taskscheduler for each build */
  779. Ref<TaskScheduler> scheduler = nullptr;
  780. {
  781. Lock<MutexSys> lock(taskGroup->schedulerMutex);
  782. scheduler = taskGroup->scheduler;
  783. if (scheduler == null) {
  784. buildLock.lock();
  785. taskGroup->scheduler = scheduler = new TaskScheduler;
  786. }
  787. }
  788. /* worker threads join build */
  789. if (!buildLock.isLocked())
  790. {
  791. if (!join)
  792. throw_RTCError(RTC_ERROR_INVALID_OPERATION,"use rtcJoinCommitScene to join a build operation");
  793. scheduler->join();
  794. return;
  795. }
  796. /* initiate build */
  797. //try {
  798. TaskScheduler::TaskGroupContext context;
  799. scheduler->spawn_root([&]() { commit_task(); Lock<MutexSys> lock(taskGroup->schedulerMutex); taskGroup->scheduler = nullptr; }, &context, 1, !join);
  800. //}
  801. //catch (...) {
  802. // accels_clear();
  803. // Lock<MutexSys> lock(taskGroup->schedulerMutex);
  804. // taskGroup->scheduler = nullptr;
  805. // throw;
  806. //}
  807. }
  808. #endif
  809. #if defined(TASKING_TBB)
  810. void Scene::commit (bool join)
  811. {
  812. #if defined(TASKING_TBB) && (TBB_INTERFACE_VERSION_MAJOR < 8)
  813. if (join)
  814. throw_RTCError(RTC_ERROR_INVALID_OPERATION,"rtcJoinCommitScene not supported with this TBB version");
  815. #endif
  816. /* try to obtain build lock */
  817. Lock<MutexSys> lock(buildMutex,buildMutex.try_lock());
  818. /* join hierarchy build */
  819. if (!lock.isLocked())
  820. {
  821. #if !TASKING_TBB_USE_TASK_ISOLATION
  822. if (!join)
  823. throw_RTCError(RTC_ERROR_INVALID_OPERATION,"invoking rtcCommitScene from multiple threads is not supported with this TBB version");
  824. #endif
  825. do {
  826. device->execute(join, [&](){ taskGroup->group.wait(); });
  827. pause_cpu();
  828. yield();
  829. } while (!buildMutex.try_lock());
  830. buildMutex.unlock();
  831. return;
  832. }
  833. /* for best performance set FTZ and DAZ flags in the MXCSR control and status register */
  834. const unsigned int mxcsr = _mm_getcsr();
  835. _mm_setcsr(mxcsr | /* FTZ */ (1<<15) | /* DAZ */ (1<<6));
  836. try {
  837. #if TBB_INTERFACE_VERSION_MAJOR < 8
  838. tbb::task_group_context ctx( tbb::task_group_context::isolated, tbb::task_group_context::default_traits);
  839. #else
  840. tbb::task_group_context ctx( tbb::task_group_context::isolated, tbb::task_group_context::default_traits | tbb::task_group_context::fp_settings );
  841. #endif
  842. //ctx.set_priority(tbb::priority_high);
  843. device->execute(join, [&]()
  844. {
  845. taskGroup->group.run([&]{
  846. tbb::parallel_for (size_t(0), size_t(1), size_t(1), [&] (size_t) { commit_task(); }, ctx);
  847. });
  848. taskGroup->group.wait();
  849. });
  850. /* reset MXCSR register again */
  851. _mm_setcsr(mxcsr);
  852. }
  853. catch (...)
  854. {
  855. /* reset MXCSR register again */
  856. _mm_setcsr(mxcsr);
  857. accels_clear();
  858. throw;
  859. }
  860. }
  861. #endif
  862. #if defined(TASKING_PPL)
  863. void Scene::commit (bool join)
  864. {
  865. #if defined(TASKING_PPL)
  866. if (join)
  867. throw_RTCError(RTC_ERROR_INVALID_OPERATION,"rtcJoinCommitScene not supported with PPL");
  868. #endif
  869. /* try to obtain build lock */
  870. Lock<MutexSys> lock(buildMutex);
  871. checkIfModifiedAndSet ();
  872. if (!isModified()) {
  873. return;
  874. }
  875. /* for best performance set FTZ and DAZ flags in the MXCSR control and status register */
  876. const unsigned int mxcsr = _mm_getcsr();
  877. _mm_setcsr(mxcsr | /* FTZ */ (1<<15) | /* DAZ */ (1<<6));
  878. try {
  879. taskGroup->group.run([&]{
  880. concurrency::parallel_for(size_t(0), size_t(1), size_t(1), [&](size_t) { commit_task(); });
  881. });
  882. taskGroup->group.wait();
  883. /* reset MXCSR register again */
  884. _mm_setcsr(mxcsr);
  885. }
  886. catch (...)
  887. {
  888. /* reset MXCSR register again */
  889. _mm_setcsr(mxcsr);
  890. accels_clear();
  891. throw;
  892. }
  893. }
  894. #endif
  895. void Scene::setProgressMonitorFunction(RTCProgressMonitorFunction func, void* ptr)
  896. {
  897. progress_monitor_function = func;
  898. progress_monitor_ptr = ptr;
  899. }
  900. void Scene::progressMonitor(double dn)
  901. {
  902. if (progress_monitor_function) {
  903. size_t n = size_t(dn) + progress_monitor_counter.fetch_add(size_t(dn));
  904. if (!progress_monitor_function(progress_monitor_ptr, n / (double(numPrimitives())))) {
  905. throw_RTCError(RTC_ERROR_CANCELLED,"progress monitor forced termination");
  906. }
  907. }
  908. }
  909. }