arkit_interface.mm 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752
  1. /*************************************************************************/
  2. /* arkit_interface.mm */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #include "core/os/input.h"
  31. #include "core/os/os.h"
  32. #include "scene/resources/surface_tool.h"
  33. #include "servers/visual/visual_server_globals.h"
  34. #import <ARKit/ARKit.h>
  35. #import <UIKit/UIKit.h>
  36. #include <dlfcn.h>
  37. #include "arkit_interface.h"
  38. #include "arkit_session_delegate.h"
  39. // just a dirty workaround for now, declare these as globals. I'll probably encapsulate ARSession and associated logic into an mm object and change ARKitInterface to a normal cpp object that consumes it.
  40. ARSession *ar_session;
  41. ARKitSessionDelegate *ar_delegate;
  42. NSTimeInterval last_timestamp;
  43. /* this is called when we initialize or when we come back from having our app pushed to the background, just (re)start our session */
  44. void ARKitInterface::start_session() {
  45. // We're active...
  46. session_was_started = true;
  47. // Ignore this if we're not initialized...
  48. if (initialized) {
  49. print_line("Starting ARKit session");
  50. Class ARWorldTrackingConfigurationClass = NSClassFromString(@"ARWorldTrackingConfiguration");
  51. ARWorldTrackingConfiguration *configuration = [ARWorldTrackingConfigurationClass new];
  52. configuration.lightEstimationEnabled = light_estimation_is_enabled;
  53. if (plane_detection_is_enabled) {
  54. configuration.planeDetection = ARPlaneDetectionVertical | ARPlaneDetectionHorizontal;
  55. } else {
  56. configuration.planeDetection = 0;
  57. }
  58. // make sure our camera is on
  59. if (feed.is_valid()) {
  60. feed->set_active(true);
  61. }
  62. [ar_session runWithConfiguration:configuration];
  63. }
  64. }
  65. void ARKitInterface::stop_session() {
  66. session_was_started = false;
  67. // Ignore this if we're not initialized...
  68. if (initialized) {
  69. // make sure our camera is off
  70. if (feed.is_valid()) {
  71. feed->set_active(false);
  72. }
  73. [ar_session pause];
  74. }
  75. }
  76. void ARKitInterface::notification(int p_what) {
  77. // TODO, this is not being called, need to find out why, possibly because this is not a node.
  78. // in that case we need to find a way to get these notifications!
  79. switch (p_what) {
  80. case MainLoop::NOTIFICATION_WM_FOCUS_IN: {
  81. print_line("Focus in");
  82. start_session();
  83. }; break;
  84. case MainLoop::NOTIFICATION_WM_FOCUS_OUT: {
  85. print_line("Focus out");
  86. stop_session();
  87. }; break;
  88. default:
  89. break;
  90. }
  91. }
  92. bool ARKitInterface::get_anchor_detection_is_enabled() const {
  93. return plane_detection_is_enabled;
  94. }
  95. void ARKitInterface::set_anchor_detection_is_enabled(bool p_enable) {
  96. if (plane_detection_is_enabled != p_enable) {
  97. plane_detection_is_enabled = p_enable;
  98. // Restart our session (this will be ignore if we're not initialised)
  99. if (session_was_started) {
  100. start_session();
  101. }
  102. }
  103. }
  104. int ARKitInterface::get_camera_feed_id() {
  105. if (feed.is_null()) {
  106. return 0;
  107. } else {
  108. return feed->get_id();
  109. }
  110. }
  111. bool ARKitInterface::get_light_estimation_is_enabled() const {
  112. return light_estimation_is_enabled;
  113. }
  114. void ARKitInterface::set_light_estimation_is_enabled(bool p_enable) {
  115. if (light_estimation_is_enabled != p_enable) {
  116. light_estimation_is_enabled = p_enable;
  117. // Restart our session (this will be ignore if we're not initialised)
  118. if (session_was_started) {
  119. start_session();
  120. }
  121. }
  122. }
  123. real_t ARKitInterface::get_ambient_intensity() const {
  124. return ambient_intensity;
  125. }
  126. real_t ARKitInterface::get_ambient_color_temperature() const {
  127. return ambient_color_temperature;
  128. }
  129. StringName ARKitInterface::get_name() const {
  130. return "ARKit";
  131. }
  132. int ARKitInterface::get_capabilities() const {
  133. return ARKitInterface::ARVR_MONO + ARKitInterface::ARVR_AR;
  134. }
  135. Array ARKitInterface::raycast(Vector2 p_screen_coord) {
  136. Array arr;
  137. Size2 screen_size = OS::get_singleton()->get_window_size();
  138. CGPoint point;
  139. point.x = p_screen_coord.x / screen_size.x;
  140. point.y = p_screen_coord.y / screen_size.y;
  141. ///@TODO maybe give more options here, for now we're taking just ARAchors into account that were found during plane detection keeping their size into account
  142. NSArray<ARHitTestResult *> *results = [ar_session.currentFrame hittest:point types:ARHitTestResultTypeExistingPlaneUsingExtent];
  143. for (ARHitTestResult *result in results) {
  144. Transform transform;
  145. matrix_float4x4 m44 = result.worldTransform;
  146. transform.basis.elements[0].x = m44.columns[0][0];
  147. transform.basis.elements[1].x = m44.columns[0][1];
  148. transform.basis.elements[2].x = m44.columns[0][2];
  149. transform.basis.elements[0].y = m44.columns[1][0];
  150. transform.basis.elements[1].y = m44.columns[1][1];
  151. transform.basis.elements[2].y = m44.columns[1][2];
  152. transform.basis.elements[0].z = m44.columns[2][0];
  153. transform.basis.elements[1].z = m44.columns[2][1];
  154. transform.basis.elements[2].z = m44.columns[2][2];
  155. transform.origin.x = m44.columns[3][0];
  156. transform.origin.y = m44.columns[3][1];
  157. transform.origin.z = m44.columns[3][2];
  158. /* important, NOT scaled to world_scale !! */
  159. arr.push_back(transform);
  160. }
  161. return arr;
  162. }
  163. void ARKitInterface::_bind_methods() {
  164. ClassDB::bind_method(D_METHOD("_notification", "what"), &ARKitInterface::_notification);
  165. ClassDB::bind_method(D_METHOD("set_light_estimation_is_enabled", "enable"), &ARKitInterface::set_light_estimation_is_enabled);
  166. ClassDB::bind_method(D_METHOD("get_light_estimation_is_enabled"), &ARKitInterface::get_light_estimation_is_enabled);
  167. ADD_PROPERTY(PropertyInfo(Variant::BOOL, "light_estimation"), "set_light_estimation_is_enabled", "get_light_estimation_is_enabled");
  168. ClassDB::bind_method(D_METHOD("get_ambient_intensity"), &ARKitInterface::get_ambient_intensity);
  169. ClassDB::bind_method(D_METHOD("get_ambient_color_temperature"), &ARKitInterface::get_ambient_color_temperature);
  170. ClassDB::bind_method(D_METHOD("raycast", "screen_coord"), &ARKitInterface::raycast);
  171. }
  172. bool ARKitInterface::is_stereo() {
  173. // this is a mono device...
  174. return false;
  175. }
  176. bool ARKitInterface::is_initialized() const {
  177. return initialized;
  178. }
  179. bool ARKitInterface::initialize() {
  180. ARVRServer *arvr_server = ARVRServer::get_singleton();
  181. ERR_FAIL_NULL_V(arvr_server, false);
  182. if (!initialized) {
  183. print_line("initializing ARKit");
  184. // create our ar session and delegate
  185. Class ARSessionClass = NSClassFromString(@"ARSession");
  186. if (ARSessionClass == Nil) {
  187. void *arkit_handle = dlopen("/System/Library/Frameworks/ARKit.framework/ARKit", RTLD_NOW);
  188. if (arkit_handle) {
  189. ARSessionClass = NSClassFromString(@"ARSession");
  190. } else {
  191. print_line("ARKit init failed");
  192. return false;
  193. }
  194. }
  195. ar_session = [ARSessionClass new];
  196. ar_delegate = [ARKitSessionDelegate new];
  197. ar_delegate.arkit_interface = this;
  198. ar_session.delegate = ar_delegate;
  199. // reset our transform
  200. transform = Transform();
  201. // make this our primary interface
  202. arvr_server->set_primary_interface(this);
  203. // make sure we have our feed setup
  204. if (feed.is_null()) {
  205. feed.instance();
  206. feed->set_name("ARKit");
  207. CameraServer *cs = CameraServer::get_singleton();
  208. if (cs != NULL) {
  209. cs->add_feed(feed);
  210. }
  211. }
  212. feed->set_active(true);
  213. // yeah!
  214. initialized = true;
  215. // Start our session...
  216. start_session();
  217. }
  218. return true;
  219. }
  220. void ARKitInterface::uninitialize() {
  221. if (initialized) {
  222. ARVRServer *arvr_server = ARVRServer::get_singleton();
  223. if (arvr_server != NULL) {
  224. // no longer our primary interface
  225. arvr_server->clear_primary_interface_if(this);
  226. }
  227. if (feed.is_valid()) {
  228. CameraServer *cs = CameraServer::get_singleton();
  229. if ((cs != NULL)) {
  230. cs->remove_feed(feed);
  231. }
  232. feed.unref();
  233. }
  234. remove_all_anchors();
  235. [ar_session release];
  236. [ar_delegate release];
  237. ar_session = NULL;
  238. ar_delegate = NULL;
  239. initialized = false;
  240. session_was_started = false;
  241. }
  242. }
  243. Size2 ARKitInterface::get_render_targetsize() {
  244. _THREAD_SAFE_METHOD_
  245. Size2 target_size = OS::get_singleton()->get_window_size();
  246. return target_size;
  247. }
  248. Transform ARKitInterface::get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform) {
  249. _THREAD_SAFE_METHOD_
  250. Transform transform_for_eye;
  251. ARVRServer *arvr_server = ARVRServer::get_singleton();
  252. ERR_FAIL_NULL_V(arvr_server, transform_for_eye);
  253. if (initialized) {
  254. float world_scale = arvr_server->get_world_scale();
  255. // just scale our origin point of our transform, note that we really shouldn't be using world_scale in ARKit but....
  256. transform_for_eye = transform;
  257. transform_for_eye.origin *= world_scale;
  258. transform_for_eye = p_cam_transform * arvr_server->get_reference_frame() * transform_for_eye;
  259. } else {
  260. // huh? well just return what we got....
  261. transform_for_eye = p_cam_transform;
  262. }
  263. return transform_for_eye;
  264. }
  265. CameraMatrix ARKitInterface::get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) {
  266. // Remember our near and far, it will be used in process when we obtain our projection from our ARKit session.
  267. z_near = p_z_near;
  268. z_far = p_z_far;
  269. return projection;
  270. }
  271. void ARKitInterface::commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) {
  272. _THREAD_SAFE_METHOD_
  273. // We must have a valid render target
  274. ERR_FAIL_COND(!p_render_target.is_valid());
  275. // Because we are rendering to our device we must use our main viewport!
  276. ERR_FAIL_COND(p_screen_rect == Rect2());
  277. // get the size of our screen
  278. Rect2 screen_rect = p_screen_rect;
  279. // screen_rect.position.x += screen_rect.size.x;
  280. // screen_rect.size.x = -screen_rect.size.x;
  281. // screen_rect.position.y += screen_rect.size.y;
  282. // screen_rect.size.y = -screen_rect.size.y;
  283. VSG::rasterizer->set_current_render_target(RID());
  284. VSG::rasterizer->blit_render_target_to_screen(p_render_target, screen_rect, 0);
  285. }
  286. ARVRPositionalTracker *ARKitInterface::get_anchor_for_uuid(const unsigned char *p_uuid) {
  287. if (anchors == NULL) {
  288. num_anchors = 0;
  289. max_anchors = 10;
  290. anchors = (anchor_map *)malloc(sizeof(anchor_map) * max_anchors);
  291. }
  292. ERR_FAIL_NULL_V(anchors, NULL);
  293. for (unsigned int i = 0; i < num_anchors; i++) {
  294. if (memcmp(anchors[i].uuid, p_uuid, 16) == 0) {
  295. return anchors[i].tracker;
  296. }
  297. }
  298. if (num_anchors + 1 == max_anchors) {
  299. max_anchors += 10;
  300. anchors = (anchor_map *)realloc(anchors, sizeof(anchor_map) * max_anchors);
  301. ERR_FAIL_NULL_V(anchors, NULL);
  302. }
  303. ARVRPositionalTracker *new_tracker = memnew(ARVRPositionalTracker);
  304. new_tracker->set_type(ARVRServer::TRACKER_ANCHOR);
  305. char tracker_name[256];
  306. sprintf(tracker_name, "Anchor %02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x", p_uuid[0], p_uuid[1], p_uuid[2], p_uuid[3], p_uuid[4], p_uuid[5], p_uuid[6], p_uuid[7], p_uuid[8], p_uuid[9], p_uuid[10], p_uuid[11], p_uuid[12], p_uuid[13], p_uuid[14], p_uuid[15]);
  307. String name = tracker_name;
  308. print_line("Adding tracker " + name);
  309. new_tracker->set_name(name);
  310. // add our tracker
  311. ARVRServer::get_singleton()->add_tracker(new_tracker);
  312. anchors[num_anchors].tracker = new_tracker;
  313. memcpy(anchors[num_anchors].uuid, p_uuid, 16);
  314. num_anchors++;
  315. return new_tracker;
  316. }
  317. void ARKitInterface::remove_anchor_for_uuid(const unsigned char *p_uuid) {
  318. if (anchors != NULL) {
  319. for (unsigned int i = 0; i < num_anchors; i++) {
  320. if (memcmp(anchors[i].uuid, p_uuid, 16) == 0) {
  321. // remove our tracker
  322. ARVRServer::get_singleton()->remove_tracker(anchors[i].tracker);
  323. memdelete(anchors[i].tracker);
  324. // bring remaining forward
  325. for (unsigned int j = i + 1; j < num_anchors; j++) {
  326. anchors[j - 1] = anchors[j];
  327. };
  328. // decrease count
  329. num_anchors--;
  330. return;
  331. }
  332. }
  333. }
  334. }
  335. void ARKitInterface::remove_all_anchors() {
  336. if (anchors != NULL) {
  337. for (unsigned int i = 0; i < num_anchors; i++) {
  338. // remove our tracker
  339. ARVRServer::get_singleton()->remove_tracker(anchors[i].tracker);
  340. memdelete(anchors[i].tracker);
  341. };
  342. free(anchors);
  343. anchors = NULL;
  344. num_anchors = 0;
  345. }
  346. }
  347. void ARKitInterface::process() {
  348. _THREAD_SAFE_METHOD_
  349. if (@available(iOS 11.0, *)) {
  350. if (initialized) {
  351. // get our next ARFrame
  352. ARFrame *current_frame = ar_session.currentFrame;
  353. if (last_timestamp != current_frame.timestamp) {
  354. // only process if we have a new frame
  355. last_timestamp = current_frame.timestamp;
  356. // get some info about our screen and orientation
  357. Size2 screen_size = OS::get_singleton()->get_window_size();
  358. UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
  359. // Grab our camera image for our backbuffer
  360. CVPixelBufferRef pixelBuffer = current_frame.capturedImage;
  361. if ((CVPixelBufferGetPlaneCount(pixelBuffer) == 2) && (feed != NULL)) {
  362. // Plane 0 is our Y and Plane 1 is our CbCr buffer
  363. // ignored, we check each plane separately
  364. // image_width = CVPixelBufferGetWidth(pixelBuffer);
  365. // image_height = CVPixelBufferGetHeight(pixelBuffer);
  366. // printf("Pixel buffer %i - %i\n", image_width, image_height);
  367. CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
  368. // get our buffers
  369. unsigned char *dataY = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
  370. unsigned char *dataCbCr = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
  371. if (dataY == NULL) {
  372. print_line("Couldn't access Y pixel buffer data");
  373. } else if (dataCbCr == NULL) {
  374. print_line("Couldn't access CbCr pixel buffer data");
  375. } else {
  376. Ref<Image> img[2];
  377. size_t extraLeft, extraRight, extraTop, extraBottom;
  378. CVPixelBufferGetExtendedPixels(pixelBuffer, &extraLeft, &extraRight, &extraTop, &extraBottom);
  379. {
  380. // do Y
  381. int new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
  382. int new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
  383. int bytes_per_row = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
  384. if ((image_width[0] != new_width) || (image_height[0] != new_height)) {
  385. printf("- Camera padding l:%lu r:%lu t:%lu b:%lu\n", extraLeft, extraRight, extraTop, extraBottom);
  386. printf("- Camera Y plane size: %i, %i - %i\n", new_width, new_height, bytes_per_row);
  387. image_width[0] = new_width;
  388. image_height[0] = new_height;
  389. img_data[0].resize(new_width * new_height);
  390. }
  391. PoolVector<uint8_t>::Write w = img_data[0].write();
  392. if (new_width == bytes_per_row) {
  393. memcpy(w.ptr(), dataY, new_width * new_height);
  394. } else {
  395. int offset_a = 0;
  396. int offset_b = extraLeft + (extraTop * bytes_per_row);
  397. for (int r = 0; r < new_height; r++) {
  398. memcpy(w.ptr() + offset_a, dataY + offset_b, new_width);
  399. offset_a += new_width;
  400. offset_b += bytes_per_row;
  401. }
  402. }
  403. img[0].instance();
  404. img[0]->create(new_width, new_height, 0, Image::FORMAT_R8, img_data[0]);
  405. }
  406. {
  407. // do CbCr
  408. int new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
  409. int new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
  410. int bytes_per_row = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
  411. if ((image_width[1] != new_width) || (image_height[1] != new_height)) {
  412. printf("- Camera CbCr plane size: %i, %i - %i\n", new_width, new_height, bytes_per_row);
  413. image_width[1] = new_width;
  414. image_height[1] = new_height;
  415. img_data[1].resize(2 * new_width * new_height);
  416. }
  417. PoolVector<uint8_t>::Write w = img_data[1].write();
  418. if ((2 * new_width) == bytes_per_row) {
  419. memcpy(w.ptr(), dataCbCr, 2 * new_width * new_height);
  420. } else {
  421. int offset_a = 0;
  422. int offset_b = extraLeft + (extraTop * bytes_per_row);
  423. for (int r = 0; r < new_height; r++) {
  424. memcpy(w.ptr() + offset_a, dataCbCr + offset_b, 2 * new_width);
  425. offset_a += 2 * new_width;
  426. offset_b += bytes_per_row;
  427. }
  428. }
  429. img[1].instance();
  430. img[1]->create(new_width, new_height, 0, Image::FORMAT_RG8, img_data[1]);
  431. }
  432. // set our texture...
  433. feed->set_YCbCr_imgs(img[0], img[1]);
  434. // now build our transform to display this as a background image that matches our camera
  435. CGAffineTransform affine_transform = [current_frame displayTransformForOrientation:orientation viewportSize:CGSizeMake(screen_size.width, screen_size.height)];
  436. // we need to invert this, probably row v.s. column notation
  437. affine_transform = CGAffineTransformInvert(affine_transform);
  438. if (orientation != UIInterfaceOrientationPortrait) {
  439. affine_transform.b = -affine_transform.b;
  440. affine_transform.d = -affine_transform.d;
  441. affine_transform.ty = 1.0 - affine_transform.ty;
  442. } else {
  443. affine_transform.c = -affine_transform.c;
  444. affine_transform.a = -affine_transform.a;
  445. affine_transform.tx = 1.0 - affine_transform.tx;
  446. }
  447. Transform2D display_transform = Transform2D(
  448. affine_transform.a, affine_transform.b,
  449. affine_transform.c, affine_transform.d,
  450. affine_transform.tx, affine_transform.ty);
  451. feed->set_transform(display_transform);
  452. }
  453. // and unlock
  454. CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
  455. }
  456. // Record light estimation to apply to our scene
  457. if (light_estimation_is_enabled) {
  458. ambient_intensity = current_frame.lightEstimate.ambientIntensity;
  459. ///@TODO it's there, but not there.. what to do with this...
  460. // https://developer.apple.com/documentation/arkit/arlightestimate?language=objc
  461. // ambient_color_temperature = current_frame.lightEstimate.ambientColorTemperature;
  462. }
  463. // Process our camera
  464. ARCamera *camera = current_frame.camera;
  465. // strangely enough we have to states, rolling them up into one
  466. if (camera.trackingState == ARTrackingStateNotAvailable) {
  467. // no tracking, would be good if we black out the screen or something...
  468. tracking_state = ARVRInterface::ARVR_NOT_TRACKING;
  469. } else {
  470. if (camera.trackingState == ARTrackingStateNormal) {
  471. tracking_state = ARVRInterface::ARVR_NORMAL_TRACKING;
  472. } else if (camera.trackingStateReason == ARTrackingStateReasonExcessiveMotion) {
  473. tracking_state = ARVRInterface::ARVR_EXCESSIVE_MOTION;
  474. } else if (camera.trackingStateReason == ARTrackingStateReasonInsufficientFeatures) {
  475. tracking_state = ARVRInterface::ARVR_INSUFFICIENT_FEATURES;
  476. } else {
  477. tracking_state = ARVRInterface::ARVR_UNKNOWN_TRACKING;
  478. }
  479. // copy our current frame transform
  480. matrix_float4x4 m44 = camera.transform;
  481. if (orientation == UIInterfaceOrientationLandscapeLeft) {
  482. transform.basis.elements[0].x = m44.columns[0][0];
  483. transform.basis.elements[1].x = m44.columns[0][1];
  484. transform.basis.elements[2].x = m44.columns[0][2];
  485. transform.basis.elements[0].y = m44.columns[1][0];
  486. transform.basis.elements[1].y = m44.columns[1][1];
  487. transform.basis.elements[2].y = m44.columns[1][2];
  488. } else if (orientation == UIInterfaceOrientationPortrait) {
  489. transform.basis.elements[0].x = m44.columns[1][0];
  490. transform.basis.elements[1].x = m44.columns[1][1];
  491. transform.basis.elements[2].x = m44.columns[1][2];
  492. transform.basis.elements[0].y = -m44.columns[0][0];
  493. transform.basis.elements[1].y = -m44.columns[0][1];
  494. transform.basis.elements[2].y = -m44.columns[0][2];
  495. } else if (orientation == UIInterfaceOrientationLandscapeRight) {
  496. transform.basis.elements[0].x = -m44.columns[0][0];
  497. transform.basis.elements[1].x = -m44.columns[0][1];
  498. transform.basis.elements[2].x = -m44.columns[0][2];
  499. transform.basis.elements[0].y = -m44.columns[1][0];
  500. transform.basis.elements[1].y = -m44.columns[1][1];
  501. transform.basis.elements[2].y = -m44.columns[1][2];
  502. } else if (orientation == UIInterfaceOrientationPortraitUpsideDown) {
  503. // this may not be correct
  504. transform.basis.elements[0].x = m44.columns[1][0];
  505. transform.basis.elements[1].x = m44.columns[1][1];
  506. transform.basis.elements[2].x = m44.columns[1][2];
  507. transform.basis.elements[0].y = m44.columns[0][0];
  508. transform.basis.elements[1].y = m44.columns[0][1];
  509. transform.basis.elements[2].y = m44.columns[0][2];
  510. }
  511. transform.basis.elements[0].z = m44.columns[2][0];
  512. transform.basis.elements[1].z = m44.columns[2][1];
  513. transform.basis.elements[2].z = m44.columns[2][2];
  514. transform.origin.x = m44.columns[3][0];
  515. transform.origin.y = m44.columns[3][1];
  516. transform.origin.z = m44.columns[3][2];
  517. // copy our current frame projection, investigate using projectionMatrixWithViewportSize:orientation:zNear:zFar: so we can set our own near and far
  518. m44 = [camera projectionMatrixForOrientation:orientation viewportSize:CGSizeMake(screen_size.width, screen_size.height) zNear:z_near zFar:z_far];
  519. projection.matrix[0][0] = m44.columns[0][0];
  520. projection.matrix[1][0] = m44.columns[1][0];
  521. projection.matrix[2][0] = m44.columns[2][0];
  522. projection.matrix[3][0] = m44.columns[3][0];
  523. projection.matrix[0][1] = m44.columns[0][1];
  524. projection.matrix[1][1] = m44.columns[1][1];
  525. projection.matrix[2][1] = m44.columns[2][1];
  526. projection.matrix[3][1] = m44.columns[3][1];
  527. projection.matrix[0][2] = m44.columns[0][2];
  528. projection.matrix[1][2] = m44.columns[1][2];
  529. projection.matrix[2][2] = m44.columns[2][2];
  530. projection.matrix[3][2] = m44.columns[3][2];
  531. projection.matrix[0][3] = m44.columns[0][3];
  532. projection.matrix[1][3] = m44.columns[1][3];
  533. projection.matrix[2][3] = m44.columns[2][3];
  534. projection.matrix[3][3] = m44.columns[3][3];
  535. }
  536. }
  537. }
  538. }
  539. }
  540. void ARKitInterface::_add_or_update_anchor(void *p_anchor) {
  541. _THREAD_SAFE_METHOD_
  542. ARAnchor *anchor = (ARAnchor *)p_anchor;
  543. unsigned char uuid[16];
  544. [anchor.identifier getUUIDBytes:uuid];
  545. ARVRPositionalTracker *tracker = get_anchor_for_uuid(uuid);
  546. if (tracker != NULL) {
  547. // lets update our mesh! (using Arjens code as is for now)
  548. // we should also probably limit how often we do this...
  549. // can we safely cast this?
  550. ARPlaneAnchor *planeAnchor = (ARPlaneAnchor *)anchor;
  551. if (planeAnchor.geometry.triangleCount > 0) {
  552. Ref<SurfaceTool> surftool;
  553. surftool.instance();
  554. surftool->begin(Mesh::PRIMITIVE_TRIANGLES);
  555. for (int j = planeAnchor.geometry.triangleCount * 3 - 1; j >= 0; j--) {
  556. int16_t index = planeAnchor.geometry.triangleIndices[j];
  557. simd_float3 vrtx = planeAnchor.geometry.vertices[index];
  558. simd_float2 textcoord = planeAnchor.geometry.textureCoordinates[index];
  559. surftool->add_uv(Vector2(textcoord[0], textcoord[1]));
  560. surftool->add_color(Color(0.8, 0.8, 0.8));
  561. surftool->add_vertex(Vector3(vrtx[0], vrtx[1], vrtx[2]));
  562. }
  563. surftool->generate_normals();
  564. tracker->set_mesh(surftool->commit());
  565. } else {
  566. Ref<Mesh> nomesh;
  567. tracker->set_mesh(nomesh);
  568. }
  569. // Note, this also contains a scale factor which gives us an idea of the size of the anchor
  570. // We may extract that in our ARVRAnchor class
  571. Basis b;
  572. matrix_float4x4 m44 = anchor.transform;
  573. b.elements[0].x = m44.columns[0][0];
  574. b.elements[1].x = m44.columns[0][1];
  575. b.elements[2].x = m44.columns[0][2];
  576. b.elements[0].y = m44.columns[1][0];
  577. b.elements[1].y = m44.columns[1][1];
  578. b.elements[2].y = m44.columns[1][2];
  579. b.elements[0].z = m44.columns[2][0];
  580. b.elements[1].z = m44.columns[2][1];
  581. b.elements[2].z = m44.columns[2][2];
  582. tracker->set_orientation(b);
  583. tracker->set_rw_position(Vector3(m44.columns[3][0], m44.columns[3][1], m44.columns[3][2]));
  584. }
  585. }
  586. void ARKitInterface::_remove_anchor(void *p_anchor) {
  587. _THREAD_SAFE_METHOD_
  588. ARAnchor *anchor = (ARAnchor *)p_anchor;
  589. unsigned char uuid[16];
  590. [anchor.identifier getUUIDBytes:uuid];
  591. remove_anchor_for_uuid(uuid);
  592. }
  593. ARKitInterface::ARKitInterface() {
  594. initialized = false;
  595. session_was_started = false;
  596. plane_detection_is_enabled = false;
  597. light_estimation_is_enabled = false;
  598. ar_session = NULL;
  599. z_near = 0.01;
  600. z_far = 1000.0;
  601. projection.set_perspective(60.0, 1.0, z_near, z_far, false);
  602. anchors = NULL;
  603. num_anchors = 0;
  604. ambient_intensity = 1.0;
  605. ambient_color_temperature = 1.0;
  606. image_width[0] = 0;
  607. image_width[1] = 0;
  608. image_height[0] = 0;
  609. image_height[1] = 0;
  610. }
  611. ARKitInterface::~ARKitInterface() {
  612. remove_all_anchors();
  613. // and make sure we cleanup if we haven't already
  614. if (is_initialized()) {
  615. uninitialize();
  616. }
  617. }