analyzer.c 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975
  1. #include "analyzer.h"
  2. #include "array.h"
  3. #include "errors.h"
  4. #include <string.h>
  5. static render_pipelines all_render_pipelines;
  6. // a pipeline group is a collection of pipelines that share shaders
  7. static render_pipeline_groups all_render_pipeline_groups;
  8. static compute_shaders all_compute_shaders;
  9. static raytracing_pipelines all_raytracing_pipelines;
  10. // a pipeline group is a collection of pipelines that share shaders
  11. static raytracing_pipeline_groups all_raytracing_pipeline_groups;
  12. static void find_referenced_global_for_var(variable v, global_array *globals, bool read, bool write) {
  13. for (global_id j = 0; get_global(j) != NULL && get_global(j)->type != NO_TYPE; ++j) {
  14. global *g = get_global(j);
  15. if (v.index == g->var_index) {
  16. bool found = false;
  17. for (size_t k = 0; k < globals->size; ++k) {
  18. if (globals->globals[k] == j) {
  19. found = true;
  20. if (read) {
  21. globals->readable[k] = true;
  22. }
  23. if (write) {
  24. globals->writable[k] = true;
  25. }
  26. break;
  27. }
  28. }
  29. if (!found) {
  30. globals->globals[globals->size] = j;
  31. globals->readable[globals->size] = read;
  32. globals->writable[globals->size] = write;
  33. globals->size += 1;
  34. }
  35. return;
  36. }
  37. }
  38. }
  39. void find_referenced_globals(function *f, global_array *globals) {
  40. if (f->block == NULL) {
  41. // built-in
  42. return;
  43. }
  44. function *functions[256];
  45. size_t functions_size = 0;
  46. functions[functions_size] = f;
  47. functions_size += 1;
  48. find_referenced_functions(f, functions, &functions_size);
  49. for (size_t l = 0; l < functions_size; ++l) {
  50. uint8_t *data = functions[l]->code.o;
  51. size_t size = functions[l]->code.size;
  52. size_t index = 0;
  53. while (index < size) {
  54. opcode *o = (opcode *)&data[index];
  55. switch (o->type) {
  56. case OPCODE_MULTIPLY:
  57. case OPCODE_DIVIDE:
  58. case OPCODE_ADD:
  59. case OPCODE_SUB:
  60. case OPCODE_EQUALS:
  61. case OPCODE_NOT_EQUALS:
  62. case OPCODE_GREATER:
  63. case OPCODE_GREATER_EQUAL:
  64. case OPCODE_LESS:
  65. case OPCODE_LESS_EQUAL: {
  66. find_referenced_global_for_var(o->op_binary.left, globals, false, false);
  67. find_referenced_global_for_var(o->op_binary.right, globals, false, false);
  68. break;
  69. }
  70. case OPCODE_LOAD_ACCESS_LIST: {
  71. find_referenced_global_for_var(o->op_load_access_list.from, globals, true, false);
  72. break;
  73. }
  74. case OPCODE_STORE_ACCESS_LIST:
  75. case OPCODE_SUB_AND_STORE_ACCESS_LIST:
  76. case OPCODE_ADD_AND_STORE_ACCESS_LIST:
  77. case OPCODE_DIVIDE_AND_STORE_ACCESS_LIST:
  78. case OPCODE_MULTIPLY_AND_STORE_ACCESS_LIST: {
  79. find_referenced_global_for_var(o->op_store_access_list.to, globals, false, true);
  80. break;
  81. }
  82. case OPCODE_CALL: {
  83. for (uint8_t i = 0; i < o->op_call.parameters_size; ++i) {
  84. find_referenced_global_for_var(o->op_call.parameters[i], globals, false, false);
  85. }
  86. break;
  87. }
  88. default:
  89. break;
  90. }
  91. index += o->size;
  92. }
  93. }
  94. }
  95. void find_referenced_functions(function *f, function **functions, size_t *functions_size) {
  96. if (f->block == NULL) {
  97. // built-in
  98. return;
  99. }
  100. uint8_t *data = f->code.o;
  101. size_t size = f->code.size;
  102. size_t index = 0;
  103. while (index < size) {
  104. opcode *o = (opcode *)&data[index];
  105. switch (o->type) {
  106. case OPCODE_CALL: {
  107. for (function_id i = 0; get_function(i) != NULL; ++i) {
  108. function *f = get_function(i);
  109. if (f->name == o->op_call.func) {
  110. if (f->block == NULL) {
  111. // built-in
  112. break;
  113. }
  114. bool found = false;
  115. for (size_t j = 0; j < *functions_size; ++j) {
  116. if (functions[j]->name == o->op_call.func) {
  117. found = true;
  118. break;
  119. }
  120. }
  121. if (!found) {
  122. functions[*functions_size] = f;
  123. *functions_size += 1;
  124. find_referenced_functions(f, functions, functions_size);
  125. }
  126. break;
  127. }
  128. }
  129. break;
  130. }
  131. default:
  132. break;
  133. }
  134. index += o->size;
  135. }
  136. }
  137. void find_used_builtins(function *f) {
  138. if (f->block == NULL) {
  139. // built-in
  140. return;
  141. }
  142. if (f->used_builtins.builtins_analyzed) {
  143. return;
  144. }
  145. f->used_builtins.builtins_analyzed = true;
  146. uint8_t *data = f->code.o;
  147. size_t size = f->code.size;
  148. size_t index = 0;
  149. while (index < size) {
  150. opcode *o = (opcode *)&data[index];
  151. switch (o->type) {
  152. case OPCODE_CALL: {
  153. name_id func = o->op_call.func;
  154. if (func == add_name("dispatch_thread_id")) {
  155. f->used_builtins.dispatch_thread_id = true;
  156. }
  157. if (func == add_name("group_thread_id")) {
  158. f->used_builtins.group_thread_id = true;
  159. }
  160. if (func == add_name("group_id")) {
  161. f->used_builtins.group_id = true;
  162. }
  163. if (func == add_name("vertex_id")) {
  164. f->used_builtins.vertex_id = true;
  165. }
  166. for (function_id i = 0; get_function(i) != NULL; ++i) {
  167. function *called = get_function(i);
  168. if (called->name == o->op_call.func) {
  169. find_used_builtins(f);
  170. f->used_builtins.dispatch_thread_id |= called->used_builtins.dispatch_thread_id;
  171. f->used_builtins.group_thread_id |= called->used_builtins.group_thread_id;
  172. f->used_builtins.group_id |= called->used_builtins.group_id;
  173. f->used_builtins.vertex_id |= called->used_builtins.vertex_id;
  174. break;
  175. }
  176. }
  177. break;
  178. }
  179. default:
  180. break;
  181. }
  182. index += o->size;
  183. }
  184. }
  185. static global *find_global_by_var(variable var) {
  186. for (global_id global_index = 0; get_global(global_index) != NULL && get_global(global_index)->type != NO_TYPE; ++global_index) {
  187. if (var.index == get_global(global_index)->var_index) {
  188. return get_global(global_index);
  189. }
  190. }
  191. return NULL;
  192. }
  193. void find_used_capabilities(function *f) {
  194. if (f->block == NULL) {
  195. // built-in
  196. return;
  197. }
  198. if (f->used_capabilities.capabilities_analyzed) {
  199. return;
  200. }
  201. f->used_capabilities.capabilities_analyzed = true;
  202. uint8_t *data = f->code.o;
  203. size_t size = f->code.size;
  204. size_t index = 0;
  205. variable last_base_texture_from = {0};
  206. variable last_base_texture_to = {0};
  207. while (index < size) {
  208. opcode *o = (opcode *)&data[index];
  209. switch (o->type) {
  210. case OPCODE_STORE_ACCESS_LIST: {
  211. variable to = o->op_store_access_list.to;
  212. type_id to_type = to.type.type;
  213. if (is_texture(to_type)) {
  214. assert(get_type(to_type)->array_size == 0);
  215. f->used_capabilities.image_write = true;
  216. global *g = find_global_by_var(to);
  217. assert(g != NULL);
  218. g->usage |= GLOBAL_USAGE_TEXTURE_WRITE;
  219. }
  220. break;
  221. }
  222. case OPCODE_LOAD_ACCESS_LIST: {
  223. variable from = o->op_load_access_list.from;
  224. type_id from_type = from.type.type;
  225. if (is_texture(from_type)) {
  226. f->used_capabilities.image_read = true;
  227. if (get_type(from_type)->array_size > 0) {
  228. last_base_texture_from = from;
  229. last_base_texture_to = o->op_load_access_list.to;
  230. }
  231. else {
  232. global *g = find_global_by_var(from);
  233. assert(g != NULL);
  234. g->usage |= GLOBAL_USAGE_TEXTURE_READ;
  235. }
  236. }
  237. break;
  238. }
  239. case OPCODE_CALL: {
  240. name_id func_name = o->op_call.func;
  241. if (func_name == add_name("sample") || func_name == add_name("sample_lod")) {
  242. variable tex_parameter = o->op_call.parameters[0];
  243. global *g = NULL;
  244. if (tex_parameter.kind == VARIABLE_INTERNAL) {
  245. assert(last_base_texture_to.index == tex_parameter.index);
  246. g = find_global_by_var(last_base_texture_from);
  247. }
  248. else {
  249. g = find_global_by_var(tex_parameter);
  250. }
  251. assert(g != NULL);
  252. g->usage |= GLOBAL_USAGE_TEXTURE_SAMPLE;
  253. }
  254. for (function_id i = 0; get_function(i) != NULL; ++i) {
  255. function *called = get_function(i);
  256. if (called->name == func_name) {
  257. find_used_capabilities(f);
  258. f->used_capabilities.image_read |= called->used_capabilities.image_read;
  259. f->used_capabilities.image_write |= called->used_capabilities.image_write;
  260. break;
  261. }
  262. }
  263. break;
  264. }
  265. default:
  266. break;
  267. }
  268. index += o->size;
  269. }
  270. }
  271. static void add_found_type(type_id t, type_id *types, size_t *types_size) {
  272. for (size_t i = 0; i < *types_size; ++i) {
  273. if (types[i] == t) {
  274. return;
  275. }
  276. }
  277. types[*types_size] = t;
  278. *types_size += 1;
  279. }
  280. void find_referenced_types(function *f, type_id *types, size_t *types_size) {
  281. if (f->block == NULL) {
  282. // built-in
  283. return;
  284. }
  285. function *functions[256];
  286. size_t functions_size = 0;
  287. functions[functions_size] = f;
  288. functions_size += 1;
  289. find_referenced_functions(f, functions, &functions_size);
  290. for (size_t function_index = 0; function_index < functions_size; ++function_index) {
  291. function *func = functions[function_index];
  292. debug_context context = {0};
  293. for (uint8_t parameter_index = 0; parameter_index < func->parameters_size; ++parameter_index) {
  294. check(func->parameter_types[parameter_index].type != NO_TYPE, context, "Function parameter type not found");
  295. add_found_type(func->parameter_types[parameter_index].type, types, types_size);
  296. }
  297. check(func->return_type.type != NO_TYPE, context, "Function return type missing");
  298. add_found_type(func->return_type.type, types, types_size);
  299. uint8_t *data = functions[function_index]->code.o;
  300. size_t size = functions[function_index]->code.size;
  301. size_t index = 0;
  302. while (index < size) {
  303. opcode *o = (opcode *)&data[index];
  304. switch (o->type) {
  305. case OPCODE_VAR:
  306. add_found_type(o->op_var.var.type.type, types, types_size);
  307. break;
  308. default:
  309. break;
  310. }
  311. index += o->size;
  312. }
  313. }
  314. }
  315. static bool has_set(descriptor_sets *sets, descriptor_set *set) {
  316. for (size_t set_index = 0; set_index < sets->size; ++set_index) {
  317. if (sets->values[set_index] == set) {
  318. return true;
  319. }
  320. }
  321. return false;
  322. }
  323. static void add_set(descriptor_sets *sets, descriptor_set *set) {
  324. if (has_set(sets, set)) {
  325. return;
  326. }
  327. static_array_push_p(sets, set);
  328. }
  329. static void find_referenced_sets(global_array *globals, descriptor_sets *sets) {
  330. for (size_t global_index = 0; global_index < globals->size; ++global_index) {
  331. global *g = get_global(globals->globals[global_index]);
  332. if (g->sets_count == 0) {
  333. continue;
  334. }
  335. if (g->sets_count == 1) {
  336. add_set(sets, g->sets[0]);
  337. continue;
  338. }
  339. }
  340. for (size_t global_index = 0; global_index < globals->size; ++global_index) {
  341. global *g = get_global(globals->globals[global_index]);
  342. if (g->sets_count < 2) {
  343. continue;
  344. }
  345. bool found = false;
  346. for (size_t set_index = 0; set_index < g->sets_count; ++set_index) {
  347. descriptor_set *set = g->sets[set_index];
  348. if (has_set(sets, set)) {
  349. found = true;
  350. break;
  351. }
  352. }
  353. if (!found) {
  354. debug_context context = {0};
  355. error(context, "Global %s could be used from multiple descriptor sets.", get_name(g->name));
  356. }
  357. }
  358. }
  359. static render_pipeline extract_render_pipeline_from_type(type *t) {
  360. name_id vertex_shader_name = NO_NAME;
  361. name_id amplification_shader_name = NO_NAME;
  362. name_id mesh_shader_name = NO_NAME;
  363. name_id fragment_shader_name = NO_NAME;
  364. for (size_t j = 0; j < t->members.size; ++j) {
  365. if (t->members.m[j].name == add_name("vertex")) {
  366. vertex_shader_name = t->members.m[j].value.identifier;
  367. }
  368. else if (t->members.m[j].name == add_name("amplification")) {
  369. amplification_shader_name = t->members.m[j].value.identifier;
  370. }
  371. else if (t->members.m[j].name == add_name("mesh")) {
  372. mesh_shader_name = t->members.m[j].value.identifier;
  373. }
  374. else if (t->members.m[j].name == add_name("fragment")) {
  375. fragment_shader_name = t->members.m[j].value.identifier;
  376. }
  377. }
  378. debug_context context = {0};
  379. check(vertex_shader_name != NO_NAME || mesh_shader_name != NO_NAME, context, "vertex or mesh shader missing");
  380. check(fragment_shader_name != NO_NAME, context, "fragment shader missing");
  381. render_pipeline pipeline = {0};
  382. for (function_id i = 0; get_function(i) != NULL; ++i) {
  383. function *f = get_function(i);
  384. if (vertex_shader_name != NO_NAME && f->name == vertex_shader_name) {
  385. pipeline.vertex_shader = f;
  386. }
  387. if (amplification_shader_name != NO_NAME && f->name == amplification_shader_name) {
  388. pipeline.amplification_shader = f;
  389. }
  390. if (mesh_shader_name != NO_NAME && f->name == mesh_shader_name) {
  391. pipeline.mesh_shader = f;
  392. }
  393. if (f->name == fragment_shader_name) {
  394. pipeline.fragment_shader = f;
  395. }
  396. }
  397. return pipeline;
  398. }
  399. static void find_all_render_pipelines(void) {
  400. static_array_init(all_render_pipelines);
  401. for (type_id i = 0; get_type(i) != NULL; ++i) {
  402. type *t = get_type(i);
  403. if (!t->built_in && has_attribute(&t->attributes, add_name("pipe"))) {
  404. static_array_push(all_render_pipelines, extract_render_pipeline_from_type(t));
  405. }
  406. }
  407. }
  408. static bool same_shader(function *a, function *b) {
  409. if (a == NULL && b == NULL) {
  410. return false;
  411. }
  412. return a == b;
  413. }
  414. static void find_render_pipeline_groups(void) {
  415. static_array_init(all_render_pipeline_groups);
  416. render_pipeline_indices remaining_pipelines;
  417. static_array_init(remaining_pipelines);
  418. for (uint32_t index = 0; index < all_render_pipelines.size; ++index) {
  419. static_array_push(remaining_pipelines, index);
  420. }
  421. while (remaining_pipelines.size > 0) {
  422. render_pipeline_indices next_remaining_pipelines;
  423. static_array_init(next_remaining_pipelines);
  424. render_pipeline_group group;
  425. static_array_init(group);
  426. static_array_push(group, remaining_pipelines.values[0]);
  427. for (size_t index = 1; index < remaining_pipelines.size; ++index) {
  428. uint32_t pipeline_index = remaining_pipelines.values[index];
  429. render_pipeline *pipeline = &all_render_pipelines.values[pipeline_index];
  430. bool found = false;
  431. for (size_t index_in_bucket = 0; index_in_bucket < group.size; ++index_in_bucket) {
  432. render_pipeline *pipeline_in_group = &all_render_pipelines.values[group.values[index_in_bucket]];
  433. if (same_shader(pipeline->vertex_shader, pipeline_in_group->vertex_shader) ||
  434. same_shader(pipeline->amplification_shader, pipeline_in_group->amplification_shader) ||
  435. same_shader(pipeline->mesh_shader, pipeline_in_group->mesh_shader) ||
  436. same_shader(pipeline->fragment_shader, pipeline_in_group->fragment_shader)) {
  437. found = true;
  438. break;
  439. }
  440. }
  441. if (found) {
  442. static_array_push(group, pipeline_index);
  443. }
  444. else {
  445. static_array_push(next_remaining_pipelines, pipeline_index);
  446. }
  447. }
  448. remaining_pipelines = next_remaining_pipelines;
  449. static_array_push(all_render_pipeline_groups, group);
  450. }
  451. }
  452. static void find_all_compute_shaders(void) {
  453. static_array_init(all_compute_shaders);
  454. for (function_id i = 0; get_function(i) != NULL; ++i) {
  455. function *f = get_function(i);
  456. if (has_attribute(&f->attributes, add_name("compute"))) {
  457. static_array_push(all_compute_shaders, f);
  458. }
  459. }
  460. }
  461. static raytracing_pipeline extract_raytracing_pipeline_from_type(type *t) {
  462. name_id gen_shader_name = NO_NAME;
  463. name_id miss_shader_name = NO_NAME;
  464. name_id closest_shader_name = NO_NAME;
  465. name_id intersection_shader_name = NO_NAME;
  466. name_id any_shader_name = NO_NAME;
  467. for (size_t j = 0; j < t->members.size; ++j) {
  468. if (t->members.m[j].name == add_name("gen")) {
  469. gen_shader_name = t->members.m[j].value.identifier;
  470. }
  471. else if (t->members.m[j].name == add_name("miss")) {
  472. miss_shader_name = t->members.m[j].value.identifier;
  473. }
  474. else if (t->members.m[j].name == add_name("closest")) {
  475. closest_shader_name = t->members.m[j].value.identifier;
  476. }
  477. else if (t->members.m[j].name == add_name("intersection")) {
  478. intersection_shader_name = t->members.m[j].value.identifier;
  479. }
  480. else if (t->members.m[j].name == add_name("any")) {
  481. any_shader_name = t->members.m[j].value.identifier;
  482. }
  483. }
  484. raytracing_pipeline pipeline = {0};
  485. for (function_id i = 0; get_function(i) != NULL; ++i) {
  486. function *f = get_function(i);
  487. if (gen_shader_name != NO_NAME && f->name == gen_shader_name) {
  488. pipeline.gen_shader = f;
  489. }
  490. if (miss_shader_name != NO_NAME && f->name == miss_shader_name) {
  491. pipeline.miss_shader = f;
  492. }
  493. if (closest_shader_name != NO_NAME && f->name == closest_shader_name) {
  494. pipeline.closest_shader = f;
  495. }
  496. if (intersection_shader_name != NO_NAME && f->name == intersection_shader_name) {
  497. pipeline.intersection_shader = f;
  498. }
  499. if (any_shader_name != NO_NAME && f->name == any_shader_name) {
  500. pipeline.any_shader = f;
  501. }
  502. }
  503. return pipeline;
  504. }
  505. static void find_all_raytracing_pipelines(void) {
  506. static_array_init(all_raytracing_pipelines);
  507. for (type_id i = 0; get_type(i) != NULL; ++i) {
  508. type *t = get_type(i);
  509. if (!t->built_in && has_attribute(&t->attributes, add_name("raypipe"))) {
  510. static_array_push(all_raytracing_pipelines, extract_raytracing_pipeline_from_type(t));
  511. }
  512. }
  513. }
  514. static void find_raytracing_pipeline_groups(void) {
  515. static_array_init(all_raytracing_pipeline_groups);
  516. raytracing_pipeline_indices remaining_pipelines;
  517. static_array_init(remaining_pipelines);
  518. for (uint32_t index = 0; index < all_raytracing_pipelines.size; ++index) {
  519. static_array_push(remaining_pipelines, index);
  520. }
  521. while (remaining_pipelines.size > 0) {
  522. raytracing_pipeline_indices next_remaining_pipelines;
  523. static_array_init(next_remaining_pipelines);
  524. raytracing_pipeline_group group;
  525. static_array_init(group);
  526. static_array_push(group, remaining_pipelines.values[0]);
  527. for (size_t index = 1; index < remaining_pipelines.size; ++index) {
  528. uint32_t pipeline_index = remaining_pipelines.values[index];
  529. raytracing_pipeline *pipeline = &all_raytracing_pipelines.values[pipeline_index];
  530. bool found = false;
  531. for (size_t index_in_bucket = 0; index_in_bucket < group.size; ++index_in_bucket) {
  532. raytracing_pipeline *pipeline_in_group = &all_raytracing_pipelines.values[group.values[index_in_bucket]];
  533. if (pipeline->gen_shader == pipeline_in_group->gen_shader || pipeline->miss_shader == pipeline_in_group->miss_shader ||
  534. pipeline->closest_shader == pipeline_in_group->closest_shader || pipeline->intersection_shader == pipeline_in_group->intersection_shader ||
  535. pipeline->any_shader == pipeline_in_group->any_shader) {
  536. found = true;
  537. break;
  538. }
  539. }
  540. if (found) {
  541. static_array_push(group, pipeline_index);
  542. }
  543. else {
  544. static_array_push(next_remaining_pipelines, pipeline_index);
  545. }
  546. }
  547. remaining_pipelines = next_remaining_pipelines;
  548. static_array_push(all_raytracing_pipeline_groups, group);
  549. }
  550. }
  551. static void check_globals_in_descriptor_set_group(descriptor_set_group *group) {
  552. static_array(global_id, globals, 256);
  553. globals set_globals;
  554. static_array_init(set_globals);
  555. for (size_t set_index = 0; set_index < group->size; ++set_index) {
  556. descriptor_set *set = group->values[set_index];
  557. for (size_t global_index = 0; global_index < set->globals.size; ++global_index) {
  558. global_id g = set->globals.globals[global_index];
  559. for (size_t global_index2 = 0; global_index2 < set_globals.size; ++global_index2) {
  560. if (set_globals.values[global_index2] == g) {
  561. debug_context context = {0};
  562. error(context, "Global used from more than one descriptor set in one descriptor set group");
  563. }
  564. }
  565. static_array_push(set_globals, g);
  566. }
  567. }
  568. }
  569. static void update_globals_in_descriptor_set_group(descriptor_set_group *group, global_array *globals) {
  570. for (size_t set_index = 0; set_index < group->size; ++set_index) {
  571. descriptor_set *set = group->values[set_index];
  572. for (size_t global_index = 0; global_index < set->globals.size; ++global_index) {
  573. global_id g = set->globals.globals[global_index];
  574. for (size_t global_index2 = 0; global_index2 < globals->size; ++global_index2) {
  575. if (globals->globals[global_index2] == g) {
  576. if (globals->readable[global_index2]) {
  577. set->globals.readable[global_index] = true;
  578. }
  579. if (globals->writable[global_index2]) {
  580. set->globals.writable[global_index] = true;
  581. }
  582. }
  583. }
  584. }
  585. }
  586. }
  587. static descriptor_set_groups all_descriptor_set_groups;
  588. descriptor_set_group *get_descriptor_set_group(uint32_t descriptor_set_group_index) {
  589. assert(descriptor_set_group_index < all_descriptor_set_groups.size);
  590. return &all_descriptor_set_groups.values[descriptor_set_group_index];
  591. }
  592. static void assign_descriptor_set_group_index(function *f, uint32_t descriptor_set_group_index) {
  593. assert(f->descriptor_set_group_index == UINT32_MAX || f->descriptor_set_group_index == descriptor_set_group_index);
  594. f->descriptor_set_group_index = descriptor_set_group_index;
  595. }
  596. static void find_descriptor_set_groups(void) {
  597. static_array_init(all_descriptor_set_groups);
  598. for (size_t pipeline_group_index = 0; pipeline_group_index < all_render_pipeline_groups.size; ++pipeline_group_index) {
  599. descriptor_set_group group;
  600. static_array_init(group);
  601. global_array function_globals = {0};
  602. render_pipeline_group *pipeline_group = &all_render_pipeline_groups.values[pipeline_group_index];
  603. for (size_t pipeline_index = 0; pipeline_index < pipeline_group->size; ++pipeline_index) {
  604. render_pipeline *pipeline = &all_render_pipelines.values[pipeline_group->values[pipeline_index]];
  605. if (pipeline->vertex_shader != NULL) {
  606. find_referenced_globals(pipeline->vertex_shader, &function_globals);
  607. }
  608. if (pipeline->amplification_shader != NULL) {
  609. find_referenced_globals(pipeline->amplification_shader, &function_globals);
  610. }
  611. if (pipeline->mesh_shader != NULL) {
  612. find_referenced_globals(pipeline->mesh_shader, &function_globals);
  613. }
  614. if (pipeline->fragment_shader != NULL) {
  615. find_referenced_globals(pipeline->fragment_shader, &function_globals);
  616. }
  617. }
  618. find_referenced_sets(&function_globals, &group);
  619. check_globals_in_descriptor_set_group(&group);
  620. update_globals_in_descriptor_set_group(&group, &function_globals);
  621. uint32_t descriptor_set_group_index = (uint32_t)all_descriptor_set_groups.size;
  622. static_array_push(all_descriptor_set_groups, group);
  623. for (size_t pipeline_index = 0; pipeline_index < pipeline_group->size; ++pipeline_index) {
  624. render_pipeline *pipeline = &all_render_pipelines.values[pipeline_group->values[pipeline_index]];
  625. if (pipeline->vertex_shader != NULL) {
  626. assign_descriptor_set_group_index(pipeline->vertex_shader, descriptor_set_group_index);
  627. }
  628. if (pipeline->amplification_shader != NULL) {
  629. assign_descriptor_set_group_index(pipeline->amplification_shader, descriptor_set_group_index);
  630. }
  631. if (pipeline->mesh_shader != NULL) {
  632. assign_descriptor_set_group_index(pipeline->mesh_shader, descriptor_set_group_index);
  633. }
  634. if (pipeline->fragment_shader != NULL) {
  635. assign_descriptor_set_group_index(pipeline->fragment_shader, descriptor_set_group_index);
  636. }
  637. }
  638. }
  639. for (size_t compute_shader_index = 0; compute_shader_index < all_compute_shaders.size; ++compute_shader_index) {
  640. descriptor_set_group group;
  641. static_array_init(group);
  642. global_array function_globals = {0};
  643. find_referenced_globals(all_compute_shaders.values[compute_shader_index], &function_globals);
  644. find_referenced_sets(&function_globals, &group);
  645. check_globals_in_descriptor_set_group(&group);
  646. update_globals_in_descriptor_set_group(&group, &function_globals);
  647. uint32_t descriptor_set_group_index = (uint32_t)all_descriptor_set_groups.size;
  648. static_array_push(all_descriptor_set_groups, group);
  649. for (size_t compute_shader_index = 0; compute_shader_index < all_compute_shaders.size; ++compute_shader_index) {
  650. assign_descriptor_set_group_index(all_compute_shaders.values[compute_shader_index], descriptor_set_group_index);
  651. }
  652. }
  653. for (size_t pipeline_group_index = 0; pipeline_group_index < all_raytracing_pipeline_groups.size; ++pipeline_group_index) {
  654. descriptor_set_group group;
  655. static_array_init(group);
  656. global_array function_globals = {0};
  657. raytracing_pipeline_group *pipeline_group = &all_raytracing_pipeline_groups.values[pipeline_group_index];
  658. for (size_t pipeline_index = 0; pipeline_index < pipeline_group->size; ++pipeline_index) {
  659. raytracing_pipeline *pipeline = &all_raytracing_pipelines.values[pipeline_group->values[pipeline_index]];
  660. if (pipeline->gen_shader != NULL) {
  661. find_referenced_globals(pipeline->gen_shader, &function_globals);
  662. }
  663. if (pipeline->miss_shader != NULL) {
  664. find_referenced_globals(pipeline->miss_shader, &function_globals);
  665. }
  666. if (pipeline->closest_shader != NULL) {
  667. find_referenced_globals(pipeline->closest_shader, &function_globals);
  668. }
  669. if (pipeline->intersection_shader != NULL) {
  670. find_referenced_globals(pipeline->intersection_shader, &function_globals);
  671. }
  672. if (pipeline->any_shader != NULL) {
  673. find_referenced_globals(pipeline->any_shader, &function_globals);
  674. }
  675. }
  676. find_referenced_sets(&function_globals, &group);
  677. check_globals_in_descriptor_set_group(&group);
  678. update_globals_in_descriptor_set_group(&group, &function_globals);
  679. uint32_t descriptor_set_group_index = (uint32_t)all_descriptor_set_groups.size;
  680. static_array_push(all_descriptor_set_groups, group);
  681. for (size_t pipeline_index = 0; pipeline_index < pipeline_group->size; ++pipeline_index) {
  682. raytracing_pipeline *pipeline = &all_raytracing_pipelines.values[pipeline_group->values[pipeline_index]];
  683. if (pipeline->gen_shader != NULL) {
  684. assign_descriptor_set_group_index(pipeline->gen_shader, descriptor_set_group_index);
  685. }
  686. if (pipeline->miss_shader != NULL) {
  687. assign_descriptor_set_group_index(pipeline->miss_shader, descriptor_set_group_index);
  688. }
  689. if (pipeline->closest_shader != NULL) {
  690. assign_descriptor_set_group_index(pipeline->closest_shader, descriptor_set_group_index);
  691. }
  692. if (pipeline->intersection_shader != NULL) {
  693. assign_descriptor_set_group_index(pipeline->intersection_shader, descriptor_set_group_index);
  694. }
  695. if (pipeline->any_shader != NULL) {
  696. assign_descriptor_set_group_index(pipeline->any_shader, descriptor_set_group_index);
  697. }
  698. }
  699. }
  700. }
  701. descriptor_set_group *find_descriptor_set_group_for_pipe_type(type *t) {
  702. if (!t->built_in && has_attribute(&t->attributes, add_name("pipe"))) {
  703. render_pipeline pipeline = extract_render_pipeline_from_type(t);
  704. if (pipeline.vertex_shader->descriptor_set_group_index != UINT32_MAX) {
  705. return &all_descriptor_set_groups.values[pipeline.vertex_shader->descriptor_set_group_index];
  706. }
  707. if (pipeline.amplification_shader->descriptor_set_group_index != UINT32_MAX) {
  708. return &all_descriptor_set_groups.values[pipeline.amplification_shader->descriptor_set_group_index];
  709. }
  710. if (pipeline.mesh_shader->descriptor_set_group_index != UINT32_MAX) {
  711. return &all_descriptor_set_groups.values[pipeline.mesh_shader->descriptor_set_group_index];
  712. }
  713. if (pipeline.fragment_shader->descriptor_set_group_index != UINT32_MAX) {
  714. return &all_descriptor_set_groups.values[pipeline.fragment_shader->descriptor_set_group_index];
  715. }
  716. return NULL;
  717. }
  718. if (!t->built_in && has_attribute(&t->attributes, add_name("raypipe"))) {
  719. raytracing_pipeline pipeline = extract_raytracing_pipeline_from_type(t);
  720. if (pipeline.gen_shader->descriptor_set_group_index != UINT32_MAX) {
  721. return &all_descriptor_set_groups.values[pipeline.gen_shader->descriptor_set_group_index];
  722. }
  723. if (pipeline.miss_shader->descriptor_set_group_index != UINT32_MAX) {
  724. return &all_descriptor_set_groups.values[pipeline.miss_shader->descriptor_set_group_index];
  725. }
  726. if (pipeline.closest_shader->descriptor_set_group_index != UINT32_MAX) {
  727. return &all_descriptor_set_groups.values[pipeline.closest_shader->descriptor_set_group_index];
  728. }
  729. if (pipeline.intersection_shader->descriptor_set_group_index != UINT32_MAX) {
  730. return &all_descriptor_set_groups.values[pipeline.intersection_shader->descriptor_set_group_index];
  731. }
  732. if (pipeline.any_shader->descriptor_set_group_index != UINT32_MAX) {
  733. return &all_descriptor_set_groups.values[pipeline.any_shader->descriptor_set_group_index];
  734. }
  735. return NULL;
  736. }
  737. return NULL;
  738. }
  739. descriptor_set_group *find_descriptor_set_group_for_function(function *f) {
  740. if (f->descriptor_set_group_index != UINT32_MAX) {
  741. return &all_descriptor_set_groups.values[f->descriptor_set_group_index];
  742. }
  743. else {
  744. return NULL;
  745. }
  746. }
  747. void find_sampler_use() {
  748. for (function_id i = 0; get_function(i) != NULL; ++i) {
  749. function *f = get_function(i);
  750. if (f->block == NULL) {
  751. continue;
  752. }
  753. uint8_t *data = f->code.o;
  754. size_t size = f->code.size;
  755. size_t index = 0;
  756. while (index < size) {
  757. opcode *o = (opcode *)&data[index];
  758. switch (o->type) {
  759. case OPCODE_CALL:
  760. if (o->op_call.func == add_name("sample") || o->op_call.func == add_name("sample_lod")) {
  761. if (is_depth(get_type(o->op_call.parameters[0].type.type)->tex_format)) {
  762. type *sampler_type = get_type(o->op_call.parameters[1].type.type);
  763. global *g = find_global_by_var(o->op_call.parameters[1]);
  764. assert(g != NULL);
  765. g->usage |= GLOBAL_USAGE_SAMPLE_DEPTH;
  766. }
  767. }
  768. break;
  769. default:
  770. break;
  771. }
  772. index += o->size;
  773. }
  774. }
  775. }
  776. void analyze(void) {
  777. find_all_render_pipelines();
  778. find_render_pipeline_groups();
  779. find_all_compute_shaders();
  780. find_all_raytracing_pipelines();
  781. find_raytracing_pipeline_groups();
  782. find_descriptor_set_groups();
  783. find_sampler_use();
  784. }