basisu_backend.cpp 64 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805
  1. // basisu_backend.cpp
  2. // Copyright (C) 2019-2021 Binomial LLC. All Rights Reserved.
  3. //
  4. // Licensed under the Apache License, Version 2.0 (the "License");
  5. // you may not use this file except in compliance with the License.
  6. // You may obtain a copy of the License at
  7. //
  8. // http://www.apache.org/licenses/LICENSE-2.0
  9. //
  10. // Unless required by applicable law or agreed to in writing, software
  11. // distributed under the License is distributed on an "AS IS" BASIS,
  12. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. // See the License for the specific language governing permissions and
  14. // limitations under the License.
  15. //
  16. // TODO: This code originally supported full ETC1 and ETC1S, so there's some legacy stuff in here.
  17. //
  18. #include "basisu_backend.h"
  19. #if BASISU_SUPPORT_SSE
  20. #define CPPSPMD_NAME(a) a##_sse41
  21. #include "basisu_kernels_declares.h"
  22. #endif
  23. #define BASISU_FASTER_SELECTOR_REORDERING 0
  24. #define BASISU_BACKEND_VERIFY(c) verify(c, __LINE__);
  25. namespace basisu
  26. {
  27. // TODO
  28. static inline void verify(bool condition, int line)
  29. {
  30. if (!condition)
  31. {
  32. fprintf(stderr, "ERROR: basisu_backend: verify() failed at line %i!\n", line);
  33. abort();
  34. }
  35. }
  36. basisu_backend::basisu_backend()
  37. {
  38. clear();
  39. }
  40. void basisu_backend::clear()
  41. {
  42. m_pFront_end = NULL;
  43. m_params.clear();
  44. m_output.clear();
  45. }
  46. void basisu_backend::init(basisu_frontend* pFront_end, basisu_backend_params& params, const basisu_backend_slice_desc_vec& slice_descs, const basist::etc1_global_selector_codebook* pGlobal_sel_codebook)
  47. {
  48. m_pFront_end = pFront_end;
  49. m_params = params;
  50. m_slices = slice_descs;
  51. m_pGlobal_sel_codebook = pGlobal_sel_codebook;
  52. debug_printf("basisu_backend::Init: Slices: %u, ETC1S: %u, EndpointRDOQualityThresh: %f, SelectorRDOQualityThresh: %f, UseGlobalSelCodebook: %u, GlobalSelCodebookPalBits: %u, GlobalSelCodebookModBits: %u, Use hybrid selector codebooks: %u\n",
  53. m_slices.size(),
  54. params.m_etc1s,
  55. params.m_endpoint_rdo_quality_thresh,
  56. params.m_selector_rdo_quality_thresh,
  57. params.m_use_global_sel_codebook,
  58. params.m_global_sel_codebook_pal_bits,
  59. params.m_global_sel_codebook_mod_bits,
  60. params.m_use_hybrid_sel_codebooks);
  61. debug_printf("Frontend endpoints: %u selectors: %u\n", m_pFront_end->get_total_endpoint_clusters(), m_pFront_end->get_total_selector_clusters());
  62. for (uint32_t i = 0; i < m_slices.size(); i++)
  63. {
  64. debug_printf("Slice: %u, OrigWidth: %u, OrigHeight: %u, Width: %u, Height: %u, NumBlocksX: %u, NumBlocksY: %u, FirstBlockIndex: %u\n",
  65. i,
  66. m_slices[i].m_orig_width, m_slices[i].m_orig_height,
  67. m_slices[i].m_width, m_slices[i].m_height,
  68. m_slices[i].m_num_blocks_x, m_slices[i].m_num_blocks_y,
  69. m_slices[i].m_first_block_index);
  70. }
  71. }
  72. void basisu_backend::create_endpoint_palette()
  73. {
  74. const basisu_frontend& r = *m_pFront_end;
  75. m_output.m_num_endpoints = r.get_total_endpoint_clusters();
  76. m_endpoint_palette.resize(r.get_total_endpoint_clusters());
  77. for (uint32_t i = 0; i < r.get_total_endpoint_clusters(); i++)
  78. {
  79. etc1_endpoint_palette_entry& e = m_endpoint_palette[i];
  80. e.m_color5_valid = r.get_endpoint_cluster_color_is_used(i, false);
  81. e.m_color5 = r.get_endpoint_cluster_unscaled_color(i, false);
  82. e.m_inten5 = r.get_endpoint_cluster_inten_table(i, false);
  83. BASISU_BACKEND_VERIFY(e.m_color5_valid);
  84. }
  85. }
  86. void basisu_backend::create_selector_palette()
  87. {
  88. const basisu_frontend& r = *m_pFront_end;
  89. m_output.m_num_selectors = r.get_total_selector_clusters();
  90. m_selector_palette.resize(r.get_total_selector_clusters());
  91. if (m_params.m_use_global_sel_codebook)
  92. {
  93. m_global_selector_palette_desc.resize(r.get_total_selector_clusters());
  94. for (int i = 0; i < static_cast<int>(r.get_total_selector_clusters()); i++)
  95. {
  96. basist::etc1_selector_palette_entry& selector_pal_entry = m_selector_palette[i];
  97. etc1_global_selector_cb_entry_desc& pal_entry_desc = m_global_selector_palette_desc[i];
  98. pal_entry_desc.m_pal_index = r.get_selector_cluster_global_selector_entry_ids()[i].m_palette_index;
  99. pal_entry_desc.m_mod_index = r.get_selector_cluster_global_selector_entry_ids()[i].m_modifier.get_index();
  100. pal_entry_desc.m_was_used = true;
  101. if (m_params.m_use_hybrid_sel_codebooks)
  102. pal_entry_desc.m_was_used = r.get_selector_cluster_uses_global_cb_vec()[i];
  103. if (pal_entry_desc.m_was_used)
  104. {
  105. const etc_block& selector_bits = r.get_selector_cluster_selector_bits(i);
  106. (void)selector_bits;
  107. basist::etc1_selector_palette_entry global_pal_entry(m_pGlobal_sel_codebook->get_entry(r.get_selector_cluster_global_selector_entry_ids()[i]));
  108. for (uint32_t y = 0; y < 4; y++)
  109. {
  110. for (uint32_t x = 0; x < 4; x++)
  111. {
  112. selector_pal_entry(x, y) = global_pal_entry(x, y);
  113. assert(selector_bits.get_selector(x, y) == global_pal_entry(x, y));
  114. }
  115. }
  116. }
  117. else
  118. {
  119. const etc_block& selector_bits = r.get_selector_cluster_selector_bits(i);
  120. for (uint32_t y = 0; y < 4; y++)
  121. for (uint32_t x = 0; x < 4; x++)
  122. selector_pal_entry[y * 4 + x] = static_cast<uint8_t>(selector_bits.get_selector(x, y));
  123. }
  124. }
  125. }
  126. else
  127. {
  128. for (uint32_t i = 0; i < r.get_total_selector_clusters(); i++)
  129. {
  130. basist::etc1_selector_palette_entry& s = m_selector_palette[i];
  131. const etc_block& selector_bits = r.get_selector_cluster_selector_bits(i);
  132. for (uint32_t y = 0; y < 4; y++)
  133. {
  134. for (uint32_t x = 0; x < 4; x++)
  135. {
  136. s[y * 4 + x] = static_cast<uint8_t>(selector_bits.get_selector(x, y));
  137. }
  138. }
  139. }
  140. }
  141. }
  142. static const struct
  143. {
  144. int8_t m_dx, m_dy;
  145. } g_endpoint_preds[] =
  146. {
  147. { -1, 0 },
  148. { 0, -1 },
  149. { -1, -1 }
  150. };
  151. void basisu_backend::reoptimize_and_sort_endpoints_codebook(uint32_t total_block_endpoints_remapped, uint_vec& all_endpoint_indices)
  152. {
  153. basisu_frontend& r = *m_pFront_end;
  154. //const bool is_video = r.get_params().m_tex_type == basist::cBASISTexTypeVideoFrames;
  155. if (m_params.m_used_global_codebooks)
  156. {
  157. m_endpoint_remap_table_old_to_new.clear();
  158. m_endpoint_remap_table_old_to_new.resize(r.get_total_endpoint_clusters());
  159. for (uint32_t i = 0; i < r.get_total_endpoint_clusters(); i++)
  160. m_endpoint_remap_table_old_to_new[i] = i;
  161. }
  162. else
  163. {
  164. //if ((total_block_endpoints_remapped) && (m_params.m_compression_level > 0))
  165. if ((total_block_endpoints_remapped) && (m_params.m_compression_level > 1))
  166. {
  167. // We've changed the block endpoint indices, so we need to go and adjust the endpoint codebook (remove unused entries, optimize existing entries that have changed)
  168. uint_vec new_block_endpoints(get_total_blocks());
  169. for (uint32_t slice_index = 0; slice_index < m_slices.size(); slice_index++)
  170. {
  171. const uint32_t first_block_index = m_slices[slice_index].m_first_block_index;
  172. const uint32_t num_blocks_x = m_slices[slice_index].m_num_blocks_x;
  173. const uint32_t num_blocks_y = m_slices[slice_index].m_num_blocks_y;
  174. for (uint32_t block_y = 0; block_y < num_blocks_y; block_y++)
  175. for (uint32_t block_x = 0; block_x < num_blocks_x; block_x++)
  176. new_block_endpoints[first_block_index + block_x + block_y * num_blocks_x] = m_slice_encoder_blocks[slice_index](block_x, block_y).m_endpoint_index;
  177. }
  178. int_vec old_to_new_endpoint_indices;
  179. r.reoptimize_remapped_endpoints(new_block_endpoints, old_to_new_endpoint_indices, true);
  180. create_endpoint_palette();
  181. for (uint32_t slice_index = 0; slice_index < m_slices.size(); slice_index++)
  182. {
  183. //const uint32_t first_block_index = m_slices[slice_index].m_first_block_index;
  184. //const uint32_t width = m_slices[slice_index].m_width;
  185. //const uint32_t height = m_slices[slice_index].m_height;
  186. const uint32_t num_blocks_x = m_slices[slice_index].m_num_blocks_x;
  187. const uint32_t num_blocks_y = m_slices[slice_index].m_num_blocks_y;
  188. for (uint32_t block_y = 0; block_y < num_blocks_y; block_y++)
  189. {
  190. for (uint32_t block_x = 0; block_x < num_blocks_x; block_x++)
  191. {
  192. //const uint32_t block_index = first_block_index + block_x + block_y * num_blocks_x;
  193. encoder_block& m = m_slice_encoder_blocks[slice_index](block_x, block_y);
  194. m.m_endpoint_index = old_to_new_endpoint_indices[m.m_endpoint_index];
  195. } // block_x
  196. } // block_y
  197. } // slice_index
  198. for (uint32_t i = 0; i < all_endpoint_indices.size(); i++)
  199. all_endpoint_indices[i] = old_to_new_endpoint_indices[all_endpoint_indices[i]];
  200. } //if (total_block_endpoints_remapped)
  201. // Sort endpoint codebook
  202. palette_index_reorderer reorderer;
  203. reorderer.init((uint32_t)all_endpoint_indices.size(), &all_endpoint_indices[0], r.get_total_endpoint_clusters(), nullptr, nullptr, 0);
  204. m_endpoint_remap_table_old_to_new = reorderer.get_remap_table();
  205. }
  206. // For endpoints, old_to_new[] may not be bijective!
  207. // Some "old" entries may be unused and don't get remapped into the "new" array.
  208. m_old_endpoint_was_used.clear();
  209. m_old_endpoint_was_used.resize(r.get_total_endpoint_clusters());
  210. uint32_t first_old_entry_index = UINT32_MAX;
  211. for (uint32_t slice_index = 0; slice_index < m_slices.size(); slice_index++)
  212. {
  213. const uint32_t num_blocks_x = m_slices[slice_index].m_num_blocks_x, num_blocks_y = m_slices[slice_index].m_num_blocks_y;
  214. for (uint32_t block_y = 0; block_y < num_blocks_y; block_y++)
  215. {
  216. for (uint32_t block_x = 0; block_x < num_blocks_x; block_x++)
  217. {
  218. encoder_block& m = m_slice_encoder_blocks[slice_index](block_x, block_y);
  219. const uint32_t old_endpoint_index = m.m_endpoint_index;
  220. m_old_endpoint_was_used[old_endpoint_index] = true;
  221. first_old_entry_index = basisu::minimum(first_old_entry_index, old_endpoint_index);
  222. } // block_x
  223. } // block_y
  224. } // slice_index
  225. debug_printf("basisu_backend::reoptimize_and_sort_endpoints_codebook: First old entry index: %u\n", first_old_entry_index);
  226. m_new_endpoint_was_used.clear();
  227. m_new_endpoint_was_used.resize(r.get_total_endpoint_clusters());
  228. m_endpoint_remap_table_new_to_old.clear();
  229. m_endpoint_remap_table_new_to_old.resize(r.get_total_endpoint_clusters());
  230. // Set unused entries in the new array to point to the first used entry in the old array.
  231. m_endpoint_remap_table_new_to_old.set_all(first_old_entry_index);
  232. for (uint32_t old_index = 0; old_index < m_endpoint_remap_table_old_to_new.size(); old_index++)
  233. {
  234. if (m_old_endpoint_was_used[old_index])
  235. {
  236. const uint32_t new_index = m_endpoint_remap_table_old_to_new[old_index];
  237. m_new_endpoint_was_used[new_index] = true;
  238. m_endpoint_remap_table_new_to_old[new_index] = old_index;
  239. }
  240. }
  241. }
  242. void basisu_backend::sort_selector_codebook()
  243. {
  244. basisu_frontend& r = *m_pFront_end;
  245. m_selector_remap_table_new_to_old.resize(r.get_total_selector_clusters());
  246. if ((m_params.m_compression_level == 0) || (m_params.m_used_global_codebooks))
  247. {
  248. for (uint32_t i = 0; i < r.get_total_selector_clusters(); i++)
  249. m_selector_remap_table_new_to_old[i] = i;
  250. }
  251. else
  252. {
  253. m_selector_remap_table_new_to_old[0] = 0;
  254. uint32_t prev_selector_index = 0;
  255. int_vec remaining_selectors;
  256. remaining_selectors.reserve(r.get_total_selector_clusters() - 1);
  257. for (uint32_t i = 1; i < r.get_total_selector_clusters(); i++)
  258. remaining_selectors.push_back(i);
  259. uint_vec selector_palette_bytes(m_selector_palette.size());
  260. for (uint32_t i = 0; i < m_selector_palette.size(); i++)
  261. selector_palette_bytes[i] = m_selector_palette[i].get_byte(0) | (m_selector_palette[i].get_byte(1) << 8) | (m_selector_palette[i].get_byte(2) << 16) | (m_selector_palette[i].get_byte(3) << 24);
  262. // This is the traveling salesman problem.
  263. for (uint32_t i = 1; i < r.get_total_selector_clusters(); i++)
  264. {
  265. uint32_t best_hamming_dist = 100;
  266. uint32_t best_index = 0;
  267. #if BASISU_FASTER_SELECTOR_REORDERING
  268. const uint32_t step = (remaining_selectors.size() > 16) ? 16 : 1;
  269. for (uint32_t j = 0; j < remaining_selectors.size(); j += step)
  270. #else
  271. for (uint32_t j = 0; j < remaining_selectors.size(); j++)
  272. #endif
  273. {
  274. int selector_index = remaining_selectors[j];
  275. uint32_t k = selector_palette_bytes[prev_selector_index] ^ selector_palette_bytes[selector_index];
  276. uint32_t hamming_dist = g_hamming_dist[k & 0xFF] + g_hamming_dist[(k >> 8) & 0xFF] + g_hamming_dist[(k >> 16) & 0xFF] + g_hamming_dist[k >> 24];
  277. if (hamming_dist < best_hamming_dist)
  278. {
  279. best_hamming_dist = hamming_dist;
  280. best_index = j;
  281. if (best_hamming_dist <= 1)
  282. break;
  283. }
  284. }
  285. prev_selector_index = remaining_selectors[best_index];
  286. m_selector_remap_table_new_to_old[i] = prev_selector_index;
  287. remaining_selectors[best_index] = remaining_selectors.back();
  288. remaining_selectors.resize(remaining_selectors.size() - 1);
  289. }
  290. }
  291. m_selector_remap_table_old_to_new.resize(r.get_total_selector_clusters());
  292. for (uint32_t i = 0; i < m_selector_remap_table_new_to_old.size(); i++)
  293. m_selector_remap_table_old_to_new[m_selector_remap_table_new_to_old[i]] = i;
  294. }
  295. int basisu_backend::find_video_frame(int slice_index, int delta)
  296. {
  297. for (uint32_t s = 0; s < m_slices.size(); s++)
  298. {
  299. if ((int)m_slices[s].m_source_file_index != ((int)m_slices[slice_index].m_source_file_index + delta))
  300. continue;
  301. if (m_slices[s].m_mip_index != m_slices[slice_index].m_mip_index)
  302. continue;
  303. // Being super paranoid here.
  304. if (m_slices[s].m_num_blocks_x != (m_slices[slice_index].m_num_blocks_x))
  305. continue;
  306. if (m_slices[s].m_num_blocks_y != (m_slices[slice_index].m_num_blocks_y))
  307. continue;
  308. if (m_slices[s].m_alpha != (m_slices[slice_index].m_alpha))
  309. continue;
  310. return s;
  311. }
  312. return -1;
  313. }
  314. void basisu_backend::check_for_valid_cr_blocks()
  315. {
  316. basisu_frontend& r = *m_pFront_end;
  317. const bool is_video = r.get_params().m_tex_type == basist::cBASISTexTypeVideoFrames;
  318. if (!is_video)
  319. return;
  320. uint32_t total_crs = 0;
  321. uint32_t total_invalid_crs = 0;
  322. for (uint32_t slice_index = 0; slice_index < m_slices.size(); slice_index++)
  323. {
  324. const bool is_iframe = m_slices[slice_index].m_iframe;
  325. //const uint32_t first_block_index = m_slices[slice_index].m_first_block_index;
  326. //const uint32_t width = m_slices[slice_index].m_width;
  327. //const uint32_t height = m_slices[slice_index].m_height;
  328. const uint32_t num_blocks_x = m_slices[slice_index].m_num_blocks_x;
  329. const uint32_t num_blocks_y = m_slices[slice_index].m_num_blocks_y;
  330. const int prev_frame_slice_index = find_video_frame(slice_index, -1);
  331. // If we don't have a previous frame, and we're not an i-frame, something is wrong.
  332. if ((prev_frame_slice_index < 0) && (!is_iframe))
  333. {
  334. BASISU_BACKEND_VERIFY(0);
  335. }
  336. if ((is_iframe) || (prev_frame_slice_index < 0))
  337. {
  338. // Ensure no blocks use CR's
  339. for (uint32_t block_y = 0; block_y < num_blocks_y; block_y++)
  340. {
  341. for (uint32_t block_x = 0; block_x < num_blocks_x; block_x++)
  342. {
  343. encoder_block& m = m_slice_encoder_blocks[slice_index](block_x, block_y);
  344. BASISU_BACKEND_VERIFY(m.m_endpoint_predictor != basist::CR_ENDPOINT_PRED_INDEX);
  345. }
  346. }
  347. }
  348. else
  349. {
  350. // For blocks that use CR's, make sure the endpoints/selectors haven't really changed.
  351. for (uint32_t block_y = 0; block_y < num_blocks_y; block_y++)
  352. {
  353. for (uint32_t block_x = 0; block_x < num_blocks_x; block_x++)
  354. {
  355. encoder_block& m = m_slice_encoder_blocks[slice_index](block_x, block_y);
  356. if (m.m_endpoint_predictor == basist::CR_ENDPOINT_PRED_INDEX)
  357. {
  358. total_crs++;
  359. encoder_block& prev_m = m_slice_encoder_blocks[prev_frame_slice_index](block_x, block_y);
  360. if ((m.m_endpoint_index != prev_m.m_endpoint_index) || (m.m_selector_index != prev_m.m_selector_index))
  361. {
  362. total_invalid_crs++;
  363. }
  364. }
  365. } // block_x
  366. } // block_y
  367. } // !slice_index
  368. } // slice_index
  369. debug_printf("Total CR's: %u, Total invalid CR's: %u\n", total_crs, total_invalid_crs);
  370. BASISU_BACKEND_VERIFY(total_invalid_crs == 0);
  371. }
  372. void basisu_backend::create_encoder_blocks()
  373. {
  374. basisu_frontend& r = *m_pFront_end;
  375. const bool is_video = r.get_params().m_tex_type == basist::cBASISTexTypeVideoFrames;
  376. m_slice_encoder_blocks.resize(m_slices.size());
  377. uint32_t total_endpoint_pred_missed = 0, total_endpoint_pred_hits = 0, total_block_endpoints_remapped = 0;
  378. uint_vec all_endpoint_indices;
  379. all_endpoint_indices.reserve(get_total_blocks());
  380. for (uint32_t slice_index = 0; slice_index < m_slices.size(); slice_index++)
  381. {
  382. const int prev_frame_slice_index = is_video ? find_video_frame(slice_index, -1) : -1;
  383. const bool is_iframe = m_slices[slice_index].m_iframe;
  384. const uint32_t first_block_index = m_slices[slice_index].m_first_block_index;
  385. //const uint32_t width = m_slices[slice_index].m_width;
  386. //const uint32_t height = m_slices[slice_index].m_height;
  387. const uint32_t num_blocks_x = m_slices[slice_index].m_num_blocks_x;
  388. const uint32_t num_blocks_y = m_slices[slice_index].m_num_blocks_y;
  389. m_slice_encoder_blocks[slice_index].resize(num_blocks_x, num_blocks_y);
  390. for (uint32_t block_y = 0; block_y < num_blocks_y; block_y++)
  391. {
  392. for (uint32_t block_x = 0; block_x < num_blocks_x; block_x++)
  393. {
  394. const uint32_t block_index = first_block_index + block_x + block_y * num_blocks_x;
  395. encoder_block& m = m_slice_encoder_blocks[slice_index](block_x, block_y);
  396. m.m_endpoint_index = r.get_subblock_endpoint_cluster_index(block_index, 0);
  397. BASISU_BACKEND_VERIFY(r.get_subblock_endpoint_cluster_index(block_index, 0) == r.get_subblock_endpoint_cluster_index(block_index, 1));
  398. m.m_selector_index = r.get_block_selector_cluster_index(block_index);
  399. m.m_endpoint_predictor = basist::NO_ENDPOINT_PRED_INDEX;
  400. const uint32_t block_endpoint = m.m_endpoint_index;
  401. uint32_t best_endpoint_pred = UINT32_MAX;
  402. for (uint32_t endpoint_pred = 0; endpoint_pred < basist::NUM_ENDPOINT_PREDS; endpoint_pred++)
  403. {
  404. if ((is_video) && (endpoint_pred == basist::CR_ENDPOINT_PRED_INDEX))
  405. {
  406. if ((prev_frame_slice_index != -1) && (!is_iframe))
  407. {
  408. const uint32_t cur_endpoint = m_slice_encoder_blocks[slice_index](block_x, block_y).m_endpoint_index;
  409. const uint32_t cur_selector = m_slice_encoder_blocks[slice_index](block_x, block_y).m_selector_index;
  410. const uint32_t prev_endpoint = m_slice_encoder_blocks[prev_frame_slice_index](block_x, block_y).m_endpoint_index;
  411. const uint32_t prev_selector = m_slice_encoder_blocks[prev_frame_slice_index](block_x, block_y).m_selector_index;
  412. if ((cur_endpoint == prev_endpoint) && (cur_selector == prev_selector))
  413. {
  414. best_endpoint_pred = basist::CR_ENDPOINT_PRED_INDEX;
  415. m_slice_encoder_blocks[prev_frame_slice_index](block_x, block_y).m_is_cr_target = true;
  416. }
  417. }
  418. }
  419. else
  420. {
  421. int pred_block_x = block_x + g_endpoint_preds[endpoint_pred].m_dx;
  422. if ((pred_block_x < 0) || (pred_block_x >= (int)num_blocks_x))
  423. continue;
  424. int pred_block_y = block_y + g_endpoint_preds[endpoint_pred].m_dy;
  425. if ((pred_block_y < 0) || (pred_block_y >= (int)num_blocks_y))
  426. continue;
  427. uint32_t pred_endpoint = m_slice_encoder_blocks[slice_index](pred_block_x, pred_block_y).m_endpoint_index;
  428. if (pred_endpoint == block_endpoint)
  429. {
  430. if (endpoint_pred < best_endpoint_pred)
  431. {
  432. best_endpoint_pred = endpoint_pred;
  433. }
  434. }
  435. }
  436. } // endpoint_pred
  437. if (best_endpoint_pred != UINT32_MAX)
  438. {
  439. m.m_endpoint_predictor = best_endpoint_pred;
  440. total_endpoint_pred_hits++;
  441. }
  442. else if (m_params.m_endpoint_rdo_quality_thresh > 0.0f)
  443. {
  444. const pixel_block& src_pixels = r.get_source_pixel_block(block_index);
  445. etc_block etc_blk(r.get_output_block(block_index));
  446. uint64_t cur_err = etc_blk.evaluate_etc1_error(src_pixels.get_ptr(), r.get_params().m_perceptual);
  447. if (cur_err)
  448. {
  449. const uint64_t thresh_err = (uint64_t)(cur_err * maximum(1.0f, m_params.m_endpoint_rdo_quality_thresh));
  450. etc_block trial_etc_block(etc_blk);
  451. uint64_t best_err = UINT64_MAX;
  452. uint32_t best_endpoint_index = 0;
  453. best_endpoint_pred = UINT32_MAX;
  454. for (uint32_t endpoint_pred = 0; endpoint_pred < basist::NUM_ENDPOINT_PREDS; endpoint_pred++)
  455. {
  456. if ((is_video) && (endpoint_pred == basist::CR_ENDPOINT_PRED_INDEX))
  457. continue;
  458. int pred_block_x = block_x + g_endpoint_preds[endpoint_pred].m_dx;
  459. if ((pred_block_x < 0) || (pred_block_x >= (int)num_blocks_x))
  460. continue;
  461. int pred_block_y = block_y + g_endpoint_preds[endpoint_pred].m_dy;
  462. if ((pred_block_y < 0) || (pred_block_y >= (int)num_blocks_y))
  463. continue;
  464. uint32_t pred_endpoint_index = m_slice_encoder_blocks[slice_index](pred_block_x, pred_block_y).m_endpoint_index;
  465. uint32_t pred_inten = r.get_endpoint_cluster_inten_table(pred_endpoint_index, false);
  466. color_rgba pred_color = r.get_endpoint_cluster_unscaled_color(pred_endpoint_index, false);
  467. trial_etc_block.set_block_color5(pred_color, pred_color);
  468. trial_etc_block.set_inten_table(0, pred_inten);
  469. trial_etc_block.set_inten_table(1, pred_inten);
  470. color_rgba trial_colors[16];
  471. unpack_etc1(trial_etc_block, trial_colors);
  472. uint64_t trial_err = 0;
  473. for (uint32_t p = 0; p < 16; p++)
  474. {
  475. trial_err += color_distance(r.get_params().m_perceptual, src_pixels.get_ptr()[p], trial_colors[p], false);
  476. if (trial_err > thresh_err)
  477. break;
  478. }
  479. if (trial_err <= thresh_err)
  480. {
  481. if ((trial_err < best_err) || ((trial_err == best_err) && (endpoint_pred < best_endpoint_pred)))
  482. {
  483. best_endpoint_pred = endpoint_pred;
  484. best_err = trial_err;
  485. best_endpoint_index = pred_endpoint_index;
  486. }
  487. }
  488. } // endpoint_pred
  489. if (best_endpoint_pred != UINT32_MAX)
  490. {
  491. m.m_endpoint_index = best_endpoint_index;
  492. m.m_endpoint_predictor = best_endpoint_pred;
  493. total_endpoint_pred_hits++;
  494. total_block_endpoints_remapped++;
  495. }
  496. else
  497. {
  498. total_endpoint_pred_missed++;
  499. }
  500. }
  501. }
  502. else
  503. {
  504. total_endpoint_pred_missed++;
  505. }
  506. if (m.m_endpoint_predictor == basist::NO_ENDPOINT_PRED_INDEX)
  507. {
  508. all_endpoint_indices.push_back(m.m_endpoint_index);
  509. }
  510. } // block_x
  511. } // block_y
  512. } // slice
  513. debug_printf("total_endpoint_pred_missed: %u (%3.2f%%) total_endpoint_pred_hit: %u (%3.2f%%), total_block_endpoints_remapped: %u (%3.2f%%)\n",
  514. total_endpoint_pred_missed, total_endpoint_pred_missed * 100.0f / get_total_blocks(),
  515. total_endpoint_pred_hits, total_endpoint_pred_hits * 100.0f / get_total_blocks(),
  516. total_block_endpoints_remapped, total_block_endpoints_remapped * 100.0f / get_total_blocks());
  517. reoptimize_and_sort_endpoints_codebook(total_block_endpoints_remapped, all_endpoint_indices);
  518. sort_selector_codebook();
  519. check_for_valid_cr_blocks();
  520. }
  521. void basisu_backend::compute_slice_crcs()
  522. {
  523. for (uint32_t slice_index = 0; slice_index < m_slices.size(); slice_index++)
  524. {
  525. //const uint32_t first_block_index = m_slices[slice_index].m_first_block_index;
  526. const uint32_t width = m_slices[slice_index].m_width;
  527. const uint32_t height = m_slices[slice_index].m_height;
  528. const uint32_t num_blocks_x = m_slices[slice_index].m_num_blocks_x;
  529. const uint32_t num_blocks_y = m_slices[slice_index].m_num_blocks_y;
  530. gpu_image gi;
  531. gi.init(texture_format::cETC1, width, height);
  532. for (uint32_t block_y = 0; block_y < num_blocks_y; block_y++)
  533. {
  534. for (uint32_t block_x = 0; block_x < num_blocks_x; block_x++)
  535. {
  536. //const uint32_t block_index = first_block_index + block_x + block_y * num_blocks_x;
  537. encoder_block& m = m_slice_encoder_blocks[slice_index](block_x, block_y);
  538. {
  539. etc_block& output_block = *(etc_block*)gi.get_block_ptr(block_x, block_y);
  540. output_block.set_diff_bit(true);
  541. output_block.set_flip_bit(true);
  542. const uint32_t endpoint_index = m.m_endpoint_index;
  543. output_block.set_block_color5_etc1s(m_endpoint_palette[endpoint_index].m_color5);
  544. output_block.set_inten_tables_etc1s(m_endpoint_palette[endpoint_index].m_inten5);
  545. const uint32_t selector_idx = m.m_selector_index;
  546. const basist::etc1_selector_palette_entry& selectors = m_selector_palette[selector_idx];
  547. for (uint32_t sy = 0; sy < 4; sy++)
  548. for (uint32_t sx = 0; sx < 4; sx++)
  549. output_block.set_selector(sx, sy, selectors(sx, sy));
  550. }
  551. } // block_x
  552. } // block_y
  553. m_output.m_slice_image_crcs[slice_index] = basist::crc16(gi.get_ptr(), gi.get_size_in_bytes(), 0);
  554. if (m_params.m_debug_images)
  555. {
  556. image gi_unpacked;
  557. gi.unpack(gi_unpacked);
  558. char buf[256];
  559. #ifdef _WIN32
  560. sprintf_s(buf, sizeof(buf), "basisu_backend_slice_%u.png", slice_index);
  561. #else
  562. snprintf(buf, sizeof(buf), "basisu_backend_slice_%u.png", slice_index);
  563. #endif
  564. save_png(buf, gi_unpacked);
  565. }
  566. } // slice_index
  567. }
  568. // TODO: Split this into multiple methods.
  569. bool basisu_backend::encode_image()
  570. {
  571. basisu_frontend& r = *m_pFront_end;
  572. const bool is_video = r.get_params().m_tex_type == basist::cBASISTexTypeVideoFrames;
  573. uint32_t total_used_selector_history_buf = 0;
  574. uint32_t total_selector_indices_remapped = 0;
  575. basist::approx_move_to_front selector_history_buf(basist::MAX_SELECTOR_HISTORY_BUF_SIZE);
  576. histogram selector_history_buf_histogram(basist::MAX_SELECTOR_HISTORY_BUF_SIZE);
  577. histogram selector_histogram(r.get_total_selector_clusters() + basist::MAX_SELECTOR_HISTORY_BUF_SIZE + 1);
  578. histogram selector_history_buf_rle_histogram(1 << basist::SELECTOR_HISTORY_BUF_RLE_COUNT_BITS);
  579. basisu::vector<uint_vec> selector_syms(m_slices.size());
  580. const uint32_t SELECTOR_HISTORY_BUF_FIRST_SYMBOL_INDEX = r.get_total_selector_clusters();
  581. const uint32_t SELECTOR_HISTORY_BUF_RLE_SYMBOL_INDEX = SELECTOR_HISTORY_BUF_FIRST_SYMBOL_INDEX + basist::MAX_SELECTOR_HISTORY_BUF_SIZE;
  582. m_output.m_slice_image_crcs.resize(m_slices.size());
  583. histogram delta_endpoint_histogram(r.get_total_endpoint_clusters());
  584. histogram endpoint_pred_histogram(basist::ENDPOINT_PRED_TOTAL_SYMBOLS);
  585. basisu::vector<uint_vec> endpoint_pred_syms(m_slices.size());
  586. uint32_t total_endpoint_indices_remapped = 0;
  587. uint_vec block_endpoint_indices, block_selector_indices;
  588. for (uint32_t slice_index = 0; slice_index < m_slices.size(); slice_index++)
  589. {
  590. //const int prev_frame_slice_index = is_video ? find_video_frame(slice_index, -1) : -1;
  591. //const int next_frame_slice_index = is_video ? find_video_frame(slice_index, 1) : -1;
  592. const uint32_t first_block_index = m_slices[slice_index].m_first_block_index;
  593. //const uint32_t width = m_slices[slice_index].m_width;
  594. //const uint32_t height = m_slices[slice_index].m_height;
  595. const uint32_t num_blocks_x = m_slices[slice_index].m_num_blocks_x;
  596. const uint32_t num_blocks_y = m_slices[slice_index].m_num_blocks_y;
  597. selector_history_buf.reset();
  598. int selector_history_buf_rle_count = 0;
  599. int prev_endpoint_pred_sym_bits = -1, endpoint_pred_repeat_count = 0;
  600. uint32_t prev_endpoint_index = 0;
  601. vector2D<uint8_t> block_endpoints_are_referenced(num_blocks_x, num_blocks_y);
  602. for (uint32_t block_y = 0; block_y < num_blocks_y; block_y++)
  603. {
  604. for (uint32_t block_x = 0; block_x < num_blocks_x; block_x++)
  605. {
  606. //const uint32_t block_index = first_block_index + block_x + block_y * num_blocks_x;
  607. encoder_block& m = m_slice_encoder_blocks[slice_index](block_x, block_y);
  608. if (m.m_endpoint_predictor == 0)
  609. block_endpoints_are_referenced(block_x - 1, block_y) = true;
  610. else if (m.m_endpoint_predictor == 1)
  611. block_endpoints_are_referenced(block_x, block_y - 1) = true;
  612. else if (m.m_endpoint_predictor == 2)
  613. {
  614. if (!is_video)
  615. block_endpoints_are_referenced(block_x - 1, block_y - 1) = true;
  616. }
  617. if (is_video)
  618. {
  619. if (m.m_is_cr_target)
  620. block_endpoints_are_referenced(block_x, block_y) = true;
  621. }
  622. } // block_x
  623. } // block_y
  624. for (uint32_t block_y = 0; block_y < num_blocks_y; block_y++)
  625. {
  626. for (uint32_t block_x = 0; block_x < num_blocks_x; block_x++)
  627. {
  628. const uint32_t block_index = first_block_index + block_x + block_y * num_blocks_x;
  629. encoder_block& m = m_slice_encoder_blocks[slice_index](block_x, block_y);
  630. if (((block_x & 1) == 0) && ((block_y & 1) == 0))
  631. {
  632. uint32_t endpoint_pred_cur_sym_bits = 0;
  633. for (uint32_t y = 0; y < 2; y++)
  634. {
  635. for (uint32_t x = 0; x < 2; x++)
  636. {
  637. const uint32_t bx = block_x + x;
  638. const uint32_t by = block_y + y;
  639. uint32_t pred = basist::NO_ENDPOINT_PRED_INDEX;
  640. if ((bx < num_blocks_x) && (by < num_blocks_y))
  641. pred = m_slice_encoder_blocks[slice_index](bx, by).m_endpoint_predictor;
  642. endpoint_pred_cur_sym_bits |= (pred << (x * 2 + y * 4));
  643. }
  644. }
  645. if ((int)endpoint_pred_cur_sym_bits == prev_endpoint_pred_sym_bits)
  646. {
  647. endpoint_pred_repeat_count++;
  648. }
  649. else
  650. {
  651. if (endpoint_pred_repeat_count > 0)
  652. {
  653. if (endpoint_pred_repeat_count > (int)basist::ENDPOINT_PRED_MIN_REPEAT_COUNT)
  654. {
  655. endpoint_pred_histogram.inc(basist::ENDPOINT_PRED_REPEAT_LAST_SYMBOL);
  656. endpoint_pred_syms[slice_index].push_back(basist::ENDPOINT_PRED_REPEAT_LAST_SYMBOL);
  657. endpoint_pred_syms[slice_index].push_back(endpoint_pred_repeat_count);
  658. }
  659. else
  660. {
  661. for (int j = 0; j < endpoint_pred_repeat_count; j++)
  662. {
  663. endpoint_pred_histogram.inc(prev_endpoint_pred_sym_bits);
  664. endpoint_pred_syms[slice_index].push_back(prev_endpoint_pred_sym_bits);
  665. }
  666. }
  667. endpoint_pred_repeat_count = 0;
  668. }
  669. endpoint_pred_histogram.inc(endpoint_pred_cur_sym_bits);
  670. endpoint_pred_syms[slice_index].push_back(endpoint_pred_cur_sym_bits);
  671. prev_endpoint_pred_sym_bits = endpoint_pred_cur_sym_bits;
  672. }
  673. }
  674. int new_endpoint_index = m_endpoint_remap_table_old_to_new[m.m_endpoint_index];
  675. if (m.m_endpoint_predictor == basist::NO_ENDPOINT_PRED_INDEX)
  676. {
  677. int endpoint_delta = new_endpoint_index - prev_endpoint_index;
  678. if ((m_params.m_endpoint_rdo_quality_thresh > 1.0f) && (iabs(endpoint_delta) > 1) && (!block_endpoints_are_referenced(block_x, block_y)))
  679. {
  680. const pixel_block& src_pixels = r.get_source_pixel_block(block_index);
  681. etc_block etc_blk(r.get_output_block(block_index));
  682. const uint64_t cur_err = etc_blk.evaluate_etc1_error(src_pixels.get_ptr(), r.get_params().m_perceptual);
  683. if (cur_err)
  684. {
  685. const float endpoint_remap_thresh = maximum(1.0f, m_params.m_endpoint_rdo_quality_thresh);
  686. const uint64_t thresh_err = (uint64_t)(cur_err * endpoint_remap_thresh);
  687. uint64_t best_trial_err = UINT64_MAX;
  688. int best_trial_idx = 0;
  689. etc_block trial_etc_blk(etc_blk);
  690. const int MAX_ENDPOINT_SEARCH_DIST = 32;
  691. const int search_dist = minimum<int>(iabs(endpoint_delta) - 1, MAX_ENDPOINT_SEARCH_DIST);
  692. for (int d = -search_dist; d < search_dist; d++)
  693. {
  694. int trial_idx = prev_endpoint_index + d;
  695. if (trial_idx < 0)
  696. trial_idx += (int)r.get_total_endpoint_clusters();
  697. else if (trial_idx >= (int)r.get_total_endpoint_clusters())
  698. trial_idx -= (int)r.get_total_endpoint_clusters();
  699. if (trial_idx == new_endpoint_index)
  700. continue;
  701. // Skip it if this new endpoint palette entry is actually never used.
  702. if (!m_new_endpoint_was_used[trial_idx])
  703. continue;
  704. const etc1_endpoint_palette_entry& p = m_endpoint_palette[m_endpoint_remap_table_new_to_old[trial_idx]];
  705. trial_etc_blk.set_block_color5_etc1s(p.m_color5);
  706. trial_etc_blk.set_inten_tables_etc1s(p.m_inten5);
  707. uint64_t trial_err = trial_etc_blk.evaluate_etc1_error(src_pixels.get_ptr(), r.get_params().m_perceptual);
  708. if (trial_err <= thresh_err)
  709. {
  710. if (trial_err < best_trial_err)
  711. {
  712. best_trial_err = trial_err;
  713. best_trial_idx = trial_idx;
  714. }
  715. }
  716. }
  717. if (best_trial_err != UINT64_MAX)
  718. {
  719. m.m_endpoint_index = m_endpoint_remap_table_new_to_old[best_trial_idx];
  720. new_endpoint_index = best_trial_idx;
  721. endpoint_delta = new_endpoint_index - prev_endpoint_index;
  722. total_endpoint_indices_remapped++;
  723. }
  724. }
  725. }
  726. if (endpoint_delta < 0)
  727. endpoint_delta += (int)r.get_total_endpoint_clusters();
  728. delta_endpoint_histogram.inc(endpoint_delta);
  729. }
  730. block_endpoint_indices.push_back(m_endpoint_remap_table_new_to_old[new_endpoint_index]);
  731. prev_endpoint_index = new_endpoint_index;
  732. if ((!is_video) || (m.m_endpoint_predictor != basist::CR_ENDPOINT_PRED_INDEX))
  733. {
  734. int new_selector_index = m_selector_remap_table_old_to_new[m.m_selector_index];
  735. int selector_history_buf_index = -1;
  736. if (m.m_is_cr_target)
  737. {
  738. for (uint32_t j = 0; j < selector_history_buf.size(); j++)
  739. {
  740. const int trial_idx = selector_history_buf[j];
  741. if (trial_idx == new_selector_index)
  742. {
  743. total_used_selector_history_buf++;
  744. selector_history_buf_index = j;
  745. selector_history_buf_histogram.inc(j);
  746. break;
  747. }
  748. }
  749. }
  750. else
  751. {
  752. const pixel_block& src_pixels = r.get_source_pixel_block(block_index);
  753. const etc_block& etc_blk = r.get_output_block(block_index);
  754. color_rgba etc_blk_unpacked[16];
  755. unpack_etc1(etc_blk, etc_blk_unpacked);
  756. uint64_t cur_err = 0;
  757. if (r.get_params().m_perceptual)
  758. {
  759. for (uint32_t p = 0; p < 16; p++)
  760. cur_err += color_distance(true, src_pixels.get_ptr()[p], etc_blk_unpacked[p], false);
  761. }
  762. else
  763. {
  764. for (uint32_t p = 0; p < 16; p++)
  765. cur_err += color_distance(false, src_pixels.get_ptr()[p], etc_blk_unpacked[p], false);
  766. }
  767. uint64_t best_trial_err = UINT64_MAX;
  768. int best_trial_idx = 0;
  769. uint32_t best_trial_history_buf_idx = 0;
  770. const float selector_remap_thresh = maximum(1.0f, m_params.m_selector_rdo_quality_thresh); //2.5f;
  771. const bool use_strict_search = (m_params.m_compression_level == 0) && (selector_remap_thresh == 1.0f);
  772. const uint64_t limit_err = (uint64_t)ceilf(cur_err * selector_remap_thresh);
  773. for (uint32_t j = 0; j < selector_history_buf.size(); j++)
  774. {
  775. const int trial_idx = selector_history_buf[j];
  776. if (use_strict_search)
  777. {
  778. if (trial_idx == new_selector_index)
  779. {
  780. best_trial_err = 0;
  781. best_trial_idx = trial_idx;
  782. best_trial_history_buf_idx = j;
  783. break;
  784. }
  785. }
  786. else
  787. {
  788. uint64_t trial_err = 0;
  789. const uint64_t thresh_err = minimum(limit_err, best_trial_err);
  790. color_rgba block_colors[4];
  791. etc_blk.get_block_colors(block_colors, 0);
  792. const uint8_t* pSelectors = &m_selector_palette[m_selector_remap_table_new_to_old[trial_idx]](0, 0);
  793. if (r.get_params().m_perceptual)
  794. {
  795. for (uint32_t p = 0; p < 16; p++)
  796. {
  797. uint32_t sel = pSelectors[p];
  798. trial_err += color_distance(true, src_pixels.get_ptr()[p], block_colors[sel], false);
  799. if (trial_err > thresh_err)
  800. break;
  801. }
  802. }
  803. else
  804. {
  805. for (uint32_t p = 0; p < 16; p++)
  806. {
  807. uint32_t sel = pSelectors[p];
  808. trial_err += color_distance(false, src_pixels.get_ptr()[p], block_colors[sel], false);
  809. if (trial_err > thresh_err)
  810. break;
  811. }
  812. }
  813. if ((trial_err < best_trial_err) && (trial_err <= thresh_err))
  814. {
  815. assert(trial_err <= limit_err);
  816. best_trial_err = trial_err;
  817. best_trial_idx = trial_idx;
  818. best_trial_history_buf_idx = j;
  819. }
  820. }
  821. }
  822. if (best_trial_err != UINT64_MAX)
  823. {
  824. if (new_selector_index != best_trial_idx)
  825. total_selector_indices_remapped++;
  826. new_selector_index = best_trial_idx;
  827. total_used_selector_history_buf++;
  828. selector_history_buf_index = best_trial_history_buf_idx;
  829. selector_history_buf_histogram.inc(best_trial_history_buf_idx);
  830. }
  831. } // if (m_params.m_selector_rdo_quality_thresh > 0.0f)
  832. m.m_selector_index = m_selector_remap_table_new_to_old[new_selector_index];
  833. if ((selector_history_buf_rle_count) && (selector_history_buf_index != 0))
  834. {
  835. if (selector_history_buf_rle_count >= (int)basist::SELECTOR_HISTORY_BUF_RLE_COUNT_THRESH)
  836. {
  837. selector_syms[slice_index].push_back(SELECTOR_HISTORY_BUF_RLE_SYMBOL_INDEX);
  838. selector_syms[slice_index].push_back(selector_history_buf_rle_count);
  839. int run_sym = selector_history_buf_rle_count - basist::SELECTOR_HISTORY_BUF_RLE_COUNT_THRESH;
  840. if (run_sym >= ((int)basist::SELECTOR_HISTORY_BUF_RLE_COUNT_TOTAL - 1))
  841. selector_history_buf_rle_histogram.inc(basist::SELECTOR_HISTORY_BUF_RLE_COUNT_TOTAL - 1);
  842. else
  843. selector_history_buf_rle_histogram.inc(run_sym);
  844. selector_histogram.inc(SELECTOR_HISTORY_BUF_RLE_SYMBOL_INDEX);
  845. }
  846. else
  847. {
  848. for (int k = 0; k < selector_history_buf_rle_count; k++)
  849. {
  850. uint32_t sym_index = SELECTOR_HISTORY_BUF_FIRST_SYMBOL_INDEX + 0;
  851. selector_syms[slice_index].push_back(sym_index);
  852. selector_histogram.inc(sym_index);
  853. }
  854. }
  855. selector_history_buf_rle_count = 0;
  856. }
  857. if (selector_history_buf_index >= 0)
  858. {
  859. if (selector_history_buf_index == 0)
  860. selector_history_buf_rle_count++;
  861. else
  862. {
  863. uint32_t history_buf_sym = SELECTOR_HISTORY_BUF_FIRST_SYMBOL_INDEX + selector_history_buf_index;
  864. selector_syms[slice_index].push_back(history_buf_sym);
  865. selector_histogram.inc(history_buf_sym);
  866. }
  867. }
  868. else
  869. {
  870. selector_syms[slice_index].push_back(new_selector_index);
  871. selector_histogram.inc(new_selector_index);
  872. }
  873. m.m_selector_history_buf_index = selector_history_buf_index;
  874. if (selector_history_buf_index < 0)
  875. selector_history_buf.add(new_selector_index);
  876. else if (selector_history_buf.size())
  877. selector_history_buf.use(selector_history_buf_index);
  878. }
  879. block_selector_indices.push_back(m.m_selector_index);
  880. } // block_x
  881. } // block_y
  882. if (endpoint_pred_repeat_count > 0)
  883. {
  884. if (endpoint_pred_repeat_count > (int)basist::ENDPOINT_PRED_MIN_REPEAT_COUNT)
  885. {
  886. endpoint_pred_histogram.inc(basist::ENDPOINT_PRED_REPEAT_LAST_SYMBOL);
  887. endpoint_pred_syms[slice_index].push_back(basist::ENDPOINT_PRED_REPEAT_LAST_SYMBOL);
  888. endpoint_pred_syms[slice_index].push_back(endpoint_pred_repeat_count);
  889. }
  890. else
  891. {
  892. for (int j = 0; j < endpoint_pred_repeat_count; j++)
  893. {
  894. endpoint_pred_histogram.inc(prev_endpoint_pred_sym_bits);
  895. endpoint_pred_syms[slice_index].push_back(prev_endpoint_pred_sym_bits);
  896. }
  897. }
  898. endpoint_pred_repeat_count = 0;
  899. }
  900. if (selector_history_buf_rle_count)
  901. {
  902. if (selector_history_buf_rle_count >= (int)basist::SELECTOR_HISTORY_BUF_RLE_COUNT_THRESH)
  903. {
  904. selector_syms[slice_index].push_back(SELECTOR_HISTORY_BUF_RLE_SYMBOL_INDEX);
  905. selector_syms[slice_index].push_back(selector_history_buf_rle_count);
  906. int run_sym = selector_history_buf_rle_count - basist::SELECTOR_HISTORY_BUF_RLE_COUNT_THRESH;
  907. if (run_sym >= ((int)basist::SELECTOR_HISTORY_BUF_RLE_COUNT_TOTAL - 1))
  908. selector_history_buf_rle_histogram.inc(basist::SELECTOR_HISTORY_BUF_RLE_COUNT_TOTAL - 1);
  909. else
  910. selector_history_buf_rle_histogram.inc(run_sym);
  911. selector_histogram.inc(SELECTOR_HISTORY_BUF_RLE_SYMBOL_INDEX);
  912. }
  913. else
  914. {
  915. for (int i = 0; i < selector_history_buf_rle_count; i++)
  916. {
  917. uint32_t sym_index = SELECTOR_HISTORY_BUF_FIRST_SYMBOL_INDEX + 0;
  918. selector_syms[slice_index].push_back(sym_index);
  919. selector_histogram.inc(sym_index);
  920. }
  921. }
  922. selector_history_buf_rle_count = 0;
  923. }
  924. } // slice_index
  925. debug_printf("Endpoint pred RDO total endpoint indices remapped: %u %3.2f%%\n",
  926. total_endpoint_indices_remapped, total_endpoint_indices_remapped * 100.0f / get_total_blocks());
  927. debug_printf("Selector history RDO total selector indices remapped: %u %3.2f%%, Used history buf: %u %3.2f%%\n",
  928. total_selector_indices_remapped, total_selector_indices_remapped * 100.0f / get_total_blocks(),
  929. total_used_selector_history_buf, total_used_selector_history_buf * 100.0f / get_total_blocks());
  930. //if ((total_endpoint_indices_remapped) && (m_params.m_compression_level > 0))
  931. if ((total_endpoint_indices_remapped) && (m_params.m_compression_level > 1) && (!m_params.m_used_global_codebooks))
  932. {
  933. int_vec unused;
  934. r.reoptimize_remapped_endpoints(block_endpoint_indices, unused, false, &block_selector_indices);
  935. create_endpoint_palette();
  936. }
  937. check_for_valid_cr_blocks();
  938. compute_slice_crcs();
  939. double endpoint_pred_entropy = endpoint_pred_histogram.get_entropy() / endpoint_pred_histogram.get_total();
  940. double delta_endpoint_entropy = delta_endpoint_histogram.get_entropy() / delta_endpoint_histogram.get_total();
  941. double selector_entropy = selector_histogram.get_entropy() / selector_histogram.get_total();
  942. debug_printf("Histogram entropy: EndpointPred: %3.3f DeltaEndpoint: %3.3f DeltaSelector: %3.3f\n", endpoint_pred_entropy, delta_endpoint_entropy, selector_entropy);
  943. if (!endpoint_pred_histogram.get_total())
  944. endpoint_pred_histogram.inc(0);
  945. huffman_encoding_table endpoint_pred_model;
  946. if (!endpoint_pred_model.init(endpoint_pred_histogram, 16))
  947. {
  948. error_printf("endpoint_pred_model.init() failed!");
  949. return false;
  950. }
  951. if (!delta_endpoint_histogram.get_total())
  952. delta_endpoint_histogram.inc(0);
  953. huffman_encoding_table delta_endpoint_model;
  954. if (!delta_endpoint_model.init(delta_endpoint_histogram, 16))
  955. {
  956. error_printf("delta_endpoint_model.init() failed!");
  957. return false;
  958. }
  959. if (!selector_histogram.get_total())
  960. selector_histogram.inc(0);
  961. huffman_encoding_table selector_model;
  962. if (!selector_model.init(selector_histogram, 16))
  963. {
  964. error_printf("selector_model.init() failed!");
  965. return false;
  966. }
  967. if (!selector_history_buf_rle_histogram.get_total())
  968. selector_history_buf_rle_histogram.inc(0);
  969. huffman_encoding_table selector_history_buf_rle_model;
  970. if (!selector_history_buf_rle_model.init(selector_history_buf_rle_histogram, 16))
  971. {
  972. error_printf("selector_history_buf_rle_model.init() failed!");
  973. return false;
  974. }
  975. bitwise_coder coder;
  976. coder.init(1024 * 1024 * 4);
  977. uint32_t endpoint_pred_model_bits = coder.emit_huffman_table(endpoint_pred_model);
  978. uint32_t delta_endpoint_bits = coder.emit_huffman_table(delta_endpoint_model);
  979. uint32_t selector_model_bits = coder.emit_huffman_table(selector_model);
  980. uint32_t selector_history_buf_run_sym_bits = coder.emit_huffman_table(selector_history_buf_rle_model);
  981. coder.put_bits(basist::MAX_SELECTOR_HISTORY_BUF_SIZE, 13);
  982. debug_printf("Model sizes: EndpointPred: %u bits %u bytes (%3.3f bpp) DeltaEndpoint: %u bits %u bytes (%3.3f bpp) Selector: %u bits %u bytes (%3.3f bpp) SelectorHistBufRLE: %u bits %u bytes (%3.3f bpp)\n",
  983. endpoint_pred_model_bits, (endpoint_pred_model_bits + 7) / 8, endpoint_pred_model_bits / float(get_total_input_texels()),
  984. delta_endpoint_bits, (delta_endpoint_bits + 7) / 8, delta_endpoint_bits / float(get_total_input_texels()),
  985. selector_model_bits, (selector_model_bits + 7) / 8, selector_model_bits / float(get_total_input_texels()),
  986. selector_history_buf_run_sym_bits, (selector_history_buf_run_sym_bits + 7) / 8, selector_history_buf_run_sym_bits / float(get_total_input_texels()));
  987. coder.flush();
  988. m_output.m_slice_image_tables = coder.get_bytes();
  989. uint32_t total_endpoint_pred_bits = 0, total_delta_endpoint_bits = 0, total_selector_bits = 0;
  990. uint32_t total_image_bytes = 0;
  991. m_output.m_slice_image_data.resize(m_slices.size());
  992. for (uint32_t slice_index = 0; slice_index < m_slices.size(); slice_index++)
  993. {
  994. //const uint32_t width = m_slices[slice_index].m_width;
  995. //const uint32_t height = m_slices[slice_index].m_height;
  996. const uint32_t num_blocks_x = m_slices[slice_index].m_num_blocks_x;
  997. const uint32_t num_blocks_y = m_slices[slice_index].m_num_blocks_y;
  998. coder.init(1024 * 1024 * 4);
  999. uint32_t cur_selector_sym_ofs = 0;
  1000. uint32_t selector_rle_count = 0;
  1001. int endpoint_pred_repeat_count = 0;
  1002. uint32_t cur_endpoint_pred_sym_ofs = 0;
  1003. // uint32_t prev_endpoint_pred_sym = 0;
  1004. uint32_t prev_endpoint_index = 0;
  1005. for (uint32_t block_y = 0; block_y < num_blocks_y; block_y++)
  1006. {
  1007. for (uint32_t block_x = 0; block_x < num_blocks_x; block_x++)
  1008. {
  1009. const encoder_block& m = m_slice_encoder_blocks[slice_index](block_x, block_y);
  1010. if (((block_x & 1) == 0) && ((block_y & 1) == 0))
  1011. {
  1012. if (endpoint_pred_repeat_count > 0)
  1013. {
  1014. endpoint_pred_repeat_count--;
  1015. }
  1016. else
  1017. {
  1018. uint32_t sym = endpoint_pred_syms[slice_index][cur_endpoint_pred_sym_ofs++];
  1019. if (sym == basist::ENDPOINT_PRED_REPEAT_LAST_SYMBOL)
  1020. {
  1021. total_endpoint_pred_bits += coder.put_code(sym, endpoint_pred_model);
  1022. endpoint_pred_repeat_count = endpoint_pred_syms[slice_index][cur_endpoint_pred_sym_ofs++];
  1023. assert(endpoint_pred_repeat_count >= (int)basist::ENDPOINT_PRED_MIN_REPEAT_COUNT);
  1024. total_endpoint_pred_bits += coder.put_vlc(endpoint_pred_repeat_count - basist::ENDPOINT_PRED_MIN_REPEAT_COUNT, basist::ENDPOINT_PRED_COUNT_VLC_BITS);
  1025. endpoint_pred_repeat_count--;
  1026. }
  1027. else
  1028. {
  1029. total_endpoint_pred_bits += coder.put_code(sym, endpoint_pred_model);
  1030. //prev_endpoint_pred_sym = sym;
  1031. }
  1032. }
  1033. }
  1034. const int new_endpoint_index = m_endpoint_remap_table_old_to_new[m.m_endpoint_index];
  1035. if (m.m_endpoint_predictor == basist::NO_ENDPOINT_PRED_INDEX)
  1036. {
  1037. int endpoint_delta = new_endpoint_index - prev_endpoint_index;
  1038. if (endpoint_delta < 0)
  1039. endpoint_delta += (int)r.get_total_endpoint_clusters();
  1040. total_delta_endpoint_bits += coder.put_code(endpoint_delta, delta_endpoint_model);
  1041. }
  1042. prev_endpoint_index = new_endpoint_index;
  1043. if ((!is_video) || (m.m_endpoint_predictor != basist::CR_ENDPOINT_PRED_INDEX))
  1044. {
  1045. if (!selector_rle_count)
  1046. {
  1047. uint32_t selector_sym_index = selector_syms[slice_index][cur_selector_sym_ofs++];
  1048. if (selector_sym_index == SELECTOR_HISTORY_BUF_RLE_SYMBOL_INDEX)
  1049. selector_rle_count = selector_syms[slice_index][cur_selector_sym_ofs++];
  1050. total_selector_bits += coder.put_code(selector_sym_index, selector_model);
  1051. if (selector_sym_index == SELECTOR_HISTORY_BUF_RLE_SYMBOL_INDEX)
  1052. {
  1053. int run_sym = selector_rle_count - basist::SELECTOR_HISTORY_BUF_RLE_COUNT_THRESH;
  1054. if (run_sym >= ((int)basist::SELECTOR_HISTORY_BUF_RLE_COUNT_TOTAL - 1))
  1055. {
  1056. total_selector_bits += coder.put_code(basist::SELECTOR_HISTORY_BUF_RLE_COUNT_TOTAL - 1, selector_history_buf_rle_model);
  1057. uint32_t n = selector_rle_count - basist::SELECTOR_HISTORY_BUF_RLE_COUNT_THRESH;
  1058. total_selector_bits += coder.put_vlc(n, 7);
  1059. }
  1060. else
  1061. total_selector_bits += coder.put_code(run_sym, selector_history_buf_rle_model);
  1062. }
  1063. }
  1064. if (selector_rle_count)
  1065. selector_rle_count--;
  1066. }
  1067. } // block_x
  1068. } // block_y
  1069. BASISU_BACKEND_VERIFY(cur_endpoint_pred_sym_ofs == endpoint_pred_syms[slice_index].size());
  1070. BASISU_BACKEND_VERIFY(cur_selector_sym_ofs == selector_syms[slice_index].size());
  1071. coder.flush();
  1072. m_output.m_slice_image_data[slice_index] = coder.get_bytes();
  1073. total_image_bytes += (uint32_t)coder.get_bytes().size();
  1074. debug_printf("Slice %u compressed size: %u bytes, %3.3f bits per slice texel\n", slice_index, m_output.m_slice_image_data[slice_index].size(), m_output.m_slice_image_data[slice_index].size() * 8.0f / (m_slices[slice_index].m_orig_width * m_slices[slice_index].m_orig_height));
  1075. } // slice_index
  1076. const double total_texels = static_cast<double>(get_total_input_texels());
  1077. const double total_blocks = static_cast<double>(get_total_blocks());
  1078. debug_printf("Total endpoint pred bits: %u bytes: %u bits/texel: %3.3f bits/block: %3.3f\n", total_endpoint_pred_bits, total_endpoint_pred_bits / 8, total_endpoint_pred_bits / total_texels, total_endpoint_pred_bits / total_blocks);
  1079. debug_printf("Total delta endpoint bits: %u bytes: %u bits/texel: %3.3f bits/block: %3.3f\n", total_delta_endpoint_bits, total_delta_endpoint_bits / 8, total_delta_endpoint_bits / total_texels, total_delta_endpoint_bits / total_blocks);
  1080. debug_printf("Total selector bits: %u bytes: %u bits/texel: %3.3f bits/block: %3.3f\n", total_selector_bits, total_selector_bits / 8, total_selector_bits / total_texels, total_selector_bits / total_blocks);
  1081. debug_printf("Total table bytes: %u, %3.3f bits/texel\n", m_output.m_slice_image_tables.size(), m_output.m_slice_image_tables.size() * 8.0f / total_texels);
  1082. debug_printf("Total image bytes: %u, %3.3f bits/texel\n", total_image_bytes, total_image_bytes * 8.0f / total_texels);
  1083. return true;
  1084. }
  1085. bool basisu_backend::encode_endpoint_palette()
  1086. {
  1087. const basisu_frontend& r = *m_pFront_end;
  1088. // The endpoint indices may have been changed by the backend's RDO step, so go and figure out which ones are actually used again.
  1089. bool_vec old_endpoint_was_used(r.get_total_endpoint_clusters());
  1090. uint32_t first_old_entry_index = UINT32_MAX;
  1091. for (uint32_t slice_index = 0; slice_index < m_slices.size(); slice_index++)
  1092. {
  1093. const uint32_t num_blocks_x = m_slices[slice_index].m_num_blocks_x, num_blocks_y = m_slices[slice_index].m_num_blocks_y;
  1094. for (uint32_t block_y = 0; block_y < num_blocks_y; block_y++)
  1095. {
  1096. for (uint32_t block_x = 0; block_x < num_blocks_x; block_x++)
  1097. {
  1098. encoder_block& m = m_slice_encoder_blocks[slice_index](block_x, block_y);
  1099. const uint32_t old_endpoint_index = m.m_endpoint_index;
  1100. old_endpoint_was_used[old_endpoint_index] = true;
  1101. first_old_entry_index = basisu::minimum(first_old_entry_index, old_endpoint_index);
  1102. } // block_x
  1103. } // block_y
  1104. } // slice_index
  1105. debug_printf("basisu_backend::encode_endpoint_palette: first_old_entry_index: %u\n", first_old_entry_index);
  1106. // Maps NEW to OLD endpoints
  1107. uint_vec endpoint_remap_table_new_to_old(r.get_total_endpoint_clusters());
  1108. endpoint_remap_table_new_to_old.set_all(first_old_entry_index);
  1109. bool_vec new_endpoint_was_used(r.get_total_endpoint_clusters());
  1110. for (uint32_t old_endpoint_index = 0; old_endpoint_index < m_endpoint_remap_table_old_to_new.size(); old_endpoint_index++)
  1111. {
  1112. if (old_endpoint_was_used[old_endpoint_index])
  1113. {
  1114. const uint32_t new_endpoint_index = m_endpoint_remap_table_old_to_new[old_endpoint_index];
  1115. new_endpoint_was_used[new_endpoint_index] = true;
  1116. endpoint_remap_table_new_to_old[new_endpoint_index] = old_endpoint_index;
  1117. }
  1118. }
  1119. // TODO: Some new endpoint palette entries may actually be unused and aren't worth coding. Fix that.
  1120. uint32_t total_unused_new_entries = 0;
  1121. for (uint32_t i = 0; i < new_endpoint_was_used.size(); i++)
  1122. if (!new_endpoint_was_used[i])
  1123. total_unused_new_entries++;
  1124. debug_printf("basisu_backend::encode_endpoint_palette: total_unused_new_entries: %u out of %u\n", total_unused_new_entries, new_endpoint_was_used.size());
  1125. bool is_grayscale = true;
  1126. for (uint32_t old_endpoint_index = 0; old_endpoint_index < (uint32_t)m_endpoint_palette.size(); old_endpoint_index++)
  1127. {
  1128. int r5 = m_endpoint_palette[old_endpoint_index].m_color5[0];
  1129. int g5 = m_endpoint_palette[old_endpoint_index].m_color5[1];
  1130. int b5 = m_endpoint_palette[old_endpoint_index].m_color5[2];
  1131. if ((r5 != g5) || (r5 != b5))
  1132. {
  1133. is_grayscale = false;
  1134. break;
  1135. }
  1136. }
  1137. histogram color5_delta_hist0(32); // prev 0-9, delta is -9 to 31
  1138. histogram color5_delta_hist1(32); // prev 10-21, delta is -21 to 21
  1139. histogram color5_delta_hist2(32); // prev 22-31, delta is -31 to 9
  1140. histogram inten_delta_hist(8);
  1141. color_rgba prev_color5(16, 16, 16, 0);
  1142. uint32_t prev_inten = 0;
  1143. for (uint32_t new_endpoint_index = 0; new_endpoint_index < r.get_total_endpoint_clusters(); new_endpoint_index++)
  1144. {
  1145. const uint32_t old_endpoint_index = endpoint_remap_table_new_to_old[new_endpoint_index];
  1146. int delta_inten = m_endpoint_palette[old_endpoint_index].m_inten5 - prev_inten;
  1147. inten_delta_hist.inc(delta_inten & 7);
  1148. prev_inten = m_endpoint_palette[old_endpoint_index].m_inten5;
  1149. for (uint32_t i = 0; i < (is_grayscale ? 1U : 3U); i++)
  1150. {
  1151. const int delta = (m_endpoint_palette[old_endpoint_index].m_color5[i] - prev_color5[i]) & 31;
  1152. if (prev_color5[i] <= basist::COLOR5_PAL0_PREV_HI)
  1153. color5_delta_hist0.inc(delta);
  1154. else if (prev_color5[i] <= basist::COLOR5_PAL1_PREV_HI)
  1155. color5_delta_hist1.inc(delta);
  1156. else
  1157. color5_delta_hist2.inc(delta);
  1158. prev_color5[i] = m_endpoint_palette[old_endpoint_index].m_color5[i];
  1159. }
  1160. }
  1161. if (!color5_delta_hist0.get_total()) color5_delta_hist0.inc(0);
  1162. if (!color5_delta_hist1.get_total()) color5_delta_hist1.inc(0);
  1163. if (!color5_delta_hist2.get_total()) color5_delta_hist2.inc(0);
  1164. huffman_encoding_table color5_delta_model0, color5_delta_model1, color5_delta_model2, inten_delta_model;
  1165. if (!color5_delta_model0.init(color5_delta_hist0, 16))
  1166. {
  1167. error_printf("color5_delta_model.init() failed!");
  1168. return false;
  1169. }
  1170. if (!color5_delta_model1.init(color5_delta_hist1, 16))
  1171. {
  1172. error_printf("color5_delta_model.init() failed!");
  1173. return false;
  1174. }
  1175. if (!color5_delta_model2.init(color5_delta_hist2, 16))
  1176. {
  1177. error_printf("color5_delta_model.init() failed!");
  1178. return false;
  1179. }
  1180. if (!inten_delta_model.init(inten_delta_hist, 16))
  1181. {
  1182. error_printf("inten3_model.init() failed!");
  1183. return false;
  1184. }
  1185. bitwise_coder coder;
  1186. coder.init(8192);
  1187. coder.emit_huffman_table(color5_delta_model0);
  1188. coder.emit_huffman_table(color5_delta_model1);
  1189. coder.emit_huffman_table(color5_delta_model2);
  1190. coder.emit_huffman_table(inten_delta_model);
  1191. coder.put_bits(is_grayscale, 1);
  1192. prev_color5.set(16, 16, 16, 0);
  1193. prev_inten = 0;
  1194. for (uint32_t new_endpoint_index = 0; new_endpoint_index < r.get_total_endpoint_clusters(); new_endpoint_index++)
  1195. {
  1196. const uint32_t old_endpoint_index = endpoint_remap_table_new_to_old[new_endpoint_index];
  1197. int delta_inten = (m_endpoint_palette[old_endpoint_index].m_inten5 - prev_inten) & 7;
  1198. coder.put_code(delta_inten, inten_delta_model);
  1199. prev_inten = m_endpoint_palette[old_endpoint_index].m_inten5;
  1200. for (uint32_t i = 0; i < (is_grayscale ? 1U : 3U); i++)
  1201. {
  1202. const int delta = (m_endpoint_palette[old_endpoint_index].m_color5[i] - prev_color5[i]) & 31;
  1203. if (prev_color5[i] <= basist::COLOR5_PAL0_PREV_HI)
  1204. coder.put_code(delta, color5_delta_model0);
  1205. else if (prev_color5[i] <= basist::COLOR5_PAL1_PREV_HI)
  1206. coder.put_code(delta, color5_delta_model1);
  1207. else
  1208. coder.put_code(delta, color5_delta_model2);
  1209. prev_color5[i] = m_endpoint_palette[old_endpoint_index].m_color5[i];
  1210. }
  1211. } // q
  1212. coder.flush();
  1213. m_output.m_endpoint_palette = coder.get_bytes();
  1214. debug_printf("Endpoint codebook size: %u bits %u bytes, Bits per entry: %3.1f, Avg bits/texel: %3.3f\n",
  1215. 8 * (int)m_output.m_endpoint_palette.size(), (int)m_output.m_endpoint_palette.size(), m_output.m_endpoint_palette.size() * 8.0f / r.get_total_endpoint_clusters(), m_output.m_endpoint_palette.size() * 8.0f / get_total_input_texels());
  1216. return true;
  1217. }
  1218. bool basisu_backend::encode_selector_palette()
  1219. {
  1220. const basisu_frontend& r = *m_pFront_end;
  1221. if ((m_params.m_use_global_sel_codebook) && (!m_params.m_use_hybrid_sel_codebooks))
  1222. {
  1223. histogram global_mod_indices(1 << m_params.m_global_sel_codebook_mod_bits);
  1224. for (uint32_t q = 0; q < r.get_total_selector_clusters(); q++)
  1225. global_mod_indices.inc(m_global_selector_palette_desc[q].m_mod_index);
  1226. huffman_encoding_table global_pal_model, global_mod_model;
  1227. if (!global_mod_model.init(global_mod_indices, 16))
  1228. {
  1229. error_printf("global_mod_model.init() failed!");
  1230. return false;
  1231. }
  1232. bitwise_coder coder;
  1233. coder.init(1024 * 1024);
  1234. coder.put_bits(1, 1); // use global codebook
  1235. coder.put_bits(m_params.m_global_sel_codebook_pal_bits, 4); // pal bits
  1236. coder.put_bits(m_params.m_global_sel_codebook_mod_bits, 4); // mod bits
  1237. uint32_t mod_model_bits = 0;
  1238. if (m_params.m_global_sel_codebook_mod_bits)
  1239. mod_model_bits = coder.emit_huffman_table(global_mod_model);
  1240. uint32_t total_pal_bits = 0;
  1241. uint32_t total_mod_bits = 0;
  1242. for (uint32_t q = 0; q < r.get_total_selector_clusters(); q++)
  1243. {
  1244. const uint32_t i = m_selector_remap_table_new_to_old[q];
  1245. if (m_params.m_global_sel_codebook_pal_bits)
  1246. {
  1247. coder.put_bits(m_global_selector_palette_desc[i].m_pal_index, m_params.m_global_sel_codebook_pal_bits);
  1248. total_pal_bits += m_params.m_global_sel_codebook_pal_bits;
  1249. }
  1250. if (m_params.m_global_sel_codebook_mod_bits)
  1251. total_mod_bits += coder.put_code(m_global_selector_palette_desc[i].m_mod_index, global_mod_model);
  1252. }
  1253. coder.flush();
  1254. m_output.m_selector_palette = coder.get_bytes();
  1255. debug_printf("Modifier model bits: %u Avg per entry: %3.3f\n", mod_model_bits, mod_model_bits / float(r.get_total_selector_clusters()));
  1256. debug_printf("Palette bits: %u Avg per entry: %3.3f, Modifier bits: %u Avg per entry: %3.3f\n", total_pal_bits, total_pal_bits / float(r.get_total_selector_clusters()), total_mod_bits, total_mod_bits / float(r.get_total_selector_clusters()));
  1257. }
  1258. else if (m_params.m_use_hybrid_sel_codebooks)
  1259. {
  1260. huff2D used_global_cb_bitflag_huff2D(1, 8);
  1261. histogram global_mod_indices(1 << m_params.m_global_sel_codebook_mod_bits);
  1262. for (uint32_t s = 0; s < r.get_total_selector_clusters(); s++)
  1263. {
  1264. const uint32_t q = m_selector_remap_table_new_to_old[s];
  1265. const bool used_global_cb_flag = r.get_selector_cluster_uses_global_cb_vec()[q];
  1266. used_global_cb_bitflag_huff2D.emit(used_global_cb_flag);
  1267. global_mod_indices.inc(m_global_selector_palette_desc[q].m_mod_index);
  1268. }
  1269. huffman_encoding_table global_mod_indices_model;
  1270. if (!global_mod_indices_model.init(global_mod_indices, 16))
  1271. {
  1272. error_printf("global_mod_indices_model.init() failed!");
  1273. return false;
  1274. }
  1275. bitwise_coder coder;
  1276. coder.init(1024 * 1024);
  1277. coder.put_bits(0, 1); // use global codebook
  1278. coder.put_bits(1, 1); // uses hybrid codebooks
  1279. coder.put_bits(m_params.m_global_sel_codebook_pal_bits, 4); // pal bits
  1280. coder.put_bits(m_params.m_global_sel_codebook_mod_bits, 4); // mod bits
  1281. used_global_cb_bitflag_huff2D.start_encoding(16);
  1282. coder.emit_huffman_table(used_global_cb_bitflag_huff2D.get_encoding_table());
  1283. if (m_params.m_global_sel_codebook_mod_bits)
  1284. coder.emit_huffman_table(global_mod_indices_model);
  1285. uint32_t total_global_cb_entries = 0;
  1286. uint32_t total_pal_bits = 0;
  1287. uint32_t total_mod_bits = 0;
  1288. uint32_t total_selectors = 0;
  1289. uint32_t total_selector_bits = 0;
  1290. uint32_t total_flag_bits = 0;
  1291. for (uint32_t s = 0; s < r.get_total_selector_clusters(); s++)
  1292. {
  1293. const uint32_t q = m_selector_remap_table_new_to_old[s];
  1294. total_flag_bits += used_global_cb_bitflag_huff2D.emit_next_sym(coder);
  1295. const bool used_global_cb_flag = r.get_selector_cluster_uses_global_cb_vec()[q];
  1296. if (used_global_cb_flag)
  1297. {
  1298. total_global_cb_entries++;
  1299. total_pal_bits += coder.put_bits(r.get_selector_cluster_global_selector_entry_ids()[q].m_palette_index, m_params.m_global_sel_codebook_pal_bits);
  1300. total_mod_bits += coder.put_code(r.get_selector_cluster_global_selector_entry_ids()[q].m_modifier.get_index(), global_mod_indices_model);
  1301. }
  1302. else
  1303. {
  1304. total_selectors++;
  1305. total_selector_bits += 32;
  1306. for (uint32_t j = 0; j < 4; j++)
  1307. coder.put_bits(m_selector_palette[q].get_byte(j), 8);
  1308. }
  1309. }
  1310. coder.flush();
  1311. m_output.m_selector_palette = coder.get_bytes();
  1312. debug_printf("Total global CB entries: %u %3.2f%%\n", total_global_cb_entries, total_global_cb_entries * 100.0f / r.get_total_selector_clusters());
  1313. debug_printf("Total selector entries: %u %3.2f%%\n", total_selectors, total_selectors * 100.0f / r.get_total_selector_clusters());
  1314. debug_printf("Total pal bits: %u, mod bits: %u, selector bits: %u, flag bits: %u\n", total_pal_bits, total_mod_bits, total_selector_bits, total_flag_bits);
  1315. }
  1316. else
  1317. {
  1318. histogram delta_selector_pal_histogram(256);
  1319. for (uint32_t q = 0; q < r.get_total_selector_clusters(); q++)
  1320. {
  1321. if (!q)
  1322. continue;
  1323. const basist::etc1_selector_palette_entry& cur = m_selector_palette[m_selector_remap_table_new_to_old[q]];
  1324. const basist::etc1_selector_palette_entry predictor(m_selector_palette[m_selector_remap_table_new_to_old[q - 1]]);
  1325. for (uint32_t j = 0; j < 4; j++)
  1326. delta_selector_pal_histogram.inc(cur.get_byte(j) ^ predictor.get_byte(j));
  1327. }
  1328. if (!delta_selector_pal_histogram.get_total())
  1329. delta_selector_pal_histogram.inc(0);
  1330. huffman_encoding_table delta_selector_pal_model;
  1331. if (!delta_selector_pal_model.init(delta_selector_pal_histogram, 16))
  1332. {
  1333. error_printf("delta_selector_pal_model.init() failed!");
  1334. return false;
  1335. }
  1336. bitwise_coder coder;
  1337. coder.init(1024 * 1024);
  1338. coder.put_bits(0, 1); // use global codebook
  1339. coder.put_bits(0, 1); // uses hybrid codebooks
  1340. coder.put_bits(0, 1); // raw bytes
  1341. coder.emit_huffman_table(delta_selector_pal_model);
  1342. for (uint32_t q = 0; q < r.get_total_selector_clusters(); q++)
  1343. {
  1344. if (!q)
  1345. {
  1346. for (uint32_t j = 0; j < 4; j++)
  1347. coder.put_bits(m_selector_palette[m_selector_remap_table_new_to_old[q]].get_byte(j), 8);
  1348. continue;
  1349. }
  1350. const basist::etc1_selector_palette_entry& cur = m_selector_palette[m_selector_remap_table_new_to_old[q]];
  1351. const basist::etc1_selector_palette_entry predictor(m_selector_palette[m_selector_remap_table_new_to_old[q - 1]]);
  1352. for (uint32_t j = 0; j < 4; j++)
  1353. coder.put_code(cur.get_byte(j) ^ predictor.get_byte(j), delta_selector_pal_model);
  1354. }
  1355. coder.flush();
  1356. m_output.m_selector_palette = coder.get_bytes();
  1357. if (m_output.m_selector_palette.size() >= r.get_total_selector_clusters() * 4)
  1358. {
  1359. coder.init(1024 * 1024);
  1360. coder.put_bits(0, 1); // use global codebook
  1361. coder.put_bits(0, 1); // uses hybrid codebooks
  1362. coder.put_bits(1, 1); // raw bytes
  1363. for (uint32_t q = 0; q < r.get_total_selector_clusters(); q++)
  1364. {
  1365. const uint32_t i = m_selector_remap_table_new_to_old[q];
  1366. for (uint32_t j = 0; j < 4; j++)
  1367. coder.put_bits(m_selector_palette[i].get_byte(j), 8);
  1368. }
  1369. coder.flush();
  1370. m_output.m_selector_palette = coder.get_bytes();
  1371. }
  1372. } // if (m_params.m_use_global_sel_codebook)
  1373. debug_printf("Selector codebook bits: %u bytes: %u, Bits per entry: %3.1f, Avg bits/texel: %3.3f\n",
  1374. (int)m_output.m_selector_palette.size() * 8, (int)m_output.m_selector_palette.size(),
  1375. m_output.m_selector_palette.size() * 8.0f / r.get_total_selector_clusters(), m_output.m_selector_palette.size() * 8.0f / get_total_input_texels());
  1376. return true;
  1377. }
  1378. uint32_t basisu_backend::encode()
  1379. {
  1380. //const bool is_video = m_pFront_end->get_params().m_tex_type == basist::cBASISTexTypeVideoFrames;
  1381. m_output.m_slice_desc = m_slices;
  1382. m_output.m_etc1s = m_params.m_etc1s;
  1383. m_output.m_uses_global_codebooks = m_params.m_used_global_codebooks;
  1384. m_output.m_srgb = m_pFront_end->get_params().m_perceptual;
  1385. create_endpoint_palette();
  1386. create_selector_palette();
  1387. create_encoder_blocks();
  1388. if (!encode_image())
  1389. return 0;
  1390. if (!encode_endpoint_palette())
  1391. return 0;
  1392. if (!encode_selector_palette())
  1393. return 0;
  1394. uint32_t total_compressed_bytes = (uint32_t)(m_output.m_slice_image_tables.size() + m_output.m_endpoint_palette.size() + m_output.m_selector_palette.size());
  1395. for (uint32_t i = 0; i < m_output.m_slice_image_data.size(); i++)
  1396. total_compressed_bytes += (uint32_t)m_output.m_slice_image_data[i].size();
  1397. debug_printf("Wrote %u bytes, %3.3f bits/texel\n", total_compressed_bytes, total_compressed_bytes * 8.0f / get_total_input_texels());
  1398. return total_compressed_bytes;
  1399. }
  1400. } // namespace basisu