nanovg_bgfx.cpp 36 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361
  1. /*
  2. * Copyright 2011-2025 Branimir Karadzic. All rights reserved.
  3. * License: https://github.com/bkaradzic/bgfx/blob/master/LICENSE
  4. */
  5. //
  6. // Copyright (c) 2009-2013 Mikko Mononen [email protected]
  7. //
  8. // This software is provided 'as-is', without any express or implied
  9. // warranty. In no event will the authors be held liable for any damages
  10. // arising from the use of this software.
  11. // Permission is granted to anyone to use this software for any purpose,
  12. // including commercial applications, and to alter it and redistribute it
  13. // freely, subject to the following restrictions:
  14. // 1. The origin of this software must not be misrepresented; you must not
  15. // claim that you wrote the original software. If you use this software
  16. // in a product, an acknowledgment in the product documentation would be
  17. // appreciated but is not required.
  18. // 2. Altered source versions must be plainly marked as such, and must not be
  19. // misrepresented as being the original software.
  20. // 3. This notice may not be removed or altered from any source distribution.
  21. //
  22. #define NVG_ANTIALIAS 1
  23. #include <stdlib.h>
  24. #include <math.h>
  25. #include "nanovg.h"
  26. #include <bgfx/bgfx.h>
  27. #include <bgfx/embedded_shader.h>
  28. #include <bx/bx.h>
  29. #include <bx/allocator.h>
  30. #include <bx/uint32_t.h>
  31. BX_PRAGMA_DIAGNOSTIC_IGNORED_MSVC(4244); // warning C4244: '=' : conversion from '' to '', possible loss of data
  32. #include "vs_nanovg_fill.bin.h"
  33. #include "fs_nanovg_fill.bin.h"
  34. static const bgfx::EmbeddedShader s_embeddedShaders[] =
  35. {
  36. BGFX_EMBEDDED_SHADER(vs_nanovg_fill),
  37. BGFX_EMBEDDED_SHADER(fs_nanovg_fill),
  38. BGFX_EMBEDDED_SHADER_END()
  39. };
  40. namespace
  41. {
  42. static bgfx::VertexLayout s_nvgLayout;
  43. enum GLNVGshaderType
  44. {
  45. NSVG_SHADER_FILLGRAD,
  46. NSVG_SHADER_FILLIMG,
  47. NSVG_SHADER_SIMPLE,
  48. NSVG_SHADER_IMG
  49. };
  50. struct GLNVGtexture
  51. {
  52. bgfx::TextureHandle id;
  53. int width, height;
  54. int type;
  55. int flags;
  56. };
  57. struct GLNVGblend
  58. {
  59. uint64_t srcRGB;
  60. uint64_t dstRGB;
  61. uint64_t srcAlpha;
  62. uint64_t dstAlpha;
  63. };
  64. enum GLNVGcallType
  65. {
  66. GLNVG_FILL,
  67. GLNVG_CONVEXFILL,
  68. GLNVG_STROKE,
  69. GLNVG_TRIANGLES,
  70. };
  71. struct GLNVGcall
  72. {
  73. int type;
  74. int image;
  75. int pathOffset;
  76. int pathCount;
  77. int vertexOffset;
  78. int vertexCount;
  79. int uniformOffset;
  80. GLNVGblend blendFunc;
  81. };
  82. struct GLNVGpath
  83. {
  84. int fillOffset;
  85. int fillCount;
  86. int strokeOffset;
  87. int strokeCount;
  88. };
  89. struct GLNVGfragUniforms
  90. {
  91. float scissorMat[12]; // matrices are actually 3 vec4s
  92. float paintMat[12];
  93. NVGcolor innerCol;
  94. NVGcolor outerCol;
  95. // u_scissorExtScale
  96. float scissorExt[2];
  97. float scissorScale[2];
  98. // u_extentRadius
  99. float extent[2];
  100. float radius;
  101. // u_params
  102. float feather;
  103. float strokeMult;
  104. float texType;
  105. float type;
  106. };
  107. struct GLNVGcontext
  108. {
  109. bx::AllocatorI* allocator;
  110. bgfx::ProgramHandle prog;
  111. bgfx::UniformHandle u_scissorMat;
  112. bgfx::UniformHandle u_paintMat;
  113. bgfx::UniformHandle u_innerCol;
  114. bgfx::UniformHandle u_outerCol;
  115. bgfx::UniformHandle u_viewSize;
  116. bgfx::UniformHandle u_scissorExtScale;
  117. bgfx::UniformHandle u_extentRadius;
  118. bgfx::UniformHandle u_params;
  119. bgfx::UniformHandle s_tex;
  120. uint64_t state;
  121. bgfx::TextureHandle th;
  122. bgfx::TextureHandle texMissing;
  123. bgfx::TransientVertexBuffer tvb;
  124. bgfx::ViewId viewId;
  125. struct GLNVGtexture* textures;
  126. float view[2];
  127. int ntextures;
  128. int ctextures;
  129. int textureId;
  130. int vertBuf;
  131. int fragSize;
  132. int edgeAntiAlias;
  133. // Per frame buffers
  134. struct GLNVGcall* calls;
  135. int ccalls;
  136. int ncalls;
  137. struct GLNVGpath* paths;
  138. int cpaths;
  139. int npaths;
  140. struct NVGvertex* verts;
  141. int cverts;
  142. int nverts;
  143. unsigned char* uniforms;
  144. int cuniforms;
  145. int nuniforms;
  146. };
  147. static struct GLNVGtexture* glnvg__allocTexture(struct GLNVGcontext* gl)
  148. {
  149. struct GLNVGtexture* tex = NULL;
  150. int i;
  151. for (i = 0; i < gl->ntextures; i++)
  152. {
  153. if (gl->textures[i].id.idx == bgfx::kInvalidHandle)
  154. {
  155. tex = &gl->textures[i];
  156. break;
  157. }
  158. }
  159. if (tex == NULL)
  160. {
  161. if (gl->ntextures+1 > gl->ctextures)
  162. {
  163. int old = gl->ctextures;
  164. gl->ctextures = (gl->ctextures == 0) ? 2 : gl->ctextures*2;
  165. gl->textures = (struct GLNVGtexture*)bx::realloc(gl->allocator, gl->textures, sizeof(struct GLNVGtexture)*gl->ctextures);
  166. bx::memSet(&gl->textures[old], 0xff, (gl->ctextures-old)*sizeof(struct GLNVGtexture) );
  167. if (gl->textures == NULL)
  168. {
  169. return NULL;
  170. }
  171. }
  172. tex = &gl->textures[gl->ntextures++];
  173. }
  174. bx::memSet(tex, 0, sizeof(*tex) );
  175. return tex;
  176. }
  177. static struct GLNVGtexture* glnvg__findTexture(struct GLNVGcontext* gl, int id)
  178. {
  179. int i;
  180. for (i = 0; i < gl->ntextures; i++)
  181. {
  182. if (gl->textures[i].id.idx == id)
  183. {
  184. return &gl->textures[i];
  185. }
  186. }
  187. return NULL;
  188. }
  189. static int glnvg__deleteTexture(struct GLNVGcontext* gl, int id)
  190. {
  191. for (int ii = 0; ii < gl->ntextures; ii++)
  192. {
  193. if (gl->textures[ii].id.idx == id)
  194. {
  195. if (bgfx::isValid(gl->textures[ii].id)
  196. && (gl->textures[ii].flags & NVG_IMAGE_NODELETE) == 0)
  197. {
  198. bgfx::destroy(gl->textures[ii].id);
  199. }
  200. bx::memSet(&gl->textures[ii], 0, sizeof(gl->textures[ii]) );
  201. gl->textures[ii].id.idx = bgfx::kInvalidHandle;
  202. return 1;
  203. }
  204. }
  205. return 0;
  206. }
  207. static int nvgRenderCreate(void* _userPtr)
  208. {
  209. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  210. bgfx::RendererType::Enum type = bgfx::getRendererType();
  211. gl->prog = bgfx::createProgram(
  212. bgfx::createEmbeddedShader(s_embeddedShaders, type, "vs_nanovg_fill")
  213. , bgfx::createEmbeddedShader(s_embeddedShaders, type, "fs_nanovg_fill")
  214. , true
  215. );
  216. const bgfx::Memory* mem = bgfx::alloc(4*4*4);
  217. uint32_t* bgra8 = (uint32_t*)mem->data;
  218. bx::memSet(bgra8, 0, 4*4*4);
  219. gl->texMissing = bgfx::createTexture2D(4, 4, false, 1, bgfx::TextureFormat::BGRA8, 0, mem);
  220. gl->u_scissorMat = bgfx::createUniform("u_scissorMat", bgfx::UniformType::Mat3);
  221. gl->u_paintMat = bgfx::createUniform("u_paintMat", bgfx::UniformType::Mat3);
  222. gl->u_innerCol = bgfx::createUniform("u_innerCol", bgfx::UniformType::Vec4);
  223. gl->u_outerCol = bgfx::createUniform("u_outerCol", bgfx::UniformType::Vec4);
  224. gl->u_viewSize = bgfx::createUniform("u_viewSize", bgfx::UniformType::Vec4);
  225. gl->u_scissorExtScale = bgfx::createUniform("u_scissorExtScale", bgfx::UniformType::Vec4);
  226. gl->u_extentRadius = bgfx::createUniform("u_extentRadius", bgfx::UniformType::Vec4);
  227. gl->u_params = bgfx::createUniform("u_params", bgfx::UniformType::Vec4);
  228. gl->s_tex = bgfx::createUniform("s_tex", bgfx::UniformType::Sampler);
  229. s_nvgLayout
  230. .begin()
  231. .add(bgfx::Attrib::Position, 2, bgfx::AttribType::Float)
  232. .add(bgfx::Attrib::TexCoord0, 2, bgfx::AttribType::Float)
  233. .end();
  234. int align = 16;
  235. gl->fragSize = sizeof(struct GLNVGfragUniforms) + align - sizeof(struct GLNVGfragUniforms) % align;
  236. return 1;
  237. }
  238. static int nvgRenderCreateTexture(
  239. void* _userPtr
  240. , int _type
  241. , int _width
  242. , int _height
  243. , int _flags
  244. , const unsigned char* _rgba
  245. )
  246. {
  247. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  248. struct GLNVGtexture* tex = glnvg__allocTexture(gl);
  249. if (tex == NULL)
  250. {
  251. return 0;
  252. }
  253. tex->width = _width;
  254. tex->height = _height;
  255. tex->type = _type;
  256. tex->flags = _flags;
  257. uint32_t bytesPerPixel = NVG_TEXTURE_RGBA == tex->type ? 4 : 1;
  258. uint32_t pitch = tex->width * bytesPerPixel;
  259. const bgfx::Memory* mem = NULL;
  260. if (NULL != _rgba)
  261. {
  262. mem = bgfx::copy(_rgba, tex->height * pitch);
  263. }
  264. BX_ASSERT(tex->width >= 0 && tex->width <= bx::max<uint16_t>(), "Invalid tex width %d (max: %u)", tex->width, bx::max<uint16_t>());
  265. BX_ASSERT(tex->height >= 0 && tex->height <= bx::max<uint16_t>(), "Invalid tex height %d (max: %u)", tex->height, bx::max<uint16_t>());
  266. tex->id = bgfx::createTexture2D(
  267. uint16_t(tex->width)
  268. , uint16_t(tex->height)
  269. , false
  270. , 1
  271. , NVG_TEXTURE_RGBA == _type ? bgfx::TextureFormat::RGBA8 : bgfx::TextureFormat::R8
  272. , BGFX_SAMPLER_NONE
  273. );
  274. if (NULL != mem)
  275. {
  276. bgfx::updateTexture2D(
  277. tex->id
  278. , 0
  279. , 0
  280. , 0
  281. , 0
  282. , uint16_t(tex->width)
  283. , uint16_t(tex->height)
  284. , mem
  285. );
  286. }
  287. return bgfx::isValid(tex->id) ? tex->id.idx : 0;
  288. }
  289. static int nvgRenderDeleteTexture(void* _userPtr, int image)
  290. {
  291. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  292. return glnvg__deleteTexture(gl, image);
  293. }
  294. static int nvgRenderUpdateTexture(void* _userPtr, int image, int x, int y, int w, int h, const unsigned char* data)
  295. {
  296. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  297. struct GLNVGtexture* tex = glnvg__findTexture(gl, image);
  298. if (tex == NULL)
  299. {
  300. return 0;
  301. }
  302. uint32_t bytesPerPixel = NVG_TEXTURE_RGBA == tex->type ? 4 : 1;
  303. uint32_t pitch = tex->width * bytesPerPixel;
  304. const bgfx::Memory* mem = bgfx::alloc(w * h * bytesPerPixel);
  305. bx::gather(mem->data, // dst
  306. data + y * pitch + x * bytesPerPixel, // src
  307. pitch, // srcStride
  308. w * bytesPerPixel, // stride
  309. h); // num
  310. BX_ASSERT(x >= 0 && x <= bx::max<uint16_t>(), "Invalid tex x pos %d (max: %u)", x, bx::max<uint16_t>());
  311. BX_ASSERT(y >= 0 && y <= bx::max<uint16_t>(), "Invalid tex y pos %d (max: %u)", y, bx::max<uint16_t>());
  312. BX_ASSERT(w >= 0 && w <= bx::max<uint16_t>(), "Invalid tex width %d (max: %u)", w, bx::max<uint16_t>());
  313. BX_ASSERT(h >= 0 && h <= bx::max<uint16_t>(), "Invalid tex width %d (max: %u)", h, bx::max<uint16_t>());
  314. bgfx::updateTexture2D(
  315. tex->id
  316. , 0
  317. , 0
  318. , uint16_t(x)
  319. , uint16_t(y)
  320. , uint16_t(w)
  321. , uint16_t(h)
  322. , mem
  323. , UINT16_MAX
  324. );
  325. return 1;
  326. }
  327. static int nvgRenderGetTextureSize(void* _userPtr, int image, int* w, int* h)
  328. {
  329. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  330. struct GLNVGtexture* tex = glnvg__findTexture(gl, image);
  331. if (NULL == tex
  332. || !bgfx::isValid(tex->id) )
  333. {
  334. return 0;
  335. }
  336. *w = tex->width;
  337. *h = tex->height;
  338. return 1;
  339. }
  340. static void glnvg__xformToMat3x4(float* m3, float* t)
  341. {
  342. m3[ 0] = t[0];
  343. m3[ 1] = t[1];
  344. m3[ 2] = 0.0f;
  345. m3[ 3] = 0.0f;
  346. m3[ 4] = t[2];
  347. m3[ 5] = t[3];
  348. m3[ 6] = 0.0f;
  349. m3[ 7] = 0.0f;
  350. m3[ 8] = t[4];
  351. m3[ 9] = t[5];
  352. m3[10] = 1.0f;
  353. m3[11] = 0.0f;
  354. }
  355. static NVGcolor glnvg__premulColor(NVGcolor c)
  356. {
  357. c.r *= c.a;
  358. c.g *= c.a;
  359. c.b *= c.a;
  360. return c;
  361. }
  362. static int glnvg__convertPaint(
  363. struct GLNVGcontext* gl
  364. , struct GLNVGfragUniforms* frag
  365. , struct NVGpaint* paint
  366. , struct NVGscissor* scissor
  367. , float width
  368. , float fringe
  369. )
  370. {
  371. struct GLNVGtexture* tex = NULL;
  372. float invxform[6] = {};
  373. bx::memSet(frag, 0, sizeof(*frag) );
  374. frag->innerCol = glnvg__premulColor(paint->innerColor);
  375. frag->outerCol = glnvg__premulColor(paint->outerColor);
  376. if (scissor->extent[0] < -0.5f || scissor->extent[1] < -0.5f)
  377. {
  378. bx::memSet(frag->scissorMat, 0, sizeof(frag->scissorMat) );
  379. frag->scissorExt[0] = 1.0f;
  380. frag->scissorExt[1] = 1.0f;
  381. frag->scissorScale[0] = 1.0f;
  382. frag->scissorScale[1] = 1.0f;
  383. }
  384. else
  385. {
  386. nvgTransformInverse(invxform, scissor->xform);
  387. glnvg__xformToMat3x4(frag->scissorMat, invxform);
  388. frag->scissorExt[0] = scissor->extent[0];
  389. frag->scissorExt[1] = scissor->extent[1];
  390. frag->scissorScale[0] = sqrtf(scissor->xform[0]*scissor->xform[0] + scissor->xform[2]*scissor->xform[2]) / fringe;
  391. frag->scissorScale[1] = sqrtf(scissor->xform[1]*scissor->xform[1] + scissor->xform[3]*scissor->xform[3]) / fringe;
  392. }
  393. bx::memCopy(frag->extent, paint->extent, sizeof(frag->extent) );
  394. frag->strokeMult = (width*0.5f + fringe*0.5f) / fringe;
  395. gl->th = gl->texMissing;
  396. if (paint->image != 0)
  397. {
  398. tex = glnvg__findTexture(gl, paint->image);
  399. if (tex == NULL)
  400. {
  401. return 0;
  402. }
  403. nvgTransformInverse(invxform, paint->xform);
  404. frag->type = NSVG_SHADER_FILLIMG;
  405. if (tex->type == NVG_TEXTURE_RGBA)
  406. {
  407. frag->texType = (tex->flags & NVG_IMAGE_PREMULTIPLIED) ? 0.0f : 1.0f;
  408. }
  409. else
  410. {
  411. frag->texType = 2.0f;
  412. }
  413. gl->th = tex->id;
  414. }
  415. else
  416. {
  417. frag->type = NSVG_SHADER_FILLGRAD;
  418. frag->radius = paint->radius;
  419. frag->feather = paint->feather;
  420. nvgTransformInverse(invxform, paint->xform);
  421. }
  422. glnvg__xformToMat3x4(frag->paintMat, invxform);
  423. return 1;
  424. }
  425. static void glnvg__mat3(float* dst, float* src)
  426. {
  427. dst[0] = src[ 0];
  428. dst[1] = src[ 1];
  429. dst[2] = src[ 2];
  430. dst[3] = src[ 4];
  431. dst[4] = src[ 5];
  432. dst[5] = src[ 6];
  433. dst[6] = src[ 8];
  434. dst[7] = src[ 9];
  435. dst[8] = src[10];
  436. }
  437. static struct GLNVGfragUniforms* nvg__fragUniformPtr(struct GLNVGcontext* gl, int i)
  438. {
  439. return (struct GLNVGfragUniforms*)&gl->uniforms[i];
  440. }
  441. static void nvgRenderSetUniforms(struct GLNVGcontext* gl, int uniformOffset, int image)
  442. {
  443. struct GLNVGfragUniforms* frag = nvg__fragUniformPtr(gl, uniformOffset);
  444. float tmp[9]; // Maybe there's a way to get rid of this...
  445. glnvg__mat3(tmp, frag->scissorMat);
  446. bgfx::setUniform(gl->u_scissorMat, tmp);
  447. glnvg__mat3(tmp, frag->paintMat);
  448. bgfx::setUniform(gl->u_paintMat, tmp);
  449. bgfx::setUniform(gl->u_innerCol, frag->innerCol.rgba);
  450. bgfx::setUniform(gl->u_outerCol, frag->outerCol.rgba);
  451. bgfx::setUniform(gl->u_scissorExtScale, &frag->scissorExt[0]);
  452. bgfx::setUniform(gl->u_extentRadius, &frag->extent[0]);
  453. bgfx::setUniform(gl->u_params, &frag->feather);
  454. bgfx::TextureHandle handle = gl->texMissing;
  455. if (image != 0)
  456. {
  457. struct GLNVGtexture* tex = glnvg__findTexture(gl, image);
  458. if (tex != NULL)
  459. {
  460. handle = tex->id;
  461. }
  462. }
  463. gl->th = handle;
  464. }
  465. static void nvgRenderViewport(void* _userPtr, float width, float height, float devicePixelRatio)
  466. {
  467. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  468. gl->view[0] = width;
  469. gl->view[1] = height;
  470. bgfx::setViewRect(gl->viewId, 0, 0, width * devicePixelRatio, height * devicePixelRatio);
  471. }
  472. static void fan(uint32_t _start, uint32_t _count)
  473. {
  474. uint32_t numTris = _count-2;
  475. BX_ASSERT(_count >= 3, "less than one triangle");
  476. BX_ASSERT(_start + ((numTris - 1) * 3) + 2 <= UINT16_MAX, "index overflow");
  477. bgfx::TransientIndexBuffer tib;
  478. bgfx::allocTransientIndexBuffer(&tib, numTris*3);
  479. BX_ASSERT(tib.size == numTris*3*(tib.isIndex16 ? 2 : 4), "did not get enough room for indices");
  480. uint16_t* data = (uint16_t*)tib.data;
  481. for (uint32_t ii = 0; ii < numTris; ++ii)
  482. {
  483. data[ii*3+0] = uint16_t(_start);
  484. data[ii*3+1] = uint16_t(_start + ii + 1);
  485. data[ii*3+2] = uint16_t(_start + ii + 2);
  486. }
  487. bgfx::setIndexBuffer(&tib);
  488. }
  489. static void glnvg__fill(struct GLNVGcontext* gl, struct GLNVGcall* call)
  490. {
  491. struct GLNVGpath* paths = &gl->paths[call->pathOffset];
  492. int i, npaths = call->pathCount;
  493. // set bindpoint for solid loc
  494. nvgRenderSetUniforms(gl, call->uniformOffset, 0);
  495. for (i = 0; i < npaths; i++)
  496. {
  497. if (2 < paths[i].fillCount)
  498. {
  499. bgfx::setState(0);
  500. bgfx::setStencil(0
  501. | BGFX_STENCIL_TEST_ALWAYS
  502. | BGFX_STENCIL_FUNC_RMASK(0xff)
  503. | BGFX_STENCIL_OP_FAIL_S_KEEP
  504. | BGFX_STENCIL_OP_FAIL_Z_KEEP
  505. | BGFX_STENCIL_OP_PASS_Z_INCR
  506. , 0
  507. | BGFX_STENCIL_TEST_ALWAYS
  508. | BGFX_STENCIL_FUNC_RMASK(0xff)
  509. | BGFX_STENCIL_OP_FAIL_S_KEEP
  510. | BGFX_STENCIL_OP_FAIL_Z_KEEP
  511. | BGFX_STENCIL_OP_PASS_Z_DECR
  512. );
  513. bgfx::setVertexBuffer(0, &gl->tvb);
  514. bgfx::setTexture(0, gl->s_tex, gl->th);
  515. fan(paths[i].fillOffset, paths[i].fillCount);
  516. bgfx::submit(gl->viewId, gl->prog);
  517. }
  518. }
  519. // Draw aliased off-pixels
  520. nvgRenderSetUniforms(gl, call->uniformOffset + gl->fragSize, call->image);
  521. if (gl->edgeAntiAlias)
  522. {
  523. // Draw fringes
  524. for (i = 0; i < npaths; i++)
  525. {
  526. bgfx::setState(gl->state
  527. | BGFX_STATE_PT_TRISTRIP
  528. );
  529. bgfx::setStencil(0
  530. | BGFX_STENCIL_TEST_EQUAL
  531. | BGFX_STENCIL_FUNC_RMASK(0xff)
  532. | BGFX_STENCIL_OP_FAIL_S_KEEP
  533. | BGFX_STENCIL_OP_FAIL_Z_KEEP
  534. | BGFX_STENCIL_OP_PASS_Z_KEEP
  535. );
  536. bgfx::setVertexBuffer(0, &gl->tvb, paths[i].strokeOffset, paths[i].strokeCount);
  537. bgfx::setTexture(0, gl->s_tex, gl->th);
  538. bgfx::submit(gl->viewId, gl->prog);
  539. }
  540. }
  541. // Draw fill
  542. bgfx::setState(gl->state);
  543. bgfx::setVertexBuffer(0, &gl->tvb, call->vertexOffset, call->vertexCount);
  544. bgfx::setTexture(0, gl->s_tex, gl->th);
  545. bgfx::setStencil(0
  546. | BGFX_STENCIL_TEST_NOTEQUAL
  547. | BGFX_STENCIL_FUNC_RMASK(0xff)
  548. | BGFX_STENCIL_OP_FAIL_S_ZERO
  549. | BGFX_STENCIL_OP_FAIL_Z_ZERO
  550. | BGFX_STENCIL_OP_PASS_Z_ZERO
  551. );
  552. bgfx::submit(gl->viewId, gl->prog);
  553. }
  554. static void glnvg__convexFill(struct GLNVGcontext* gl, struct GLNVGcall* call)
  555. {
  556. struct GLNVGpath* paths = &gl->paths[call->pathOffset];
  557. int i, npaths = call->pathCount;
  558. nvgRenderSetUniforms(gl, call->uniformOffset, call->image);
  559. for (i = 0; i < npaths; i++)
  560. {
  561. if (paths[i].fillCount == 0) continue;
  562. bgfx::setState(gl->state);
  563. bgfx::setVertexBuffer(0, &gl->tvb);
  564. bgfx::setTexture(0, gl->s_tex, gl->th);
  565. fan(paths[i].fillOffset, paths[i].fillCount);
  566. bgfx::submit(gl->viewId, gl->prog);
  567. }
  568. if (gl->edgeAntiAlias)
  569. {
  570. // Draw fringes
  571. for (i = 0; i < npaths; i++)
  572. {
  573. bgfx::setState(gl->state
  574. | BGFX_STATE_PT_TRISTRIP
  575. );
  576. bgfx::setVertexBuffer(0, &gl->tvb, paths[i].strokeOffset, paths[i].strokeCount);
  577. bgfx::setTexture(0, gl->s_tex, gl->th);
  578. bgfx::submit(gl->viewId, gl->prog);
  579. }
  580. }
  581. }
  582. static void glnvg__stroke(struct GLNVGcontext* gl, struct GLNVGcall* call)
  583. {
  584. struct GLNVGpath* paths = &gl->paths[call->pathOffset];
  585. int npaths = call->pathCount, i;
  586. nvgRenderSetUniforms(gl, call->uniformOffset, call->image);
  587. // Draw Strokes
  588. for (i = 0; i < npaths; i++)
  589. {
  590. bgfx::setState(gl->state
  591. | BGFX_STATE_PT_TRISTRIP
  592. );
  593. bgfx::setVertexBuffer(0, &gl->tvb, paths[i].strokeOffset, paths[i].strokeCount);
  594. bgfx::setTexture(0, gl->s_tex, gl->th);
  595. bgfx::submit(gl->viewId, gl->prog);
  596. }
  597. }
  598. static void glnvg__triangles(struct GLNVGcontext* gl, struct GLNVGcall* call)
  599. {
  600. if (3 <= call->vertexCount)
  601. {
  602. nvgRenderSetUniforms(gl, call->uniformOffset, call->image);
  603. bgfx::setState(gl->state);
  604. bgfx::setVertexBuffer(0, &gl->tvb, call->vertexOffset, call->vertexCount);
  605. bgfx::setTexture(0, gl->s_tex, gl->th);
  606. bgfx::submit(gl->viewId, gl->prog);
  607. }
  608. }
  609. static const uint64_t s_blend[] =
  610. {
  611. BGFX_STATE_BLEND_ZERO,
  612. BGFX_STATE_BLEND_ONE,
  613. BGFX_STATE_BLEND_SRC_COLOR,
  614. BGFX_STATE_BLEND_INV_SRC_COLOR,
  615. BGFX_STATE_BLEND_DST_COLOR,
  616. BGFX_STATE_BLEND_INV_DST_COLOR,
  617. BGFX_STATE_BLEND_SRC_ALPHA,
  618. BGFX_STATE_BLEND_INV_SRC_ALPHA,
  619. BGFX_STATE_BLEND_DST_ALPHA,
  620. BGFX_STATE_BLEND_INV_DST_ALPHA,
  621. BGFX_STATE_BLEND_SRC_ALPHA_SAT,
  622. };
  623. static uint64_t glnvg_convertBlendFuncFactor(int factor)
  624. {
  625. const uint32_t numtz = bx::uint32_cnttz(factor);
  626. const uint32_t idx = bx::uint32_min(numtz, BX_COUNTOF(s_blend)-1);
  627. return s_blend[idx];
  628. }
  629. static GLNVGblend glnvg__blendCompositeOperation(NVGcompositeOperationState op)
  630. {
  631. GLNVGblend blend;
  632. blend.srcRGB = glnvg_convertBlendFuncFactor(op.srcRGB);
  633. blend.dstRGB = glnvg_convertBlendFuncFactor(op.dstRGB);
  634. blend.srcAlpha = glnvg_convertBlendFuncFactor(op.srcAlpha);
  635. blend.dstAlpha = glnvg_convertBlendFuncFactor(op.dstAlpha);
  636. if (blend.srcRGB == BGFX_STATE_NONE || blend.dstRGB == BGFX_STATE_NONE || blend.srcAlpha == BGFX_STATE_NONE || blend.dstAlpha == BGFX_STATE_NONE)
  637. {
  638. blend.srcRGB = BGFX_STATE_BLEND_ONE;
  639. blend.dstRGB = BGFX_STATE_BLEND_INV_SRC_ALPHA;
  640. blend.srcAlpha = BGFX_STATE_BLEND_ONE;
  641. blend.dstAlpha = BGFX_STATE_BLEND_INV_SRC_ALPHA;
  642. }
  643. return blend;
  644. }
  645. static void nvgRenderFlush(void* _userPtr)
  646. {
  647. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  648. if (gl->ncalls > 0)
  649. {
  650. int avail = bgfx::getAvailTransientVertexBuffer(gl->nverts, s_nvgLayout);
  651. if (avail < gl->nverts)
  652. {
  653. gl->nverts = avail;
  654. BX_WARN(true, "Vertex number truncated due to transient vertex buffer overflow");
  655. if (gl->nverts < 2)
  656. {
  657. goto _cleanup;
  658. }
  659. }
  660. bgfx::allocTransientVertexBuffer(&gl->tvb, gl->nverts, s_nvgLayout);
  661. int allocated = gl->tvb.size/gl->tvb.stride;
  662. if (allocated < gl->nverts)
  663. {
  664. // this branch should never be taken as we've already checked the transient vertex buffer size
  665. gl->nverts = allocated;
  666. BX_WARN(true, "Vertex number truncated due to transient vertex buffer overflow");
  667. }
  668. bx::memCopy(gl->tvb.data, gl->verts, gl->nverts * sizeof(struct NVGvertex) );
  669. bgfx::setUniform(gl->u_viewSize, gl->view);
  670. for (uint32_t ii = 0, num = gl->ncalls; ii < num; ++ii)
  671. {
  672. struct GLNVGcall* call = &gl->calls[ii];
  673. const GLNVGblend* blend = &call->blendFunc;
  674. gl->state = BGFX_STATE_BLEND_FUNC_SEPARATE(blend->srcRGB, blend->dstRGB, blend->srcAlpha, blend->dstAlpha)
  675. | BGFX_STATE_WRITE_RGB
  676. | BGFX_STATE_WRITE_A
  677. ;
  678. switch (call->type)
  679. {
  680. case GLNVG_FILL:
  681. glnvg__fill(gl, call);
  682. break;
  683. case GLNVG_CONVEXFILL:
  684. glnvg__convexFill(gl, call);
  685. break;
  686. case GLNVG_STROKE:
  687. glnvg__stroke(gl, call);
  688. break;
  689. case GLNVG_TRIANGLES:
  690. glnvg__triangles(gl, call);
  691. break;
  692. }
  693. }
  694. }
  695. _cleanup:
  696. // Reset calls
  697. gl->nverts = 0;
  698. gl->npaths = 0;
  699. gl->ncalls = 0;
  700. gl->nuniforms = 0;
  701. }
  702. static int glnvg__maxVertCount(const struct NVGpath* paths, int npaths)
  703. {
  704. int i, count = 0;
  705. for (i = 0; i < npaths; i++)
  706. {
  707. count += paths[i].nfill;
  708. count += paths[i].nstroke;
  709. }
  710. return count;
  711. }
  712. static int glnvg__mini(int a, int b) { return a < b ? a : b; }
  713. static int glnvg__maxi(int a, int b) { return a > b ? a : b; }
  714. static struct GLNVGcall* glnvg__allocCall(struct GLNVGcontext* gl)
  715. {
  716. struct GLNVGcall* ret = NULL;
  717. if (gl->ncalls+1 > gl->ccalls)
  718. {
  719. gl->ccalls = gl->ccalls == 0 ? 32 : gl->ccalls * 2;
  720. gl->calls = (struct GLNVGcall*)bx::realloc(gl->allocator, gl->calls, sizeof(struct GLNVGcall) * gl->ccalls);
  721. }
  722. ret = &gl->calls[gl->ncalls++];
  723. bx::memSet(ret, 0, sizeof(struct GLNVGcall) );
  724. return ret;
  725. }
  726. static int glnvg__allocPaths(struct GLNVGcontext* gl, int n)
  727. {
  728. int ret = 0;
  729. if (gl->npaths + n > gl->cpaths) {
  730. GLNVGpath* paths;
  731. int cpaths = glnvg__maxi(gl->npaths + n, 128) + gl->cpaths / 2; // 1.5x Overallocate
  732. paths = (GLNVGpath*)bx::realloc(gl->allocator, gl->paths, sizeof(GLNVGpath) * cpaths);
  733. if (paths == NULL) return -1;
  734. gl->paths = paths;
  735. gl->cpaths = cpaths;
  736. }
  737. ret = gl->npaths;
  738. gl->npaths += n;
  739. return ret;
  740. }
  741. static int glnvg__allocVerts(GLNVGcontext* gl, int n)
  742. {
  743. // Before calling this function, make sure that glnvg__flushIfNeeded()
  744. // is called, before allocating the NVGCall.
  745. int ret = 0;
  746. BX_ASSERT(gl->nverts + n <= UINT16_MAX, "index overflow is imminent, please flush.");
  747. if (gl->nverts+n > gl->cverts)
  748. {
  749. NVGvertex* verts;
  750. int cverts = glnvg__maxi(gl->nverts + n, 4096) + gl->cverts/2; // 1.5x Overallocate
  751. cverts = glnvg__mini(cverts, UINT16_MAX);
  752. verts = (NVGvertex*)bx::realloc(gl->allocator, gl->verts, sizeof(NVGvertex) * cverts);
  753. if (verts == NULL) return -1;
  754. gl->verts = verts;
  755. gl->cverts = cverts;
  756. }
  757. ret = gl->nverts;
  758. gl->nverts += n;
  759. return ret;
  760. }
  761. static int glnvg__allocFragUniforms(struct GLNVGcontext* gl, int n)
  762. {
  763. int ret = 0, structSize = gl->fragSize;
  764. if (gl->nuniforms+n > gl->cuniforms)
  765. {
  766. gl->cuniforms = gl->cuniforms == 0 ? glnvg__maxi(n, 32) : gl->cuniforms * 2;
  767. gl->uniforms = (unsigned char*)bx::realloc(gl->allocator, gl->uniforms, gl->cuniforms * structSize);
  768. }
  769. ret = gl->nuniforms * structSize;
  770. gl->nuniforms += n;
  771. return ret;
  772. }
  773. static void glnvg__vset(struct NVGvertex* vtx, float x, float y, float u, float v)
  774. {
  775. vtx->x = x;
  776. vtx->y = y;
  777. vtx->u = u;
  778. vtx->v = v;
  779. }
  780. static void glnvg__flushIfNeeded(struct GLNVGcontext *gl, int nverts) {
  781. if (gl->nverts + nverts > UINT16_MAX) {
  782. nvgRenderFlush(gl);
  783. }
  784. }
  785. static void nvgRenderFill(
  786. void* _userPtr
  787. , NVGpaint* paint
  788. , NVGcompositeOperationState compositeOperation
  789. , NVGscissor* scissor
  790. , float fringe
  791. , const float* bounds
  792. , const NVGpath* paths
  793. , int npaths
  794. )
  795. {
  796. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  797. int maxverts = glnvg__maxVertCount(paths, npaths) + 6;
  798. glnvg__flushIfNeeded(gl, maxverts);
  799. struct GLNVGcall* call = glnvg__allocCall(gl);
  800. struct NVGvertex* quad;
  801. struct GLNVGfragUniforms* frag;
  802. int i, offset;
  803. call->type = GLNVG_FILL;
  804. call->pathOffset = glnvg__allocPaths(gl, npaths);
  805. call->pathCount = npaths;
  806. call->image = paint->image;
  807. call->blendFunc = glnvg__blendCompositeOperation(compositeOperation);
  808. if (npaths == 1 && paths[0].convex)
  809. {
  810. call->type = GLNVG_CONVEXFILL;
  811. }
  812. // Allocate vertices for all the paths.
  813. offset = glnvg__allocVerts(gl, maxverts);
  814. for (i = 0; i < npaths; i++)
  815. {
  816. struct GLNVGpath* copy = &gl->paths[call->pathOffset + i];
  817. const struct NVGpath* path = &paths[i];
  818. bx::memSet(copy, 0, sizeof(struct GLNVGpath) );
  819. if (path->nfill > 0)
  820. {
  821. copy->fillOffset = offset;
  822. copy->fillCount = path->nfill;
  823. bx::memCopy(&gl->verts[offset], path->fill, sizeof(struct NVGvertex) * path->nfill);
  824. offset += path->nfill;
  825. }
  826. if (path->nstroke > 0)
  827. {
  828. copy->strokeOffset = offset;
  829. copy->strokeCount = path->nstroke;
  830. bx::memCopy(&gl->verts[offset], path->stroke, sizeof(struct NVGvertex) * path->nstroke);
  831. offset += path->nstroke;
  832. }
  833. }
  834. // Quad
  835. call->vertexOffset = offset;
  836. call->vertexCount = 6;
  837. quad = &gl->verts[call->vertexOffset];
  838. glnvg__vset(&quad[0], bounds[0], bounds[3], 0.5f, 1.0f);
  839. glnvg__vset(&quad[1], bounds[2], bounds[3], 0.5f, 1.0f);
  840. glnvg__vset(&quad[2], bounds[2], bounds[1], 0.5f, 1.0f);
  841. glnvg__vset(&quad[3], bounds[0], bounds[3], 0.5f, 1.0f);
  842. glnvg__vset(&quad[4], bounds[2], bounds[1], 0.5f, 1.0f);
  843. glnvg__vset(&quad[5], bounds[0], bounds[1], 0.5f, 1.0f);
  844. // Setup uniforms for draw calls
  845. if (call->type == GLNVG_FILL)
  846. {
  847. call->uniformOffset = glnvg__allocFragUniforms(gl, 2);
  848. // Simple shader for stencil
  849. frag = nvg__fragUniformPtr(gl, call->uniformOffset);
  850. bx::memSet(frag, 0, sizeof(*frag) );
  851. frag->type = NSVG_SHADER_SIMPLE;
  852. // Fill shader
  853. glnvg__convertPaint(gl, nvg__fragUniformPtr(gl, call->uniformOffset + gl->fragSize), paint, scissor, fringe, fringe);
  854. }
  855. else
  856. {
  857. call->uniformOffset = glnvg__allocFragUniforms(gl, 1);
  858. // Fill shader
  859. glnvg__convertPaint(gl, nvg__fragUniformPtr(gl, call->uniformOffset), paint, scissor, fringe, fringe);
  860. }
  861. }
  862. static void nvgRenderStroke(
  863. void* _userPtr
  864. , struct NVGpaint* paint
  865. , NVGcompositeOperationState compositeOperation
  866. , struct NVGscissor* scissor
  867. , float fringe
  868. , float strokeWidth
  869. , const struct NVGpath* paths
  870. , int npaths
  871. )
  872. {
  873. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  874. int maxverts = glnvg__maxVertCount(paths, npaths);
  875. glnvg__flushIfNeeded(gl, maxverts);
  876. struct GLNVGcall* call = glnvg__allocCall(gl);
  877. int i, offset;
  878. call->type = GLNVG_STROKE;
  879. call->pathOffset = glnvg__allocPaths(gl, npaths);
  880. call->pathCount = npaths;
  881. call->image = paint->image;
  882. call->blendFunc = glnvg__blendCompositeOperation(compositeOperation);
  883. // Allocate vertices for all the paths.
  884. offset = glnvg__allocVerts(gl, maxverts);
  885. for (i = 0; i < npaths; i++)
  886. {
  887. struct GLNVGpath* copy = &gl->paths[call->pathOffset + i];
  888. const struct NVGpath* path = &paths[i];
  889. bx::memSet(copy, 0, sizeof(struct GLNVGpath) );
  890. BX_ASSERT(path->nfill == 0, "strokes should not have any fill");
  891. if (path->nstroke)
  892. {
  893. copy->strokeOffset = offset;
  894. copy->strokeCount = path->nstroke;
  895. bx::memCopy(&gl->verts[offset], path->stroke, sizeof(struct NVGvertex) * path->nstroke);
  896. offset += path->nstroke;
  897. }
  898. }
  899. // Fill shader
  900. call->uniformOffset = glnvg__allocFragUniforms(gl, 1);
  901. glnvg__convertPaint(gl, nvg__fragUniformPtr(gl, call->uniformOffset), paint, scissor, strokeWidth, fringe);
  902. }
  903. static void nvgRenderTriangles(void* _userPtr, struct NVGpaint* paint, NVGcompositeOperationState compositeOperation, struct NVGscissor* scissor,
  904. const struct NVGvertex* verts, int nverts)
  905. {
  906. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  907. glnvg__flushIfNeeded(gl, nverts);
  908. struct GLNVGcall* call = glnvg__allocCall(gl);
  909. struct GLNVGfragUniforms* frag;
  910. call->type = GLNVG_TRIANGLES;
  911. call->image = paint->image;
  912. call->blendFunc = glnvg__blendCompositeOperation(compositeOperation);
  913. // Allocate vertices for all the paths.
  914. call->vertexOffset = glnvg__allocVerts(gl, nverts);
  915. call->vertexCount = nverts;
  916. bx::memCopy(&gl->verts[call->vertexOffset], verts, sizeof(struct NVGvertex) * nverts);
  917. // Fill shader
  918. call->uniformOffset = glnvg__allocFragUniforms(gl, 1);
  919. frag = nvg__fragUniformPtr(gl, call->uniformOffset);
  920. glnvg__convertPaint(gl, frag, paint, scissor, 1.0f, 1.0f);
  921. frag->type = NSVG_SHADER_IMG;
  922. }
  923. static void nvgRenderDelete(void* _userPtr)
  924. {
  925. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  926. if (gl == NULL)
  927. {
  928. return;
  929. }
  930. bgfx::destroy(gl->prog);
  931. bgfx::destroy(gl->texMissing);
  932. bgfx::destroy(gl->u_scissorMat);
  933. bgfx::destroy(gl->u_paintMat);
  934. bgfx::destroy(gl->u_innerCol);
  935. bgfx::destroy(gl->u_outerCol);
  936. bgfx::destroy(gl->u_viewSize);
  937. bgfx::destroy(gl->u_scissorExtScale);
  938. bgfx::destroy(gl->u_extentRadius);
  939. bgfx::destroy(gl->u_params);
  940. bgfx::destroy(gl->s_tex);
  941. for (uint32_t ii = 0, num = gl->ntextures; ii < num; ++ii)
  942. {
  943. if (bgfx::isValid(gl->textures[ii].id)
  944. && (gl->textures[ii].flags & NVG_IMAGE_NODELETE) == 0)
  945. {
  946. bgfx::destroy(gl->textures[ii].id);
  947. }
  948. }
  949. bx::free(gl->allocator, gl->uniforms);
  950. bx::free(gl->allocator, gl->verts);
  951. bx::free(gl->allocator, gl->paths);
  952. bx::free(gl->allocator, gl->calls);
  953. bx::free(gl->allocator, gl->textures);
  954. bx::free(gl->allocator, gl);
  955. }
  956. } // namespace
  957. NVGcontext* nvgCreate(int32_t _edgeaa, bgfx::ViewId _viewId, bx::AllocatorI* _allocator)
  958. {
  959. if (NULL == _allocator)
  960. {
  961. static bx::DefaultAllocator allocator;
  962. _allocator = &allocator;
  963. }
  964. struct NVGparams params;
  965. struct NVGcontext* ctx = NULL;
  966. struct GLNVGcontext* gl = (struct GLNVGcontext*)bx::alloc(_allocator, sizeof(struct GLNVGcontext) );
  967. if (gl == NULL)
  968. {
  969. goto error;
  970. }
  971. bx::memSet(gl, 0, sizeof(struct GLNVGcontext) );
  972. bx::memSet(&params, 0, sizeof(params) );
  973. params.renderCreate = nvgRenderCreate;
  974. params.renderCreateTexture = nvgRenderCreateTexture;
  975. params.renderDeleteTexture = nvgRenderDeleteTexture;
  976. params.renderUpdateTexture = nvgRenderUpdateTexture;
  977. params.renderGetTextureSize = nvgRenderGetTextureSize;
  978. params.renderViewport = nvgRenderViewport;
  979. params.renderFlush = nvgRenderFlush;
  980. params.renderFill = nvgRenderFill;
  981. params.renderStroke = nvgRenderStroke;
  982. params.renderTriangles = nvgRenderTriangles;
  983. params.renderDelete = nvgRenderDelete;
  984. params.userPtr = gl;
  985. params.edgeAntiAlias = _edgeaa;
  986. gl->allocator = _allocator;
  987. gl->edgeAntiAlias = _edgeaa;
  988. gl->viewId = _viewId;
  989. ctx = nvgCreateInternal(&params);
  990. if (ctx == NULL) goto error;
  991. return ctx;
  992. error:
  993. // 'gl' is freed by nvgDeleteInternal.
  994. if (ctx != NULL)
  995. {
  996. nvgDeleteInternal(ctx);
  997. }
  998. return NULL;
  999. }
  1000. NVGcontext* nvgCreate(int32_t _edgeaa, bgfx::ViewId _viewId) {
  1001. return nvgCreate(_edgeaa, _viewId, NULL);
  1002. }
  1003. void nvgDelete(NVGcontext* _ctx)
  1004. {
  1005. nvgDeleteInternal(_ctx);
  1006. }
  1007. void nvgSetViewId(NVGcontext* _ctx, bgfx::ViewId _viewId)
  1008. {
  1009. struct NVGparams* params = nvgInternalParams(_ctx);
  1010. struct GLNVGcontext* gl = (struct GLNVGcontext*)params->userPtr;
  1011. gl->viewId = _viewId;
  1012. }
  1013. uint16_t nvgGetViewId(struct NVGcontext* _ctx)
  1014. {
  1015. struct NVGparams* params = nvgInternalParams(_ctx);
  1016. struct GLNVGcontext* gl = (struct GLNVGcontext*)params->userPtr;
  1017. return gl->viewId;
  1018. }
  1019. bgfx::TextureHandle nvglImageHandle(NVGcontext* _ctx, int32_t _image)
  1020. {
  1021. GLNVGcontext* gl = (GLNVGcontext*)nvgInternalParams(_ctx)->userPtr;
  1022. GLNVGtexture* tex = glnvg__findTexture(gl, _image);
  1023. return tex->id;
  1024. }
  1025. NVGLUframebuffer* nvgluCreateFramebuffer(NVGcontext* ctx, int32_t width, int32_t height, int32_t imageFlags, bgfx::ViewId viewId)
  1026. {
  1027. NVGLUframebuffer* framebuffer = nvgluCreateFramebuffer(ctx, width, height, imageFlags);
  1028. if (framebuffer != NULL)
  1029. {
  1030. nvgluSetViewFramebuffer(viewId, framebuffer);
  1031. }
  1032. return framebuffer;
  1033. }
  1034. NVGLUframebuffer* nvgluCreateFramebuffer(NVGcontext* _ctx, int32_t _width, int32_t _height, int32_t _imageFlags)
  1035. {
  1036. BX_UNUSED(_imageFlags);
  1037. BX_ASSERT(_width >= 0 && _width <= bx::max<uint16_t>(), "Invalid tex width %d (max: %u)", _width, bx::max<uint16_t>());
  1038. BX_ASSERT(_height >= 0 && _height <= bx::max<uint16_t>(), "Invalid tex height %d (max: %u)", _height, bx::max<uint16_t>());
  1039. const uint16_t w = uint16_t(_width);
  1040. const uint16_t h = uint16_t(_height);
  1041. bgfx::TextureHandle textures[] =
  1042. {
  1043. bgfx::createTexture2D(w, h, false, 1, bgfx::TextureFormat::RGBA8, BGFX_TEXTURE_RT),
  1044. bgfx::createTexture2D(w, h, false, 1, bgfx::TextureFormat::D24S8, BGFX_TEXTURE_RT | BGFX_TEXTURE_RT_WRITE_ONLY)
  1045. };
  1046. bgfx::FrameBufferHandle fbh = bgfx::createFrameBuffer(
  1047. BX_COUNTOF(textures)
  1048. , textures
  1049. , true
  1050. );
  1051. if (!bgfx::isValid(fbh) )
  1052. {
  1053. return NULL;
  1054. }
  1055. struct NVGparams* params = nvgInternalParams(_ctx);
  1056. struct GLNVGcontext* gl = (struct GLNVGcontext*)params->userPtr;
  1057. struct GLNVGtexture* tex = glnvg__allocTexture(gl);
  1058. if (NULL == tex)
  1059. {
  1060. bgfx::destroy(fbh);
  1061. return NULL;
  1062. }
  1063. tex->width = _width;
  1064. tex->height = _height;
  1065. tex->type = NVG_TEXTURE_RGBA;
  1066. tex->flags = _imageFlags | NVG_IMAGE_PREMULTIPLIED;
  1067. tex->id = bgfx::getTexture(fbh);
  1068. NVGLUframebuffer* framebuffer = BX_NEW(gl->allocator, NVGLUframebuffer);
  1069. framebuffer->ctx = _ctx;
  1070. framebuffer->image = tex->id.idx;
  1071. framebuffer->handle = fbh;
  1072. return framebuffer;
  1073. }
  1074. NVGLUframebuffer* nvgluCreateFramebuffer(NVGcontext* ctx, int32_t imageFlags, bgfx::ViewId viewId)
  1075. {
  1076. NVGLUframebuffer* framebuffer = nvgluCreateFramebuffer(ctx, imageFlags);
  1077. if (framebuffer != NULL)
  1078. {
  1079. nvgluSetViewFramebuffer(viewId, framebuffer);
  1080. }
  1081. return framebuffer;
  1082. }
  1083. NVGLUframebuffer* nvgluCreateFramebuffer(NVGcontext* _ctx, int32_t _imageFlags)
  1084. {
  1085. BX_UNUSED(_imageFlags);
  1086. bgfx::TextureHandle textures[] =
  1087. {
  1088. bgfx::createTexture2D(bgfx::BackbufferRatio::Equal, false, 1, bgfx::TextureFormat::RGBA8, BGFX_TEXTURE_RT),
  1089. bgfx::createTexture2D(bgfx::BackbufferRatio::Equal, false, 1, bgfx::TextureFormat::D24S8, BGFX_TEXTURE_RT | BGFX_TEXTURE_RT_WRITE_ONLY)
  1090. };
  1091. bgfx::FrameBufferHandle fbh = bgfx::createFrameBuffer(
  1092. BX_COUNTOF(textures)
  1093. , textures
  1094. , true
  1095. );
  1096. if (!bgfx::isValid(fbh) )
  1097. {
  1098. return NULL;
  1099. }
  1100. struct NVGparams* params = nvgInternalParams(_ctx);
  1101. struct GLNVGcontext* gl = (struct GLNVGcontext*)params->userPtr;
  1102. struct GLNVGtexture* tex = glnvg__allocTexture(gl);
  1103. if (NULL == tex)
  1104. {
  1105. bgfx::destroy(fbh);
  1106. return NULL;
  1107. }
  1108. tex->width = 0;
  1109. tex->height = 0;
  1110. tex->type = NVG_TEXTURE_RGBA;
  1111. tex->flags = _imageFlags | NVG_IMAGE_PREMULTIPLIED;
  1112. tex->id = bgfx::getTexture(fbh);
  1113. NVGLUframebuffer* framebuffer = BX_NEW(gl->allocator, NVGLUframebuffer);
  1114. framebuffer->ctx = _ctx;
  1115. framebuffer->image = tex->id.idx;
  1116. framebuffer->handle = fbh;
  1117. return framebuffer;
  1118. }
  1119. void nvgluBindFramebuffer(NVGLUframebuffer* _framebuffer)
  1120. {
  1121. static NVGcontext* s_prevCtx = NULL;
  1122. static bgfx::ViewId s_prevViewId;
  1123. if (_framebuffer != NULL)
  1124. {
  1125. s_prevCtx = _framebuffer->ctx;
  1126. s_prevViewId = nvgGetViewId(_framebuffer->ctx);
  1127. nvgSetViewId(_framebuffer->ctx, _framebuffer->viewId);
  1128. }
  1129. else if (s_prevCtx != NULL)
  1130. {
  1131. nvgSetViewId(s_prevCtx, s_prevViewId);
  1132. }
  1133. }
  1134. void nvgluDeleteFramebuffer(NVGLUframebuffer* _framebuffer)
  1135. {
  1136. if (_framebuffer == NULL)
  1137. {
  1138. return;
  1139. }
  1140. if (bgfx::isValid(_framebuffer->handle))
  1141. {
  1142. bgfx::destroy(_framebuffer->handle);
  1143. }
  1144. struct NVGparams* params = nvgInternalParams(_framebuffer->ctx);
  1145. struct GLNVGcontext* gl = (struct GLNVGcontext*)params->userPtr;
  1146. glnvg__deleteTexture(gl, _framebuffer->image);
  1147. bx::deleteObject(gl->allocator, _framebuffer);
  1148. }
  1149. void nvgluSetViewFramebuffer(bgfx::ViewId _viewId, NVGLUframebuffer* _framebuffer)
  1150. {
  1151. _framebuffer->viewId = _viewId;
  1152. bgfx::setViewFrameBuffer(_viewId, _framebuffer->handle);
  1153. bgfx::setViewMode(_viewId, bgfx::ViewMode::Sequential);
  1154. }
  1155. int nvgCreateBgfxTexture(struct NVGcontext *_ctx,
  1156. bgfx::TextureHandle _id,
  1157. int _width,
  1158. int _height,
  1159. int _flags) {
  1160. struct NVGparams *params = nvgInternalParams(_ctx);
  1161. struct GLNVGcontext *gl = (struct GLNVGcontext *)params->userPtr;
  1162. struct GLNVGtexture *tex = glnvg__allocTexture(gl);
  1163. tex->id = _id;
  1164. tex->width = _width;
  1165. tex->height = _height;
  1166. tex->flags = _flags;
  1167. tex->type = NVG_TEXTURE_RGBA;
  1168. return tex->id.idx;
  1169. }