nanovg_bgfx.cpp 28 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066
  1. /*
  2. * Copyright 2011-2015 Branimir Karadzic. All rights reserved.
  3. * License: http://www.opensource.org/licenses/BSD-2-Clause
  4. */
  5. //
  6. // Copyright (c) 2009-2013 Mikko Mononen [email protected]
  7. //
  8. // This software is provided 'as-is', without any express or implied
  9. // warranty. In no event will the authors be held liable for any damages
  10. // arising from the use of this software.
  11. // Permission is granted to anyone to use this software for any purpose,
  12. // including commercial applications, and to alter it and redistribute it
  13. // freely, subject to the following restrictions:
  14. // 1. The origin of this software must not be misrepresented; you must not
  15. // claim that you wrote the original software. If you use this software
  16. // in a product, an acknowledgment in the product documentation would be
  17. // appreciated but is not required.
  18. // 2. Altered source versions must be plainly marked as such, and must not be
  19. // misrepresented as being the original software.
  20. // 3. This notice may not be removed or altered from any source distribution.
  21. //
  22. #define NVG_ANTIALIAS 1
  23. #include <stdlib.h>
  24. #include <stdio.h>
  25. #include <string.h>
  26. #include <math.h>
  27. #include "nanovg.h"
  28. #include <bgfx/bgfx.h>
  29. #include <bx/bx.h>
  30. BX_PRAGMA_DIAGNOSTIC_IGNORED_MSVC(4244); // warning C4244: '=' : conversion from '' to '', possible loss of data
  31. namespace
  32. {
  33. #include "vs_nanovg_fill.bin.h"
  34. #include "fs_nanovg_fill.bin.h"
  35. static bgfx::VertexDecl s_nvgDecl;
  36. enum GLNVGshaderType
  37. {
  38. NSVG_SHADER_FILLGRAD,
  39. NSVG_SHADER_FILLIMG,
  40. NSVG_SHADER_SIMPLE,
  41. NSVG_SHADER_IMG
  42. };
  43. // These are additional flags on top of NVGimageFlags.
  44. enum NVGimageFlagsGL {
  45. NVG_IMAGE_NODELETE = 1<<16, // Do not delete GL texture handle.
  46. };
  47. struct GLNVGtexture
  48. {
  49. bgfx::TextureHandle id;
  50. int width, height;
  51. int type;
  52. int flags;
  53. };
  54. enum GLNVGcallType
  55. {
  56. GLNVG_FILL,
  57. GLNVG_CONVEXFILL,
  58. GLNVG_STROKE,
  59. GLNVG_TRIANGLES,
  60. };
  61. struct GLNVGcall
  62. {
  63. int type;
  64. int image;
  65. int pathOffset;
  66. int pathCount;
  67. int vertexOffset;
  68. int vertexCount;
  69. int uniformOffset;
  70. };
  71. struct GLNVGpath
  72. {
  73. int fillOffset;
  74. int fillCount;
  75. int strokeOffset;
  76. int strokeCount;
  77. };
  78. struct GLNVGfragUniforms
  79. {
  80. float scissorMat[12]; // matrices are actually 3 vec4s
  81. float paintMat[12];
  82. NVGcolor innerCol;
  83. NVGcolor outerCol;
  84. // u_scissorExtScale
  85. float scissorExt[2];
  86. float scissorScale[2];
  87. // u_extentRadius
  88. float extent[2];
  89. float radius;
  90. // u_params
  91. float feather;
  92. float strokeMult;
  93. float texType;
  94. float type;
  95. };
  96. struct GLNVGcontext
  97. {
  98. bgfx::ProgramHandle prog;
  99. bgfx::UniformHandle u_scissorMat;
  100. bgfx::UniformHandle u_paintMat;
  101. bgfx::UniformHandle u_innerCol;
  102. bgfx::UniformHandle u_outerCol;
  103. bgfx::UniformHandle u_viewSize;
  104. bgfx::UniformHandle u_scissorExtScale;
  105. bgfx::UniformHandle u_extentRadius;
  106. bgfx::UniformHandle u_params;
  107. bgfx::UniformHandle u_halfTexel;
  108. bgfx::UniformHandle s_tex;
  109. uint64_t state;
  110. bgfx::TextureHandle th;
  111. bgfx::TransientVertexBuffer tvb;
  112. uint8_t viewid;
  113. struct GLNVGtexture* textures;
  114. float view[2];
  115. float surface[2];
  116. int ntextures;
  117. int ctextures;
  118. int textureId;
  119. int vertBuf;
  120. int fragSize;
  121. int edgeAntiAlias;
  122. // Per frame buffers
  123. struct GLNVGcall* calls;
  124. int ccalls;
  125. int ncalls;
  126. struct GLNVGpath* paths;
  127. int cpaths;
  128. int npaths;
  129. struct NVGvertex* verts;
  130. int cverts;
  131. int nverts;
  132. unsigned char* uniforms;
  133. int cuniforms;
  134. int nuniforms;
  135. };
  136. static struct GLNVGtexture* glnvg__allocTexture(struct GLNVGcontext* gl)
  137. {
  138. struct GLNVGtexture* tex = NULL;
  139. int i;
  140. for (i = 0; i < gl->ntextures; i++)
  141. {
  142. if (gl->textures[i].id.idx == bgfx::invalidHandle)
  143. {
  144. tex = &gl->textures[i];
  145. break;
  146. }
  147. }
  148. if (tex == NULL)
  149. {
  150. if (gl->ntextures+1 > gl->ctextures)
  151. {
  152. int old = gl->ctextures;
  153. gl->ctextures = (gl->ctextures == 0) ? 2 : gl->ctextures*2;
  154. gl->textures = (struct GLNVGtexture*)realloc(gl->textures, sizeof(struct GLNVGtexture)*gl->ctextures);
  155. memset(&gl->textures[old], 0xff, (gl->ctextures-old)*sizeof(struct GLNVGtexture) );
  156. if (gl->textures == NULL)
  157. {
  158. return NULL;
  159. }
  160. }
  161. tex = &gl->textures[gl->ntextures++];
  162. }
  163. memset(tex, 0, sizeof(*tex) );
  164. return tex;
  165. }
  166. static struct GLNVGtexture* glnvg__findTexture(struct GLNVGcontext* gl, int id)
  167. {
  168. int i;
  169. for (i = 0; i < gl->ntextures; i++)
  170. {
  171. if (gl->textures[i].id.idx == id)
  172. {
  173. return &gl->textures[i];
  174. }
  175. }
  176. return NULL;
  177. }
  178. static int glnvg__deleteTexture(struct GLNVGcontext* gl, int id)
  179. {
  180. for (int ii = 0; ii < gl->ntextures; ii++)
  181. {
  182. if (gl->textures[ii].id.idx == id)
  183. {
  184. if (bgfx::isValid(gl->textures[ii].id)
  185. && (gl->textures[ii].flags & NVG_IMAGE_NODELETE) == 0)
  186. {
  187. bgfx::destroyTexture(gl->textures[ii].id);
  188. }
  189. memset(&gl->textures[ii], 0, sizeof(gl->textures[ii]) );
  190. gl->textures[ii].id.idx = bgfx::invalidHandle;
  191. return 1;
  192. }
  193. }
  194. return 0;
  195. }
  196. static int nvgRenderCreate(void* _userPtr)
  197. {
  198. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  199. const bgfx::Memory* vs_nanovg_fill;
  200. const bgfx::Memory* fs_nanovg_fill;
  201. switch (bgfx::getRendererType() )
  202. {
  203. case bgfx::RendererType::Direct3D9:
  204. vs_nanovg_fill = bgfx::makeRef(vs_nanovg_fill_dx9, sizeof(vs_nanovg_fill_dx9) );
  205. fs_nanovg_fill = bgfx::makeRef(fs_nanovg_fill_dx9, sizeof(fs_nanovg_fill_dx9) );
  206. break;
  207. case bgfx::RendererType::Direct3D11:
  208. case bgfx::RendererType::Direct3D12:
  209. vs_nanovg_fill = bgfx::makeRef(vs_nanovg_fill_dx11, sizeof(vs_nanovg_fill_dx11) );
  210. fs_nanovg_fill = bgfx::makeRef(fs_nanovg_fill_dx11, sizeof(fs_nanovg_fill_dx11) );
  211. break;
  212. case bgfx::RendererType::Metal:
  213. vs_nanovg_fill = bgfx::makeRef(vs_nanovg_fill_mtl, sizeof(vs_nanovg_fill_mtl) );
  214. fs_nanovg_fill = bgfx::makeRef(fs_nanovg_fill_mtl, sizeof(fs_nanovg_fill_mtl) );
  215. break;
  216. default:
  217. vs_nanovg_fill = bgfx::makeRef(vs_nanovg_fill_glsl, sizeof(vs_nanovg_fill_glsl) );
  218. fs_nanovg_fill = bgfx::makeRef(fs_nanovg_fill_glsl, sizeof(fs_nanovg_fill_glsl) );
  219. break;
  220. }
  221. gl->prog = bgfx::createProgram(
  222. bgfx::createShader(vs_nanovg_fill)
  223. , bgfx::createShader(fs_nanovg_fill)
  224. , true
  225. );
  226. gl->u_scissorMat = bgfx::createUniform("u_scissorMat", bgfx::UniformType::Mat3);
  227. gl->u_paintMat = bgfx::createUniform("u_paintMat", bgfx::UniformType::Mat3);
  228. gl->u_innerCol = bgfx::createUniform("u_innerCol", bgfx::UniformType::Vec4);
  229. gl->u_outerCol = bgfx::createUniform("u_outerCol", bgfx::UniformType::Vec4);
  230. gl->u_viewSize = bgfx::createUniform("u_viewSize", bgfx::UniformType::Vec4);
  231. gl->u_scissorExtScale = bgfx::createUniform("u_scissorExtScale", bgfx::UniformType::Vec4);
  232. gl->u_extentRadius = bgfx::createUniform("u_extentRadius", bgfx::UniformType::Vec4);
  233. gl->u_params = bgfx::createUniform("u_params", bgfx::UniformType::Vec4);
  234. gl->s_tex = bgfx::createUniform("s_tex", bgfx::UniformType::Int1);
  235. if (bgfx::getRendererType() == bgfx::RendererType::Direct3D9)
  236. {
  237. gl->u_halfTexel = bgfx::createUniform("u_halfTexel", bgfx::UniformType::Vec4);
  238. }
  239. else
  240. {
  241. gl->u_halfTexel.idx = bgfx::invalidHandle;
  242. }
  243. s_nvgDecl
  244. .begin()
  245. .add(bgfx::Attrib::Position, 2, bgfx::AttribType::Float)
  246. .add(bgfx::Attrib::TexCoord0, 2, bgfx::AttribType::Float)
  247. .end();
  248. int align = 16;
  249. gl->fragSize = sizeof(struct GLNVGfragUniforms) + align - sizeof(struct GLNVGfragUniforms) % align;
  250. return 1;
  251. }
  252. static int nvgRenderCreateTexture(void* _userPtr, int _type, int _width, int _height, int _flags, const unsigned char* _rgba)
  253. {
  254. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  255. struct GLNVGtexture* tex = glnvg__allocTexture(gl);
  256. if (tex == NULL)
  257. {
  258. return 0;
  259. }
  260. tex->width = _width;
  261. tex->height = _height;
  262. tex->type = _type;
  263. tex->flags = _flags;
  264. uint32_t bytesPerPixel = NVG_TEXTURE_RGBA == tex->type ? 4 : 1;
  265. uint32_t pitch = tex->width * bytesPerPixel;
  266. const bgfx::Memory* mem = NULL;
  267. if (NULL != _rgba)
  268. {
  269. mem = bgfx::copy(_rgba, tex->height * pitch);
  270. }
  271. tex->id = bgfx::createTexture2D(tex->width
  272. , tex->height
  273. , 1
  274. , NVG_TEXTURE_RGBA == _type ? bgfx::TextureFormat::RGBA8 : bgfx::TextureFormat::R8
  275. , BGFX_TEXTURE_NONE
  276. , mem
  277. );
  278. return tex->id.idx;
  279. }
  280. static int nvgRenderDeleteTexture(void* _userPtr, int image)
  281. {
  282. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  283. return glnvg__deleteTexture(gl, image);
  284. }
  285. static int nvgRenderUpdateTexture(void* _userPtr, int image, int x, int y, int w, int h, const unsigned char* data)
  286. {
  287. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  288. struct GLNVGtexture* tex = glnvg__findTexture(gl, image);
  289. if (tex == NULL)
  290. {
  291. return 0;
  292. }
  293. uint32_t bytesPerPixel = NVG_TEXTURE_RGBA == tex->type ? 4 : 1;
  294. uint32_t pitch = tex->width * bytesPerPixel;
  295. bgfx::updateTexture2D(tex->id, 0, x, y, w, h
  296. , bgfx::makeRef(data + y*pitch + x*bytesPerPixel, h*pitch)
  297. , pitch
  298. );
  299. return 1;
  300. }
  301. static int nvgRenderGetTextureSize(void* _userPtr, int image, int* w, int* h)
  302. {
  303. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  304. struct GLNVGtexture* tex = glnvg__findTexture(gl, image);
  305. if (!bgfx::isValid(tex->id) )
  306. {
  307. return 0;
  308. }
  309. *w = tex->width;
  310. *h = tex->height;
  311. return 1;
  312. }
  313. static void glnvg__xformIdentity(float* t)
  314. {
  315. t[0] = 1.0f; t[1] = 0.0f;
  316. t[2] = 0.0f; t[3] = 1.0f;
  317. t[4] = 0.0f; t[5] = 0.0f;
  318. }
  319. static void glnvg__xformInverse(float* inv, float* t)
  320. {
  321. double invdet, det = (double)t[0] * t[3] - (double)t[2] * t[1];
  322. if (det > -1e-6 && det < 1e-6) {
  323. glnvg__xformIdentity(t);
  324. return;
  325. }
  326. invdet = 1.0 / det;
  327. inv[0] = (float)(t[3] * invdet);
  328. inv[2] = (float)(-t[2] * invdet);
  329. inv[4] = (float)( ((double)t[2] * t[5] - (double)t[3] * t[4]) * invdet);
  330. inv[1] = (float)(-t[1] * invdet);
  331. inv[3] = (float)(t[0] * invdet);
  332. inv[5] = (float)( ((double)t[1] * t[4] - (double)t[0] * t[5]) * invdet);
  333. }
  334. static void glnvg__xformToMat3x4(float* m3, float* t)
  335. {
  336. m3[0] = t[0];
  337. m3[1] = t[1];
  338. m3[2] = 0.0f;
  339. m3[3] = 0.0f;
  340. m3[4] = t[2];
  341. m3[5] = t[3];
  342. m3[6] = 0.0f;
  343. m3[7] = 0.0f;
  344. m3[8] = t[4];
  345. m3[9] = t[5];
  346. m3[10] = 1.0f;
  347. m3[11] = 0.0f;
  348. }
  349. static int glnvg__convertPaint(struct GLNVGcontext* gl, struct GLNVGfragUniforms* frag, struct NVGpaint* paint,
  350. struct NVGscissor* scissor, float width, float fringe)
  351. {
  352. struct GLNVGtexture* tex = NULL;
  353. float invxform[6] = {};
  354. memset(frag, 0, sizeof(*frag) );
  355. frag->innerCol = paint->innerColor;
  356. frag->outerCol = paint->outerColor;
  357. glnvg__xformInverse(invxform, paint->xform);
  358. glnvg__xformToMat3x4(frag->paintMat, invxform);
  359. if (scissor->extent[0] < 0.5f || scissor->extent[1] < 0.5f)
  360. {
  361. memset(frag->scissorMat, 0, sizeof(frag->scissorMat) );
  362. frag->scissorExt[0] = 1.0f;
  363. frag->scissorExt[1] = 1.0f;
  364. frag->scissorScale[0] = 1.0f;
  365. frag->scissorScale[1] = 1.0f;
  366. }
  367. else
  368. {
  369. glnvg__xformInverse(invxform, scissor->xform);
  370. glnvg__xformToMat3x4(frag->scissorMat, invxform);
  371. frag->scissorExt[0] = scissor->extent[0];
  372. frag->scissorExt[1] = scissor->extent[1];
  373. frag->scissorScale[0] = sqrtf(scissor->xform[0]*scissor->xform[0] + scissor->xform[2]*scissor->xform[2]) / fringe;
  374. frag->scissorScale[1] = sqrtf(scissor->xform[1]*scissor->xform[1] + scissor->xform[3]*scissor->xform[3]) / fringe;
  375. }
  376. memcpy(frag->extent, paint->extent, sizeof(frag->extent) );
  377. frag->strokeMult = (width*0.5f + fringe*0.5f) / fringe;
  378. bgfx::TextureHandle invalid = BGFX_INVALID_HANDLE;
  379. gl->th = invalid;
  380. if (paint->image != 0)
  381. {
  382. tex = glnvg__findTexture(gl, paint->image);
  383. if (tex == NULL)
  384. {
  385. return 0;
  386. }
  387. frag->type = NSVG_SHADER_FILLIMG;
  388. frag->texType = tex->type == NVG_TEXTURE_RGBA ? 0.0f : 1.0f;
  389. gl->th = tex->id;
  390. }
  391. else
  392. {
  393. frag->type = NSVG_SHADER_FILLGRAD;
  394. frag->radius = paint->radius;
  395. frag->feather = paint->feather;
  396. }
  397. return 1;
  398. }
  399. static void glnvg__mat3(float* dst, float* src)
  400. {
  401. dst[0] = src[0];
  402. dst[1] = src[1];
  403. dst[2] = src[2];
  404. dst[3] = src[4];
  405. dst[4] = src[5];
  406. dst[5] = src[6];
  407. dst[6] = src[8];
  408. dst[7] = src[9];
  409. dst[8] = src[10];
  410. }
  411. static struct GLNVGfragUniforms* nvg__fragUniformPtr(struct GLNVGcontext* gl, int i)
  412. {
  413. return (struct GLNVGfragUniforms*)&gl->uniforms[i];
  414. }
  415. static void nvgRenderSetUniforms(struct GLNVGcontext* gl, int uniformOffset, int image)
  416. {
  417. struct GLNVGfragUniforms* frag = nvg__fragUniformPtr(gl, uniformOffset);
  418. float tmp[9]; // Maybe there's a way to get rid of this...
  419. glnvg__mat3(tmp, frag->scissorMat);
  420. bgfx::setUniform(gl->u_scissorMat, tmp);
  421. glnvg__mat3(tmp, frag->paintMat);
  422. bgfx::setUniform(gl->u_paintMat, tmp);
  423. bgfx::setUniform(gl->u_innerCol, frag->innerCol.rgba);
  424. bgfx::setUniform(gl->u_outerCol, frag->outerCol.rgba);
  425. bgfx::setUniform(gl->u_scissorExtScale, &frag->scissorExt[0]);
  426. bgfx::setUniform(gl->u_extentRadius, &frag->extent[0]);
  427. bgfx::setUniform(gl->u_params, &frag->feather);
  428. bgfx::TextureHandle handle = BGFX_INVALID_HANDLE;
  429. if (image != 0)
  430. {
  431. struct GLNVGtexture* tex = glnvg__findTexture(gl, image);
  432. if (tex != NULL)
  433. {
  434. handle = tex->id;
  435. if (bgfx::isValid(gl->u_halfTexel) )
  436. {
  437. float halfTexel[4] = { 0.5f / tex->width, 0.5f / tex->height };
  438. bgfx::setUniform(gl->u_halfTexel, halfTexel);
  439. }
  440. }
  441. }
  442. gl->th = handle;
  443. }
  444. static void nvgRenderViewport(void* _userPtr, int width, int height, int surfaceWidth, int surfaceHeight)
  445. {
  446. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  447. gl->view[0] = (float)width;
  448. gl->view[1] = (float)height;
  449. gl->surface[0] = (float)surfaceWidth;
  450. gl->surface[1] = (float)surfaceHeight;
  451. bgfx::setViewRect(gl->viewid, 0, 0, width, height);
  452. }
  453. static void fan(uint32_t _start, uint32_t _count)
  454. {
  455. uint32_t numTris = _count-2;
  456. bgfx::TransientIndexBuffer tib;
  457. bgfx::allocTransientIndexBuffer(&tib, numTris*3);
  458. uint16_t* data = (uint16_t*)tib.data;
  459. for (uint32_t ii = 0; ii < numTris; ++ii)
  460. {
  461. data[ii*3+0] = _start;
  462. data[ii*3+1] = _start + ii + 1;
  463. data[ii*3+2] = _start + ii + 2;
  464. }
  465. bgfx::setIndexBuffer(&tib);
  466. }
  467. static void glnvg__fill(struct GLNVGcontext* gl, struct GLNVGcall* call)
  468. {
  469. struct GLNVGpath* paths = &gl->paths[call->pathOffset];
  470. int i, npaths = call->pathCount;
  471. // set bindpoint for solid loc
  472. nvgRenderSetUniforms(gl, call->uniformOffset, 0);
  473. for (i = 0; i < npaths; i++)
  474. {
  475. if (2 < paths[i].fillCount)
  476. {
  477. bgfx::setState(0);
  478. bgfx::setStencil(0
  479. | BGFX_STENCIL_TEST_ALWAYS
  480. | BGFX_STENCIL_FUNC_RMASK(0xff)
  481. | BGFX_STENCIL_OP_FAIL_S_KEEP
  482. | BGFX_STENCIL_OP_FAIL_Z_KEEP
  483. | BGFX_STENCIL_OP_PASS_Z_INCR
  484. , 0
  485. | BGFX_STENCIL_TEST_ALWAYS
  486. | BGFX_STENCIL_FUNC_RMASK(0xff)
  487. | BGFX_STENCIL_OP_FAIL_S_KEEP
  488. | BGFX_STENCIL_OP_FAIL_Z_KEEP
  489. | BGFX_STENCIL_OP_PASS_Z_DECR
  490. );
  491. bgfx::setVertexBuffer(&gl->tvb);
  492. bgfx::setTexture(0, gl->s_tex, gl->th);
  493. fan(paths[i].fillOffset, paths[i].fillCount);
  494. bgfx::submit(gl->viewid, gl->prog);
  495. }
  496. }
  497. // Draw aliased off-pixels
  498. nvgRenderSetUniforms(gl, call->uniformOffset + gl->fragSize, call->image);
  499. if (gl->edgeAntiAlias)
  500. {
  501. // Draw fringes
  502. for (i = 0; i < npaths; i++)
  503. {
  504. bgfx::setState(gl->state
  505. | BGFX_STATE_PT_TRISTRIP
  506. );
  507. bgfx::setStencil(0
  508. | BGFX_STENCIL_TEST_EQUAL
  509. | BGFX_STENCIL_FUNC_RMASK(0xff)
  510. | BGFX_STENCIL_OP_FAIL_S_KEEP
  511. | BGFX_STENCIL_OP_FAIL_Z_KEEP
  512. | BGFX_STENCIL_OP_PASS_Z_KEEP
  513. );
  514. bgfx::setVertexBuffer(&gl->tvb, paths[i].strokeOffset, paths[i].strokeCount);
  515. bgfx::setTexture(0, gl->s_tex, gl->th);
  516. bgfx::submit(gl->viewid, gl->prog);
  517. }
  518. }
  519. // Draw fill
  520. bgfx::setState(gl->state);
  521. bgfx::setVertexBuffer(&gl->tvb, call->vertexOffset, call->vertexCount);
  522. bgfx::setTexture(0, gl->s_tex, gl->th);
  523. bgfx::setStencil(0
  524. | BGFX_STENCIL_TEST_NOTEQUAL
  525. | BGFX_STENCIL_FUNC_RMASK(0xff)
  526. | BGFX_STENCIL_OP_FAIL_S_ZERO
  527. | BGFX_STENCIL_OP_FAIL_Z_ZERO
  528. | BGFX_STENCIL_OP_PASS_Z_ZERO
  529. );
  530. bgfx::submit(gl->viewid, gl->prog);
  531. }
  532. static void glnvg__convexFill(struct GLNVGcontext* gl, struct GLNVGcall* call)
  533. {
  534. struct GLNVGpath* paths = &gl->paths[call->pathOffset];
  535. int i, npaths = call->pathCount;
  536. nvgRenderSetUniforms(gl, call->uniformOffset, call->image);
  537. for (i = 0; i < npaths; i++)
  538. {
  539. if (paths[i].fillCount == 0) continue;
  540. bgfx::setState(gl->state);
  541. bgfx::setVertexBuffer(&gl->tvb);
  542. bgfx::setTexture(0, gl->s_tex, gl->th);
  543. fan(paths[i].fillOffset, paths[i].fillCount);
  544. bgfx::submit(gl->viewid, gl->prog);
  545. }
  546. if (gl->edgeAntiAlias)
  547. {
  548. // Draw fringes
  549. for (i = 0; i < npaths; i++)
  550. {
  551. bgfx::setState(gl->state
  552. | BGFX_STATE_PT_TRISTRIP
  553. );
  554. bgfx::setVertexBuffer(&gl->tvb, paths[i].strokeOffset, paths[i].strokeCount);
  555. bgfx::setTexture(0, gl->s_tex, gl->th);
  556. bgfx::submit(gl->viewid, gl->prog);
  557. }
  558. }
  559. }
  560. static void glnvg__stroke(struct GLNVGcontext* gl, struct GLNVGcall* call)
  561. {
  562. struct GLNVGpath* paths = &gl->paths[call->pathOffset];
  563. int npaths = call->pathCount, i;
  564. nvgRenderSetUniforms(gl, call->uniformOffset, call->image);
  565. // Draw Strokes
  566. for (i = 0; i < npaths; i++)
  567. {
  568. bgfx::setState(gl->state
  569. | BGFX_STATE_PT_TRISTRIP
  570. );
  571. bgfx::setVertexBuffer(&gl->tvb, paths[i].strokeOffset, paths[i].strokeCount);
  572. bgfx::setTexture(0, gl->s_tex, gl->th);
  573. bgfx::setTexture(0, gl->s_tex, gl->th);
  574. bgfx::submit(gl->viewid, gl->prog);
  575. }
  576. }
  577. static void glnvg__triangles(struct GLNVGcontext* gl, struct GLNVGcall* call)
  578. {
  579. if (3 <= call->vertexCount)
  580. {
  581. nvgRenderSetUniforms(gl, call->uniformOffset, call->image);
  582. bgfx::setState(gl->state);
  583. bgfx::setVertexBuffer(&gl->tvb, call->vertexOffset, call->vertexCount);
  584. bgfx::setTexture(0, gl->s_tex, gl->th);
  585. bgfx::submit(gl->viewid, gl->prog);
  586. }
  587. }
  588. static void nvgRenderFlush(void* _userPtr)
  589. {
  590. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  591. if (gl->ncalls > 0)
  592. {
  593. bgfx::allocTransientVertexBuffer(&gl->tvb, gl->nverts, s_nvgDecl);
  594. int allocated = gl->tvb.size/gl->tvb.stride;
  595. if (allocated < gl->nverts) {
  596. gl->nverts = allocated;
  597. BX_WARN(true, "Vertex number truncated due to transient vertex buffer overflow");
  598. }
  599. memcpy(gl->tvb.data, gl->verts, gl->nverts * sizeof(struct NVGvertex) );
  600. gl->state = 0
  601. | BGFX_STATE_RGB_WRITE
  602. | BGFX_STATE_ALPHA_WRITE
  603. ;
  604. // if (alphaBlend == NVG_PREMULTIPLIED_ALPHA)
  605. // {
  606. // gl->state |= BGFX_STATE_BLEND_FUNC_SEPARATE(
  607. // BGFX_STATE_BLEND_SRC_ALPHA, BGFX_STATE_BLEND_INV_SRC_ALPHA
  608. // , BGFX_STATE_BLEND_ONE, BGFX_STATE_BLEND_INV_SRC_ALPHA
  609. // );
  610. // }
  611. // else
  612. {
  613. gl->state |= BGFX_STATE_BLEND_FUNC(
  614. BGFX_STATE_BLEND_SRC_ALPHA, BGFX_STATE_BLEND_INV_SRC_ALPHA
  615. );
  616. }
  617. bgfx::setUniform(gl->u_viewSize, gl->surface);
  618. for (uint32_t ii = 0, num = gl->ncalls; ii < num; ++ii)
  619. {
  620. struct GLNVGcall* call = &gl->calls[ii];
  621. switch (call->type)
  622. {
  623. case GLNVG_FILL:
  624. glnvg__fill(gl, call);
  625. break;
  626. case GLNVG_CONVEXFILL:
  627. glnvg__convexFill(gl, call);
  628. break;
  629. case GLNVG_STROKE:
  630. glnvg__stroke(gl, call);
  631. break;
  632. case GLNVG_TRIANGLES:
  633. glnvg__triangles(gl, call);
  634. break;
  635. }
  636. }
  637. }
  638. // Reset calls
  639. gl->nverts = 0;
  640. gl->npaths = 0;
  641. gl->ncalls = 0;
  642. gl->nuniforms = 0;
  643. }
  644. static int glnvg__maxVertCount(const struct NVGpath* paths, int npaths)
  645. {
  646. int i, count = 0;
  647. for (i = 0; i < npaths; i++)
  648. {
  649. count += paths[i].nfill;
  650. count += paths[i].nstroke;
  651. }
  652. return count;
  653. }
  654. static int glnvg__maxi(int a, int b) { return a > b ? a : b; }
  655. static struct GLNVGcall* glnvg__allocCall(struct GLNVGcontext* gl)
  656. {
  657. struct GLNVGcall* ret = NULL;
  658. if (gl->ncalls+1 > gl->ccalls)
  659. {
  660. gl->ccalls = gl->ccalls == 0 ? 32 : gl->ccalls * 2;
  661. gl->calls = (struct GLNVGcall*)realloc(gl->calls, sizeof(struct GLNVGcall) * gl->ccalls);
  662. }
  663. ret = &gl->calls[gl->ncalls++];
  664. memset(ret, 0, sizeof(struct GLNVGcall) );
  665. return ret;
  666. }
  667. static int glnvg__allocPaths(struct GLNVGcontext* gl, int n)
  668. {
  669. int ret = 0;
  670. if (gl->npaths + n > gl->cpaths) {
  671. GLNVGpath* paths;
  672. int cpaths = glnvg__maxi(gl->npaths + n, 128) + gl->cpaths / 2; // 1.5x Overallocate
  673. paths = (GLNVGpath*)realloc(gl->paths, sizeof(GLNVGpath) * cpaths);
  674. if (paths == NULL) return -1;
  675. gl->paths = paths;
  676. gl->cpaths = cpaths;
  677. }
  678. ret = gl->npaths;
  679. gl->npaths += n;
  680. return ret;
  681. }
  682. static int glnvg__allocVerts(GLNVGcontext* gl, int n)
  683. {
  684. int ret = 0;
  685. if (gl->nverts+n > gl->cverts)
  686. {
  687. NVGvertex* verts;
  688. int cverts = glnvg__maxi(gl->nverts + n, 4096) + gl->cverts/2; // 1.5x Overallocate
  689. verts = (NVGvertex*)realloc(gl->verts, sizeof(NVGvertex) * cverts);
  690. if (verts == NULL) return -1;
  691. gl->verts = verts;
  692. gl->cverts = cverts;
  693. }
  694. ret = gl->nverts;
  695. gl->nverts += n;
  696. return ret;
  697. }
  698. static int glnvg__allocFragUniforms(struct GLNVGcontext* gl, int n)
  699. {
  700. int ret = 0, structSize = gl->fragSize;
  701. if (gl->nuniforms+n > gl->cuniforms)
  702. {
  703. gl->cuniforms = gl->cuniforms == 0 ? glnvg__maxi(n, 32) : gl->cuniforms * 2;
  704. gl->uniforms = (unsigned char*)realloc(gl->uniforms, gl->cuniforms * structSize);
  705. }
  706. ret = gl->nuniforms * structSize;
  707. gl->nuniforms += n;
  708. return ret;
  709. }
  710. static void glnvg__vset(struct NVGvertex* vtx, float x, float y, float u, float v)
  711. {
  712. vtx->x = x;
  713. vtx->y = y;
  714. vtx->u = u;
  715. vtx->v = v;
  716. }
  717. static void nvgRenderFill(void* _userPtr, struct NVGpaint* paint, struct NVGscissor* scissor, float fringe,
  718. const float* bounds, const struct NVGpath* paths, int npaths)
  719. {
  720. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  721. struct GLNVGcall* call = glnvg__allocCall(gl);
  722. struct NVGvertex* quad;
  723. struct GLNVGfragUniforms* frag;
  724. int i, maxverts, offset;
  725. call->type = GLNVG_FILL;
  726. call->pathOffset = glnvg__allocPaths(gl, npaths);
  727. call->pathCount = npaths;
  728. call->image = paint->image;
  729. if (npaths == 1 && paths[0].convex)
  730. {
  731. call->type = GLNVG_CONVEXFILL;
  732. }
  733. // Allocate vertices for all the paths.
  734. maxverts = glnvg__maxVertCount(paths, npaths) + 6;
  735. offset = glnvg__allocVerts(gl, maxverts);
  736. for (i = 0; i < npaths; i++)
  737. {
  738. struct GLNVGpath* copy = &gl->paths[call->pathOffset + i];
  739. const struct NVGpath* path = &paths[i];
  740. memset(copy, 0, sizeof(struct GLNVGpath) );
  741. if (path->nfill > 0)
  742. {
  743. copy->fillOffset = offset;
  744. copy->fillCount = path->nfill;
  745. memcpy(&gl->verts[offset], path->fill, sizeof(struct NVGvertex) * path->nfill);
  746. offset += path->nfill;
  747. }
  748. if (path->nstroke > 0)
  749. {
  750. copy->strokeOffset = offset;
  751. copy->strokeCount = path->nstroke;
  752. memcpy(&gl->verts[offset], path->stroke, sizeof(struct NVGvertex) * path->nstroke);
  753. offset += path->nstroke;
  754. }
  755. }
  756. // Quad
  757. call->vertexOffset = offset;
  758. call->vertexCount = 6;
  759. quad = &gl->verts[call->vertexOffset];
  760. glnvg__vset(&quad[0], bounds[0], bounds[3], 0.5f, 1.0f);
  761. glnvg__vset(&quad[1], bounds[2], bounds[3], 0.5f, 1.0f);
  762. glnvg__vset(&quad[2], bounds[2], bounds[1], 0.5f, 1.0f);
  763. glnvg__vset(&quad[3], bounds[0], bounds[3], 0.5f, 1.0f);
  764. glnvg__vset(&quad[4], bounds[2], bounds[1], 0.5f, 1.0f);
  765. glnvg__vset(&quad[5], bounds[0], bounds[1], 0.5f, 1.0f);
  766. // Setup uniforms for draw calls
  767. if (call->type == GLNVG_FILL)
  768. {
  769. call->uniformOffset = glnvg__allocFragUniforms(gl, 2);
  770. // Simple shader for stencil
  771. frag = nvg__fragUniformPtr(gl, call->uniformOffset);
  772. memset(frag, 0, sizeof(*frag) );
  773. frag->type = NSVG_SHADER_SIMPLE;
  774. // Fill shader
  775. glnvg__convertPaint(gl, nvg__fragUniformPtr(gl, call->uniformOffset + gl->fragSize), paint, scissor, fringe, fringe);
  776. }
  777. else
  778. {
  779. call->uniformOffset = glnvg__allocFragUniforms(gl, 1);
  780. // Fill shader
  781. glnvg__convertPaint(gl, nvg__fragUniformPtr(gl, call->uniformOffset), paint, scissor, fringe, fringe);
  782. }
  783. }
  784. static void nvgRenderStroke(void* _userPtr, struct NVGpaint* paint, struct NVGscissor* scissor, float fringe,
  785. float strokeWidth, const struct NVGpath* paths, int npaths)
  786. {
  787. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  788. struct GLNVGcall* call = glnvg__allocCall(gl);
  789. int i, maxverts, offset;
  790. call->type = GLNVG_STROKE;
  791. call->pathOffset = glnvg__allocPaths(gl, npaths);
  792. call->pathCount = npaths;
  793. call->image = paint->image;
  794. // Allocate vertices for all the paths.
  795. maxverts = glnvg__maxVertCount(paths, npaths);
  796. offset = glnvg__allocVerts(gl, maxverts);
  797. for (i = 0; i < npaths; i++)
  798. {
  799. struct GLNVGpath* copy = &gl->paths[call->pathOffset + i];
  800. const struct NVGpath* path = &paths[i];
  801. memset(copy, 0, sizeof(struct GLNVGpath) );
  802. if (path->nstroke)
  803. {
  804. copy->strokeOffset = offset;
  805. copy->strokeCount = path->nstroke;
  806. memcpy(&gl->verts[offset], path->stroke, sizeof(struct NVGvertex) * path->nstroke);
  807. offset += path->nstroke;
  808. }
  809. }
  810. // Fill shader
  811. call->uniformOffset = glnvg__allocFragUniforms(gl, 1);
  812. glnvg__convertPaint(gl, nvg__fragUniformPtr(gl, call->uniformOffset), paint, scissor, strokeWidth, fringe);
  813. }
  814. static void nvgRenderTriangles(void* _userPtr, struct NVGpaint* paint, struct NVGscissor* scissor,
  815. const struct NVGvertex* verts, int nverts)
  816. {
  817. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  818. struct GLNVGcall* call = glnvg__allocCall(gl);
  819. struct GLNVGfragUniforms* frag;
  820. call->type = GLNVG_TRIANGLES;
  821. call->image = paint->image;
  822. // Allocate vertices for all the paths.
  823. call->vertexOffset = glnvg__allocVerts(gl, nverts);
  824. call->vertexCount = nverts;
  825. memcpy(&gl->verts[call->vertexOffset], verts, sizeof(struct NVGvertex) * nverts);
  826. // Fill shader
  827. call->uniformOffset = glnvg__allocFragUniforms(gl, 1);
  828. frag = nvg__fragUniformPtr(gl, call->uniformOffset);
  829. glnvg__convertPaint(gl, frag, paint, scissor, 1.0f, 1.0f);
  830. frag->type = NSVG_SHADER_IMG;
  831. }
  832. static void nvgRenderDelete(void* _userPtr)
  833. {
  834. struct GLNVGcontext* gl = (struct GLNVGcontext*)_userPtr;
  835. if (gl == NULL)
  836. {
  837. return;
  838. }
  839. bgfx::destroyProgram(gl->prog);
  840. bgfx::destroyUniform(gl->u_scissorMat);
  841. bgfx::destroyUniform(gl->u_paintMat);
  842. bgfx::destroyUniform(gl->u_innerCol);
  843. bgfx::destroyUniform(gl->u_outerCol);
  844. bgfx::destroyUniform(gl->u_viewSize);
  845. bgfx::destroyUniform(gl->u_scissorExtScale);
  846. bgfx::destroyUniform(gl->u_extentRadius);
  847. bgfx::destroyUniform(gl->u_params);
  848. bgfx::destroyUniform(gl->s_tex);
  849. if (bgfx::isValid(gl->u_halfTexel) )
  850. {
  851. bgfx::destroyUniform(gl->u_halfTexel);
  852. }
  853. for (uint32_t ii = 0, num = gl->ntextures; ii < num; ++ii)
  854. {
  855. if (bgfx::isValid(gl->textures[ii].id)
  856. && (gl->textures[ii].flags & NVG_IMAGE_NODELETE) == 0)
  857. {
  858. bgfx::destroyTexture(gl->textures[ii].id);
  859. }
  860. }
  861. free(gl->textures);
  862. free(gl);
  863. }
  864. } // namespace
  865. NVGcontext* nvgCreate(int edgeaa, unsigned char viewid)
  866. {
  867. struct NVGparams params;
  868. struct NVGcontext* ctx = NULL;
  869. struct GLNVGcontext* gl = (struct GLNVGcontext*)malloc(sizeof(struct GLNVGcontext) );
  870. if (gl == NULL) goto error;
  871. memset(gl, 0, sizeof(struct GLNVGcontext) );
  872. memset(&params, 0, sizeof(params) );
  873. params.renderCreate = nvgRenderCreate;
  874. params.renderCreateTexture = nvgRenderCreateTexture;
  875. params.renderDeleteTexture = nvgRenderDeleteTexture;
  876. params.renderUpdateTexture = nvgRenderUpdateTexture;
  877. params.renderGetTextureSize = nvgRenderGetTextureSize;
  878. params.renderViewport = nvgRenderViewport;
  879. params.renderFlush = nvgRenderFlush;
  880. params.renderFill = nvgRenderFill;
  881. params.renderStroke = nvgRenderStroke;
  882. params.renderTriangles = nvgRenderTriangles;
  883. params.renderDelete = nvgRenderDelete;
  884. params.userPtr = gl;
  885. params.edgeAntiAlias = edgeaa;
  886. gl->edgeAntiAlias = edgeaa;
  887. gl->viewid = uint8_t(viewid);
  888. ctx = nvgCreateInternal(&params);
  889. if (ctx == NULL) goto error;
  890. return ctx;
  891. error:
  892. // 'gl' is freed by nvgDeleteInternal.
  893. if (ctx != NULL)
  894. {
  895. nvgDeleteInternal(ctx);
  896. }
  897. return NULL;
  898. }
  899. void nvgViewId(struct NVGcontext* ctx, unsigned char viewid)
  900. {
  901. struct NVGparams* params = nvgInternalParams(ctx);
  902. struct GLNVGcontext* gl = (struct GLNVGcontext*)params->userPtr;
  903. gl->viewid = uint8_t(viewid);
  904. }
  905. void nvgDelete(struct NVGcontext* ctx)
  906. {
  907. nvgDeleteInternal(ctx);
  908. }