anim_decode.c 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442
  1. // Copyright 2015 Google Inc. All Rights Reserved.
  2. //
  3. // Use of this source code is governed by a BSD-style license
  4. // that can be found in the COPYING file in the root of the source
  5. // tree. An additional intellectual property rights grant can be found
  6. // in the file PATENTS. All contributing project authors may
  7. // be found in the AUTHORS file in the root of the source tree.
  8. // -----------------------------------------------------------------------------
  9. //
  10. // AnimDecoder implementation.
  11. //
  12. #ifdef HAVE_CONFIG_H
  13. #include "../webp/config.h"
  14. #endif
  15. #include <assert.h>
  16. #include <string.h>
  17. #include "../utils/utils.h"
  18. #include "../webp/decode.h"
  19. #include "../webp/demux.h"
  20. #define NUM_CHANNELS 4
  21. typedef void (*BlendRowFunc)(uint32_t* const, const uint32_t* const, int);
  22. static void BlendPixelRowNonPremult(uint32_t* const src,
  23. const uint32_t* const dst, int num_pixels);
  24. static void BlendPixelRowPremult(uint32_t* const src, const uint32_t* const dst,
  25. int num_pixels);
  26. struct WebPAnimDecoder {
  27. WebPDemuxer* demux_; // Demuxer created from given WebP bitstream.
  28. WebPDecoderConfig config_; // Decoder config.
  29. // Note: we use a pointer to a function blending multiple pixels at a time to
  30. // allow possible inlining of per-pixel blending function.
  31. BlendRowFunc blend_func_; // Pointer to the chose blend row function.
  32. WebPAnimInfo info_; // Global info about the animation.
  33. uint8_t* curr_frame_; // Current canvas (not disposed).
  34. uint8_t* prev_frame_disposed_; // Previous canvas (properly disposed).
  35. int prev_frame_timestamp_; // Previous frame timestamp (milliseconds).
  36. WebPIterator prev_iter_; // Iterator object for previous frame.
  37. int prev_frame_was_keyframe_; // True if previous frame was a keyframe.
  38. int next_frame_; // Index of the next frame to be decoded
  39. // (starting from 1).
  40. };
  41. static void DefaultDecoderOptions(WebPAnimDecoderOptions* const dec_options) {
  42. dec_options->color_mode = MODE_RGBA;
  43. dec_options->use_threads = 0;
  44. }
  45. int WebPAnimDecoderOptionsInitInternal(WebPAnimDecoderOptions* dec_options,
  46. int abi_version) {
  47. if (dec_options == NULL ||
  48. WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_DEMUX_ABI_VERSION)) {
  49. return 0;
  50. }
  51. DefaultDecoderOptions(dec_options);
  52. return 1;
  53. }
  54. static int ApplyDecoderOptions(const WebPAnimDecoderOptions* const dec_options,
  55. WebPAnimDecoder* const dec) {
  56. WEBP_CSP_MODE mode;
  57. WebPDecoderConfig* config = &dec->config_;
  58. assert(dec_options != NULL);
  59. mode = dec_options->color_mode;
  60. if (mode != MODE_RGBA && mode != MODE_BGRA &&
  61. mode != MODE_rgbA && mode != MODE_bgrA) {
  62. return 0;
  63. }
  64. dec->blend_func_ = (mode == MODE_RGBA || mode == MODE_BGRA)
  65. ? &BlendPixelRowNonPremult
  66. : &BlendPixelRowPremult;
  67. WebPInitDecoderConfig(config);
  68. config->output.colorspace = mode;
  69. config->output.is_external_memory = 1;
  70. config->options.use_threads = dec_options->use_threads;
  71. // Note: config->output.u.RGBA is set at the time of decoding each frame.
  72. return 1;
  73. }
  74. WebPAnimDecoder* WebPAnimDecoderNewInternal(
  75. const WebPData* webp_data, const WebPAnimDecoderOptions* dec_options,
  76. int abi_version) {
  77. WebPAnimDecoderOptions options;
  78. WebPAnimDecoder* dec = NULL;
  79. if (webp_data == NULL ||
  80. WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_DEMUX_ABI_VERSION)) {
  81. return NULL;
  82. }
  83. // Note: calloc() so that the pointer members are initialized to NULL.
  84. dec = (WebPAnimDecoder*)WebPSafeCalloc(1ULL, sizeof(*dec));
  85. if (dec == NULL) goto Error;
  86. if (dec_options != NULL) {
  87. options = *dec_options;
  88. } else {
  89. DefaultDecoderOptions(&options);
  90. }
  91. if (!ApplyDecoderOptions(&options, dec)) goto Error;
  92. dec->demux_ = WebPDemux(webp_data);
  93. if (dec->demux_ == NULL) goto Error;
  94. dec->info_.canvas_width = WebPDemuxGetI(dec->demux_, WEBP_FF_CANVAS_WIDTH);
  95. dec->info_.canvas_height = WebPDemuxGetI(dec->demux_, WEBP_FF_CANVAS_HEIGHT);
  96. dec->info_.loop_count = WebPDemuxGetI(dec->demux_, WEBP_FF_LOOP_COUNT);
  97. dec->info_.bgcolor = WebPDemuxGetI(dec->demux_, WEBP_FF_BACKGROUND_COLOR);
  98. dec->info_.frame_count = WebPDemuxGetI(dec->demux_, WEBP_FF_FRAME_COUNT);
  99. {
  100. const int canvas_bytes =
  101. dec->info_.canvas_width * NUM_CHANNELS * dec->info_.canvas_height;
  102. // Note: calloc() because we fill frame with zeroes as well.
  103. dec->curr_frame_ = WebPSafeCalloc(1ULL, canvas_bytes);
  104. if (dec->curr_frame_ == NULL) goto Error;
  105. dec->prev_frame_disposed_ = WebPSafeCalloc(1ULL, canvas_bytes);
  106. if (dec->prev_frame_disposed_ == NULL) goto Error;
  107. }
  108. WebPAnimDecoderReset(dec);
  109. return dec;
  110. Error:
  111. WebPAnimDecoderDelete(dec);
  112. return NULL;
  113. }
  114. int WebPAnimDecoderGetInfo(const WebPAnimDecoder* dec, WebPAnimInfo* info) {
  115. if (dec == NULL || info == NULL) return 0;
  116. *info = dec->info_;
  117. return 1;
  118. }
  119. // Returns true if the frame covers the full canvas.
  120. static int IsFullFrame(int width, int height, int canvas_width,
  121. int canvas_height) {
  122. return (width == canvas_width && height == canvas_height);
  123. }
  124. // Clear the canvas to transparent.
  125. static void ZeroFillCanvas(uint8_t* buf, uint32_t canvas_width,
  126. uint32_t canvas_height) {
  127. memset(buf, 0, canvas_width * NUM_CHANNELS * canvas_height);
  128. }
  129. // Clear given frame rectangle to transparent.
  130. static void ZeroFillFrameRect(uint8_t* buf, int buf_stride, int x_offset,
  131. int y_offset, int width, int height) {
  132. int j;
  133. assert(width * NUM_CHANNELS <= buf_stride);
  134. buf += y_offset * buf_stride + x_offset * NUM_CHANNELS;
  135. for (j = 0; j < height; ++j) {
  136. memset(buf, 0, width * NUM_CHANNELS);
  137. buf += buf_stride;
  138. }
  139. }
  140. // Copy width * height pixels from 'src' to 'dst'.
  141. static void CopyCanvas(const uint8_t* src, uint8_t* dst,
  142. uint32_t width, uint32_t height) {
  143. assert(src != NULL && dst != NULL);
  144. memcpy(dst, src, width * NUM_CHANNELS * height);
  145. }
  146. // Returns true if the current frame is a key-frame.
  147. static int IsKeyFrame(const WebPIterator* const curr,
  148. const WebPIterator* const prev,
  149. int prev_frame_was_key_frame,
  150. int canvas_width, int canvas_height) {
  151. if (curr->frame_num == 1) {
  152. return 1;
  153. } else if ((!curr->has_alpha || curr->blend_method == WEBP_MUX_NO_BLEND) &&
  154. IsFullFrame(curr->width, curr->height,
  155. canvas_width, canvas_height)) {
  156. return 1;
  157. } else {
  158. return (prev->dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) &&
  159. (IsFullFrame(prev->width, prev->height, canvas_width,
  160. canvas_height) ||
  161. prev_frame_was_key_frame);
  162. }
  163. }
  164. // Blend a single channel of 'src' over 'dst', given their alpha channel values.
  165. // 'src' and 'dst' are assumed to be NOT pre-multiplied by alpha.
  166. static uint8_t BlendChannelNonPremult(uint32_t src, uint8_t src_a,
  167. uint32_t dst, uint8_t dst_a,
  168. uint32_t scale, int shift) {
  169. const uint8_t src_channel = (src >> shift) & 0xff;
  170. const uint8_t dst_channel = (dst >> shift) & 0xff;
  171. const uint32_t blend_unscaled = src_channel * src_a + dst_channel * dst_a;
  172. assert(blend_unscaled < (1ULL << 32) / scale);
  173. return (blend_unscaled * scale) >> 24;
  174. }
  175. // Blend 'src' over 'dst' assuming they are NOT pre-multiplied by alpha.
  176. static uint32_t BlendPixelNonPremult(uint32_t src, uint32_t dst) {
  177. const uint8_t src_a = (src >> 24) & 0xff;
  178. if (src_a == 0) {
  179. return dst;
  180. } else {
  181. const uint8_t dst_a = (dst >> 24) & 0xff;
  182. // This is the approximate integer arithmetic for the actual formula:
  183. // dst_factor_a = (dst_a * (255 - src_a)) / 255.
  184. const uint8_t dst_factor_a = (dst_a * (256 - src_a)) >> 8;
  185. const uint8_t blend_a = src_a + dst_factor_a;
  186. const uint32_t scale = (1UL << 24) / blend_a;
  187. const uint8_t blend_r =
  188. BlendChannelNonPremult(src, src_a, dst, dst_factor_a, scale, 0);
  189. const uint8_t blend_g =
  190. BlendChannelNonPremult(src, src_a, dst, dst_factor_a, scale, 8);
  191. const uint8_t blend_b =
  192. BlendChannelNonPremult(src, src_a, dst, dst_factor_a, scale, 16);
  193. assert(src_a + dst_factor_a < 256);
  194. return (blend_r << 0) |
  195. (blend_g << 8) |
  196. (blend_b << 16) |
  197. ((uint32_t)blend_a << 24);
  198. }
  199. }
  200. // Blend 'num_pixels' in 'src' over 'dst' assuming they are NOT pre-multiplied
  201. // by alpha.
  202. static void BlendPixelRowNonPremult(uint32_t* const src,
  203. const uint32_t* const dst, int num_pixels) {
  204. int i;
  205. for (i = 0; i < num_pixels; ++i) {
  206. const uint8_t src_alpha = (src[i] >> 24) & 0xff;
  207. if (src_alpha != 0xff) {
  208. src[i] = BlendPixelNonPremult(src[i], dst[i]);
  209. }
  210. }
  211. }
  212. // Individually multiply each channel in 'pix' by 'scale'.
  213. static WEBP_INLINE uint32_t ChannelwiseMultiply(uint32_t pix, uint32_t scale) {
  214. uint32_t mask = 0x00FF00FF;
  215. uint32_t rb = ((pix & mask) * scale) >> 8;
  216. uint32_t ag = ((pix >> 8) & mask) * scale;
  217. return (rb & mask) | (ag & ~mask);
  218. }
  219. // Blend 'src' over 'dst' assuming they are pre-multiplied by alpha.
  220. static uint32_t BlendPixelPremult(uint32_t src, uint32_t dst) {
  221. const uint8_t src_a = (src >> 24) & 0xff;
  222. return src + ChannelwiseMultiply(dst, 256 - src_a);
  223. }
  224. // Blend 'num_pixels' in 'src' over 'dst' assuming they are pre-multiplied by
  225. // alpha.
  226. static void BlendPixelRowPremult(uint32_t* const src, const uint32_t* const dst,
  227. int num_pixels) {
  228. int i;
  229. for (i = 0; i < num_pixels; ++i) {
  230. const uint8_t src_alpha = (src[i] >> 24) & 0xff;
  231. if (src_alpha != 0xff) {
  232. src[i] = BlendPixelPremult(src[i], dst[i]);
  233. }
  234. }
  235. }
  236. // Returns two ranges (<left, width> pairs) at row 'canvas_y', that belong to
  237. // 'src' but not 'dst'. A point range is empty if the corresponding width is 0.
  238. static void FindBlendRangeAtRow(const WebPIterator* const src,
  239. const WebPIterator* const dst, int canvas_y,
  240. int* const left1, int* const width1,
  241. int* const left2, int* const width2) {
  242. const int src_max_x = src->x_offset + src->width;
  243. const int dst_max_x = dst->x_offset + dst->width;
  244. const int dst_max_y = dst->y_offset + dst->height;
  245. assert(canvas_y >= src->y_offset && canvas_y < (src->y_offset + src->height));
  246. *left1 = -1;
  247. *width1 = 0;
  248. *left2 = -1;
  249. *width2 = 0;
  250. if (canvas_y < dst->y_offset || canvas_y >= dst_max_y ||
  251. src->x_offset >= dst_max_x || src_max_x <= dst->x_offset) {
  252. *left1 = src->x_offset;
  253. *width1 = src->width;
  254. return;
  255. }
  256. if (src->x_offset < dst->x_offset) {
  257. *left1 = src->x_offset;
  258. *width1 = dst->x_offset - src->x_offset;
  259. }
  260. if (src_max_x > dst_max_x) {
  261. *left2 = dst_max_x;
  262. *width2 = src_max_x - dst_max_x;
  263. }
  264. }
  265. int WebPAnimDecoderGetNext(WebPAnimDecoder* dec,
  266. uint8_t** buf_ptr, int* timestamp_ptr) {
  267. WebPIterator iter;
  268. uint32_t width;
  269. uint32_t height;
  270. int is_key_frame;
  271. int timestamp;
  272. BlendRowFunc blend_row;
  273. if (dec == NULL || buf_ptr == NULL || timestamp_ptr == NULL) return 0;
  274. if (!WebPAnimDecoderHasMoreFrames(dec)) return 0;
  275. width = dec->info_.canvas_width;
  276. height = dec->info_.canvas_height;
  277. blend_row = dec->blend_func_;
  278. // Get compressed frame.
  279. if (!WebPDemuxGetFrame(dec->demux_, dec->next_frame_, &iter)) {
  280. return 0;
  281. }
  282. timestamp = dec->prev_frame_timestamp_ + iter.duration;
  283. // Initialize.
  284. is_key_frame = IsKeyFrame(&iter, &dec->prev_iter_,
  285. dec->prev_frame_was_keyframe_, width, height);
  286. if (is_key_frame) {
  287. ZeroFillCanvas(dec->curr_frame_, width, height);
  288. } else {
  289. CopyCanvas(dec->prev_frame_disposed_, dec->curr_frame_, width, height);
  290. }
  291. // Decode.
  292. {
  293. const uint8_t* in = iter.fragment.bytes;
  294. const size_t in_size = iter.fragment.size;
  295. const size_t out_offset =
  296. (iter.y_offset * width + iter.x_offset) * NUM_CHANNELS;
  297. WebPDecoderConfig* const config = &dec->config_;
  298. WebPRGBABuffer* const buf = &config->output.u.RGBA;
  299. buf->stride = NUM_CHANNELS * width;
  300. buf->size = buf->stride * iter.height;
  301. buf->rgba = dec->curr_frame_ + out_offset;
  302. if (WebPDecode(in, in_size, config) != VP8_STATUS_OK) {
  303. goto Error;
  304. }
  305. }
  306. // During the decoding of current frame, we may have set some pixels to be
  307. // transparent (i.e. alpha < 255). However, the value of each of these
  308. // pixels should have been determined by blending it against the value of
  309. // that pixel in the previous frame if blending method of is WEBP_MUX_BLEND.
  310. if (iter.frame_num > 1 && iter.blend_method == WEBP_MUX_BLEND &&
  311. !is_key_frame) {
  312. if (dec->prev_iter_.dispose_method == WEBP_MUX_DISPOSE_NONE) {
  313. int y;
  314. // Blend transparent pixels with pixels in previous canvas.
  315. for (y = 0; y < iter.height; ++y) {
  316. const size_t offset =
  317. (iter.y_offset + y) * width + iter.x_offset;
  318. blend_row((uint32_t*)dec->curr_frame_ + offset,
  319. (uint32_t*)dec->prev_frame_disposed_ + offset, iter.width);
  320. }
  321. } else {
  322. int y;
  323. assert(dec->prev_iter_.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND);
  324. // We need to blend a transparent pixel with its value just after
  325. // initialization. That is, blend it with:
  326. // * Fully transparent pixel if it belongs to prevRect <-- No-op.
  327. // * The pixel in the previous canvas otherwise <-- Need alpha-blending.
  328. for (y = 0; y < iter.height; ++y) {
  329. const int canvas_y = iter.y_offset + y;
  330. int left1, width1, left2, width2;
  331. FindBlendRangeAtRow(&iter, &dec->prev_iter_, canvas_y, &left1, &width1,
  332. &left2, &width2);
  333. if (width1 > 0) {
  334. const size_t offset1 = canvas_y * width + left1;
  335. blend_row((uint32_t*)dec->curr_frame_ + offset1,
  336. (uint32_t*)dec->prev_frame_disposed_ + offset1, width1);
  337. }
  338. if (width2 > 0) {
  339. const size_t offset2 = canvas_y * width + left2;
  340. blend_row((uint32_t*)dec->curr_frame_ + offset2,
  341. (uint32_t*)dec->prev_frame_disposed_ + offset2, width2);
  342. }
  343. }
  344. }
  345. }
  346. // Update info of the previous frame and dispose it for the next iteration.
  347. dec->prev_frame_timestamp_ = timestamp;
  348. dec->prev_iter_ = iter;
  349. dec->prev_frame_was_keyframe_ = is_key_frame;
  350. CopyCanvas(dec->curr_frame_, dec->prev_frame_disposed_, width, height);
  351. if (dec->prev_iter_.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
  352. ZeroFillFrameRect(dec->prev_frame_disposed_, width * NUM_CHANNELS,
  353. dec->prev_iter_.x_offset, dec->prev_iter_.y_offset,
  354. dec->prev_iter_.width, dec->prev_iter_.height);
  355. }
  356. ++dec->next_frame_;
  357. // All OK, fill in the values.
  358. *buf_ptr = dec->curr_frame_;
  359. *timestamp_ptr = timestamp;
  360. return 1;
  361. Error:
  362. WebPDemuxReleaseIterator(&iter);
  363. return 0;
  364. }
  365. int WebPAnimDecoderHasMoreFrames(const WebPAnimDecoder* dec) {
  366. if (dec == NULL) return 0;
  367. return (dec->next_frame_ <= (int)dec->info_.frame_count);
  368. }
  369. void WebPAnimDecoderReset(WebPAnimDecoder* dec) {
  370. if (dec != NULL) {
  371. dec->prev_frame_timestamp_ = 0;
  372. memset(&dec->prev_iter_, 0, sizeof(dec->prev_iter_));
  373. dec->prev_frame_was_keyframe_ = 0;
  374. dec->next_frame_ = 1;
  375. }
  376. }
  377. const WebPDemuxer* WebPAnimDecoderGetDemuxer(const WebPAnimDecoder* dec) {
  378. if (dec == NULL) return NULL;
  379. return dec->demux_;
  380. }
  381. void WebPAnimDecoderDelete(WebPAnimDecoder* dec) {
  382. if (dec != NULL) {
  383. WebPDemuxDelete(dec->demux_);
  384. WebPSafeFree(dec->curr_frame_);
  385. WebPSafeFree(dec->prev_frame_disposed_);
  386. WebPSafeFree(dec);
  387. }
  388. }