anim_encode.c 55 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534
  1. // Copyright 2014 Google Inc. All Rights Reserved.
  2. //
  3. // Use of this source code is governed by a BSD-style license
  4. // that can be found in the COPYING file in the root of the source
  5. // tree. An additional intellectual property rights grant can be found
  6. // in the file PATENTS. All contributing project authors may
  7. // be found in the AUTHORS file in the root of the source tree.
  8. // -----------------------------------------------------------------------------
  9. //
  10. // AnimEncoder implementation.
  11. //
  12. #include <assert.h>
  13. #include <limits.h>
  14. #include <math.h> // for pow()
  15. #include <stdio.h>
  16. #include <stdlib.h> // for abs()
  17. #include "../utils/utils.h"
  18. #include "../webp/decode.h"
  19. #include "../webp/encode.h"
  20. #include "../webp/format_constants.h"
  21. #include "../webp/mux.h"
  22. #if defined(_MSC_VER) && _MSC_VER < 1900
  23. #define snprintf _snprintf
  24. #endif
  25. #define ERROR_STR_MAX_LENGTH 100
  26. //------------------------------------------------------------------------------
  27. // Internal structs.
  28. // Stores frame rectangle dimensions.
  29. typedef struct {
  30. int x_offset_, y_offset_, width_, height_;
  31. } FrameRect;
  32. // Used to store two candidates of encoded data for an animation frame. One of
  33. // the two will be chosen later.
  34. typedef struct {
  35. WebPMuxFrameInfo sub_frame_; // Encoded frame rectangle.
  36. WebPMuxFrameInfo key_frame_; // Encoded frame if it is a key-frame.
  37. int is_key_frame_; // True if 'key_frame' has been chosen.
  38. } EncodedFrame;
  39. struct WebPAnimEncoder {
  40. const int canvas_width_; // Canvas width.
  41. const int canvas_height_; // Canvas height.
  42. const WebPAnimEncoderOptions options_; // Global encoding options.
  43. FrameRect prev_rect_; // Previous WebP frame rectangle.
  44. WebPConfig last_config_; // Cached in case a re-encode is needed.
  45. WebPConfig last_config_reversed_; // If 'last_config_' uses lossless, then
  46. // this config uses lossy and vice versa;
  47. // only valid if 'options_.allow_mixed'
  48. // is true.
  49. WebPPicture* curr_canvas_; // Only pointer; we don't own memory.
  50. // Canvas buffers.
  51. WebPPicture curr_canvas_copy_; // Possibly modified current canvas.
  52. int curr_canvas_copy_modified_; // True if pixels in 'curr_canvas_copy_'
  53. // differ from those in 'curr_canvas_'.
  54. WebPPicture prev_canvas_; // Previous canvas.
  55. WebPPicture prev_canvas_disposed_; // Previous canvas disposed to background.
  56. // Encoded data.
  57. EncodedFrame* encoded_frames_; // Array of encoded frames.
  58. size_t size_; // Number of allocated frames.
  59. size_t start_; // Frame start index.
  60. size_t count_; // Number of valid frames.
  61. size_t flush_count_; // If >0, 'flush_count' frames starting from
  62. // 'start' are ready to be added to mux.
  63. // key-frame related.
  64. int64_t best_delta_; // min(canvas size - frame size) over the frames.
  65. // Can be negative in certain cases due to
  66. // transparent pixels in a frame.
  67. int keyframe_; // Index of selected key-frame relative to 'start_'.
  68. int count_since_key_frame_; // Frames seen since the last key-frame.
  69. int first_timestamp_; // Timestamp of the first frame.
  70. int prev_timestamp_; // Timestamp of the last added frame.
  71. int prev_candidate_undecided_; // True if it's not yet decided if previous
  72. // frame would be a sub-frame or a key-frame.
  73. // Misc.
  74. int is_first_frame_; // True if first frame is yet to be added/being added.
  75. int got_null_frame_; // True if WebPAnimEncoderAdd() has already been called
  76. // with a NULL frame.
  77. size_t in_frame_count_; // Number of input frames processed so far.
  78. size_t out_frame_count_; // Number of frames added to mux so far. This may be
  79. // different from 'in_frame_count_' due to merging.
  80. WebPMux* mux_; // Muxer to assemble the WebP bitstream.
  81. char error_str_[ERROR_STR_MAX_LENGTH]; // Error string. Empty if no error.
  82. };
  83. // -----------------------------------------------------------------------------
  84. // Life of WebPAnimEncoder object.
  85. #define DELTA_INFINITY (1ULL << 32)
  86. #define KEYFRAME_NONE (-1)
  87. // Reset the counters in the WebPAnimEncoder.
  88. static void ResetCounters(WebPAnimEncoder* const enc) {
  89. enc->start_ = 0;
  90. enc->count_ = 0;
  91. enc->flush_count_ = 0;
  92. enc->best_delta_ = DELTA_INFINITY;
  93. enc->keyframe_ = KEYFRAME_NONE;
  94. }
  95. static void DisableKeyframes(WebPAnimEncoderOptions* const enc_options) {
  96. enc_options->kmax = INT_MAX;
  97. enc_options->kmin = enc_options->kmax - 1;
  98. }
  99. #define MAX_CACHED_FRAMES 30
  100. static void SanitizeEncoderOptions(WebPAnimEncoderOptions* const enc_options) {
  101. int print_warning = enc_options->verbose;
  102. if (enc_options->minimize_size) {
  103. DisableKeyframes(enc_options);
  104. }
  105. if (enc_options->kmin <= 0) {
  106. DisableKeyframes(enc_options);
  107. print_warning = 0;
  108. }
  109. if (enc_options->kmax <= 0) { // All frames will be key-frames.
  110. enc_options->kmin = 0;
  111. enc_options->kmax = 0;
  112. return;
  113. }
  114. if (enc_options->kmin >= enc_options->kmax) {
  115. enc_options->kmin = enc_options->kmax - 1;
  116. if (print_warning) {
  117. fprintf(stderr, "WARNING: Setting kmin = %d, so that kmin < kmax.\n",
  118. enc_options->kmin);
  119. }
  120. } else {
  121. const int kmin_limit = enc_options->kmax / 2 + 1;
  122. if (enc_options->kmin < kmin_limit && kmin_limit < enc_options->kmax) {
  123. // This ensures that enc.keyframe + kmin >= kmax is always true. So, we
  124. // can flush all the frames in the 'count_since_key_frame == kmax' case.
  125. enc_options->kmin = kmin_limit;
  126. if (print_warning) {
  127. fprintf(stderr,
  128. "WARNING: Setting kmin = %d, so that kmin >= kmax / 2 + 1.\n",
  129. enc_options->kmin);
  130. }
  131. }
  132. }
  133. // Limit the max number of frames that are allocated.
  134. if (enc_options->kmax - enc_options->kmin > MAX_CACHED_FRAMES) {
  135. enc_options->kmin = enc_options->kmax - MAX_CACHED_FRAMES;
  136. if (print_warning) {
  137. fprintf(stderr,
  138. "WARNING: Setting kmin = %d, so that kmax - kmin <= %d.\n",
  139. enc_options->kmin, MAX_CACHED_FRAMES);
  140. }
  141. }
  142. assert(enc_options->kmin < enc_options->kmax);
  143. }
  144. #undef MAX_CACHED_FRAMES
  145. static void DefaultEncoderOptions(WebPAnimEncoderOptions* const enc_options) {
  146. enc_options->anim_params.loop_count = 0;
  147. enc_options->anim_params.bgcolor = 0xffffffff; // White.
  148. enc_options->minimize_size = 0;
  149. DisableKeyframes(enc_options);
  150. enc_options->allow_mixed = 0;
  151. enc_options->verbose = 0;
  152. }
  153. int WebPAnimEncoderOptionsInitInternal(WebPAnimEncoderOptions* enc_options,
  154. int abi_version) {
  155. if (enc_options == NULL ||
  156. WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_MUX_ABI_VERSION)) {
  157. return 0;
  158. }
  159. DefaultEncoderOptions(enc_options);
  160. return 1;
  161. }
  162. // This starting value is more fit to WebPCleanupTransparentAreaLossless().
  163. #define TRANSPARENT_COLOR 0x00000000
  164. static void ClearRectangle(WebPPicture* const picture,
  165. int left, int top, int width, int height) {
  166. int j;
  167. for (j = top; j < top + height; ++j) {
  168. uint32_t* const dst = picture->argb + j * picture->argb_stride;
  169. int i;
  170. for (i = left; i < left + width; ++i) {
  171. dst[i] = TRANSPARENT_COLOR;
  172. }
  173. }
  174. }
  175. static void WebPUtilClearPic(WebPPicture* const picture,
  176. const FrameRect* const rect) {
  177. if (rect != NULL) {
  178. ClearRectangle(picture, rect->x_offset_, rect->y_offset_,
  179. rect->width_, rect->height_);
  180. } else {
  181. ClearRectangle(picture, 0, 0, picture->width, picture->height);
  182. }
  183. }
  184. static void MarkNoError(WebPAnimEncoder* const enc) {
  185. enc->error_str_[0] = '\0'; // Empty string.
  186. }
  187. static void MarkError(WebPAnimEncoder* const enc, const char* str) {
  188. if (snprintf(enc->error_str_, ERROR_STR_MAX_LENGTH, "%s.", str) < 0) {
  189. assert(0); // FIX ME!
  190. }
  191. }
  192. static void MarkError2(WebPAnimEncoder* const enc,
  193. const char* str, int error_code) {
  194. if (snprintf(enc->error_str_, ERROR_STR_MAX_LENGTH, "%s: %d.", str,
  195. error_code) < 0) {
  196. assert(0); // FIX ME!
  197. }
  198. }
  199. WebPAnimEncoder* WebPAnimEncoderNewInternal(
  200. int width, int height, const WebPAnimEncoderOptions* enc_options,
  201. int abi_version) {
  202. WebPAnimEncoder* enc;
  203. if (WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_MUX_ABI_VERSION)) {
  204. return NULL;
  205. }
  206. if (width <= 0 || height <= 0 ||
  207. (width * (uint64_t)height) >= MAX_IMAGE_AREA) {
  208. return NULL;
  209. }
  210. enc = (WebPAnimEncoder*)WebPSafeCalloc(1, sizeof(*enc));
  211. if (enc == NULL) return NULL;
  212. // sanity inits, so we can call WebPAnimEncoderDelete():
  213. enc->encoded_frames_ = NULL;
  214. enc->mux_ = NULL;
  215. MarkNoError(enc);
  216. // Dimensions and options.
  217. *(int*)&enc->canvas_width_ = width;
  218. *(int*)&enc->canvas_height_ = height;
  219. if (enc_options != NULL) {
  220. *(WebPAnimEncoderOptions*)&enc->options_ = *enc_options;
  221. SanitizeEncoderOptions((WebPAnimEncoderOptions*)&enc->options_);
  222. } else {
  223. DefaultEncoderOptions((WebPAnimEncoderOptions*)&enc->options_);
  224. }
  225. // Canvas buffers.
  226. if (!WebPPictureInit(&enc->curr_canvas_copy_) ||
  227. !WebPPictureInit(&enc->prev_canvas_) ||
  228. !WebPPictureInit(&enc->prev_canvas_disposed_)) {
  229. goto Err;
  230. }
  231. enc->curr_canvas_copy_.width = width;
  232. enc->curr_canvas_copy_.height = height;
  233. enc->curr_canvas_copy_.use_argb = 1;
  234. if (!WebPPictureAlloc(&enc->curr_canvas_copy_) ||
  235. !WebPPictureCopy(&enc->curr_canvas_copy_, &enc->prev_canvas_) ||
  236. !WebPPictureCopy(&enc->curr_canvas_copy_, &enc->prev_canvas_disposed_)) {
  237. goto Err;
  238. }
  239. WebPUtilClearPic(&enc->prev_canvas_, NULL);
  240. enc->curr_canvas_copy_modified_ = 1;
  241. // Encoded frames.
  242. ResetCounters(enc);
  243. // Note: one extra storage is for the previous frame.
  244. enc->size_ = enc->options_.kmax - enc->options_.kmin + 1;
  245. // We need space for at least 2 frames. But when kmin, kmax are both zero,
  246. // enc->size_ will be 1. So we handle that special case below.
  247. if (enc->size_ < 2) enc->size_ = 2;
  248. enc->encoded_frames_ =
  249. (EncodedFrame*)WebPSafeCalloc(enc->size_, sizeof(*enc->encoded_frames_));
  250. if (enc->encoded_frames_ == NULL) goto Err;
  251. enc->mux_ = WebPMuxNew();
  252. if (enc->mux_ == NULL) goto Err;
  253. enc->count_since_key_frame_ = 0;
  254. enc->first_timestamp_ = 0;
  255. enc->prev_timestamp_ = 0;
  256. enc->prev_candidate_undecided_ = 0;
  257. enc->is_first_frame_ = 1;
  258. enc->got_null_frame_ = 0;
  259. return enc; // All OK.
  260. Err:
  261. WebPAnimEncoderDelete(enc);
  262. return NULL;
  263. }
  264. // Release the data contained by 'encoded_frame'.
  265. static void FrameRelease(EncodedFrame* const encoded_frame) {
  266. if (encoded_frame != NULL) {
  267. WebPDataClear(&encoded_frame->sub_frame_.bitstream);
  268. WebPDataClear(&encoded_frame->key_frame_.bitstream);
  269. memset(encoded_frame, 0, sizeof(*encoded_frame));
  270. }
  271. }
  272. void WebPAnimEncoderDelete(WebPAnimEncoder* enc) {
  273. if (enc != NULL) {
  274. WebPPictureFree(&enc->curr_canvas_copy_);
  275. WebPPictureFree(&enc->prev_canvas_);
  276. WebPPictureFree(&enc->prev_canvas_disposed_);
  277. if (enc->encoded_frames_ != NULL) {
  278. size_t i;
  279. for (i = 0; i < enc->size_; ++i) {
  280. FrameRelease(&enc->encoded_frames_[i]);
  281. }
  282. WebPSafeFree(enc->encoded_frames_);
  283. }
  284. WebPMuxDelete(enc->mux_);
  285. WebPSafeFree(enc);
  286. }
  287. }
  288. // -----------------------------------------------------------------------------
  289. // Frame addition.
  290. // Returns cached frame at the given 'position'.
  291. static EncodedFrame* GetFrame(const WebPAnimEncoder* const enc,
  292. size_t position) {
  293. assert(enc->start_ + position < enc->size_);
  294. return &enc->encoded_frames_[enc->start_ + position];
  295. }
  296. typedef int (*ComparePixelsFunc)(const uint32_t*, int, const uint32_t*, int,
  297. int, int);
  298. // Returns true if 'length' number of pixels in 'src' and 'dst' are equal,
  299. // assuming the given step sizes between pixels.
  300. // 'max_allowed_diff' is unused and only there to allow function pointer use.
  301. static WEBP_INLINE int ComparePixelsLossless(const uint32_t* src, int src_step,
  302. const uint32_t* dst, int dst_step,
  303. int length, int max_allowed_diff) {
  304. (void)max_allowed_diff;
  305. assert(length > 0);
  306. while (length-- > 0) {
  307. if (*src != *dst) {
  308. return 0;
  309. }
  310. src += src_step;
  311. dst += dst_step;
  312. }
  313. return 1;
  314. }
  315. // Helper to check if each channel in 'src' and 'dst' is at most off by
  316. // 'max_allowed_diff'.
  317. static WEBP_INLINE int PixelsAreSimilar(uint32_t src, uint32_t dst,
  318. int max_allowed_diff) {
  319. const int src_a = (src >> 24) & 0xff;
  320. const int src_r = (src >> 16) & 0xff;
  321. const int src_g = (src >> 8) & 0xff;
  322. const int src_b = (src >> 0) & 0xff;
  323. const int dst_a = (dst >> 24) & 0xff;
  324. const int dst_r = (dst >> 16) & 0xff;
  325. const int dst_g = (dst >> 8) & 0xff;
  326. const int dst_b = (dst >> 0) & 0xff;
  327. return (abs(src_r * src_a - dst_r * dst_a) <= (max_allowed_diff * 255)) &&
  328. (abs(src_g * src_a - dst_g * dst_a) <= (max_allowed_diff * 255)) &&
  329. (abs(src_b * src_a - dst_b * dst_a) <= (max_allowed_diff * 255)) &&
  330. (abs(src_a - dst_a) <= max_allowed_diff);
  331. }
  332. // Returns true if 'length' number of pixels in 'src' and 'dst' are within an
  333. // error bound, assuming the given step sizes between pixels.
  334. static WEBP_INLINE int ComparePixelsLossy(const uint32_t* src, int src_step,
  335. const uint32_t* dst, int dst_step,
  336. int length, int max_allowed_diff) {
  337. assert(length > 0);
  338. while (length-- > 0) {
  339. if (!PixelsAreSimilar(*src, *dst, max_allowed_diff)) {
  340. return 0;
  341. }
  342. src += src_step;
  343. dst += dst_step;
  344. }
  345. return 1;
  346. }
  347. static int IsEmptyRect(const FrameRect* const rect) {
  348. return (rect->width_ == 0) || (rect->height_ == 0);
  349. }
  350. static int QualityToMaxDiff(float quality) {
  351. const double val = pow(quality / 100., 0.5);
  352. const double max_diff = 31 * (1 - val) + 1 * val;
  353. return (int)(max_diff + 0.5);
  354. }
  355. // Assumes that an initial valid guess of change rectangle 'rect' is passed.
  356. static void MinimizeChangeRectangle(const WebPPicture* const src,
  357. const WebPPicture* const dst,
  358. FrameRect* const rect,
  359. int is_lossless, float quality) {
  360. int i, j;
  361. const ComparePixelsFunc compare_pixels =
  362. is_lossless ? ComparePixelsLossless : ComparePixelsLossy;
  363. const int max_allowed_diff_lossy = QualityToMaxDiff(quality);
  364. const int max_allowed_diff = is_lossless ? 0 : max_allowed_diff_lossy;
  365. // Sanity checks.
  366. assert(src->width == dst->width && src->height == dst->height);
  367. assert(rect->x_offset_ + rect->width_ <= dst->width);
  368. assert(rect->y_offset_ + rect->height_ <= dst->height);
  369. // Left boundary.
  370. for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
  371. const uint32_t* const src_argb =
  372. &src->argb[rect->y_offset_ * src->argb_stride + i];
  373. const uint32_t* const dst_argb =
  374. &dst->argb[rect->y_offset_ * dst->argb_stride + i];
  375. if (compare_pixels(src_argb, src->argb_stride, dst_argb, dst->argb_stride,
  376. rect->height_, max_allowed_diff)) {
  377. --rect->width_; // Redundant column.
  378. ++rect->x_offset_;
  379. } else {
  380. break;
  381. }
  382. }
  383. if (rect->width_ == 0) goto NoChange;
  384. // Right boundary.
  385. for (i = rect->x_offset_ + rect->width_ - 1; i >= rect->x_offset_; --i) {
  386. const uint32_t* const src_argb =
  387. &src->argb[rect->y_offset_ * src->argb_stride + i];
  388. const uint32_t* const dst_argb =
  389. &dst->argb[rect->y_offset_ * dst->argb_stride + i];
  390. if (compare_pixels(src_argb, src->argb_stride, dst_argb, dst->argb_stride,
  391. rect->height_, max_allowed_diff)) {
  392. --rect->width_; // Redundant column.
  393. } else {
  394. break;
  395. }
  396. }
  397. if (rect->width_ == 0) goto NoChange;
  398. // Top boundary.
  399. for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
  400. const uint32_t* const src_argb =
  401. &src->argb[j * src->argb_stride + rect->x_offset_];
  402. const uint32_t* const dst_argb =
  403. &dst->argb[j * dst->argb_stride + rect->x_offset_];
  404. if (compare_pixels(src_argb, 1, dst_argb, 1, rect->width_,
  405. max_allowed_diff)) {
  406. --rect->height_; // Redundant row.
  407. ++rect->y_offset_;
  408. } else {
  409. break;
  410. }
  411. }
  412. if (rect->height_ == 0) goto NoChange;
  413. // Bottom boundary.
  414. for (j = rect->y_offset_ + rect->height_ - 1; j >= rect->y_offset_; --j) {
  415. const uint32_t* const src_argb =
  416. &src->argb[j * src->argb_stride + rect->x_offset_];
  417. const uint32_t* const dst_argb =
  418. &dst->argb[j * dst->argb_stride + rect->x_offset_];
  419. if (compare_pixels(src_argb, 1, dst_argb, 1, rect->width_,
  420. max_allowed_diff)) {
  421. --rect->height_; // Redundant row.
  422. } else {
  423. break;
  424. }
  425. }
  426. if (rect->height_ == 0) goto NoChange;
  427. if (IsEmptyRect(rect)) {
  428. NoChange:
  429. rect->x_offset_ = 0;
  430. rect->y_offset_ = 0;
  431. rect->width_ = 0;
  432. rect->height_ = 0;
  433. }
  434. }
  435. // Snap rectangle to even offsets (and adjust dimensions if needed).
  436. static WEBP_INLINE void SnapToEvenOffsets(FrameRect* const rect) {
  437. rect->width_ += (rect->x_offset_ & 1);
  438. rect->height_ += (rect->y_offset_ & 1);
  439. rect->x_offset_ &= ~1;
  440. rect->y_offset_ &= ~1;
  441. }
  442. typedef struct {
  443. int should_try_; // Should try this set of parameters.
  444. int empty_rect_allowed_; // Frame with empty rectangle can be skipped.
  445. FrameRect rect_ll_; // Frame rectangle for lossless compression.
  446. WebPPicture sub_frame_ll_; // Sub-frame pic for lossless compression.
  447. FrameRect rect_lossy_; // Frame rectangle for lossy compression.
  448. // Could be smaller than rect_ll_ as pixels
  449. // with small diffs can be ignored.
  450. WebPPicture sub_frame_lossy_; // Sub-frame pic for lossless compression.
  451. } SubFrameParams;
  452. static int SubFrameParamsInit(SubFrameParams* const params,
  453. int should_try, int empty_rect_allowed) {
  454. params->should_try_ = should_try;
  455. params->empty_rect_allowed_ = empty_rect_allowed;
  456. if (!WebPPictureInit(&params->sub_frame_ll_) ||
  457. !WebPPictureInit(&params->sub_frame_lossy_)) {
  458. return 0;
  459. }
  460. return 1;
  461. }
  462. static void SubFrameParamsFree(SubFrameParams* const params) {
  463. WebPPictureFree(&params->sub_frame_ll_);
  464. WebPPictureFree(&params->sub_frame_lossy_);
  465. }
  466. // Given previous and current canvas, picks the optimal rectangle for the
  467. // current frame based on 'is_lossless' and other parameters. Assumes that the
  468. // initial guess 'rect' is valid.
  469. static int GetSubRect(const WebPPicture* const prev_canvas,
  470. const WebPPicture* const curr_canvas, int is_key_frame,
  471. int is_first_frame, int empty_rect_allowed,
  472. int is_lossless, float quality, FrameRect* const rect,
  473. WebPPicture* const sub_frame) {
  474. if (!is_key_frame || is_first_frame) { // Optimize frame rectangle.
  475. // Note: This behaves as expected for first frame, as 'prev_canvas' is
  476. // initialized to a fully transparent canvas in the beginning.
  477. MinimizeChangeRectangle(prev_canvas, curr_canvas, rect,
  478. is_lossless, quality);
  479. }
  480. if (IsEmptyRect(rect)) {
  481. if (empty_rect_allowed) { // No need to get 'sub_frame'.
  482. return 1;
  483. } else { // Force a 1x1 rectangle.
  484. rect->width_ = 1;
  485. rect->height_ = 1;
  486. assert(rect->x_offset_ == 0);
  487. assert(rect->y_offset_ == 0);
  488. }
  489. }
  490. SnapToEvenOffsets(rect);
  491. return WebPPictureView(curr_canvas, rect->x_offset_, rect->y_offset_,
  492. rect->width_, rect->height_, sub_frame);
  493. }
  494. // Picks optimal frame rectangle for both lossless and lossy compression. The
  495. // initial guess for frame rectangles will be the full canvas.
  496. static int GetSubRects(const WebPPicture* const prev_canvas,
  497. const WebPPicture* const curr_canvas, int is_key_frame,
  498. int is_first_frame, float quality,
  499. SubFrameParams* const params) {
  500. // Lossless frame rectangle.
  501. params->rect_ll_.x_offset_ = 0;
  502. params->rect_ll_.y_offset_ = 0;
  503. params->rect_ll_.width_ = curr_canvas->width;
  504. params->rect_ll_.height_ = curr_canvas->height;
  505. if (!GetSubRect(prev_canvas, curr_canvas, is_key_frame, is_first_frame,
  506. params->empty_rect_allowed_, 1, quality,
  507. &params->rect_ll_, &params->sub_frame_ll_)) {
  508. return 0;
  509. }
  510. // Lossy frame rectangle.
  511. params->rect_lossy_ = params->rect_ll_; // seed with lossless rect.
  512. return GetSubRect(prev_canvas, curr_canvas, is_key_frame, is_first_frame,
  513. params->empty_rect_allowed_, 0, quality,
  514. &params->rect_lossy_, &params->sub_frame_lossy_);
  515. }
  516. static void DisposeFrameRectangle(int dispose_method,
  517. const FrameRect* const rect,
  518. WebPPicture* const curr_canvas) {
  519. assert(rect != NULL);
  520. if (dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
  521. WebPUtilClearPic(curr_canvas, rect);
  522. }
  523. }
  524. static uint32_t RectArea(const FrameRect* const rect) {
  525. return (uint32_t)rect->width_ * rect->height_;
  526. }
  527. static int IsLosslessBlendingPossible(const WebPPicture* const src,
  528. const WebPPicture* const dst,
  529. const FrameRect* const rect) {
  530. int i, j;
  531. assert(src->width == dst->width && src->height == dst->height);
  532. assert(rect->x_offset_ + rect->width_ <= dst->width);
  533. assert(rect->y_offset_ + rect->height_ <= dst->height);
  534. for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
  535. for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
  536. const uint32_t src_pixel = src->argb[j * src->argb_stride + i];
  537. const uint32_t dst_pixel = dst->argb[j * dst->argb_stride + i];
  538. const uint32_t dst_alpha = dst_pixel >> 24;
  539. if (dst_alpha != 0xff && src_pixel != dst_pixel) {
  540. // In this case, if we use blending, we can't attain the desired
  541. // 'dst_pixel' value for this pixel. So, blending is not possible.
  542. return 0;
  543. }
  544. }
  545. }
  546. return 1;
  547. }
  548. static int IsLossyBlendingPossible(const WebPPicture* const src,
  549. const WebPPicture* const dst,
  550. const FrameRect* const rect,
  551. float quality) {
  552. const int max_allowed_diff_lossy = QualityToMaxDiff(quality);
  553. int i, j;
  554. assert(src->width == dst->width && src->height == dst->height);
  555. assert(rect->x_offset_ + rect->width_ <= dst->width);
  556. assert(rect->y_offset_ + rect->height_ <= dst->height);
  557. for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
  558. for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
  559. const uint32_t src_pixel = src->argb[j * src->argb_stride + i];
  560. const uint32_t dst_pixel = dst->argb[j * dst->argb_stride + i];
  561. const uint32_t dst_alpha = dst_pixel >> 24;
  562. if (dst_alpha != 0xff &&
  563. !PixelsAreSimilar(src_pixel, dst_pixel, max_allowed_diff_lossy)) {
  564. // In this case, if we use blending, we can't attain the desired
  565. // 'dst_pixel' value for this pixel. So, blending is not possible.
  566. return 0;
  567. }
  568. }
  569. }
  570. return 1;
  571. }
  572. // For pixels in 'rect', replace those pixels in 'dst' that are same as 'src' by
  573. // transparent pixels.
  574. // Returns true if at least one pixel gets modified.
  575. static int IncreaseTransparency(const WebPPicture* const src,
  576. const FrameRect* const rect,
  577. WebPPicture* const dst) {
  578. int i, j;
  579. int modified = 0;
  580. assert(src != NULL && dst != NULL && rect != NULL);
  581. assert(src->width == dst->width && src->height == dst->height);
  582. for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
  583. const uint32_t* const psrc = src->argb + j * src->argb_stride;
  584. uint32_t* const pdst = dst->argb + j * dst->argb_stride;
  585. for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
  586. if (psrc[i] == pdst[i] && pdst[i] != TRANSPARENT_COLOR) {
  587. pdst[i] = TRANSPARENT_COLOR;
  588. modified = 1;
  589. }
  590. }
  591. }
  592. return modified;
  593. }
  594. #undef TRANSPARENT_COLOR
  595. // Replace similar blocks of pixels by a 'see-through' transparent block
  596. // with uniform average color.
  597. // Assumes lossy compression is being used.
  598. // Returns true if at least one pixel gets modified.
  599. static int FlattenSimilarBlocks(const WebPPicture* const src,
  600. const FrameRect* const rect,
  601. WebPPicture* const dst, float quality) {
  602. const int max_allowed_diff_lossy = QualityToMaxDiff(quality);
  603. int i, j;
  604. int modified = 0;
  605. const int block_size = 8;
  606. const int y_start = (rect->y_offset_ + block_size) & ~(block_size - 1);
  607. const int y_end = (rect->y_offset_ + rect->height_) & ~(block_size - 1);
  608. const int x_start = (rect->x_offset_ + block_size) & ~(block_size - 1);
  609. const int x_end = (rect->x_offset_ + rect->width_) & ~(block_size - 1);
  610. assert(src != NULL && dst != NULL && rect != NULL);
  611. assert(src->width == dst->width && src->height == dst->height);
  612. assert((block_size & (block_size - 1)) == 0); // must be a power of 2
  613. // Iterate over each block and count similar pixels.
  614. for (j = y_start; j < y_end; j += block_size) {
  615. for (i = x_start; i < x_end; i += block_size) {
  616. int cnt = 0;
  617. int avg_r = 0, avg_g = 0, avg_b = 0;
  618. int x, y;
  619. const uint32_t* const psrc = src->argb + j * src->argb_stride + i;
  620. uint32_t* const pdst = dst->argb + j * dst->argb_stride + i;
  621. for (y = 0; y < block_size; ++y) {
  622. for (x = 0; x < block_size; ++x) {
  623. const uint32_t src_pixel = psrc[x + y * src->argb_stride];
  624. const int alpha = src_pixel >> 24;
  625. if (alpha == 0xff &&
  626. PixelsAreSimilar(src_pixel, pdst[x + y * dst->argb_stride],
  627. max_allowed_diff_lossy)) {
  628. ++cnt;
  629. avg_r += (src_pixel >> 16) & 0xff;
  630. avg_g += (src_pixel >> 8) & 0xff;
  631. avg_b += (src_pixel >> 0) & 0xff;
  632. }
  633. }
  634. }
  635. // If we have a fully similar block, we replace it with an
  636. // average transparent block. This compresses better in lossy mode.
  637. if (cnt == block_size * block_size) {
  638. const uint32_t color = (0x00 << 24) |
  639. ((avg_r / cnt) << 16) |
  640. ((avg_g / cnt) << 8) |
  641. ((avg_b / cnt) << 0);
  642. for (y = 0; y < block_size; ++y) {
  643. for (x = 0; x < block_size; ++x) {
  644. pdst[x + y * dst->argb_stride] = color;
  645. }
  646. }
  647. modified = 1;
  648. }
  649. }
  650. }
  651. return modified;
  652. }
  653. static int EncodeFrame(const WebPConfig* const config, WebPPicture* const pic,
  654. WebPMemoryWriter* const memory) {
  655. pic->use_argb = 1;
  656. pic->writer = WebPMemoryWrite;
  657. pic->custom_ptr = memory;
  658. if (!WebPEncode(config, pic)) {
  659. return 0;
  660. }
  661. return 1;
  662. }
  663. // Struct representing a candidate encoded frame including its metadata.
  664. typedef struct {
  665. WebPMemoryWriter mem_;
  666. WebPMuxFrameInfo info_;
  667. FrameRect rect_;
  668. int evaluate_; // True if this candidate should be evaluated.
  669. } Candidate;
  670. // Generates a candidate encoded frame given a picture and metadata.
  671. static WebPEncodingError EncodeCandidate(WebPPicture* const sub_frame,
  672. const FrameRect* const rect,
  673. const WebPConfig* const encoder_config,
  674. int use_blending,
  675. Candidate* const candidate) {
  676. WebPConfig config = *encoder_config;
  677. WebPEncodingError error_code = VP8_ENC_OK;
  678. assert(candidate != NULL);
  679. memset(candidate, 0, sizeof(*candidate));
  680. // Set frame rect and info.
  681. candidate->rect_ = *rect;
  682. candidate->info_.id = WEBP_CHUNK_ANMF;
  683. candidate->info_.x_offset = rect->x_offset_;
  684. candidate->info_.y_offset = rect->y_offset_;
  685. candidate->info_.dispose_method = WEBP_MUX_DISPOSE_NONE; // Set later.
  686. candidate->info_.blend_method =
  687. use_blending ? WEBP_MUX_BLEND : WEBP_MUX_NO_BLEND;
  688. candidate->info_.duration = 0; // Set in next call to WebPAnimEncoderAdd().
  689. // Encode picture.
  690. WebPMemoryWriterInit(&candidate->mem_);
  691. if (!config.lossless && use_blending) {
  692. // Disable filtering to avoid blockiness in reconstructed frames at the
  693. // time of decoding.
  694. config.autofilter = 0;
  695. config.filter_strength = 0;
  696. }
  697. if (!EncodeFrame(&config, sub_frame, &candidate->mem_)) {
  698. error_code = sub_frame->error_code;
  699. goto Err;
  700. }
  701. candidate->evaluate_ = 1;
  702. return error_code;
  703. Err:
  704. WebPMemoryWriterClear(&candidate->mem_);
  705. return error_code;
  706. }
  707. static void CopyCurrentCanvas(WebPAnimEncoder* const enc) {
  708. if (enc->curr_canvas_copy_modified_) {
  709. WebPCopyPixels(enc->curr_canvas_, &enc->curr_canvas_copy_);
  710. enc->curr_canvas_copy_.progress_hook = enc->curr_canvas_->progress_hook;
  711. enc->curr_canvas_copy_.user_data = enc->curr_canvas_->user_data;
  712. enc->curr_canvas_copy_modified_ = 0;
  713. }
  714. }
  715. enum {
  716. LL_DISP_NONE = 0,
  717. LL_DISP_BG,
  718. LOSSY_DISP_NONE,
  719. LOSSY_DISP_BG,
  720. CANDIDATE_COUNT
  721. };
  722. #define MIN_COLORS_LOSSY 31 // Don't try lossy below this threshold.
  723. #define MAX_COLORS_LOSSLESS 194 // Don't try lossless above this threshold.
  724. // Generates candidates for a given dispose method given pre-filled sub-frame
  725. // 'params'.
  726. static WebPEncodingError GenerateCandidates(
  727. WebPAnimEncoder* const enc, Candidate candidates[CANDIDATE_COUNT],
  728. WebPMuxAnimDispose dispose_method, int is_lossless, int is_key_frame,
  729. SubFrameParams* const params,
  730. const WebPConfig* const config_ll, const WebPConfig* const config_lossy) {
  731. WebPEncodingError error_code = VP8_ENC_OK;
  732. const int is_dispose_none = (dispose_method == WEBP_MUX_DISPOSE_NONE);
  733. Candidate* const candidate_ll =
  734. is_dispose_none ? &candidates[LL_DISP_NONE] : &candidates[LL_DISP_BG];
  735. Candidate* const candidate_lossy = is_dispose_none
  736. ? &candidates[LOSSY_DISP_NONE]
  737. : &candidates[LOSSY_DISP_BG];
  738. WebPPicture* const curr_canvas = &enc->curr_canvas_copy_;
  739. const WebPPicture* const prev_canvas =
  740. is_dispose_none ? &enc->prev_canvas_ : &enc->prev_canvas_disposed_;
  741. int use_blending_ll;
  742. int use_blending_lossy;
  743. CopyCurrentCanvas(enc);
  744. use_blending_ll =
  745. !is_key_frame &&
  746. IsLosslessBlendingPossible(prev_canvas, curr_canvas, &params->rect_ll_);
  747. use_blending_lossy =
  748. !is_key_frame &&
  749. IsLossyBlendingPossible(prev_canvas, curr_canvas, &params->rect_lossy_,
  750. config_lossy->quality);
  751. // Pick candidates to be tried.
  752. if (!enc->options_.allow_mixed) {
  753. candidate_ll->evaluate_ = is_lossless;
  754. candidate_lossy->evaluate_ = !is_lossless;
  755. } else { // Use a heuristic for trying lossless and/or lossy compression.
  756. const int num_colors = WebPGetColorPalette(&params->sub_frame_ll_, NULL);
  757. candidate_ll->evaluate_ = (num_colors < MAX_COLORS_LOSSLESS);
  758. candidate_lossy->evaluate_ = (num_colors >= MIN_COLORS_LOSSY);
  759. }
  760. // Generate candidates.
  761. if (candidate_ll->evaluate_) {
  762. CopyCurrentCanvas(enc);
  763. if (use_blending_ll) {
  764. enc->curr_canvas_copy_modified_ =
  765. IncreaseTransparency(prev_canvas, &params->rect_ll_, curr_canvas);
  766. }
  767. error_code = EncodeCandidate(&params->sub_frame_ll_, &params->rect_ll_,
  768. config_ll, use_blending_ll, candidate_ll);
  769. if (error_code != VP8_ENC_OK) return error_code;
  770. }
  771. if (candidate_lossy->evaluate_) {
  772. CopyCurrentCanvas(enc);
  773. if (use_blending_lossy) {
  774. enc->curr_canvas_copy_modified_ =
  775. FlattenSimilarBlocks(prev_canvas, &params->rect_lossy_, curr_canvas,
  776. config_lossy->quality);
  777. }
  778. error_code =
  779. EncodeCandidate(&params->sub_frame_lossy_, &params->rect_lossy_,
  780. config_lossy, use_blending_lossy, candidate_lossy);
  781. if (error_code != VP8_ENC_OK) return error_code;
  782. enc->curr_canvas_copy_modified_ = 1;
  783. }
  784. return error_code;
  785. }
  786. #undef MIN_COLORS_LOSSY
  787. #undef MAX_COLORS_LOSSLESS
  788. static void GetEncodedData(const WebPMemoryWriter* const memory,
  789. WebPData* const encoded_data) {
  790. encoded_data->bytes = memory->mem;
  791. encoded_data->size = memory->size;
  792. }
  793. // Sets dispose method of the previous frame to be 'dispose_method'.
  794. static void SetPreviousDisposeMethod(WebPAnimEncoder* const enc,
  795. WebPMuxAnimDispose dispose_method) {
  796. const size_t position = enc->count_ - 2;
  797. EncodedFrame* const prev_enc_frame = GetFrame(enc, position);
  798. assert(enc->count_ >= 2); // As current and previous frames are in enc.
  799. if (enc->prev_candidate_undecided_) {
  800. assert(dispose_method == WEBP_MUX_DISPOSE_NONE);
  801. prev_enc_frame->sub_frame_.dispose_method = dispose_method;
  802. prev_enc_frame->key_frame_.dispose_method = dispose_method;
  803. } else {
  804. WebPMuxFrameInfo* const prev_info = prev_enc_frame->is_key_frame_
  805. ? &prev_enc_frame->key_frame_
  806. : &prev_enc_frame->sub_frame_;
  807. prev_info->dispose_method = dispose_method;
  808. }
  809. }
  810. static int IncreasePreviousDuration(WebPAnimEncoder* const enc, int duration) {
  811. const size_t position = enc->count_ - 1;
  812. EncodedFrame* const prev_enc_frame = GetFrame(enc, position);
  813. int new_duration;
  814. assert(enc->count_ >= 1);
  815. assert(prev_enc_frame->sub_frame_.duration ==
  816. prev_enc_frame->key_frame_.duration);
  817. assert(prev_enc_frame->sub_frame_.duration ==
  818. (prev_enc_frame->sub_frame_.duration & (MAX_DURATION - 1)));
  819. assert(duration == (duration & (MAX_DURATION - 1)));
  820. new_duration = prev_enc_frame->sub_frame_.duration + duration;
  821. if (new_duration >= MAX_DURATION) { // Special case.
  822. // Separate out previous frame from earlier merged frames to avoid overflow.
  823. // We add a 1x1 transparent frame for the previous frame, with blending on.
  824. const FrameRect rect = { 0, 0, 1, 1 };
  825. const uint8_t lossless_1x1_bytes[] = {
  826. 0x52, 0x49, 0x46, 0x46, 0x14, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50,
  827. 0x56, 0x50, 0x38, 0x4c, 0x08, 0x00, 0x00, 0x00, 0x2f, 0x00, 0x00, 0x00,
  828. 0x10, 0x88, 0x88, 0x08
  829. };
  830. const WebPData lossless_1x1 = {
  831. lossless_1x1_bytes, sizeof(lossless_1x1_bytes)
  832. };
  833. const uint8_t lossy_1x1_bytes[] = {
  834. 0x52, 0x49, 0x46, 0x46, 0x40, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50,
  835. 0x56, 0x50, 0x38, 0x58, 0x0a, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
  836. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x41, 0x4c, 0x50, 0x48, 0x02, 0x00,
  837. 0x00, 0x00, 0x00, 0x00, 0x56, 0x50, 0x38, 0x20, 0x18, 0x00, 0x00, 0x00,
  838. 0x30, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x01, 0x00, 0x01, 0x00, 0x02, 0x00,
  839. 0x34, 0x25, 0xa4, 0x00, 0x03, 0x70, 0x00, 0xfe, 0xfb, 0xfd, 0x50, 0x00
  840. };
  841. const WebPData lossy_1x1 = { lossy_1x1_bytes, sizeof(lossy_1x1_bytes) };
  842. const int can_use_lossless =
  843. (enc->last_config_.lossless || enc->options_.allow_mixed);
  844. EncodedFrame* const curr_enc_frame = GetFrame(enc, enc->count_);
  845. curr_enc_frame->is_key_frame_ = 0;
  846. curr_enc_frame->sub_frame_.id = WEBP_CHUNK_ANMF;
  847. curr_enc_frame->sub_frame_.x_offset = 0;
  848. curr_enc_frame->sub_frame_.y_offset = 0;
  849. curr_enc_frame->sub_frame_.dispose_method = WEBP_MUX_DISPOSE_NONE;
  850. curr_enc_frame->sub_frame_.blend_method = WEBP_MUX_BLEND;
  851. curr_enc_frame->sub_frame_.duration = duration;
  852. if (!WebPDataCopy(can_use_lossless ? &lossless_1x1 : &lossy_1x1,
  853. &curr_enc_frame->sub_frame_.bitstream)) {
  854. return 0;
  855. }
  856. ++enc->count_;
  857. ++enc->count_since_key_frame_;
  858. enc->flush_count_ = enc->count_ - 1;
  859. enc->prev_candidate_undecided_ = 0;
  860. enc->prev_rect_ = rect;
  861. } else { // Regular case.
  862. // Increase duration of the previous frame by 'duration'.
  863. prev_enc_frame->sub_frame_.duration = new_duration;
  864. prev_enc_frame->key_frame_.duration = new_duration;
  865. }
  866. return 1;
  867. }
  868. // Pick the candidate encoded frame with smallest size and release other
  869. // candidates.
  870. // TODO(later): Perhaps a rough SSIM/PSNR produced by the encoder should
  871. // also be a criteria, in addition to sizes.
  872. static void PickBestCandidate(WebPAnimEncoder* const enc,
  873. Candidate* const candidates, int is_key_frame,
  874. EncodedFrame* const encoded_frame) {
  875. int i;
  876. int best_idx = -1;
  877. size_t best_size = ~0;
  878. for (i = 0; i < CANDIDATE_COUNT; ++i) {
  879. if (candidates[i].evaluate_) {
  880. const size_t candidate_size = candidates[i].mem_.size;
  881. if (candidate_size < best_size) {
  882. best_idx = i;
  883. best_size = candidate_size;
  884. }
  885. }
  886. }
  887. assert(best_idx != -1);
  888. for (i = 0; i < CANDIDATE_COUNT; ++i) {
  889. if (candidates[i].evaluate_) {
  890. if (i == best_idx) {
  891. WebPMuxFrameInfo* const dst = is_key_frame
  892. ? &encoded_frame->key_frame_
  893. : &encoded_frame->sub_frame_;
  894. *dst = candidates[i].info_;
  895. GetEncodedData(&candidates[i].mem_, &dst->bitstream);
  896. if (!is_key_frame) {
  897. // Note: Previous dispose method only matters for non-keyframes.
  898. // Also, we don't want to modify previous dispose method that was
  899. // selected when a non key-frame was assumed.
  900. const WebPMuxAnimDispose prev_dispose_method =
  901. (best_idx == LL_DISP_NONE || best_idx == LOSSY_DISP_NONE)
  902. ? WEBP_MUX_DISPOSE_NONE
  903. : WEBP_MUX_DISPOSE_BACKGROUND;
  904. SetPreviousDisposeMethod(enc, prev_dispose_method);
  905. }
  906. enc->prev_rect_ = candidates[i].rect_; // save for next frame.
  907. } else {
  908. WebPMemoryWriterClear(&candidates[i].mem_);
  909. candidates[i].evaluate_ = 0;
  910. }
  911. }
  912. }
  913. }
  914. // Depending on the configuration, tries different compressions
  915. // (lossy/lossless), dispose methods, blending methods etc to encode the current
  916. // frame and outputs the best one in 'encoded_frame'.
  917. // 'frame_skipped' will be set to true if this frame should actually be skipped.
  918. static WebPEncodingError SetFrame(WebPAnimEncoder* const enc,
  919. const WebPConfig* const config,
  920. int is_key_frame,
  921. EncodedFrame* const encoded_frame,
  922. int* const frame_skipped) {
  923. int i;
  924. WebPEncodingError error_code = VP8_ENC_OK;
  925. const WebPPicture* const curr_canvas = &enc->curr_canvas_copy_;
  926. const WebPPicture* const prev_canvas = &enc->prev_canvas_;
  927. Candidate candidates[CANDIDATE_COUNT];
  928. const int is_lossless = config->lossless;
  929. const int is_first_frame = enc->is_first_frame_;
  930. // First frame cannot be skipped as there is no 'previous frame' to merge it
  931. // to. So, empty rectangle is not allowed for the first frame.
  932. const int empty_rect_allowed_none = !is_first_frame;
  933. // Even if there is exact pixel match between 'disposed previous canvas' and
  934. // 'current canvas', we can't skip current frame, as there may not be exact
  935. // pixel match between 'previous canvas' and 'current canvas'. So, we don't
  936. // allow empty rectangle in this case.
  937. const int empty_rect_allowed_bg = 0;
  938. // If current frame is a key-frame, dispose method of previous frame doesn't
  939. // matter, so we don't try dispose to background.
  940. // Also, if key-frame insertion is on, and previous frame could be picked as
  941. // either a sub-frame or a key-frame, then we can't be sure about what frame
  942. // rectangle would be disposed. In that case too, we don't try dispose to
  943. // background.
  944. const int dispose_bg_possible =
  945. !is_key_frame && !enc->prev_candidate_undecided_;
  946. SubFrameParams dispose_none_params;
  947. SubFrameParams dispose_bg_params;
  948. WebPConfig config_ll = *config;
  949. WebPConfig config_lossy = *config;
  950. config_ll.lossless = 1;
  951. config_lossy.lossless = 0;
  952. enc->last_config_ = *config;
  953. enc->last_config_reversed_ = config->lossless ? config_lossy : config_ll;
  954. *frame_skipped = 0;
  955. if (!SubFrameParamsInit(&dispose_none_params, 1, empty_rect_allowed_none) ||
  956. !SubFrameParamsInit(&dispose_bg_params, 0, empty_rect_allowed_bg)) {
  957. return VP8_ENC_ERROR_INVALID_CONFIGURATION;
  958. }
  959. for (i = 0; i < CANDIDATE_COUNT; ++i) {
  960. candidates[i].evaluate_ = 0;
  961. }
  962. // Change-rectangle assuming previous frame was DISPOSE_NONE.
  963. if (!GetSubRects(prev_canvas, curr_canvas, is_key_frame, is_first_frame,
  964. config_lossy.quality, &dispose_none_params)) {
  965. error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
  966. goto Err;
  967. }
  968. if ((is_lossless && IsEmptyRect(&dispose_none_params.rect_ll_)) ||
  969. (!is_lossless && IsEmptyRect(&dispose_none_params.rect_lossy_))) {
  970. // Don't encode the frame at all. Instead, the duration of the previous
  971. // frame will be increased later.
  972. assert(empty_rect_allowed_none);
  973. *frame_skipped = 1;
  974. goto End;
  975. }
  976. if (dispose_bg_possible) {
  977. // Change-rectangle assuming previous frame was DISPOSE_BACKGROUND.
  978. WebPPicture* const prev_canvas_disposed = &enc->prev_canvas_disposed_;
  979. WebPCopyPixels(prev_canvas, prev_canvas_disposed);
  980. DisposeFrameRectangle(WEBP_MUX_DISPOSE_BACKGROUND, &enc->prev_rect_,
  981. prev_canvas_disposed);
  982. if (!GetSubRects(prev_canvas_disposed, curr_canvas, is_key_frame,
  983. is_first_frame, config_lossy.quality,
  984. &dispose_bg_params)) {
  985. error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
  986. goto Err;
  987. }
  988. assert(!IsEmptyRect(&dispose_bg_params.rect_ll_));
  989. assert(!IsEmptyRect(&dispose_bg_params.rect_lossy_));
  990. if (enc->options_.minimize_size) { // Try both dispose methods.
  991. dispose_bg_params.should_try_ = 1;
  992. dispose_none_params.should_try_ = 1;
  993. } else if ((is_lossless &&
  994. RectArea(&dispose_bg_params.rect_ll_) <
  995. RectArea(&dispose_none_params.rect_ll_)) ||
  996. (!is_lossless &&
  997. RectArea(&dispose_bg_params.rect_lossy_) <
  998. RectArea(&dispose_none_params.rect_lossy_))) {
  999. dispose_bg_params.should_try_ = 1; // Pick DISPOSE_BACKGROUND.
  1000. dispose_none_params.should_try_ = 0;
  1001. }
  1002. }
  1003. if (dispose_none_params.should_try_) {
  1004. error_code = GenerateCandidates(
  1005. enc, candidates, WEBP_MUX_DISPOSE_NONE, is_lossless, is_key_frame,
  1006. &dispose_none_params, &config_ll, &config_lossy);
  1007. if (error_code != VP8_ENC_OK) goto Err;
  1008. }
  1009. if (dispose_bg_params.should_try_) {
  1010. assert(!enc->is_first_frame_);
  1011. assert(dispose_bg_possible);
  1012. error_code = GenerateCandidates(
  1013. enc, candidates, WEBP_MUX_DISPOSE_BACKGROUND, is_lossless, is_key_frame,
  1014. &dispose_bg_params, &config_ll, &config_lossy);
  1015. if (error_code != VP8_ENC_OK) goto Err;
  1016. }
  1017. PickBestCandidate(enc, candidates, is_key_frame, encoded_frame);
  1018. goto End;
  1019. Err:
  1020. for (i = 0; i < CANDIDATE_COUNT; ++i) {
  1021. if (candidates[i].evaluate_) {
  1022. WebPMemoryWriterClear(&candidates[i].mem_);
  1023. }
  1024. }
  1025. End:
  1026. SubFrameParamsFree(&dispose_none_params);
  1027. SubFrameParamsFree(&dispose_bg_params);
  1028. return error_code;
  1029. }
  1030. // Calculate the penalty incurred if we encode given frame as a key frame
  1031. // instead of a sub-frame.
  1032. static int64_t KeyFramePenalty(const EncodedFrame* const encoded_frame) {
  1033. return ((int64_t)encoded_frame->key_frame_.bitstream.size -
  1034. encoded_frame->sub_frame_.bitstream.size);
  1035. }
  1036. static int CacheFrame(WebPAnimEncoder* const enc,
  1037. const WebPConfig* const config) {
  1038. int ok = 0;
  1039. int frame_skipped = 0;
  1040. WebPEncodingError error_code = VP8_ENC_OK;
  1041. const size_t position = enc->count_;
  1042. EncodedFrame* const encoded_frame = GetFrame(enc, position);
  1043. ++enc->count_;
  1044. if (enc->is_first_frame_) { // Add this as a key-frame.
  1045. error_code = SetFrame(enc, config, 1, encoded_frame, &frame_skipped);
  1046. if (error_code != VP8_ENC_OK) goto End;
  1047. assert(frame_skipped == 0); // First frame can't be skipped, even if empty.
  1048. assert(position == 0 && enc->count_ == 1);
  1049. encoded_frame->is_key_frame_ = 1;
  1050. enc->flush_count_ = 0;
  1051. enc->count_since_key_frame_ = 0;
  1052. enc->prev_candidate_undecided_ = 0;
  1053. } else {
  1054. ++enc->count_since_key_frame_;
  1055. if (enc->count_since_key_frame_ <= enc->options_.kmin) {
  1056. // Add this as a frame rectangle.
  1057. error_code = SetFrame(enc, config, 0, encoded_frame, &frame_skipped);
  1058. if (error_code != VP8_ENC_OK) goto End;
  1059. if (frame_skipped) goto Skip;
  1060. encoded_frame->is_key_frame_ = 0;
  1061. enc->flush_count_ = enc->count_ - 1;
  1062. enc->prev_candidate_undecided_ = 0;
  1063. } else {
  1064. int64_t curr_delta;
  1065. // Add this as a frame rectangle to enc.
  1066. error_code = SetFrame(enc, config, 0, encoded_frame, &frame_skipped);
  1067. if (error_code != VP8_ENC_OK) goto End;
  1068. if (frame_skipped) goto Skip;
  1069. // Add this as a key-frame to enc, too.
  1070. error_code = SetFrame(enc, config, 1, encoded_frame, &frame_skipped);
  1071. if (error_code != VP8_ENC_OK) goto End;
  1072. assert(frame_skipped == 0); // Key-frame cannot be an empty rectangle.
  1073. // Analyze size difference of the two variants.
  1074. curr_delta = KeyFramePenalty(encoded_frame);
  1075. if (curr_delta <= enc->best_delta_) { // Pick this as the key-frame.
  1076. if (enc->keyframe_ != KEYFRAME_NONE) {
  1077. EncodedFrame* const old_keyframe = GetFrame(enc, enc->keyframe_);
  1078. assert(old_keyframe->is_key_frame_);
  1079. old_keyframe->is_key_frame_ = 0;
  1080. }
  1081. encoded_frame->is_key_frame_ = 1;
  1082. enc->keyframe_ = (int)position;
  1083. enc->best_delta_ = curr_delta;
  1084. enc->flush_count_ = enc->count_ - 1; // We can flush previous frames.
  1085. } else {
  1086. encoded_frame->is_key_frame_ = 0;
  1087. }
  1088. // Note: We need '>=' below because when kmin and kmax are both zero,
  1089. // count_since_key_frame will always be > kmax.
  1090. if (enc->count_since_key_frame_ >= enc->options_.kmax) {
  1091. enc->flush_count_ = enc->count_ - 1;
  1092. enc->count_since_key_frame_ = 0;
  1093. enc->keyframe_ = KEYFRAME_NONE;
  1094. enc->best_delta_ = DELTA_INFINITY;
  1095. }
  1096. enc->prev_candidate_undecided_ = 1;
  1097. }
  1098. }
  1099. // Update previous to previous and previous canvases for next call.
  1100. WebPCopyPixels(enc->curr_canvas_, &enc->prev_canvas_);
  1101. enc->is_first_frame_ = 0;
  1102. Skip:
  1103. ok = 1;
  1104. ++enc->in_frame_count_;
  1105. End:
  1106. if (!ok || frame_skipped) {
  1107. FrameRelease(encoded_frame);
  1108. // We reset some counters, as the frame addition failed/was skipped.
  1109. --enc->count_;
  1110. if (!enc->is_first_frame_) --enc->count_since_key_frame_;
  1111. if (!ok) {
  1112. MarkError2(enc, "ERROR adding frame. WebPEncodingError", error_code);
  1113. }
  1114. }
  1115. enc->curr_canvas_->error_code = error_code; // report error_code
  1116. assert(ok || error_code != VP8_ENC_OK);
  1117. return ok;
  1118. }
  1119. static int FlushFrames(WebPAnimEncoder* const enc) {
  1120. while (enc->flush_count_ > 0) {
  1121. WebPMuxError err;
  1122. EncodedFrame* const curr = GetFrame(enc, 0);
  1123. const WebPMuxFrameInfo* const info =
  1124. curr->is_key_frame_ ? &curr->key_frame_ : &curr->sub_frame_;
  1125. assert(enc->mux_ != NULL);
  1126. err = WebPMuxPushFrame(enc->mux_, info, 1);
  1127. if (err != WEBP_MUX_OK) {
  1128. MarkError2(enc, "ERROR adding frame. WebPMuxError", err);
  1129. return 0;
  1130. }
  1131. if (enc->options_.verbose) {
  1132. fprintf(stderr, "INFO: Added frame. offset:%d,%d dispose:%d blend:%d\n",
  1133. info->x_offset, info->y_offset, info->dispose_method,
  1134. info->blend_method);
  1135. }
  1136. ++enc->out_frame_count_;
  1137. FrameRelease(curr);
  1138. ++enc->start_;
  1139. --enc->flush_count_;
  1140. --enc->count_;
  1141. if (enc->keyframe_ != KEYFRAME_NONE) --enc->keyframe_;
  1142. }
  1143. if (enc->count_ == 1 && enc->start_ != 0) {
  1144. // Move enc->start to index 0.
  1145. const int enc_start_tmp = (int)enc->start_;
  1146. EncodedFrame temp = enc->encoded_frames_[0];
  1147. enc->encoded_frames_[0] = enc->encoded_frames_[enc_start_tmp];
  1148. enc->encoded_frames_[enc_start_tmp] = temp;
  1149. FrameRelease(&enc->encoded_frames_[enc_start_tmp]);
  1150. enc->start_ = 0;
  1151. }
  1152. return 1;
  1153. }
  1154. #undef DELTA_INFINITY
  1155. #undef KEYFRAME_NONE
  1156. int WebPAnimEncoderAdd(WebPAnimEncoder* enc, WebPPicture* frame, int timestamp,
  1157. const WebPConfig* encoder_config) {
  1158. WebPConfig config;
  1159. int ok;
  1160. if (enc == NULL) {
  1161. return 0;
  1162. }
  1163. MarkNoError(enc);
  1164. if (!enc->is_first_frame_) {
  1165. // Make sure timestamps are non-decreasing (integer wrap-around is OK).
  1166. const uint32_t prev_frame_duration =
  1167. (uint32_t)timestamp - enc->prev_timestamp_;
  1168. if (prev_frame_duration >= MAX_DURATION) {
  1169. if (frame != NULL) {
  1170. frame->error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
  1171. }
  1172. MarkError(enc, "ERROR adding frame: timestamps must be non-decreasing");
  1173. return 0;
  1174. }
  1175. if (!IncreasePreviousDuration(enc, (int)prev_frame_duration)) {
  1176. return 0;
  1177. }
  1178. } else {
  1179. enc->first_timestamp_ = timestamp;
  1180. }
  1181. if (frame == NULL) { // Special: last call.
  1182. enc->got_null_frame_ = 1;
  1183. enc->prev_timestamp_ = timestamp;
  1184. return 1;
  1185. }
  1186. if (frame->width != enc->canvas_width_ ||
  1187. frame->height != enc->canvas_height_) {
  1188. frame->error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
  1189. MarkError(enc, "ERROR adding frame: Invalid frame dimensions");
  1190. return 0;
  1191. }
  1192. if (!frame->use_argb) { // Convert frame from YUV(A) to ARGB.
  1193. if (enc->options_.verbose) {
  1194. fprintf(stderr, "WARNING: Converting frame from YUV(A) to ARGB format; "
  1195. "this incurs a small loss.\n");
  1196. }
  1197. if (!WebPPictureYUVAToARGB(frame)) {
  1198. MarkError(enc, "ERROR converting frame from YUV(A) to ARGB");
  1199. return 0;
  1200. }
  1201. }
  1202. if (encoder_config != NULL) {
  1203. if (!WebPValidateConfig(encoder_config)) {
  1204. MarkError(enc, "ERROR adding frame: Invalid WebPConfig");
  1205. return 0;
  1206. }
  1207. config = *encoder_config;
  1208. } else {
  1209. WebPConfigInit(&config);
  1210. config.lossless = 1;
  1211. }
  1212. assert(enc->curr_canvas_ == NULL);
  1213. enc->curr_canvas_ = frame; // Store reference.
  1214. assert(enc->curr_canvas_copy_modified_ == 1);
  1215. CopyCurrentCanvas(enc);
  1216. ok = CacheFrame(enc, &config) && FlushFrames(enc);
  1217. enc->curr_canvas_ = NULL;
  1218. enc->curr_canvas_copy_modified_ = 1;
  1219. if (ok) {
  1220. enc->prev_timestamp_ = timestamp;
  1221. }
  1222. return ok;
  1223. }
  1224. // -----------------------------------------------------------------------------
  1225. // Bitstream assembly.
  1226. static int DecodeFrameOntoCanvas(const WebPMuxFrameInfo* const frame,
  1227. WebPPicture* const canvas) {
  1228. const WebPData* const image = &frame->bitstream;
  1229. WebPPicture sub_image;
  1230. WebPDecoderConfig config;
  1231. WebPInitDecoderConfig(&config);
  1232. WebPUtilClearPic(canvas, NULL);
  1233. if (WebPGetFeatures(image->bytes, image->size, &config.input) !=
  1234. VP8_STATUS_OK) {
  1235. return 0;
  1236. }
  1237. if (!WebPPictureView(canvas, frame->x_offset, frame->y_offset,
  1238. config.input.width, config.input.height, &sub_image)) {
  1239. return 0;
  1240. }
  1241. config.output.is_external_memory = 1;
  1242. config.output.colorspace = MODE_BGRA;
  1243. config.output.u.RGBA.rgba = (uint8_t*)sub_image.argb;
  1244. config.output.u.RGBA.stride = sub_image.argb_stride * 4;
  1245. config.output.u.RGBA.size = config.output.u.RGBA.stride * sub_image.height;
  1246. if (WebPDecode(image->bytes, image->size, &config) != VP8_STATUS_OK) {
  1247. return 0;
  1248. }
  1249. return 1;
  1250. }
  1251. static int FrameToFullCanvas(WebPAnimEncoder* const enc,
  1252. const WebPMuxFrameInfo* const frame,
  1253. WebPData* const full_image) {
  1254. WebPPicture* const canvas_buf = &enc->curr_canvas_copy_;
  1255. WebPMemoryWriter mem1, mem2;
  1256. WebPMemoryWriterInit(&mem1);
  1257. WebPMemoryWriterInit(&mem2);
  1258. if (!DecodeFrameOntoCanvas(frame, canvas_buf)) goto Err;
  1259. if (!EncodeFrame(&enc->last_config_, canvas_buf, &mem1)) goto Err;
  1260. GetEncodedData(&mem1, full_image);
  1261. if (enc->options_.allow_mixed) {
  1262. if (!EncodeFrame(&enc->last_config_reversed_, canvas_buf, &mem2)) goto Err;
  1263. if (mem2.size < mem1.size) {
  1264. GetEncodedData(&mem2, full_image);
  1265. WebPMemoryWriterClear(&mem1);
  1266. } else {
  1267. WebPMemoryWriterClear(&mem2);
  1268. }
  1269. }
  1270. return 1;
  1271. Err:
  1272. WebPMemoryWriterClear(&mem1);
  1273. WebPMemoryWriterClear(&mem2);
  1274. return 0;
  1275. }
  1276. // Convert a single-frame animation to a non-animated image if appropriate.
  1277. // TODO(urvang): Can we pick one of the two heuristically (based on frame
  1278. // rectangle and/or presence of alpha)?
  1279. static WebPMuxError OptimizeSingleFrame(WebPAnimEncoder* const enc,
  1280. WebPData* const webp_data) {
  1281. WebPMuxError err = WEBP_MUX_OK;
  1282. int canvas_width, canvas_height;
  1283. WebPMuxFrameInfo frame;
  1284. WebPData full_image;
  1285. WebPData webp_data2;
  1286. WebPMux* const mux = WebPMuxCreate(webp_data, 0);
  1287. if (mux == NULL) return WEBP_MUX_BAD_DATA;
  1288. assert(enc->out_frame_count_ == 1);
  1289. WebPDataInit(&frame.bitstream);
  1290. WebPDataInit(&full_image);
  1291. WebPDataInit(&webp_data2);
  1292. err = WebPMuxGetFrame(mux, 1, &frame);
  1293. if (err != WEBP_MUX_OK) goto End;
  1294. if (frame.id != WEBP_CHUNK_ANMF) goto End; // Non-animation: nothing to do.
  1295. err = WebPMuxGetCanvasSize(mux, &canvas_width, &canvas_height);
  1296. if (err != WEBP_MUX_OK) goto End;
  1297. if (!FrameToFullCanvas(enc, &frame, &full_image)) {
  1298. err = WEBP_MUX_BAD_DATA;
  1299. goto End;
  1300. }
  1301. err = WebPMuxSetImage(mux, &full_image, 1);
  1302. if (err != WEBP_MUX_OK) goto End;
  1303. err = WebPMuxAssemble(mux, &webp_data2);
  1304. if (err != WEBP_MUX_OK) goto End;
  1305. if (webp_data2.size < webp_data->size) { // Pick 'webp_data2' if smaller.
  1306. WebPDataClear(webp_data);
  1307. *webp_data = webp_data2;
  1308. WebPDataInit(&webp_data2);
  1309. }
  1310. End:
  1311. WebPDataClear(&frame.bitstream);
  1312. WebPDataClear(&full_image);
  1313. WebPMuxDelete(mux);
  1314. WebPDataClear(&webp_data2);
  1315. return err;
  1316. }
  1317. int WebPAnimEncoderAssemble(WebPAnimEncoder* enc, WebPData* webp_data) {
  1318. WebPMux* mux;
  1319. WebPMuxError err;
  1320. if (enc == NULL) {
  1321. return 0;
  1322. }
  1323. MarkNoError(enc);
  1324. if (webp_data == NULL) {
  1325. MarkError(enc, "ERROR assembling: NULL input");
  1326. return 0;
  1327. }
  1328. if (enc->in_frame_count_ == 0) {
  1329. MarkError(enc, "ERROR: No frames to assemble");
  1330. return 0;
  1331. }
  1332. if (!enc->got_null_frame_ && enc->in_frame_count_ > 1 && enc->count_ > 0) {
  1333. // set duration of the last frame to be avg of durations of previous frames.
  1334. const double delta_time = enc->prev_timestamp_ - enc->first_timestamp_;
  1335. const int average_duration = (int)(delta_time / (enc->in_frame_count_ - 1));
  1336. if (!IncreasePreviousDuration(enc, average_duration)) {
  1337. return 0;
  1338. }
  1339. }
  1340. // Flush any remaining frames.
  1341. enc->flush_count_ = enc->count_;
  1342. if (!FlushFrames(enc)) {
  1343. return 0;
  1344. }
  1345. // Set definitive canvas size.
  1346. mux = enc->mux_;
  1347. err = WebPMuxSetCanvasSize(mux, enc->canvas_width_, enc->canvas_height_);
  1348. if (err != WEBP_MUX_OK) goto Err;
  1349. err = WebPMuxSetAnimationParams(mux, &enc->options_.anim_params);
  1350. if (err != WEBP_MUX_OK) goto Err;
  1351. // Assemble into a WebP bitstream.
  1352. err = WebPMuxAssemble(mux, webp_data);
  1353. if (err != WEBP_MUX_OK) goto Err;
  1354. if (enc->out_frame_count_ == 1) {
  1355. err = OptimizeSingleFrame(enc, webp_data);
  1356. if (err != WEBP_MUX_OK) goto Err;
  1357. }
  1358. return 1;
  1359. Err:
  1360. MarkError2(enc, "ERROR assembling WebP", err);
  1361. return 0;
  1362. }
  1363. const char* WebPAnimEncoderGetError(WebPAnimEncoder* enc) {
  1364. if (enc == NULL) return NULL;
  1365. return enc->error_str_;
  1366. }
  1367. // -----------------------------------------------------------------------------