vp9_aq_cyclicrefresh.c 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590
  1. /*
  2. * Copyright (c) 2014 The WebM project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include <limits.h>
  11. #include <math.h>
  12. #include "vpx_dsp/vpx_dsp_common.h"
  13. #include "vpx_ports/system_state.h"
  14. #include "vp9/encoder/vp9_aq_cyclicrefresh.h"
  15. #include "vp9/common/vp9_seg_common.h"
  16. #include "vp9/encoder/vp9_ratectrl.h"
  17. #include "vp9/encoder/vp9_segmentation.h"
  18. CYCLIC_REFRESH *vp9_cyclic_refresh_alloc(int mi_rows, int mi_cols) {
  19. size_t last_coded_q_map_size;
  20. CYCLIC_REFRESH *const cr = vpx_calloc(1, sizeof(*cr));
  21. if (cr == NULL) return NULL;
  22. cr->map = vpx_calloc(mi_rows * mi_cols, sizeof(*cr->map));
  23. if (cr->map == NULL) {
  24. vp9_cyclic_refresh_free(cr);
  25. return NULL;
  26. }
  27. last_coded_q_map_size = mi_rows * mi_cols * sizeof(*cr->last_coded_q_map);
  28. cr->last_coded_q_map = vpx_malloc(last_coded_q_map_size);
  29. if (cr->last_coded_q_map == NULL) {
  30. vp9_cyclic_refresh_free(cr);
  31. return NULL;
  32. }
  33. assert(MAXQ <= 255);
  34. memset(cr->last_coded_q_map, MAXQ, last_coded_q_map_size);
  35. return cr;
  36. }
  37. void vp9_cyclic_refresh_free(CYCLIC_REFRESH *cr) {
  38. vpx_free(cr->map);
  39. vpx_free(cr->last_coded_q_map);
  40. vpx_free(cr);
  41. }
  42. // Check if this coding block, of size bsize, should be considered for refresh
  43. // (lower-qp coding). Decision can be based on various factors, such as
  44. // size of the coding block (i.e., below min_block size rejected), coding
  45. // mode, and rate/distortion.
  46. static int candidate_refresh_aq(const CYCLIC_REFRESH *cr, const MODE_INFO *mi,
  47. int64_t rate, int64_t dist, int bsize) {
  48. MV mv = mi->mv[0].as_mv;
  49. // Reject the block for lower-qp coding if projected distortion
  50. // is above the threshold, and any of the following is true:
  51. // 1) mode uses large mv
  52. // 2) mode is an intra-mode
  53. // Otherwise accept for refresh.
  54. if (dist > cr->thresh_dist_sb &&
  55. (mv.row > cr->motion_thresh || mv.row < -cr->motion_thresh ||
  56. mv.col > cr->motion_thresh || mv.col < -cr->motion_thresh ||
  57. !is_inter_block(mi)))
  58. return CR_SEGMENT_ID_BASE;
  59. else if (bsize >= BLOCK_16X16 && rate < cr->thresh_rate_sb &&
  60. is_inter_block(mi) && mi->mv[0].as_int == 0 &&
  61. cr->rate_boost_fac > 10)
  62. // More aggressive delta-q for bigger blocks with zero motion.
  63. return CR_SEGMENT_ID_BOOST2;
  64. else
  65. return CR_SEGMENT_ID_BOOST1;
  66. }
  67. // Compute delta-q for the segment.
  68. static int compute_deltaq(const VP9_COMP *cpi, int q, double rate_factor) {
  69. const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  70. const RATE_CONTROL *const rc = &cpi->rc;
  71. int deltaq = vp9_compute_qdelta_by_rate(rc, cpi->common.frame_type, q,
  72. rate_factor, cpi->common.bit_depth);
  73. if ((-deltaq) > cr->max_qdelta_perc * q / 100) {
  74. deltaq = -cr->max_qdelta_perc * q / 100;
  75. }
  76. return deltaq;
  77. }
  78. // For the just encoded frame, estimate the bits, incorporating the delta-q
  79. // from non-base segment. For now ignore effect of multiple segments
  80. // (with different delta-q). Note this function is called in the postencode
  81. // (called from rc_update_rate_correction_factors()).
  82. int vp9_cyclic_refresh_estimate_bits_at_q(const VP9_COMP *cpi,
  83. double correction_factor) {
  84. const VP9_COMMON *const cm = &cpi->common;
  85. const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  86. int estimated_bits;
  87. int mbs = cm->MBs;
  88. int num8x8bl = mbs << 2;
  89. // Weight for non-base segments: use actual number of blocks refreshed in
  90. // previous/just encoded frame. Note number of blocks here is in 8x8 units.
  91. double weight_segment1 = (double)cr->actual_num_seg1_blocks / num8x8bl;
  92. double weight_segment2 = (double)cr->actual_num_seg2_blocks / num8x8bl;
  93. // Take segment weighted average for estimated bits.
  94. estimated_bits =
  95. (int)((1.0 - weight_segment1 - weight_segment2) *
  96. vp9_estimate_bits_at_q(cm->frame_type, cm->base_qindex, mbs,
  97. correction_factor, cm->bit_depth) +
  98. weight_segment1 *
  99. vp9_estimate_bits_at_q(cm->frame_type,
  100. cm->base_qindex + cr->qindex_delta[1],
  101. mbs, correction_factor, cm->bit_depth) +
  102. weight_segment2 *
  103. vp9_estimate_bits_at_q(cm->frame_type,
  104. cm->base_qindex + cr->qindex_delta[2],
  105. mbs, correction_factor, cm->bit_depth));
  106. return estimated_bits;
  107. }
  108. // Prior to encoding the frame, estimate the bits per mb, for a given q = i and
  109. // a corresponding delta-q (for segment 1). This function is called in the
  110. // rc_regulate_q() to set the base qp index.
  111. // Note: the segment map is set to either 0/CR_SEGMENT_ID_BASE (no refresh) or
  112. // to 1/CR_SEGMENT_ID_BOOST1 (refresh) for each superblock, prior to encoding.
  113. int vp9_cyclic_refresh_rc_bits_per_mb(const VP9_COMP *cpi, int i,
  114. double correction_factor) {
  115. const VP9_COMMON *const cm = &cpi->common;
  116. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  117. int bits_per_mb;
  118. int deltaq = 0;
  119. if (cpi->oxcf.speed < 8)
  120. deltaq = compute_deltaq(cpi, i, cr->rate_ratio_qdelta);
  121. else
  122. deltaq = -(cr->max_qdelta_perc * i) / 200;
  123. // Take segment weighted average for bits per mb.
  124. bits_per_mb = (int)((1.0 - cr->weight_segment) *
  125. vp9_rc_bits_per_mb(cm->frame_type, i,
  126. correction_factor, cm->bit_depth) +
  127. cr->weight_segment *
  128. vp9_rc_bits_per_mb(cm->frame_type, i + deltaq,
  129. correction_factor, cm->bit_depth));
  130. return bits_per_mb;
  131. }
  132. // Prior to coding a given prediction block, of size bsize at (mi_row, mi_col),
  133. // check if we should reset the segment_id, and update the cyclic_refresh map
  134. // and segmentation map.
  135. void vp9_cyclic_refresh_update_segment(VP9_COMP *const cpi, MODE_INFO *const mi,
  136. int mi_row, int mi_col, BLOCK_SIZE bsize,
  137. int64_t rate, int64_t dist, int skip,
  138. struct macroblock_plane *const p) {
  139. const VP9_COMMON *const cm = &cpi->common;
  140. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  141. const int bw = num_8x8_blocks_wide_lookup[bsize];
  142. const int bh = num_8x8_blocks_high_lookup[bsize];
  143. const int xmis = VPXMIN(cm->mi_cols - mi_col, bw);
  144. const int ymis = VPXMIN(cm->mi_rows - mi_row, bh);
  145. const int block_index = mi_row * cm->mi_cols + mi_col;
  146. int refresh_this_block = candidate_refresh_aq(cr, mi, rate, dist, bsize);
  147. // Default is to not update the refresh map.
  148. int new_map_value = cr->map[block_index];
  149. int x = 0;
  150. int y = 0;
  151. int is_skin = 0;
  152. if (refresh_this_block == 0 && bsize <= BLOCK_16X16 &&
  153. cpi->use_skin_detection) {
  154. is_skin =
  155. vp9_compute_skin_block(p[0].src.buf, p[1].src.buf, p[2].src.buf,
  156. p[0].src.stride, p[1].src.stride, bsize, 0, 0);
  157. if (is_skin) refresh_this_block = 1;
  158. }
  159. if (cpi->oxcf.rc_mode == VPX_VBR && mi->ref_frame[0] == GOLDEN_FRAME)
  160. refresh_this_block = 0;
  161. // If this block is labeled for refresh, check if we should reset the
  162. // segment_id.
  163. if (cyclic_refresh_segment_id_boosted(mi->segment_id)) {
  164. mi->segment_id = refresh_this_block;
  165. // Reset segment_id if it will be skipped.
  166. if (skip) mi->segment_id = CR_SEGMENT_ID_BASE;
  167. }
  168. // Update the cyclic refresh map, to be used for setting segmentation map
  169. // for the next frame. If the block will be refreshed this frame, mark it
  170. // as clean. The magnitude of the -ve influences how long before we consider
  171. // it for refresh again.
  172. if (cyclic_refresh_segment_id_boosted(mi->segment_id)) {
  173. new_map_value = -cr->time_for_refresh;
  174. } else if (refresh_this_block) {
  175. // Else if it is accepted as candidate for refresh, and has not already
  176. // been refreshed (marked as 1) then mark it as a candidate for cleanup
  177. // for future time (marked as 0), otherwise don't update it.
  178. if (cr->map[block_index] == 1) new_map_value = 0;
  179. } else {
  180. // Leave it marked as block that is not candidate for refresh.
  181. new_map_value = 1;
  182. }
  183. // Update entries in the cyclic refresh map with new_map_value, and
  184. // copy mbmi->segment_id into global segmentation map.
  185. for (y = 0; y < ymis; y++)
  186. for (x = 0; x < xmis; x++) {
  187. int map_offset = block_index + y * cm->mi_cols + x;
  188. cr->map[map_offset] = new_map_value;
  189. cpi->segmentation_map[map_offset] = mi->segment_id;
  190. }
  191. }
  192. void vp9_cyclic_refresh_update_sb_postencode(VP9_COMP *const cpi,
  193. const MODE_INFO *const mi,
  194. int mi_row, int mi_col,
  195. BLOCK_SIZE bsize) {
  196. const VP9_COMMON *const cm = &cpi->common;
  197. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  198. const int bw = num_8x8_blocks_wide_lookup[bsize];
  199. const int bh = num_8x8_blocks_high_lookup[bsize];
  200. const int xmis = VPXMIN(cm->mi_cols - mi_col, bw);
  201. const int ymis = VPXMIN(cm->mi_rows - mi_row, bh);
  202. const int block_index = mi_row * cm->mi_cols + mi_col;
  203. int x, y;
  204. for (y = 0; y < ymis; y++)
  205. for (x = 0; x < xmis; x++) {
  206. int map_offset = block_index + y * cm->mi_cols + x;
  207. // Inter skip blocks were clearly not coded at the current qindex, so
  208. // don't update the map for them. For cases where motion is non-zero or
  209. // the reference frame isn't the previous frame, the previous value in
  210. // the map for this spatial location is not entirely correct.
  211. if ((!is_inter_block(mi) || !mi->skip) &&
  212. mi->segment_id <= CR_SEGMENT_ID_BOOST2) {
  213. cr->last_coded_q_map[map_offset] =
  214. clamp(cm->base_qindex + cr->qindex_delta[mi->segment_id], 0, MAXQ);
  215. } else if (is_inter_block(mi) && mi->skip &&
  216. mi->segment_id <= CR_SEGMENT_ID_BOOST2) {
  217. cr->last_coded_q_map[map_offset] = VPXMIN(
  218. clamp(cm->base_qindex + cr->qindex_delta[mi->segment_id], 0, MAXQ),
  219. cr->last_coded_q_map[map_offset]);
  220. }
  221. }
  222. }
  223. // From the just encoded frame: update the actual number of blocks that were
  224. // applied the segment delta q, and the amount of low motion in the frame.
  225. // Also check conditions for forcing golden update, or preventing golden
  226. // update if the period is up.
  227. void vp9_cyclic_refresh_postencode(VP9_COMP *const cpi) {
  228. VP9_COMMON *const cm = &cpi->common;
  229. MODE_INFO **mi = cm->mi_grid_visible;
  230. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  231. RATE_CONTROL *const rc = &cpi->rc;
  232. unsigned char *const seg_map = cpi->segmentation_map;
  233. double fraction_low = 0.0;
  234. int force_gf_refresh = 0;
  235. int low_content_frame = 0;
  236. int mi_row, mi_col;
  237. cr->actual_num_seg1_blocks = 0;
  238. cr->actual_num_seg2_blocks = 0;
  239. for (mi_row = 0; mi_row < cm->mi_rows; mi_row++) {
  240. for (mi_col = 0; mi_col < cm->mi_cols; mi_col++) {
  241. MV mv = mi[0]->mv[0].as_mv;
  242. int map_index = mi_row * cm->mi_cols + mi_col;
  243. if (cyclic_refresh_segment_id(seg_map[map_index]) == CR_SEGMENT_ID_BOOST1)
  244. cr->actual_num_seg1_blocks++;
  245. else if (cyclic_refresh_segment_id(seg_map[map_index]) ==
  246. CR_SEGMENT_ID_BOOST2)
  247. cr->actual_num_seg2_blocks++;
  248. // Accumulate low_content_frame.
  249. if (is_inter_block(mi[0]) && abs(mv.row) < 16 && abs(mv.col) < 16)
  250. low_content_frame++;
  251. mi++;
  252. }
  253. mi += 8;
  254. }
  255. // Check for golden frame update: only for non-SVC and non-golden boost.
  256. if (!cpi->use_svc && cpi->ext_refresh_frame_flags_pending == 0 &&
  257. !cpi->oxcf.gf_cbr_boost_pct) {
  258. // Force this frame as a golden update frame if this frame changes the
  259. // resolution (resize_pending != 0).
  260. if (cpi->resize_pending != 0) {
  261. vp9_cyclic_refresh_set_golden_update(cpi);
  262. rc->frames_till_gf_update_due = rc->baseline_gf_interval;
  263. if (rc->frames_till_gf_update_due > rc->frames_to_key)
  264. rc->frames_till_gf_update_due = rc->frames_to_key;
  265. cpi->refresh_golden_frame = 1;
  266. force_gf_refresh = 1;
  267. }
  268. // Update average of low content/motion in the frame.
  269. fraction_low = (double)low_content_frame / (cm->mi_rows * cm->mi_cols);
  270. cr->low_content_avg = (fraction_low + 3 * cr->low_content_avg) / 4;
  271. if (!force_gf_refresh && cpi->refresh_golden_frame == 1 &&
  272. rc->frames_since_key > rc->frames_since_golden + 1) {
  273. // Don't update golden reference if the amount of low_content for the
  274. // current encoded frame is small, or if the recursive average of the
  275. // low_content over the update interval window falls below threshold.
  276. if (fraction_low < 0.65 || cr->low_content_avg < 0.6) {
  277. cpi->refresh_golden_frame = 0;
  278. }
  279. // Reset for next internal.
  280. cr->low_content_avg = fraction_low;
  281. }
  282. }
  283. }
  284. // Set golden frame update interval, for non-svc 1 pass CBR mode.
  285. void vp9_cyclic_refresh_set_golden_update(VP9_COMP *const cpi) {
  286. RATE_CONTROL *const rc = &cpi->rc;
  287. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  288. // Set minimum gf_interval for GF update to a multiple of the refresh period,
  289. // with some max limit. Depending on past encoding stats, GF flag may be
  290. // reset and update may not occur until next baseline_gf_interval.
  291. if (cr->percent_refresh > 0)
  292. rc->baseline_gf_interval = VPXMIN(4 * (100 / cr->percent_refresh), 40);
  293. else
  294. rc->baseline_gf_interval = 40;
  295. if (cpi->oxcf.rc_mode == VPX_VBR) rc->baseline_gf_interval = 20;
  296. if (rc->avg_frame_low_motion < 50 && rc->frames_since_key > 40)
  297. rc->baseline_gf_interval = 10;
  298. }
  299. // Update the segmentation map, and related quantities: cyclic refresh map,
  300. // refresh sb_index, and target number of blocks to be refreshed.
  301. // The map is set to either 0/CR_SEGMENT_ID_BASE (no refresh) or to
  302. // 1/CR_SEGMENT_ID_BOOST1 (refresh) for each superblock.
  303. // Blocks labeled as BOOST1 may later get set to BOOST2 (during the
  304. // encoding of the superblock).
  305. static void cyclic_refresh_update_map(VP9_COMP *const cpi) {
  306. VP9_COMMON *const cm = &cpi->common;
  307. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  308. unsigned char *const seg_map = cpi->segmentation_map;
  309. int i, block_count, bl_index, sb_rows, sb_cols, sbs_in_frame;
  310. int xmis, ymis, x, y;
  311. int consec_zero_mv_thresh = 0;
  312. int qindex_thresh = 0;
  313. int count_sel = 0;
  314. int count_tot = 0;
  315. memset(seg_map, CR_SEGMENT_ID_BASE, cm->mi_rows * cm->mi_cols);
  316. sb_cols = (cm->mi_cols + MI_BLOCK_SIZE - 1) / MI_BLOCK_SIZE;
  317. sb_rows = (cm->mi_rows + MI_BLOCK_SIZE - 1) / MI_BLOCK_SIZE;
  318. sbs_in_frame = sb_cols * sb_rows;
  319. // Number of target blocks to get the q delta (segment 1).
  320. block_count = cr->percent_refresh * cm->mi_rows * cm->mi_cols / 100;
  321. // Set the segmentation map: cycle through the superblocks, starting at
  322. // cr->mb_index, and stopping when either block_count blocks have been found
  323. // to be refreshed, or we have passed through whole frame.
  324. assert(cr->sb_index < sbs_in_frame);
  325. i = cr->sb_index;
  326. cr->target_num_seg_blocks = 0;
  327. if (cpi->oxcf.content != VP9E_CONTENT_SCREEN) {
  328. consec_zero_mv_thresh = 100;
  329. }
  330. qindex_thresh =
  331. cpi->oxcf.content == VP9E_CONTENT_SCREEN
  332. ? vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST2, cm->base_qindex)
  333. : vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST1, cm->base_qindex);
  334. // More aggressive settings for noisy content.
  335. if (cpi->noise_estimate.enabled && cpi->noise_estimate.level >= kMedium) {
  336. consec_zero_mv_thresh = 60;
  337. qindex_thresh =
  338. VPXMAX(vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST1, cm->base_qindex),
  339. cm->base_qindex);
  340. }
  341. do {
  342. int sum_map = 0;
  343. int consec_zero_mv_thresh_block = consec_zero_mv_thresh;
  344. // Get the mi_row/mi_col corresponding to superblock index i.
  345. int sb_row_index = (i / sb_cols);
  346. int sb_col_index = i - sb_row_index * sb_cols;
  347. int mi_row = sb_row_index * MI_BLOCK_SIZE;
  348. int mi_col = sb_col_index * MI_BLOCK_SIZE;
  349. assert(mi_row >= 0 && mi_row < cm->mi_rows);
  350. assert(mi_col >= 0 && mi_col < cm->mi_cols);
  351. bl_index = mi_row * cm->mi_cols + mi_col;
  352. // Loop through all 8x8 blocks in superblock and update map.
  353. xmis =
  354. VPXMIN(cm->mi_cols - mi_col, num_8x8_blocks_wide_lookup[BLOCK_64X64]);
  355. ymis =
  356. VPXMIN(cm->mi_rows - mi_row, num_8x8_blocks_high_lookup[BLOCK_64X64]);
  357. if (cpi->noise_estimate.enabled && cpi->noise_estimate.level >= kMedium &&
  358. (xmis <= 2 || ymis <= 2))
  359. consec_zero_mv_thresh_block = 4;
  360. for (y = 0; y < ymis; y++) {
  361. for (x = 0; x < xmis; x++) {
  362. const int bl_index2 = bl_index + y * cm->mi_cols + x;
  363. // If the block is as a candidate for clean up then mark it
  364. // for possible boost/refresh (segment 1). The segment id may get
  365. // reset to 0 later depending on the coding mode.
  366. if (cr->map[bl_index2] == 0) {
  367. count_tot++;
  368. if (cr->last_coded_q_map[bl_index2] > qindex_thresh ||
  369. cpi->consec_zero_mv[bl_index2] < consec_zero_mv_thresh_block) {
  370. sum_map++;
  371. count_sel++;
  372. }
  373. } else if (cr->map[bl_index2] < 0) {
  374. cr->map[bl_index2]++;
  375. }
  376. }
  377. }
  378. // Enforce constant segment over superblock.
  379. // If segment is at least half of superblock, set to 1.
  380. if (sum_map >= xmis * ymis / 2) {
  381. for (y = 0; y < ymis; y++)
  382. for (x = 0; x < xmis; x++) {
  383. seg_map[bl_index + y * cm->mi_cols + x] = CR_SEGMENT_ID_BOOST1;
  384. }
  385. cr->target_num_seg_blocks += xmis * ymis;
  386. }
  387. i++;
  388. if (i == sbs_in_frame) {
  389. i = 0;
  390. }
  391. } while (cr->target_num_seg_blocks < block_count && i != cr->sb_index);
  392. cr->sb_index = i;
  393. cr->reduce_refresh = 0;
  394. if (count_sel<(3 * count_tot)>> 2) cr->reduce_refresh = 1;
  395. }
  396. // Set cyclic refresh parameters.
  397. void vp9_cyclic_refresh_update_parameters(VP9_COMP *const cpi) {
  398. const RATE_CONTROL *const rc = &cpi->rc;
  399. const VP9_COMMON *const cm = &cpi->common;
  400. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  401. int num8x8bl = cm->MBs << 2;
  402. int target_refresh = 0;
  403. double weight_segment_target = 0;
  404. double weight_segment = 0;
  405. int thresh_low_motion = (cm->width < 720) ? 55 : 20;
  406. cr->apply_cyclic_refresh = 1;
  407. if (cm->frame_type == KEY_FRAME || cpi->svc.temporal_layer_id > 0 ||
  408. (!cpi->use_svc && rc->avg_frame_low_motion < thresh_low_motion &&
  409. rc->frames_since_key > 40)) {
  410. cr->apply_cyclic_refresh = 0;
  411. return;
  412. }
  413. cr->percent_refresh = 10;
  414. if (cr->reduce_refresh) cr->percent_refresh = 5;
  415. cr->max_qdelta_perc = 60;
  416. cr->time_for_refresh = 0;
  417. cr->motion_thresh = 32;
  418. cr->rate_boost_fac = 15;
  419. // Use larger delta-qp (increase rate_ratio_qdelta) for first few (~4)
  420. // periods of the refresh cycle, after a key frame.
  421. // Account for larger interval on base layer for temporal layers.
  422. if (cr->percent_refresh > 0 &&
  423. rc->frames_since_key <
  424. (4 * cpi->svc.number_temporal_layers) * (100 / cr->percent_refresh)) {
  425. cr->rate_ratio_qdelta = 3.0;
  426. } else {
  427. cr->rate_ratio_qdelta = 2.0;
  428. if (cpi->noise_estimate.enabled && cpi->noise_estimate.level >= kMedium) {
  429. // Reduce the delta-qp if the estimated source noise is above threshold.
  430. cr->rate_ratio_qdelta = 1.7;
  431. cr->rate_boost_fac = 13;
  432. }
  433. }
  434. // Adjust some parameters for low resolutions.
  435. if (cm->width <= 352 && cm->height <= 288) {
  436. if (rc->avg_frame_bandwidth < 3000) {
  437. cr->motion_thresh = 16;
  438. cr->rate_boost_fac = 13;
  439. } else {
  440. cr->max_qdelta_perc = 70;
  441. cr->rate_ratio_qdelta = VPXMAX(cr->rate_ratio_qdelta, 2.5);
  442. }
  443. }
  444. if (cpi->svc.spatial_layer_id > 0) {
  445. cr->motion_thresh = 4;
  446. cr->rate_boost_fac = 12;
  447. }
  448. if (cpi->oxcf.rc_mode == VPX_VBR) {
  449. // To be adjusted for VBR mode, e.g., based on gf period and boost.
  450. // For now use smaller qp-delta (than CBR), no second boosted seg, and
  451. // turn-off (no refresh) on golden refresh (since it's already boosted).
  452. cr->percent_refresh = 10;
  453. cr->rate_ratio_qdelta = 1.5;
  454. cr->rate_boost_fac = 10;
  455. if (cpi->refresh_golden_frame == 1) {
  456. cr->percent_refresh = 0;
  457. cr->rate_ratio_qdelta = 1.0;
  458. }
  459. }
  460. // Weight for segment prior to encoding: take the average of the target
  461. // number for the frame to be encoded and the actual from the previous frame.
  462. // Use the target if its less. To be used for setting the base qp for the
  463. // frame in vp9_rc_regulate_q.
  464. target_refresh = cr->percent_refresh * cm->mi_rows * cm->mi_cols / 100;
  465. weight_segment_target = (double)(target_refresh) / num8x8bl;
  466. weight_segment = (double)((target_refresh + cr->actual_num_seg1_blocks +
  467. cr->actual_num_seg2_blocks) >>
  468. 1) /
  469. num8x8bl;
  470. if (weight_segment_target < 7 * weight_segment / 8)
  471. weight_segment = weight_segment_target;
  472. cr->weight_segment = weight_segment;
  473. }
  474. // Setup cyclic background refresh: set delta q and segmentation map.
  475. void vp9_cyclic_refresh_setup(VP9_COMP *const cpi) {
  476. VP9_COMMON *const cm = &cpi->common;
  477. const RATE_CONTROL *const rc = &cpi->rc;
  478. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  479. struct segmentation *const seg = &cm->seg;
  480. if (cm->current_video_frame == 0) cr->low_content_avg = 0.0;
  481. if (!cr->apply_cyclic_refresh || (cpi->force_update_segmentation)) {
  482. // Set segmentation map to 0 and disable.
  483. unsigned char *const seg_map = cpi->segmentation_map;
  484. memset(seg_map, 0, cm->mi_rows * cm->mi_cols);
  485. vp9_disable_segmentation(&cm->seg);
  486. if (cm->frame_type == KEY_FRAME) {
  487. memset(cr->last_coded_q_map, MAXQ,
  488. cm->mi_rows * cm->mi_cols * sizeof(*cr->last_coded_q_map));
  489. cr->sb_index = 0;
  490. cr->reduce_refresh = 0;
  491. }
  492. return;
  493. } else {
  494. int qindex_delta = 0;
  495. int qindex2;
  496. const double q = vp9_convert_qindex_to_q(cm->base_qindex, cm->bit_depth);
  497. vpx_clear_system_state();
  498. // Set rate threshold to some multiple (set to 2 for now) of the target
  499. // rate (target is given by sb64_target_rate and scaled by 256).
  500. cr->thresh_rate_sb = ((int64_t)(rc->sb64_target_rate) << 8) << 2;
  501. // Distortion threshold, quadratic in Q, scale factor to be adjusted.
  502. // q will not exceed 457, so (q * q) is within 32bit; see:
  503. // vp9_convert_qindex_to_q(), vp9_ac_quant(), ac_qlookup*[].
  504. cr->thresh_dist_sb = ((int64_t)(q * q)) << 2;
  505. // Set up segmentation.
  506. // Clear down the segment map.
  507. vp9_enable_segmentation(&cm->seg);
  508. vp9_clearall_segfeatures(seg);
  509. // Select delta coding method.
  510. seg->abs_delta = SEGMENT_DELTADATA;
  511. // Note: setting temporal_update has no effect, as the seg-map coding method
  512. // (temporal or spatial) is determined in vp9_choose_segmap_coding_method(),
  513. // based on the coding cost of each method. For error_resilient mode on the
  514. // last_frame_seg_map is set to 0, so if temporal coding is used, it is
  515. // relative to 0 previous map.
  516. // seg->temporal_update = 0;
  517. // Segment BASE "Q" feature is disabled so it defaults to the baseline Q.
  518. vp9_disable_segfeature(seg, CR_SEGMENT_ID_BASE, SEG_LVL_ALT_Q);
  519. // Use segment BOOST1 for in-frame Q adjustment.
  520. vp9_enable_segfeature(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q);
  521. // Use segment BOOST2 for more aggressive in-frame Q adjustment.
  522. vp9_enable_segfeature(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q);
  523. // Set the q delta for segment BOOST1.
  524. qindex_delta = compute_deltaq(cpi, cm->base_qindex, cr->rate_ratio_qdelta);
  525. cr->qindex_delta[1] = qindex_delta;
  526. // Compute rd-mult for segment BOOST1.
  527. qindex2 = clamp(cm->base_qindex + cm->y_dc_delta_q + qindex_delta, 0, MAXQ);
  528. cr->rdmult = vp9_compute_rd_mult(cpi, qindex2);
  529. vp9_set_segdata(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q, qindex_delta);
  530. // Set a more aggressive (higher) q delta for segment BOOST2.
  531. qindex_delta = compute_deltaq(
  532. cpi, cm->base_qindex,
  533. VPXMIN(CR_MAX_RATE_TARGET_RATIO,
  534. 0.1 * cr->rate_boost_fac * cr->rate_ratio_qdelta));
  535. cr->qindex_delta[2] = qindex_delta;
  536. vp9_set_segdata(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q, qindex_delta);
  537. // Reset if resoluton change has occurred.
  538. if (cpi->resize_pending != 0) vp9_cyclic_refresh_reset_resize(cpi);
  539. // Update the segmentation and refresh map.
  540. cyclic_refresh_update_map(cpi);
  541. }
  542. }
  543. int vp9_cyclic_refresh_get_rdmult(const CYCLIC_REFRESH *cr) {
  544. return cr->rdmult;
  545. }
  546. void vp9_cyclic_refresh_reset_resize(VP9_COMP *const cpi) {
  547. const VP9_COMMON *const cm = &cpi->common;
  548. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  549. memset(cr->map, 0, cm->mi_rows * cm->mi_cols);
  550. memset(cr->last_coded_q_map, MAXQ, cm->mi_rows * cm->mi_cols);
  551. cr->sb_index = 0;
  552. cpi->refresh_golden_frame = 1;
  553. cpi->refresh_alt_ref_frame = 1;
  554. }