vp8_cx_iface.c 42 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263
  1. /*
  2. * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include "./vpx_config.h"
  11. #include "./vp8_rtcd.h"
  12. #include "./vpx_dsp_rtcd.h"
  13. #include "./vpx_scale_rtcd.h"
  14. #include "vpx/vpx_codec.h"
  15. #include "vpx/internal/vpx_codec_internal.h"
  16. #include "vpx_version.h"
  17. #include "vpx_mem/vpx_mem.h"
  18. #include "vpx_ports/vpx_once.h"
  19. #include "vp8/encoder/onyx_int.h"
  20. #include "vpx/vp8cx.h"
  21. #include "vp8/encoder/firstpass.h"
  22. #include "vp8/common/onyx.h"
  23. #include "vp8/common/common.h"
  24. #include <stdlib.h>
  25. #include <string.h>
  26. struct vp8_extracfg {
  27. struct vpx_codec_pkt_list *pkt_list;
  28. int cpu_used; /** available cpu percentage in 1/16*/
  29. /** if encoder decides to uses alternate reference frame */
  30. unsigned int enable_auto_alt_ref;
  31. unsigned int noise_sensitivity;
  32. unsigned int Sharpness;
  33. unsigned int static_thresh;
  34. unsigned int token_partitions;
  35. unsigned int arnr_max_frames; /* alt_ref Noise Reduction Max Frame Count */
  36. unsigned int arnr_strength; /* alt_ref Noise Reduction Strength */
  37. unsigned int arnr_type; /* alt_ref filter type */
  38. vp8e_tuning tuning;
  39. unsigned int cq_level; /* constrained quality level */
  40. unsigned int rc_max_intra_bitrate_pct;
  41. unsigned int gf_cbr_boost_pct;
  42. unsigned int screen_content_mode;
  43. };
  44. static struct vp8_extracfg default_extracfg = {
  45. NULL,
  46. #if !(CONFIG_REALTIME_ONLY)
  47. 0, /* cpu_used */
  48. #else
  49. 4, /* cpu_used */
  50. #endif
  51. 0, /* enable_auto_alt_ref */
  52. 0, /* noise_sensitivity */
  53. 0, /* Sharpness */
  54. 0, /* static_thresh */
  55. #if (CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
  56. VP8_EIGHT_TOKENPARTITION,
  57. #else
  58. VP8_ONE_TOKENPARTITION, /* token_partitions */
  59. #endif
  60. 0, /* arnr_max_frames */
  61. 3, /* arnr_strength */
  62. 3, /* arnr_type*/
  63. 0, /* tuning*/
  64. 10, /* cq_level */
  65. 0, /* rc_max_intra_bitrate_pct */
  66. 0, /* gf_cbr_boost_pct */
  67. 0, /* screen_content_mode */
  68. };
  69. struct vpx_codec_alg_priv {
  70. vpx_codec_priv_t base;
  71. vpx_codec_enc_cfg_t cfg;
  72. struct vp8_extracfg vp8_cfg;
  73. VP8_CONFIG oxcf;
  74. struct VP8_COMP *cpi;
  75. unsigned char *cx_data;
  76. unsigned int cx_data_sz;
  77. vpx_image_t preview_img;
  78. unsigned int next_frame_flag;
  79. vp8_postproc_cfg_t preview_ppcfg;
  80. /* pkt_list size depends on the maximum number of lagged frames allowed. */
  81. vpx_codec_pkt_list_decl(64) pkt_list;
  82. unsigned int fixed_kf_cntr;
  83. vpx_enc_frame_flags_t control_frame_flags;
  84. };
  85. static vpx_codec_err_t update_error_state(
  86. vpx_codec_alg_priv_t *ctx, const struct vpx_internal_error_info *error) {
  87. vpx_codec_err_t res;
  88. if ((res = error->error_code)) {
  89. ctx->base.err_detail = error->has_detail ? error->detail : NULL;
  90. }
  91. return res;
  92. }
  93. #undef ERROR
  94. #define ERROR(str) \
  95. do { \
  96. ctx->base.err_detail = str; \
  97. return VPX_CODEC_INVALID_PARAM; \
  98. } while (0)
  99. #define RANGE_CHECK(p, memb, lo, hi) \
  100. do { \
  101. if (!(((p)->memb == lo || (p)->memb > (lo)) && (p)->memb <= hi)) \
  102. ERROR(#memb " out of range [" #lo ".." #hi "]"); \
  103. } while (0)
  104. #define RANGE_CHECK_HI(p, memb, hi) \
  105. do { \
  106. if (!((p)->memb <= (hi))) ERROR(#memb " out of range [.." #hi "]"); \
  107. } while (0)
  108. #define RANGE_CHECK_LO(p, memb, lo) \
  109. do { \
  110. if (!((p)->memb >= (lo))) ERROR(#memb " out of range [" #lo "..]"); \
  111. } while (0)
  112. #define RANGE_CHECK_BOOL(p, memb) \
  113. do { \
  114. if (!!((p)->memb) != (p)->memb) ERROR(#memb " expected boolean"); \
  115. } while (0)
  116. static vpx_codec_err_t validate_config(vpx_codec_alg_priv_t *ctx,
  117. const vpx_codec_enc_cfg_t *cfg,
  118. const struct vp8_extracfg *vp8_cfg,
  119. int finalize) {
  120. RANGE_CHECK(cfg, g_w, 1, 16383); /* 14 bits available */
  121. RANGE_CHECK(cfg, g_h, 1, 16383); /* 14 bits available */
  122. RANGE_CHECK(cfg, g_timebase.den, 1, 1000000000);
  123. RANGE_CHECK(cfg, g_timebase.num, 1, 1000000000);
  124. RANGE_CHECK_HI(cfg, g_profile, 3);
  125. RANGE_CHECK_HI(cfg, rc_max_quantizer, 63);
  126. RANGE_CHECK_HI(cfg, rc_min_quantizer, cfg->rc_max_quantizer);
  127. RANGE_CHECK_HI(cfg, g_threads, 64);
  128. #if CONFIG_REALTIME_ONLY
  129. RANGE_CHECK_HI(cfg, g_lag_in_frames, 0);
  130. #elif CONFIG_MULTI_RES_ENCODING
  131. if (ctx->base.enc.total_encoders > 1) RANGE_CHECK_HI(cfg, g_lag_in_frames, 0);
  132. #else
  133. RANGE_CHECK_HI(cfg, g_lag_in_frames, 25);
  134. #endif
  135. RANGE_CHECK(cfg, rc_end_usage, VPX_VBR, VPX_Q);
  136. RANGE_CHECK_HI(cfg, rc_undershoot_pct, 1000);
  137. RANGE_CHECK_HI(cfg, rc_overshoot_pct, 1000);
  138. RANGE_CHECK_HI(cfg, rc_2pass_vbr_bias_pct, 100);
  139. RANGE_CHECK(cfg, kf_mode, VPX_KF_DISABLED, VPX_KF_AUTO);
  140. /* TODO: add spatial re-sampling support and frame dropping in
  141. * multi-res-encoder.*/
  142. #if CONFIG_MULTI_RES_ENCODING
  143. if (ctx->base.enc.total_encoders > 1)
  144. RANGE_CHECK_HI(cfg, rc_resize_allowed, 0);
  145. #else
  146. RANGE_CHECK_BOOL(cfg, rc_resize_allowed);
  147. #endif
  148. RANGE_CHECK_HI(cfg, rc_dropframe_thresh, 100);
  149. RANGE_CHECK_HI(cfg, rc_resize_up_thresh, 100);
  150. RANGE_CHECK_HI(cfg, rc_resize_down_thresh, 100);
  151. #if CONFIG_REALTIME_ONLY
  152. RANGE_CHECK(cfg, g_pass, VPX_RC_ONE_PASS, VPX_RC_ONE_PASS);
  153. #elif CONFIG_MULTI_RES_ENCODING
  154. if (ctx->base.enc.total_encoders > 1)
  155. RANGE_CHECK(cfg, g_pass, VPX_RC_ONE_PASS, VPX_RC_ONE_PASS);
  156. #else
  157. RANGE_CHECK(cfg, g_pass, VPX_RC_ONE_PASS, VPX_RC_LAST_PASS);
  158. #endif
  159. /* VP8 does not support a lower bound on the keyframe interval in
  160. * automatic keyframe placement mode.
  161. */
  162. if (cfg->kf_mode != VPX_KF_DISABLED && cfg->kf_min_dist != cfg->kf_max_dist &&
  163. cfg->kf_min_dist > 0)
  164. ERROR(
  165. "kf_min_dist not supported in auto mode, use 0 "
  166. "or kf_max_dist instead.");
  167. RANGE_CHECK_BOOL(vp8_cfg, enable_auto_alt_ref);
  168. RANGE_CHECK(vp8_cfg, cpu_used, -16, 16);
  169. #if CONFIG_REALTIME_ONLY && !CONFIG_TEMPORAL_DENOISING
  170. RANGE_CHECK(vp8_cfg, noise_sensitivity, 0, 0);
  171. #else
  172. RANGE_CHECK_HI(vp8_cfg, noise_sensitivity, 6);
  173. #endif
  174. RANGE_CHECK(vp8_cfg, token_partitions, VP8_ONE_TOKENPARTITION,
  175. VP8_EIGHT_TOKENPARTITION);
  176. RANGE_CHECK_HI(vp8_cfg, Sharpness, 7);
  177. RANGE_CHECK(vp8_cfg, arnr_max_frames, 0, 15);
  178. RANGE_CHECK_HI(vp8_cfg, arnr_strength, 6);
  179. RANGE_CHECK(vp8_cfg, arnr_type, 1, 3);
  180. RANGE_CHECK(vp8_cfg, cq_level, 0, 63);
  181. RANGE_CHECK_HI(vp8_cfg, screen_content_mode, 2);
  182. if (finalize && (cfg->rc_end_usage == VPX_CQ || cfg->rc_end_usage == VPX_Q))
  183. RANGE_CHECK(vp8_cfg, cq_level, cfg->rc_min_quantizer,
  184. cfg->rc_max_quantizer);
  185. #if !(CONFIG_REALTIME_ONLY)
  186. if (cfg->g_pass == VPX_RC_LAST_PASS) {
  187. size_t packet_sz = sizeof(FIRSTPASS_STATS);
  188. int n_packets = (int)(cfg->rc_twopass_stats_in.sz / packet_sz);
  189. FIRSTPASS_STATS *stats;
  190. if (!cfg->rc_twopass_stats_in.buf)
  191. ERROR("rc_twopass_stats_in.buf not set.");
  192. if (cfg->rc_twopass_stats_in.sz % packet_sz)
  193. ERROR("rc_twopass_stats_in.sz indicates truncated packet.");
  194. if (cfg->rc_twopass_stats_in.sz < 2 * packet_sz)
  195. ERROR("rc_twopass_stats_in requires at least two packets.");
  196. stats = (void *)((char *)cfg->rc_twopass_stats_in.buf +
  197. (n_packets - 1) * packet_sz);
  198. if ((int)(stats->count + 0.5) != n_packets - 1)
  199. ERROR("rc_twopass_stats_in missing EOS stats packet");
  200. }
  201. #endif
  202. RANGE_CHECK(cfg, ts_number_layers, 1, 5);
  203. if (cfg->ts_number_layers > 1) {
  204. unsigned int i;
  205. RANGE_CHECK_HI(cfg, ts_periodicity, 16);
  206. for (i = 1; i < cfg->ts_number_layers; ++i) {
  207. if (cfg->ts_target_bitrate[i] <= cfg->ts_target_bitrate[i - 1] &&
  208. cfg->rc_target_bitrate > 0)
  209. ERROR("ts_target_bitrate entries are not strictly increasing");
  210. }
  211. RANGE_CHECK(cfg, ts_rate_decimator[cfg->ts_number_layers - 1], 1, 1);
  212. for (i = cfg->ts_number_layers - 2; i > 0; i--) {
  213. if (cfg->ts_rate_decimator[i - 1] != 2 * cfg->ts_rate_decimator[i])
  214. ERROR("ts_rate_decimator factors are not powers of 2");
  215. }
  216. RANGE_CHECK_HI(cfg, ts_layer_id[i], cfg->ts_number_layers - 1);
  217. }
  218. #if (CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
  219. if (cfg->g_threads > (1 << vp8_cfg->token_partitions))
  220. ERROR("g_threads cannot be bigger than number of token partitions");
  221. #endif
  222. return VPX_CODEC_OK;
  223. }
  224. static vpx_codec_err_t validate_img(vpx_codec_alg_priv_t *ctx,
  225. const vpx_image_t *img) {
  226. switch (img->fmt) {
  227. case VPX_IMG_FMT_YV12:
  228. case VPX_IMG_FMT_I420:
  229. case VPX_IMG_FMT_VPXI420:
  230. case VPX_IMG_FMT_VPXYV12: break;
  231. default:
  232. ERROR("Invalid image format. Only YV12 and I420 images are supported");
  233. }
  234. if ((img->d_w != ctx->cfg.g_w) || (img->d_h != ctx->cfg.g_h))
  235. ERROR("Image size must match encoder init configuration size");
  236. return VPX_CODEC_OK;
  237. }
  238. static vpx_codec_err_t set_vp8e_config(VP8_CONFIG *oxcf,
  239. vpx_codec_enc_cfg_t cfg,
  240. struct vp8_extracfg vp8_cfg,
  241. vpx_codec_priv_enc_mr_cfg_t *mr_cfg) {
  242. oxcf->multi_threaded = cfg.g_threads;
  243. oxcf->Version = cfg.g_profile;
  244. oxcf->Width = cfg.g_w;
  245. oxcf->Height = cfg.g_h;
  246. oxcf->timebase = cfg.g_timebase;
  247. oxcf->error_resilient_mode = cfg.g_error_resilient;
  248. switch (cfg.g_pass) {
  249. case VPX_RC_ONE_PASS: oxcf->Mode = MODE_BESTQUALITY; break;
  250. case VPX_RC_FIRST_PASS: oxcf->Mode = MODE_FIRSTPASS; break;
  251. case VPX_RC_LAST_PASS: oxcf->Mode = MODE_SECONDPASS_BEST; break;
  252. }
  253. if (cfg.g_pass == VPX_RC_FIRST_PASS || cfg.g_pass == VPX_RC_ONE_PASS) {
  254. oxcf->allow_lag = 0;
  255. oxcf->lag_in_frames = 0;
  256. } else {
  257. oxcf->allow_lag = (cfg.g_lag_in_frames) > 0;
  258. oxcf->lag_in_frames = cfg.g_lag_in_frames;
  259. }
  260. oxcf->allow_df = (cfg.rc_dropframe_thresh > 0);
  261. oxcf->drop_frames_water_mark = cfg.rc_dropframe_thresh;
  262. oxcf->allow_spatial_resampling = cfg.rc_resize_allowed;
  263. oxcf->resample_up_water_mark = cfg.rc_resize_up_thresh;
  264. oxcf->resample_down_water_mark = cfg.rc_resize_down_thresh;
  265. if (cfg.rc_end_usage == VPX_VBR) {
  266. oxcf->end_usage = USAGE_LOCAL_FILE_PLAYBACK;
  267. } else if (cfg.rc_end_usage == VPX_CBR) {
  268. oxcf->end_usage = USAGE_STREAM_FROM_SERVER;
  269. } else if (cfg.rc_end_usage == VPX_CQ) {
  270. oxcf->end_usage = USAGE_CONSTRAINED_QUALITY;
  271. } else if (cfg.rc_end_usage == VPX_Q) {
  272. oxcf->end_usage = USAGE_CONSTANT_QUALITY;
  273. }
  274. oxcf->target_bandwidth = cfg.rc_target_bitrate;
  275. oxcf->rc_max_intra_bitrate_pct = vp8_cfg.rc_max_intra_bitrate_pct;
  276. oxcf->gf_cbr_boost_pct = vp8_cfg.gf_cbr_boost_pct;
  277. oxcf->best_allowed_q = cfg.rc_min_quantizer;
  278. oxcf->worst_allowed_q = cfg.rc_max_quantizer;
  279. oxcf->cq_level = vp8_cfg.cq_level;
  280. oxcf->fixed_q = -1;
  281. oxcf->under_shoot_pct = cfg.rc_undershoot_pct;
  282. oxcf->over_shoot_pct = cfg.rc_overshoot_pct;
  283. oxcf->maximum_buffer_size_in_ms = cfg.rc_buf_sz;
  284. oxcf->starting_buffer_level_in_ms = cfg.rc_buf_initial_sz;
  285. oxcf->optimal_buffer_level_in_ms = cfg.rc_buf_optimal_sz;
  286. oxcf->maximum_buffer_size = cfg.rc_buf_sz;
  287. oxcf->starting_buffer_level = cfg.rc_buf_initial_sz;
  288. oxcf->optimal_buffer_level = cfg.rc_buf_optimal_sz;
  289. oxcf->two_pass_vbrbias = cfg.rc_2pass_vbr_bias_pct;
  290. oxcf->two_pass_vbrmin_section = cfg.rc_2pass_vbr_minsection_pct;
  291. oxcf->two_pass_vbrmax_section = cfg.rc_2pass_vbr_maxsection_pct;
  292. oxcf->auto_key =
  293. cfg.kf_mode == VPX_KF_AUTO && cfg.kf_min_dist != cfg.kf_max_dist;
  294. oxcf->key_freq = cfg.kf_max_dist;
  295. oxcf->number_of_layers = cfg.ts_number_layers;
  296. oxcf->periodicity = cfg.ts_periodicity;
  297. if (oxcf->number_of_layers > 1) {
  298. memcpy(oxcf->target_bitrate, cfg.ts_target_bitrate,
  299. sizeof(cfg.ts_target_bitrate));
  300. memcpy(oxcf->rate_decimator, cfg.ts_rate_decimator,
  301. sizeof(cfg.ts_rate_decimator));
  302. memcpy(oxcf->layer_id, cfg.ts_layer_id, sizeof(cfg.ts_layer_id));
  303. }
  304. #if CONFIG_MULTI_RES_ENCODING
  305. /* When mr_cfg is NULL, oxcf->mr_total_resolutions and oxcf->mr_encoder_id
  306. * are both memset to 0, which ensures the correct logic under this
  307. * situation.
  308. */
  309. if (mr_cfg) {
  310. oxcf->mr_total_resolutions = mr_cfg->mr_total_resolutions;
  311. oxcf->mr_encoder_id = mr_cfg->mr_encoder_id;
  312. oxcf->mr_down_sampling_factor.num = mr_cfg->mr_down_sampling_factor.num;
  313. oxcf->mr_down_sampling_factor.den = mr_cfg->mr_down_sampling_factor.den;
  314. oxcf->mr_low_res_mode_info = mr_cfg->mr_low_res_mode_info;
  315. }
  316. #else
  317. (void)mr_cfg;
  318. #endif
  319. oxcf->cpu_used = vp8_cfg.cpu_used;
  320. oxcf->encode_breakout = vp8_cfg.static_thresh;
  321. oxcf->play_alternate = vp8_cfg.enable_auto_alt_ref;
  322. oxcf->noise_sensitivity = vp8_cfg.noise_sensitivity;
  323. oxcf->Sharpness = vp8_cfg.Sharpness;
  324. oxcf->token_partitions = vp8_cfg.token_partitions;
  325. oxcf->two_pass_stats_in = cfg.rc_twopass_stats_in;
  326. oxcf->output_pkt_list = vp8_cfg.pkt_list;
  327. oxcf->arnr_max_frames = vp8_cfg.arnr_max_frames;
  328. oxcf->arnr_strength = vp8_cfg.arnr_strength;
  329. oxcf->arnr_type = vp8_cfg.arnr_type;
  330. oxcf->tuning = vp8_cfg.tuning;
  331. oxcf->screen_content_mode = vp8_cfg.screen_content_mode;
  332. /*
  333. printf("Current VP8 Settings: \n");
  334. printf("target_bandwidth: %d\n", oxcf->target_bandwidth);
  335. printf("noise_sensitivity: %d\n", oxcf->noise_sensitivity);
  336. printf("Sharpness: %d\n", oxcf->Sharpness);
  337. printf("cpu_used: %d\n", oxcf->cpu_used);
  338. printf("Mode: %d\n", oxcf->Mode);
  339. printf("auto_key: %d\n", oxcf->auto_key);
  340. printf("key_freq: %d\n", oxcf->key_freq);
  341. printf("end_usage: %d\n", oxcf->end_usage);
  342. printf("under_shoot_pct: %d\n", oxcf->under_shoot_pct);
  343. printf("over_shoot_pct: %d\n", oxcf->over_shoot_pct);
  344. printf("starting_buffer_level: %d\n", oxcf->starting_buffer_level);
  345. printf("optimal_buffer_level: %d\n", oxcf->optimal_buffer_level);
  346. printf("maximum_buffer_size: %d\n", oxcf->maximum_buffer_size);
  347. printf("fixed_q: %d\n", oxcf->fixed_q);
  348. printf("worst_allowed_q: %d\n", oxcf->worst_allowed_q);
  349. printf("best_allowed_q: %d\n", oxcf->best_allowed_q);
  350. printf("allow_spatial_resampling: %d\n", oxcf->allow_spatial_resampling);
  351. printf("resample_down_water_mark: %d\n", oxcf->resample_down_water_mark);
  352. printf("resample_up_water_mark: %d\n", oxcf->resample_up_water_mark);
  353. printf("allow_df: %d\n", oxcf->allow_df);
  354. printf("drop_frames_water_mark: %d\n", oxcf->drop_frames_water_mark);
  355. printf("two_pass_vbrbias: %d\n", oxcf->two_pass_vbrbias);
  356. printf("two_pass_vbrmin_section: %d\n", oxcf->two_pass_vbrmin_section);
  357. printf("two_pass_vbrmax_section: %d\n", oxcf->two_pass_vbrmax_section);
  358. printf("allow_lag: %d\n", oxcf->allow_lag);
  359. printf("lag_in_frames: %d\n", oxcf->lag_in_frames);
  360. printf("play_alternate: %d\n", oxcf->play_alternate);
  361. printf("Version: %d\n", oxcf->Version);
  362. printf("multi_threaded: %d\n", oxcf->multi_threaded);
  363. printf("encode_breakout: %d\n", oxcf->encode_breakout);
  364. */
  365. return VPX_CODEC_OK;
  366. }
  367. static vpx_codec_err_t vp8e_set_config(vpx_codec_alg_priv_t *ctx,
  368. const vpx_codec_enc_cfg_t *cfg) {
  369. vpx_codec_err_t res;
  370. if (cfg->g_w != ctx->cfg.g_w || cfg->g_h != ctx->cfg.g_h) {
  371. if (cfg->g_lag_in_frames > 1 || cfg->g_pass != VPX_RC_ONE_PASS)
  372. ERROR("Cannot change width or height after initialization");
  373. if ((ctx->cpi->initial_width && (int)cfg->g_w > ctx->cpi->initial_width) ||
  374. (ctx->cpi->initial_height && (int)cfg->g_h > ctx->cpi->initial_height))
  375. ERROR("Cannot increase width or height larger than their initial values");
  376. }
  377. /* Prevent increasing lag_in_frames. This check is stricter than it needs
  378. * to be -- the limit is not increasing past the first lag_in_frames
  379. * value, but we don't track the initial config, only the last successful
  380. * config.
  381. */
  382. if ((cfg->g_lag_in_frames > ctx->cfg.g_lag_in_frames))
  383. ERROR("Cannot increase lag_in_frames");
  384. res = validate_config(ctx, cfg, &ctx->vp8_cfg, 0);
  385. if (!res) {
  386. ctx->cfg = *cfg;
  387. set_vp8e_config(&ctx->oxcf, ctx->cfg, ctx->vp8_cfg, NULL);
  388. vp8_change_config(ctx->cpi, &ctx->oxcf);
  389. }
  390. return res;
  391. }
  392. static vpx_codec_err_t get_quantizer(vpx_codec_alg_priv_t *ctx, va_list args) {
  393. int *const arg = va_arg(args, int *);
  394. if (arg == NULL) return VPX_CODEC_INVALID_PARAM;
  395. *arg = vp8_get_quantizer(ctx->cpi);
  396. return VPX_CODEC_OK;
  397. }
  398. static vpx_codec_err_t get_quantizer64(vpx_codec_alg_priv_t *ctx,
  399. va_list args) {
  400. int *const arg = va_arg(args, int *);
  401. if (arg == NULL) return VPX_CODEC_INVALID_PARAM;
  402. *arg = vp8_reverse_trans(vp8_get_quantizer(ctx->cpi));
  403. return VPX_CODEC_OK;
  404. }
  405. static vpx_codec_err_t update_extracfg(vpx_codec_alg_priv_t *ctx,
  406. const struct vp8_extracfg *extra_cfg) {
  407. const vpx_codec_err_t res = validate_config(ctx, &ctx->cfg, extra_cfg, 0);
  408. if (res == VPX_CODEC_OK) {
  409. ctx->vp8_cfg = *extra_cfg;
  410. set_vp8e_config(&ctx->oxcf, ctx->cfg, ctx->vp8_cfg, NULL);
  411. vp8_change_config(ctx->cpi, &ctx->oxcf);
  412. }
  413. return res;
  414. }
  415. static vpx_codec_err_t set_cpu_used(vpx_codec_alg_priv_t *ctx, va_list args) {
  416. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  417. extra_cfg.cpu_used = CAST(VP8E_SET_CPUUSED, args);
  418. return update_extracfg(ctx, &extra_cfg);
  419. }
  420. static vpx_codec_err_t set_enable_auto_alt_ref(vpx_codec_alg_priv_t *ctx,
  421. va_list args) {
  422. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  423. extra_cfg.enable_auto_alt_ref = CAST(VP8E_SET_ENABLEAUTOALTREF, args);
  424. return update_extracfg(ctx, &extra_cfg);
  425. }
  426. static vpx_codec_err_t set_noise_sensitivity(vpx_codec_alg_priv_t *ctx,
  427. va_list args) {
  428. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  429. extra_cfg.noise_sensitivity = CAST(VP8E_SET_NOISE_SENSITIVITY, args);
  430. return update_extracfg(ctx, &extra_cfg);
  431. }
  432. static vpx_codec_err_t set_sharpness(vpx_codec_alg_priv_t *ctx, va_list args) {
  433. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  434. extra_cfg.Sharpness = CAST(VP8E_SET_SHARPNESS, args);
  435. return update_extracfg(ctx, &extra_cfg);
  436. }
  437. static vpx_codec_err_t set_static_thresh(vpx_codec_alg_priv_t *ctx,
  438. va_list args) {
  439. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  440. extra_cfg.static_thresh = CAST(VP8E_SET_STATIC_THRESHOLD, args);
  441. return update_extracfg(ctx, &extra_cfg);
  442. }
  443. static vpx_codec_err_t set_token_partitions(vpx_codec_alg_priv_t *ctx,
  444. va_list args) {
  445. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  446. extra_cfg.token_partitions = CAST(VP8E_SET_TOKEN_PARTITIONS, args);
  447. return update_extracfg(ctx, &extra_cfg);
  448. }
  449. static vpx_codec_err_t set_arnr_max_frames(vpx_codec_alg_priv_t *ctx,
  450. va_list args) {
  451. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  452. extra_cfg.arnr_max_frames = CAST(VP8E_SET_ARNR_MAXFRAMES, args);
  453. return update_extracfg(ctx, &extra_cfg);
  454. }
  455. static vpx_codec_err_t set_arnr_strength(vpx_codec_alg_priv_t *ctx,
  456. va_list args) {
  457. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  458. extra_cfg.arnr_strength = CAST(VP8E_SET_ARNR_STRENGTH, args);
  459. return update_extracfg(ctx, &extra_cfg);
  460. }
  461. static vpx_codec_err_t set_arnr_type(vpx_codec_alg_priv_t *ctx, va_list args) {
  462. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  463. extra_cfg.arnr_type = CAST(VP8E_SET_ARNR_TYPE, args);
  464. return update_extracfg(ctx, &extra_cfg);
  465. }
  466. static vpx_codec_err_t set_tuning(vpx_codec_alg_priv_t *ctx, va_list args) {
  467. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  468. extra_cfg.tuning = CAST(VP8E_SET_TUNING, args);
  469. return update_extracfg(ctx, &extra_cfg);
  470. }
  471. static vpx_codec_err_t set_cq_level(vpx_codec_alg_priv_t *ctx, va_list args) {
  472. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  473. extra_cfg.cq_level = CAST(VP8E_SET_CQ_LEVEL, args);
  474. return update_extracfg(ctx, &extra_cfg);
  475. }
  476. static vpx_codec_err_t set_rc_max_intra_bitrate_pct(vpx_codec_alg_priv_t *ctx,
  477. va_list args) {
  478. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  479. extra_cfg.rc_max_intra_bitrate_pct =
  480. CAST(VP8E_SET_MAX_INTRA_BITRATE_PCT, args);
  481. return update_extracfg(ctx, &extra_cfg);
  482. }
  483. static vpx_codec_err_t ctrl_set_rc_gf_cbr_boost_pct(vpx_codec_alg_priv_t *ctx,
  484. va_list args) {
  485. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  486. extra_cfg.gf_cbr_boost_pct = CAST(VP8E_SET_GF_CBR_BOOST_PCT, args);
  487. return update_extracfg(ctx, &extra_cfg);
  488. }
  489. static vpx_codec_err_t set_screen_content_mode(vpx_codec_alg_priv_t *ctx,
  490. va_list args) {
  491. struct vp8_extracfg extra_cfg = ctx->vp8_cfg;
  492. extra_cfg.screen_content_mode = CAST(VP8E_SET_SCREEN_CONTENT_MODE, args);
  493. return update_extracfg(ctx, &extra_cfg);
  494. }
  495. static vpx_codec_err_t vp8e_mr_alloc_mem(const vpx_codec_enc_cfg_t *cfg,
  496. void **mem_loc) {
  497. vpx_codec_err_t res = 0;
  498. #if CONFIG_MULTI_RES_ENCODING
  499. LOWER_RES_FRAME_INFO *shared_mem_loc;
  500. int mb_rows = ((cfg->g_w + 15) >> 4);
  501. int mb_cols = ((cfg->g_h + 15) >> 4);
  502. shared_mem_loc = calloc(1, sizeof(LOWER_RES_FRAME_INFO));
  503. if (!shared_mem_loc) {
  504. res = VPX_CODEC_MEM_ERROR;
  505. }
  506. shared_mem_loc->mb_info =
  507. calloc(mb_rows * mb_cols, sizeof(LOWER_RES_MB_INFO));
  508. if (!(shared_mem_loc->mb_info)) {
  509. res = VPX_CODEC_MEM_ERROR;
  510. } else {
  511. *mem_loc = (void *)shared_mem_loc;
  512. res = VPX_CODEC_OK;
  513. }
  514. #else
  515. (void)cfg;
  516. (void)mem_loc;
  517. #endif
  518. return res;
  519. }
  520. static vpx_codec_err_t vp8e_init(vpx_codec_ctx_t *ctx,
  521. vpx_codec_priv_enc_mr_cfg_t *mr_cfg) {
  522. vpx_codec_err_t res = VPX_CODEC_OK;
  523. vp8_rtcd();
  524. vpx_dsp_rtcd();
  525. vpx_scale_rtcd();
  526. if (!ctx->priv) {
  527. struct vpx_codec_alg_priv *priv =
  528. (struct vpx_codec_alg_priv *)vpx_calloc(1, sizeof(*priv));
  529. if (!priv) {
  530. return VPX_CODEC_MEM_ERROR;
  531. }
  532. ctx->priv = (vpx_codec_priv_t *)priv;
  533. ctx->priv->init_flags = ctx->init_flags;
  534. if (ctx->config.enc) {
  535. /* Update the reference to the config structure to an
  536. * internal copy.
  537. */
  538. priv->cfg = *ctx->config.enc;
  539. ctx->config.enc = &priv->cfg;
  540. }
  541. priv->vp8_cfg = default_extracfg;
  542. priv->vp8_cfg.pkt_list = &priv->pkt_list.head;
  543. priv->cx_data_sz = priv->cfg.g_w * priv->cfg.g_h * 3 / 2 * 2;
  544. if (priv->cx_data_sz < 32768) priv->cx_data_sz = 32768;
  545. priv->cx_data = malloc(priv->cx_data_sz);
  546. if (!priv->cx_data) {
  547. return VPX_CODEC_MEM_ERROR;
  548. }
  549. if (mr_cfg) {
  550. ctx->priv->enc.total_encoders = mr_cfg->mr_total_resolutions;
  551. } else {
  552. ctx->priv->enc.total_encoders = 1;
  553. }
  554. once(vp8_initialize_enc);
  555. res = validate_config(priv, &priv->cfg, &priv->vp8_cfg, 0);
  556. if (!res) {
  557. set_vp8e_config(&priv->oxcf, priv->cfg, priv->vp8_cfg, mr_cfg);
  558. priv->cpi = vp8_create_compressor(&priv->oxcf);
  559. if (!priv->cpi) res = VPX_CODEC_MEM_ERROR;
  560. }
  561. }
  562. return res;
  563. }
  564. static vpx_codec_err_t vp8e_destroy(vpx_codec_alg_priv_t *ctx) {
  565. #if CONFIG_MULTI_RES_ENCODING
  566. /* Free multi-encoder shared memory */
  567. if (ctx->oxcf.mr_total_resolutions > 0 &&
  568. (ctx->oxcf.mr_encoder_id == ctx->oxcf.mr_total_resolutions - 1)) {
  569. LOWER_RES_FRAME_INFO *shared_mem_loc =
  570. (LOWER_RES_FRAME_INFO *)ctx->oxcf.mr_low_res_mode_info;
  571. free(shared_mem_loc->mb_info);
  572. free(ctx->oxcf.mr_low_res_mode_info);
  573. }
  574. #endif
  575. free(ctx->cx_data);
  576. vp8_remove_compressor(&ctx->cpi);
  577. vpx_free(ctx);
  578. return VPX_CODEC_OK;
  579. }
  580. static vpx_codec_err_t image2yuvconfig(const vpx_image_t *img,
  581. YV12_BUFFER_CONFIG *yv12) {
  582. const int y_w = img->d_w;
  583. const int y_h = img->d_h;
  584. const int uv_w = (img->d_w + 1) / 2;
  585. const int uv_h = (img->d_h + 1) / 2;
  586. vpx_codec_err_t res = VPX_CODEC_OK;
  587. yv12->y_buffer = img->planes[VPX_PLANE_Y];
  588. yv12->u_buffer = img->planes[VPX_PLANE_U];
  589. yv12->v_buffer = img->planes[VPX_PLANE_V];
  590. yv12->y_crop_width = y_w;
  591. yv12->y_crop_height = y_h;
  592. yv12->y_width = y_w;
  593. yv12->y_height = y_h;
  594. yv12->uv_crop_width = uv_w;
  595. yv12->uv_crop_height = uv_h;
  596. yv12->uv_width = uv_w;
  597. yv12->uv_height = uv_h;
  598. yv12->y_stride = img->stride[VPX_PLANE_Y];
  599. yv12->uv_stride = img->stride[VPX_PLANE_U];
  600. yv12->border = (img->stride[VPX_PLANE_Y] - img->w) / 2;
  601. return res;
  602. }
  603. static void pick_quickcompress_mode(vpx_codec_alg_priv_t *ctx,
  604. unsigned long duration,
  605. unsigned long deadline) {
  606. int new_qc;
  607. #if !(CONFIG_REALTIME_ONLY)
  608. /* Use best quality mode if no deadline is given. */
  609. new_qc = MODE_BESTQUALITY;
  610. if (deadline) {
  611. uint64_t duration_us;
  612. /* Convert duration parameter from stream timebase to microseconds */
  613. duration_us = (uint64_t)duration * 1000000 *
  614. (uint64_t)ctx->cfg.g_timebase.num /
  615. (uint64_t)ctx->cfg.g_timebase.den;
  616. /* If the deadline is more that the duration this frame is to be shown,
  617. * use good quality mode. Otherwise use realtime mode.
  618. */
  619. new_qc = (deadline > duration_us) ? MODE_GOODQUALITY : MODE_REALTIME;
  620. }
  621. #else
  622. (void)duration;
  623. new_qc = MODE_REALTIME;
  624. #endif
  625. if (deadline == VPX_DL_REALTIME) {
  626. new_qc = MODE_REALTIME;
  627. } else if (ctx->cfg.g_pass == VPX_RC_FIRST_PASS) {
  628. new_qc = MODE_FIRSTPASS;
  629. } else if (ctx->cfg.g_pass == VPX_RC_LAST_PASS) {
  630. new_qc =
  631. (new_qc == MODE_BESTQUALITY) ? MODE_SECONDPASS_BEST : MODE_SECONDPASS;
  632. }
  633. if (ctx->oxcf.Mode != new_qc) {
  634. ctx->oxcf.Mode = new_qc;
  635. vp8_change_config(ctx->cpi, &ctx->oxcf);
  636. }
  637. }
  638. static vpx_codec_err_t set_reference_and_update(vpx_codec_alg_priv_t *ctx,
  639. vpx_enc_frame_flags_t flags) {
  640. /* Handle Flags */
  641. if (((flags & VP8_EFLAG_NO_UPD_GF) && (flags & VP8_EFLAG_FORCE_GF)) ||
  642. ((flags & VP8_EFLAG_NO_UPD_ARF) && (flags & VP8_EFLAG_FORCE_ARF))) {
  643. ctx->base.err_detail = "Conflicting flags.";
  644. return VPX_CODEC_INVALID_PARAM;
  645. }
  646. if (flags &
  647. (VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF)) {
  648. int ref = 7;
  649. if (flags & VP8_EFLAG_NO_REF_LAST) ref ^= VP8_LAST_FRAME;
  650. if (flags & VP8_EFLAG_NO_REF_GF) ref ^= VP8_GOLD_FRAME;
  651. if (flags & VP8_EFLAG_NO_REF_ARF) ref ^= VP8_ALTR_FRAME;
  652. vp8_use_as_reference(ctx->cpi, ref);
  653. }
  654. if (flags &
  655. (VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
  656. VP8_EFLAG_FORCE_GF | VP8_EFLAG_FORCE_ARF)) {
  657. int upd = 7;
  658. if (flags & VP8_EFLAG_NO_UPD_LAST) upd ^= VP8_LAST_FRAME;
  659. if (flags & VP8_EFLAG_NO_UPD_GF) upd ^= VP8_GOLD_FRAME;
  660. if (flags & VP8_EFLAG_NO_UPD_ARF) upd ^= VP8_ALTR_FRAME;
  661. vp8_update_reference(ctx->cpi, upd);
  662. }
  663. if (flags & VP8_EFLAG_NO_UPD_ENTROPY) {
  664. vp8_update_entropy(ctx->cpi, 0);
  665. }
  666. return VPX_CODEC_OK;
  667. }
  668. static vpx_codec_err_t vp8e_encode(vpx_codec_alg_priv_t *ctx,
  669. const vpx_image_t *img, vpx_codec_pts_t pts,
  670. unsigned long duration,
  671. vpx_enc_frame_flags_t flags,
  672. unsigned long deadline) {
  673. vpx_codec_err_t res = VPX_CODEC_OK;
  674. if (!ctx->cfg.rc_target_bitrate) return res;
  675. if (img) res = validate_img(ctx, img);
  676. if (!res) res = validate_config(ctx, &ctx->cfg, &ctx->vp8_cfg, 1);
  677. pick_quickcompress_mode(ctx, duration, deadline);
  678. vpx_codec_pkt_list_init(&ctx->pkt_list);
  679. // If no flags are set in the encode call, then use the frame flags as
  680. // defined via the control function: vp8e_set_frame_flags.
  681. if (!flags) {
  682. flags = ctx->control_frame_flags;
  683. }
  684. ctx->control_frame_flags = 0;
  685. if (!res) res = set_reference_and_update(ctx, flags);
  686. /* Handle fixed keyframe intervals */
  687. if (ctx->cfg.kf_mode == VPX_KF_AUTO &&
  688. ctx->cfg.kf_min_dist == ctx->cfg.kf_max_dist) {
  689. if (++ctx->fixed_kf_cntr > ctx->cfg.kf_min_dist) {
  690. flags |= VPX_EFLAG_FORCE_KF;
  691. ctx->fixed_kf_cntr = 1;
  692. }
  693. }
  694. /* Initialize the encoder instance on the first frame*/
  695. if (!res && ctx->cpi) {
  696. unsigned int lib_flags;
  697. YV12_BUFFER_CONFIG sd;
  698. int64_t dst_time_stamp, dst_end_time_stamp;
  699. size_t size, cx_data_sz;
  700. unsigned char *cx_data;
  701. unsigned char *cx_data_end;
  702. int comp_data_state = 0;
  703. /* Set up internal flags */
  704. if (ctx->base.init_flags & VPX_CODEC_USE_PSNR) {
  705. ((VP8_COMP *)ctx->cpi)->b_calculate_psnr = 1;
  706. }
  707. if (ctx->base.init_flags & VPX_CODEC_USE_OUTPUT_PARTITION) {
  708. ((VP8_COMP *)ctx->cpi)->output_partition = 1;
  709. }
  710. /* Convert API flags to internal codec lib flags */
  711. lib_flags = (flags & VPX_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0;
  712. /* vp8 use 10,000,000 ticks/second as time stamp */
  713. dst_time_stamp =
  714. pts * 10000000 * ctx->cfg.g_timebase.num / ctx->cfg.g_timebase.den;
  715. dst_end_time_stamp = (pts + duration) * 10000000 * ctx->cfg.g_timebase.num /
  716. ctx->cfg.g_timebase.den;
  717. if (img != NULL) {
  718. res = image2yuvconfig(img, &sd);
  719. if (vp8_receive_raw_frame(ctx->cpi, ctx->next_frame_flag | lib_flags, &sd,
  720. dst_time_stamp, dst_end_time_stamp)) {
  721. VP8_COMP *cpi = (VP8_COMP *)ctx->cpi;
  722. res = update_error_state(ctx, &cpi->common.error);
  723. }
  724. /* reset for next frame */
  725. ctx->next_frame_flag = 0;
  726. }
  727. cx_data = ctx->cx_data;
  728. cx_data_sz = ctx->cx_data_sz;
  729. cx_data_end = ctx->cx_data + cx_data_sz;
  730. lib_flags = 0;
  731. while (cx_data_sz >= ctx->cx_data_sz / 2) {
  732. comp_data_state = vp8_get_compressed_data(
  733. ctx->cpi, &lib_flags, &size, cx_data, cx_data_end, &dst_time_stamp,
  734. &dst_end_time_stamp, !img);
  735. if (comp_data_state == VPX_CODEC_CORRUPT_FRAME) {
  736. return VPX_CODEC_CORRUPT_FRAME;
  737. } else if (comp_data_state == -1) {
  738. break;
  739. }
  740. if (size) {
  741. vpx_codec_pts_t round, delta;
  742. vpx_codec_cx_pkt_t pkt;
  743. VP8_COMP *cpi = (VP8_COMP *)ctx->cpi;
  744. /* Add the frame packet to the list of returned packets. */
  745. round = (vpx_codec_pts_t)10000000 * ctx->cfg.g_timebase.num / 2 - 1;
  746. delta = (dst_end_time_stamp - dst_time_stamp);
  747. pkt.kind = VPX_CODEC_CX_FRAME_PKT;
  748. pkt.data.frame.pts =
  749. (dst_time_stamp * ctx->cfg.g_timebase.den + round) /
  750. ctx->cfg.g_timebase.num / 10000000;
  751. pkt.data.frame.duration =
  752. (unsigned long)((delta * ctx->cfg.g_timebase.den + round) /
  753. ctx->cfg.g_timebase.num / 10000000);
  754. pkt.data.frame.flags = lib_flags << 16;
  755. if (lib_flags & FRAMEFLAGS_KEY) {
  756. pkt.data.frame.flags |= VPX_FRAME_IS_KEY;
  757. }
  758. if (!cpi->common.show_frame) {
  759. pkt.data.frame.flags |= VPX_FRAME_IS_INVISIBLE;
  760. /* This timestamp should be as close as possible to the
  761. * prior PTS so that if a decoder uses pts to schedule when
  762. * to do this, we start right after last frame was decoded.
  763. * Invisible frames have no duration.
  764. */
  765. pkt.data.frame.pts =
  766. ((cpi->last_time_stamp_seen * ctx->cfg.g_timebase.den + round) /
  767. ctx->cfg.g_timebase.num / 10000000) +
  768. 1;
  769. pkt.data.frame.duration = 0;
  770. }
  771. if (cpi->droppable) pkt.data.frame.flags |= VPX_FRAME_IS_DROPPABLE;
  772. if (cpi->output_partition) {
  773. int i;
  774. const int num_partitions =
  775. (1 << cpi->common.multi_token_partition) + 1;
  776. pkt.data.frame.flags |= VPX_FRAME_IS_FRAGMENT;
  777. for (i = 0; i < num_partitions; ++i) {
  778. #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
  779. pkt.data.frame.buf = cpi->partition_d[i];
  780. #else
  781. pkt.data.frame.buf = cx_data;
  782. cx_data += cpi->partition_sz[i];
  783. cx_data_sz -= cpi->partition_sz[i];
  784. #endif
  785. pkt.data.frame.sz = cpi->partition_sz[i];
  786. pkt.data.frame.partition_id = i;
  787. /* don't set the fragment bit for the last partition */
  788. if (i == (num_partitions - 1)) {
  789. pkt.data.frame.flags &= ~VPX_FRAME_IS_FRAGMENT;
  790. }
  791. vpx_codec_pkt_list_add(&ctx->pkt_list.head, &pkt);
  792. }
  793. #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
  794. /* In lagged mode the encoder can buffer multiple frames.
  795. * We don't want this in partitioned output because
  796. * partitions are spread all over the output buffer.
  797. * So, force an exit!
  798. */
  799. cx_data_sz -= ctx->cx_data_sz / 2;
  800. #endif
  801. } else {
  802. pkt.data.frame.buf = cx_data;
  803. pkt.data.frame.sz = size;
  804. pkt.data.frame.partition_id = -1;
  805. vpx_codec_pkt_list_add(&ctx->pkt_list.head, &pkt);
  806. cx_data += size;
  807. cx_data_sz -= size;
  808. }
  809. }
  810. }
  811. }
  812. return res;
  813. }
  814. static const vpx_codec_cx_pkt_t *vp8e_get_cxdata(vpx_codec_alg_priv_t *ctx,
  815. vpx_codec_iter_t *iter) {
  816. return vpx_codec_pkt_list_get(&ctx->pkt_list.head, iter);
  817. }
  818. static vpx_codec_err_t vp8e_set_reference(vpx_codec_alg_priv_t *ctx,
  819. va_list args) {
  820. vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
  821. if (data) {
  822. vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
  823. YV12_BUFFER_CONFIG sd;
  824. image2yuvconfig(&frame->img, &sd);
  825. vp8_set_reference(ctx->cpi, frame->frame_type, &sd);
  826. return VPX_CODEC_OK;
  827. } else {
  828. return VPX_CODEC_INVALID_PARAM;
  829. }
  830. }
  831. static vpx_codec_err_t vp8e_get_reference(vpx_codec_alg_priv_t *ctx,
  832. va_list args) {
  833. vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
  834. if (data) {
  835. vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
  836. YV12_BUFFER_CONFIG sd;
  837. image2yuvconfig(&frame->img, &sd);
  838. vp8_get_reference(ctx->cpi, frame->frame_type, &sd);
  839. return VPX_CODEC_OK;
  840. } else {
  841. return VPX_CODEC_INVALID_PARAM;
  842. }
  843. }
  844. static vpx_codec_err_t vp8e_set_previewpp(vpx_codec_alg_priv_t *ctx,
  845. va_list args) {
  846. #if CONFIG_POSTPROC
  847. vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *);
  848. if (data) {
  849. ctx->preview_ppcfg = *((vp8_postproc_cfg_t *)data);
  850. return VPX_CODEC_OK;
  851. } else {
  852. return VPX_CODEC_INVALID_PARAM;
  853. }
  854. #else
  855. (void)ctx;
  856. (void)args;
  857. return VPX_CODEC_INCAPABLE;
  858. #endif
  859. }
  860. static vpx_image_t *vp8e_get_preview(vpx_codec_alg_priv_t *ctx) {
  861. YV12_BUFFER_CONFIG sd;
  862. vp8_ppflags_t flags;
  863. vp8_zero(flags);
  864. if (ctx->preview_ppcfg.post_proc_flag) {
  865. flags.post_proc_flag = ctx->preview_ppcfg.post_proc_flag;
  866. flags.deblocking_level = ctx->preview_ppcfg.deblocking_level;
  867. flags.noise_level = ctx->preview_ppcfg.noise_level;
  868. }
  869. if (0 == vp8_get_preview_raw_frame(ctx->cpi, &sd, &flags)) {
  870. /*
  871. vpx_img_wrap(&ctx->preview_img, VPX_IMG_FMT_YV12,
  872. sd.y_width + 2*VP8BORDERINPIXELS,
  873. sd.y_height + 2*VP8BORDERINPIXELS,
  874. 1,
  875. sd.buffer_alloc);
  876. vpx_img_set_rect(&ctx->preview_img,
  877. VP8BORDERINPIXELS, VP8BORDERINPIXELS,
  878. sd.y_width, sd.y_height);
  879. */
  880. ctx->preview_img.bps = 12;
  881. ctx->preview_img.planes[VPX_PLANE_Y] = sd.y_buffer;
  882. ctx->preview_img.planes[VPX_PLANE_U] = sd.u_buffer;
  883. ctx->preview_img.planes[VPX_PLANE_V] = sd.v_buffer;
  884. ctx->preview_img.fmt = VPX_IMG_FMT_I420;
  885. ctx->preview_img.x_chroma_shift = 1;
  886. ctx->preview_img.y_chroma_shift = 1;
  887. ctx->preview_img.d_w = sd.y_width;
  888. ctx->preview_img.d_h = sd.y_height;
  889. ctx->preview_img.stride[VPX_PLANE_Y] = sd.y_stride;
  890. ctx->preview_img.stride[VPX_PLANE_U] = sd.uv_stride;
  891. ctx->preview_img.stride[VPX_PLANE_V] = sd.uv_stride;
  892. ctx->preview_img.w = sd.y_width;
  893. ctx->preview_img.h = sd.y_height;
  894. return &ctx->preview_img;
  895. } else {
  896. return NULL;
  897. }
  898. }
  899. static vpx_codec_err_t vp8e_set_frame_flags(vpx_codec_alg_priv_t *ctx,
  900. va_list args) {
  901. int frame_flags = va_arg(args, int);
  902. ctx->control_frame_flags = frame_flags;
  903. return set_reference_and_update(ctx, frame_flags);
  904. }
  905. static vpx_codec_err_t vp8e_set_temporal_layer_id(vpx_codec_alg_priv_t *ctx,
  906. va_list args) {
  907. int layer_id = va_arg(args, int);
  908. if (layer_id < 0 || layer_id >= (int)ctx->cfg.ts_number_layers) {
  909. return VPX_CODEC_INVALID_PARAM;
  910. }
  911. ctx->cpi->temporal_layer_id = layer_id;
  912. return VPX_CODEC_OK;
  913. }
  914. static vpx_codec_err_t vp8e_set_roi_map(vpx_codec_alg_priv_t *ctx,
  915. va_list args) {
  916. vpx_roi_map_t *data = va_arg(args, vpx_roi_map_t *);
  917. if (data) {
  918. vpx_roi_map_t *roi = (vpx_roi_map_t *)data;
  919. if (!vp8_set_roimap(ctx->cpi, roi->roi_map, roi->rows, roi->cols,
  920. roi->delta_q, roi->delta_lf, roi->static_threshold)) {
  921. return VPX_CODEC_OK;
  922. } else {
  923. return VPX_CODEC_INVALID_PARAM;
  924. }
  925. } else {
  926. return VPX_CODEC_INVALID_PARAM;
  927. }
  928. }
  929. static vpx_codec_err_t vp8e_set_activemap(vpx_codec_alg_priv_t *ctx,
  930. va_list args) {
  931. vpx_active_map_t *data = va_arg(args, vpx_active_map_t *);
  932. if (data) {
  933. vpx_active_map_t *map = (vpx_active_map_t *)data;
  934. if (!vp8_set_active_map(ctx->cpi, map->active_map, map->rows, map->cols)) {
  935. return VPX_CODEC_OK;
  936. } else {
  937. return VPX_CODEC_INVALID_PARAM;
  938. }
  939. } else {
  940. return VPX_CODEC_INVALID_PARAM;
  941. }
  942. }
  943. static vpx_codec_err_t vp8e_set_scalemode(vpx_codec_alg_priv_t *ctx,
  944. va_list args) {
  945. vpx_scaling_mode_t *data = va_arg(args, vpx_scaling_mode_t *);
  946. if (data) {
  947. int res;
  948. vpx_scaling_mode_t scalemode = *(vpx_scaling_mode_t *)data;
  949. res = vp8_set_internal_size(ctx->cpi, (VPX_SCALING)scalemode.h_scaling_mode,
  950. (VPX_SCALING)scalemode.v_scaling_mode);
  951. if (!res) {
  952. /*force next frame a key frame to effect scaling mode */
  953. ctx->next_frame_flag |= FRAMEFLAGS_KEY;
  954. return VPX_CODEC_OK;
  955. } else {
  956. return VPX_CODEC_INVALID_PARAM;
  957. }
  958. } else {
  959. return VPX_CODEC_INVALID_PARAM;
  960. }
  961. }
  962. static vpx_codec_ctrl_fn_map_t vp8e_ctf_maps[] = {
  963. { VP8_SET_REFERENCE, vp8e_set_reference },
  964. { VP8_COPY_REFERENCE, vp8e_get_reference },
  965. { VP8_SET_POSTPROC, vp8e_set_previewpp },
  966. { VP8E_SET_FRAME_FLAGS, vp8e_set_frame_flags },
  967. { VP8E_SET_TEMPORAL_LAYER_ID, vp8e_set_temporal_layer_id },
  968. { VP8E_SET_ROI_MAP, vp8e_set_roi_map },
  969. { VP8E_SET_ACTIVEMAP, vp8e_set_activemap },
  970. { VP8E_SET_SCALEMODE, vp8e_set_scalemode },
  971. { VP8E_SET_CPUUSED, set_cpu_used },
  972. { VP8E_SET_NOISE_SENSITIVITY, set_noise_sensitivity },
  973. { VP8E_SET_ENABLEAUTOALTREF, set_enable_auto_alt_ref },
  974. { VP8E_SET_SHARPNESS, set_sharpness },
  975. { VP8E_SET_STATIC_THRESHOLD, set_static_thresh },
  976. { VP8E_SET_TOKEN_PARTITIONS, set_token_partitions },
  977. { VP8E_GET_LAST_QUANTIZER, get_quantizer },
  978. { VP8E_GET_LAST_QUANTIZER_64, get_quantizer64 },
  979. { VP8E_SET_ARNR_MAXFRAMES, set_arnr_max_frames },
  980. { VP8E_SET_ARNR_STRENGTH, set_arnr_strength },
  981. { VP8E_SET_ARNR_TYPE, set_arnr_type },
  982. { VP8E_SET_TUNING, set_tuning },
  983. { VP8E_SET_CQ_LEVEL, set_cq_level },
  984. { VP8E_SET_MAX_INTRA_BITRATE_PCT, set_rc_max_intra_bitrate_pct },
  985. { VP8E_SET_SCREEN_CONTENT_MODE, set_screen_content_mode },
  986. { VP8E_SET_GF_CBR_BOOST_PCT, ctrl_set_rc_gf_cbr_boost_pct },
  987. { -1, NULL },
  988. };
  989. static vpx_codec_enc_cfg_map_t vp8e_usage_cfg_map[] = {
  990. { 0,
  991. {
  992. 0, /* g_usage */
  993. 0, /* g_threads */
  994. 0, /* g_profile */
  995. 320, /* g_width */
  996. 240, /* g_height */
  997. VPX_BITS_8, /* g_bit_depth */
  998. 8, /* g_input_bit_depth */
  999. { 1, 30 }, /* g_timebase */
  1000. 0, /* g_error_resilient */
  1001. VPX_RC_ONE_PASS, /* g_pass */
  1002. 0, /* g_lag_in_frames */
  1003. 0, /* rc_dropframe_thresh */
  1004. 0, /* rc_resize_allowed */
  1005. 1, /* rc_scaled_width */
  1006. 1, /* rc_scaled_height */
  1007. 60, /* rc_resize_down_thresold */
  1008. 30, /* rc_resize_up_thresold */
  1009. VPX_VBR, /* rc_end_usage */
  1010. { NULL, 0 }, /* rc_twopass_stats_in */
  1011. { NULL, 0 }, /* rc_firstpass_mb_stats_in */
  1012. 256, /* rc_target_bandwidth */
  1013. 4, /* rc_min_quantizer */
  1014. 63, /* rc_max_quantizer */
  1015. 100, /* rc_undershoot_pct */
  1016. 100, /* rc_overshoot_pct */
  1017. 6000, /* rc_max_buffer_size */
  1018. 4000, /* rc_buffer_initial_size; */
  1019. 5000, /* rc_buffer_optimal_size; */
  1020. 50, /* rc_two_pass_vbrbias */
  1021. 0, /* rc_two_pass_vbrmin_section */
  1022. 400, /* rc_two_pass_vbrmax_section */
  1023. /* keyframing settings (kf) */
  1024. VPX_KF_AUTO, /* g_kfmode*/
  1025. 0, /* kf_min_dist */
  1026. 128, /* kf_max_dist */
  1027. VPX_SS_DEFAULT_LAYERS, /* ss_number_layers */
  1028. { 0 },
  1029. { 0 }, /* ss_target_bitrate */
  1030. 1, /* ts_number_layers */
  1031. { 0 }, /* ts_target_bitrate */
  1032. { 0 }, /* ts_rate_decimator */
  1033. 0, /* ts_periodicity */
  1034. { 0 }, /* ts_layer_id */
  1035. { 0 }, /* layer_target_bitrate */
  1036. 0 /* temporal_layering_mode */
  1037. } },
  1038. };
  1039. #ifndef VERSION_STRING
  1040. #define VERSION_STRING
  1041. #endif
  1042. CODEC_INTERFACE(vpx_codec_vp8_cx) = {
  1043. "WebM Project VP8 Encoder" VERSION_STRING,
  1044. VPX_CODEC_INTERNAL_ABI_VERSION,
  1045. VPX_CODEC_CAP_ENCODER | VPX_CODEC_CAP_PSNR | VPX_CODEC_CAP_OUTPUT_PARTITION,
  1046. /* vpx_codec_caps_t caps; */
  1047. vp8e_init, /* vpx_codec_init_fn_t init; */
  1048. vp8e_destroy, /* vpx_codec_destroy_fn_t destroy; */
  1049. vp8e_ctf_maps, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
  1050. {
  1051. NULL, /* vpx_codec_peek_si_fn_t peek_si; */
  1052. NULL, /* vpx_codec_get_si_fn_t get_si; */
  1053. NULL, /* vpx_codec_decode_fn_t decode; */
  1054. NULL, /* vpx_codec_frame_get_fn_t frame_get; */
  1055. NULL, /* vpx_codec_set_fb_fn_t set_fb_fn; */
  1056. },
  1057. {
  1058. 1, /* 1 cfg map */
  1059. vp8e_usage_cfg_map, /* vpx_codec_enc_cfg_map_t cfg_maps; */
  1060. vp8e_encode, /* vpx_codec_encode_fn_t encode; */
  1061. vp8e_get_cxdata, /* vpx_codec_get_cx_data_fn_t get_cx_data; */
  1062. vp8e_set_config, NULL, vp8e_get_preview, vp8e_mr_alloc_mem,
  1063. } /* encoder functions */
  1064. };