1 /*
2 * Copyright 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "C2SoftVpxEnc"
19 #include <log/log.h>
20 #include <utils/misc.h>
21
22 #include <media/hardware/VideoAPI.h>
23
24 #include <Codec2BufferUtils.h>
25 #include <Codec2CommonUtils.h>
26 #include <C2Debug.h>
27 #include "C2SoftVpxEnc.h"
28
29 #ifndef INT32_MAX
30 #define INT32_MAX 2147483647
31 #endif
32
33 /* Quantization param values defined by the spec */
34 #define VPX_QP_MIN 0
35 #define VPX_QP_MAX 63
36 #define VPX_QP_DEFAULT_MIN VPX_QP_MIN
37 #define VPX_QP_DEFAULT_MAX VPX_QP_MAX
38
39 namespace android {
40
IntfImpl(const std::shared_ptr<C2ReflectorHelper> & helper)41 C2SoftVpxEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
42 : SimpleInterface<void>::BaseParams(
43 helper,
44 COMPONENT_NAME,
45 C2Component::KIND_ENCODER,
46 C2Component::DOMAIN_VIDEO,
47 MEDIA_MIMETYPE_VIDEO) {
48 noPrivateBuffers(); // TODO: account for our buffers here
49 noInputReferences();
50 noOutputReferences();
51 noInputLatency();
52 noTimeStretch();
53 setDerivedInstance(this);
54
55 addParameter(
56 DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
57 .withConstValue(new C2ComponentAttributesSetting(
58 C2Component::ATTRIB_IS_TEMPORAL))
59 .build());
60
61 addParameter(
62 DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
63 .withConstValue(new C2StreamUsageTuning::input(
64 0u, (uint64_t)C2MemoryUsage::CPU_READ))
65 .build());
66
67 // Odd dimension support in encoders requires Android V and above
68 size_t stepSize = isAtLeastV() ? 1 : 2;
69 addParameter(
70 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
71 .withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
72 .withFields({
73 C2F(mSize, width).inRange(2, 2048, stepSize),
74 C2F(mSize, height).inRange(2, 2048, stepSize),
75 })
76 .withSetter(SizeSetter)
77 .build());
78
79 addParameter(
80 DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
81 .withDefault(new C2StreamBitrateModeTuning::output(
82 0u, C2Config::BITRATE_VARIABLE))
83 .withFields({
84 C2F(mBitrateMode, value).oneOf({
85 C2Config::BITRATE_CONST, C2Config::BITRATE_VARIABLE })
86 })
87 .withSetter(
88 Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
89 .build());
90
91 addParameter(
92 DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
93 .withDefault(new C2StreamFrameRateInfo::output(0u, 1.))
94 // TODO: More restriction?
95 .withFields({C2F(mFrameRate, value).greaterThan(0.)})
96 .withSetter(
97 Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
98 .build());
99
100 addParameter(
101 DefineParam(mLayering, C2_PARAMKEY_TEMPORAL_LAYERING)
102 .withDefault(C2StreamTemporalLayeringTuning::output::AllocShared(0u, 0, 0, 0))
103 .withFields({
104 C2F(mLayering, m.layerCount).inRange(0, 4),
105 C2F(mLayering, m.bLayerCount).inRange(0, 0),
106 C2F(mLayering, m.bitrateRatios).inRange(0., 1.)
107 })
108 .withSetter(LayeringSetter)
109 .build());
110
111 addParameter(
112 DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
113 .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
114 .withFields({C2F(mSyncFramePeriod, value).any()})
115 .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
116 .build());
117
118 addParameter(
119 DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
120 .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
121 .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
122 .withSetter(BitrateSetter)
123 .build());
124
125 addParameter(
126 DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
127 .withConstValue(new C2StreamIntraRefreshTuning::output(
128 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
129 .build());
130 #ifdef VP9
131 addParameter(
132 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
133 .withDefault(new C2StreamProfileLevelInfo::output(
134 0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
135 .withFields({
136 C2F(mProfileLevel, profile).equalTo(
137 PROFILE_VP9_0
138 ),
139 C2F(mProfileLevel, level).oneOf({
140 C2Config::LEVEL_VP9_1,
141 C2Config::LEVEL_VP9_1_1,
142 C2Config::LEVEL_VP9_2,
143 C2Config::LEVEL_VP9_2_1,
144 C2Config::LEVEL_VP9_3,
145 C2Config::LEVEL_VP9_3_1,
146 C2Config::LEVEL_VP9_4,
147 C2Config::LEVEL_VP9_4_1,
148 }),
149 })
150 .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
151 .build());
152 #else
153 addParameter(
154 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
155 .withDefault(new C2StreamProfileLevelInfo::output(
156 0u, PROFILE_VP8_0, LEVEL_UNUSED))
157 .withFields({
158 C2F(mProfileLevel, profile).equalTo(
159 PROFILE_VP8_0
160 ),
161 C2F(mProfileLevel, level).equalTo(
162 LEVEL_UNUSED),
163 })
164 .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
165 .build());
166 #endif
167 addParameter(
168 DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
169 .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
170 .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
171 .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
172 .build());
173
174 addParameter(
175 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
176 .withDefault(new C2StreamColorAspectsInfo::input(
177 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
178 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
179 .withFields({
180 C2F(mColorAspects, range).inRange(
181 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
182 C2F(mColorAspects, primaries).inRange(
183 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
184 C2F(mColorAspects, transfer).inRange(
185 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
186 C2F(mColorAspects, matrix).inRange(
187 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
188 })
189 .withSetter(ColorAspectsSetter)
190 .build());
191
192 addParameter(
193 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
194 .withDefault(new C2StreamColorAspectsInfo::output(
195 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
196 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
197 .withFields({
198 C2F(mCodedColorAspects, range).inRange(
199 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
200 C2F(mCodedColorAspects, primaries).inRange(
201 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
202 C2F(mCodedColorAspects, transfer).inRange(
203 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
204 C2F(mCodedColorAspects, matrix).inRange(
205 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
206 })
207 .withSetter(CodedColorAspectsSetter, mColorAspects)
208 .build());
209
210 addParameter(
211 DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
212 .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
213 0 /* flexCount */, 0u /* stream */))
214 .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
215 {C2Config::I_FRAME, C2Config::P_FRAME}),
216 C2F(mPictureQuantization, m.values[0].min).inRange(
217 VPX_QP_DEFAULT_MIN, VPX_QP_DEFAULT_MAX),
218 C2F(mPictureQuantization, m.values[0].max).inRange(
219 VPX_QP_DEFAULT_MIN, VPX_QP_DEFAULT_MAX)})
220 .withSetter(PictureQuantizationSetter)
221 .build());
222
223 }
224
BitrateSetter(bool mayBlock,C2P<C2StreamBitrateInfo::output> & me)225 C2R C2SoftVpxEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
226 (void)mayBlock;
227 C2R res = C2R::Ok();
228 if (me.v.value < 4096) {
229 me.set().value = 4096;
230 }
231 return res;
232 }
233
SizeSetter(bool mayBlock,const C2P<C2StreamPictureSizeInfo::input> & oldMe,C2P<C2StreamPictureSizeInfo::input> & me)234 C2R C2SoftVpxEnc::IntfImpl::SizeSetter(bool mayBlock,
235 const C2P<C2StreamPictureSizeInfo::input>& oldMe,
236 C2P<C2StreamPictureSizeInfo::input>& me) {
237 (void)mayBlock;
238 C2R res = C2R::Ok();
239 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
240 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
241 me.set().width = oldMe.v.width;
242 }
243 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
244 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
245 me.set().height = oldMe.v.height;
246 }
247 return res;
248 }
249
ProfileLevelSetter(bool mayBlock,C2P<C2StreamProfileLevelInfo::output> & me,const C2P<C2StreamPictureSizeInfo::input> & size,const C2P<C2StreamFrameRateInfo::output> & frameRate,const C2P<C2StreamBitrateInfo::output> & bitrate)250 C2R C2SoftVpxEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
251 C2P<C2StreamProfileLevelInfo::output>& me,
252 const C2P<C2StreamPictureSizeInfo::input>& size,
253 const C2P<C2StreamFrameRateInfo::output>& frameRate,
254 const C2P<C2StreamBitrateInfo::output>& bitrate) {
255 (void)mayBlock;
256 #ifdef VP9
257 if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
258 me.set().profile = PROFILE_VP9_0;
259 }
260 struct LevelLimits {
261 C2Config::level_t level;
262 float samplesPerSec;
263 uint64_t samples;
264 uint32_t bitrate;
265 size_t dimension;
266 };
267 constexpr LevelLimits kLimits[] = {
268 {LEVEL_VP9_1, 829440, 36864, 200000, 512},
269 {LEVEL_VP9_1_1, 2764800, 73728, 800000, 768},
270 {LEVEL_VP9_2, 4608000, 122880, 1800000, 960},
271 {LEVEL_VP9_2_1, 9216000, 245760, 3600000, 1344},
272 {LEVEL_VP9_3, 20736000, 552960, 7200000, 2048},
273 {LEVEL_VP9_3_1, 36864000, 983040, 12000000, 2752},
274 {LEVEL_VP9_4, 83558400, 2228224, 18000000, 4160},
275 {LEVEL_VP9_4_1, 160432128, 2228224, 30000000, 4160},
276 };
277
278 uint64_t samples = size.v.width * size.v.height;
279 float samplesPerSec = float(samples) * frameRate.v.value;
280 size_t dimension = std::max(size.v.width, size.v.height);
281
282 // Check if the supplied level meets the samples / bitrate requirements.
283 // If not, update the level with the lowest level meeting the requirements.
284 bool found = false;
285
286 // By default needsUpdate = false in case the supplied level does meet
287 // the requirements.
288 bool needsUpdate = false;
289 if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
290 needsUpdate = true;
291 }
292 for (const LevelLimits& limit : kLimits) {
293 if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
294 bitrate.v.value <= limit.bitrate && dimension <= limit.dimension) {
295 // This is the lowest level that meets the requirements, and if
296 // we haven't seen the supplied level yet, that means we don't
297 // need the update.
298 if (needsUpdate) {
299 ALOGD("Given level %x does not cover current configuration: "
300 "adjusting to %x",
301 me.v.level, limit.level);
302 me.set().level = limit.level;
303 }
304 found = true;
305 break;
306 }
307 if (me.v.level == limit.level) {
308 // We break out of the loop when the lowest feasible level is
309 // found. The fact that we're here means that our level doesn't
310 // meet the requirement and needs to be updated.
311 needsUpdate = true;
312 }
313 }
314 if (!found) {
315 // We set to the highest supported level.
316 me.set().level = LEVEL_VP9_4_1;
317 }
318 #else
319 (void)size;
320 (void)frameRate;
321 (void)bitrate;
322 if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
323 me.set().profile = PROFILE_VP8_0;
324 }
325 if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
326 me.set().level = LEVEL_UNUSED;
327 }
328 #endif
329 return C2R::Ok();
330 }
331
LayeringSetter(bool mayBlock,C2P<C2StreamTemporalLayeringTuning::output> & me)332 C2R C2SoftVpxEnc::IntfImpl::LayeringSetter(bool mayBlock,
333 C2P<C2StreamTemporalLayeringTuning::output>& me) {
334 (void)mayBlock;
335 C2R res = C2R::Ok();
336 if (me.v.m.layerCount > 4) {
337 me.set().m.layerCount = 4;
338 }
339 me.set().m.bLayerCount = 0;
340 // ensure ratios are monotonic and clamped between 0 and 1
341 for (size_t ix = 0; ix < me.v.flexCount(); ++ix) {
342 me.set().m.bitrateRatios[ix] = c2_clamp(
343 ix > 0 ? me.v.m.bitrateRatios[ix - 1] : 0, me.v.m.bitrateRatios[ix], 1.);
344 }
345 ALOGI("setting temporal layering %u + %u", me.v.m.layerCount, me.v.m.bLayerCount);
346 return res;
347 }
348
getSyncFramePeriod() const349 uint32_t C2SoftVpxEnc::IntfImpl::getSyncFramePeriod() const {
350 if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
351 return 0;
352 }
353 double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
354 return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
355 }
356
PictureQuantizationSetter(bool mayBlock,C2P<C2StreamPictureQuantizationTuning::output> & me)357 C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(
358 bool mayBlock, C2P<C2StreamPictureQuantizationTuning::output>& me) {
359 (void)mayBlock;
360 int32_t iMin = VPX_QP_DEFAULT_MIN, pMin = VPX_QP_DEFAULT_MIN;
361 int32_t iMax = VPX_QP_DEFAULT_MAX, pMax = VPX_QP_DEFAULT_MAX;
362 for (size_t i = 0; i < me.v.flexCount(); ++i) {
363 const C2PictureQuantizationStruct &layer = me.v.m.values[i];
364 // layerMin is clamped to [VPX_QP_MIN, layerMax] to avoid error
365 // cases where layer.min > layer.max
366 int32_t layerMax = std::clamp(layer.max, VPX_QP_MIN, VPX_QP_MAX);
367 int32_t layerMin = std::clamp(layer.min, VPX_QP_MIN, layerMax);
368 if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
369 iMax = layerMax;
370 iMin = layerMin;
371 ALOGV("iMin %d iMax %d", iMin, iMax);
372 } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
373 pMax = layerMax;
374 pMin = layerMin;
375 ALOGV("pMin %d pMax %d", pMin, pMax);
376 }
377 }
378 ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d",
379 iMin, iMax, pMin, pMax);
380
381 // vpx library takes same range for I/P picture type
382 int32_t maxFrameQP = std::min(iMax, pMax);
383 int32_t minFrameQP = std::max(iMin, pMin);
384 if (minFrameQP > maxFrameQP) {
385 minFrameQP = maxFrameQP;
386 }
387 // put them back into the structure
388 for (size_t i = 0; i < me.v.flexCount(); ++i) {
389 const C2PictureQuantizationStruct &layer = me.v.m.values[i];
390
391 if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
392 me.set().m.values[i].max = maxFrameQP;
393 me.set().m.values[i].min = minFrameQP;
394 }
395 else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
396 me.set().m.values[i].max = maxFrameQP;
397 me.set().m.values[i].min = minFrameQP;
398 }
399 }
400 ALOGV("PictureQuantizationSetter(exit): minFrameQP = %d maxFrameQP = %d",
401 minFrameQP, maxFrameQP);
402 return C2R::Ok();
403 }
404
ColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsInfo::input> & me)405 C2R C2SoftVpxEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
406 C2P<C2StreamColorAspectsInfo::input>& me) {
407 (void)mayBlock;
408 if (me.v.range > C2Color::RANGE_OTHER) {
409 me.set().range = C2Color::RANGE_OTHER;
410 }
411 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
412 me.set().primaries = C2Color::PRIMARIES_OTHER;
413 }
414 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
415 me.set().transfer = C2Color::TRANSFER_OTHER;
416 }
417 if (me.v.matrix > C2Color::MATRIX_OTHER) {
418 me.set().matrix = C2Color::MATRIX_OTHER;
419 }
420 return C2R::Ok();
421 }
CodedColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsInfo::output> & me,const C2P<C2StreamColorAspectsInfo::input> & coded)422 C2R C2SoftVpxEnc::IntfImpl::CodedColorAspectsSetter(
423 bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
424 const C2P<C2StreamColorAspectsInfo::input>& coded) {
425 (void)mayBlock;
426 me.set().range = coded.v.range;
427 me.set().primaries = coded.v.primaries;
428 me.set().transfer = coded.v.transfer;
429 me.set().matrix = coded.v.matrix;
430 return C2R::Ok();
431 }
432
433 #if 0
434 static size_t getCpuCoreCount() {
435 long cpuCoreCount = 1;
436 #if defined(_SC_NPROCESSORS_ONLN)
437 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
438 #else
439 // _SC_NPROC_ONLN must be defined...
440 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
441 #endif
442 CHECK(cpuCoreCount >= 1);
443 ALOGV("Number of CPU cores: %ld", cpuCoreCount);
444 return (size_t)cpuCoreCount;
445 }
446 #endif
447
C2SoftVpxEnc(const char * name,c2_node_id_t id,const std::shared_ptr<IntfImpl> & intfImpl)448 C2SoftVpxEnc::C2SoftVpxEnc(const char* name, c2_node_id_t id,
449 const std::shared_ptr<IntfImpl>& intfImpl)
450 : SimpleC2Component(
451 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
452 mIntf(intfImpl),
453 mCodecContext(nullptr),
454 mCodecConfiguration(nullptr),
455 mCodecInterface(nullptr),
456 mStrideAlign(2),
457 mColorFormat(VPX_IMG_FMT_I420),
458 mBitrateControlMode(VPX_VBR),
459 mErrorResilience(false),
460 mMinQuantizer(0),
461 mMaxQuantizer(0),
462 mTemporalLayers(0),
463 mTemporalPatternType(VPXTemporalLayerPatternNone),
464 mTemporalPatternLength(0),
465 mTemporalPatternIdx(0),
466 mLastTimestamp(0x7FFFFFFFFFFFFFFFull),
467 mSignalledOutputEos(false),
468 mHeaderGenerated(false),
469 mSignalledError(false) {
470 for (int i = 0; i < MAXTEMPORALLAYERS; i++) {
471 mTemporalLayerBitrateRatio[i] = 1.0f;
472 }
473 }
474
~C2SoftVpxEnc()475 C2SoftVpxEnc::~C2SoftVpxEnc() {
476 onRelease();
477 }
478
onInit()479 c2_status_t C2SoftVpxEnc::onInit() {
480 status_t err = initEncoder();
481 return err == OK ? C2_OK : C2_CORRUPTED;
482 }
483
onRelease()484 void C2SoftVpxEnc::onRelease() {
485 if (mCodecContext) {
486 vpx_codec_destroy(mCodecContext);
487 delete mCodecContext;
488 mCodecContext = nullptr;
489 }
490
491 if (mCodecConfiguration) {
492 delete mCodecConfiguration;
493 mCodecConfiguration = nullptr;
494 }
495
496 // this one is not allocated by us
497 mCodecInterface = nullptr;
498 mHeaderGenerated = false;
499 }
500
onStop()501 c2_status_t C2SoftVpxEnc::onStop() {
502 onRelease();
503 mLastTimestamp = 0x7FFFFFFFFFFFFFFFLL;
504 mSignalledOutputEos = false;
505 mSignalledError = false;
506 return C2_OK;
507 }
508
onReset()509 void C2SoftVpxEnc::onReset() {
510 (void)onStop();
511 }
512
onFlush_sm()513 c2_status_t C2SoftVpxEnc::onFlush_sm() {
514 return onStop();
515 }
516
initEncoder()517 status_t C2SoftVpxEnc::initEncoder() {
518 vpx_codec_err_t codec_return;
519 status_t result = UNKNOWN_ERROR;
520 {
521 IntfImpl::Lock lock = mIntf->lock();
522 mSize = mIntf->getSize_l();
523 mBitrate = mIntf->getBitrate_l();
524 mBitrateMode = mIntf->getBitrateMode_l();
525 mFrameRate = mIntf->getFrameRate_l();
526 mIntraRefresh = mIntf->getIntraRefresh_l();
527 mRequestSync = mIntf->getRequestSync_l();
528 mLayering = mIntf->getTemporalLayers_l();
529 mTemporalLayers = mLayering->m.layerCount;
530 mQpBounds = mIntf->getPictureQuantization_l();
531 }
532
533 switch (mBitrateMode->value) {
534 case C2Config::BITRATE_CONST:
535 mBitrateControlMode = VPX_CBR;
536 break;
537 case C2Config::BITRATE_VARIABLE:
538 [[fallthrough]];
539 default:
540 mBitrateControlMode = VPX_VBR;
541 break;
542 }
543
544 if (mQpBounds->flexCount() > 0) {
545 // read min max qp for sequence
546 for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
547 const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
548 if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
549 mMaxQuantizer = layer.max;
550 mMinQuantizer = layer.min;
551 break;
552 }
553 }
554 }
555
556 setCodecSpecificInterface();
557 if (!mCodecInterface) goto CleanUp;
558
559 ALOGD("VPx: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u",
560 (uint32_t)mBitrateControlMode, mTemporalLayers, mIntf->getSyncFramePeriod(),
561 mMinQuantizer, mMaxQuantizer);
562
563 mHeaderGenerated = false;
564 mCodecConfiguration = new vpx_codec_enc_cfg_t;
565 if (!mCodecConfiguration) goto CleanUp;
566 codec_return = vpx_codec_enc_config_default(mCodecInterface,
567 mCodecConfiguration,
568 0);
569 if (codec_return != VPX_CODEC_OK) {
570 ALOGE("Error populating default configuration for vpx encoder.");
571 goto CleanUp;
572 }
573
574 mCodecConfiguration->g_w = mSize->width;
575 mCodecConfiguration->g_h = mSize->height;
576 //mCodecConfiguration->g_threads = getCpuCoreCount();
577 mCodecConfiguration->g_threads = 0;
578 mCodecConfiguration->g_error_resilient = mErrorResilience;
579
580 // timebase unit is microsecond
581 // g_timebase is in seconds (i.e. 1/1000000 seconds)
582 mCodecConfiguration->g_timebase.num = 1;
583 mCodecConfiguration->g_timebase.den = 1000000;
584 // rc_target_bitrate is in kbps, mBitrate in bps
585 mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
586 mCodecConfiguration->rc_end_usage = mBitrateControlMode;
587 // Disable frame drop - not allowed in MediaCodec now.
588 mCodecConfiguration->rc_dropframe_thresh = 0;
589 // Disable lagged encoding.
590 mCodecConfiguration->g_lag_in_frames = 0;
591 if (mBitrateControlMode == VPX_CBR) {
592 // Disable spatial resizing.
593 mCodecConfiguration->rc_resize_allowed = 0;
594 // Single-pass mode.
595 mCodecConfiguration->g_pass = VPX_RC_ONE_PASS;
596 // Maximum amount of bits that can be subtracted from the target
597 // bitrate - expressed as percentage of the target bitrate.
598 mCodecConfiguration->rc_undershoot_pct = 100;
599 // Maximum amount of bits that can be added to the target
600 // bitrate - expressed as percentage of the target bitrate.
601 mCodecConfiguration->rc_overshoot_pct = 15;
602 // Initial value of the buffer level in ms.
603 mCodecConfiguration->rc_buf_initial_sz = 500;
604 // Amount of data that the encoder should try to maintain in ms.
605 mCodecConfiguration->rc_buf_optimal_sz = 600;
606 // The amount of data that may be buffered by the decoding
607 // application in ms.
608 mCodecConfiguration->rc_buf_sz = 1000;
609 // Enable error resilience - needed for packet loss.
610 mCodecConfiguration->g_error_resilient = 1;
611 // Maximum key frame interval - for CBR boost to 3000
612 mCodecConfiguration->kf_max_dist = 3000;
613 // Encoder determines optimal key frame placement automatically.
614 mCodecConfiguration->kf_mode = VPX_KF_AUTO;
615 }
616
617 // Frames temporal pattern - for now WebRTC like pattern is only supported.
618 switch (mTemporalLayers) {
619 case 0:
620 mTemporalPatternLength = 0;
621 break;
622 case 1:
623 mCodecConfiguration->ts_number_layers = 1;
624 mCodecConfiguration->ts_rate_decimator[0] = 1;
625 mCodecConfiguration->ts_periodicity = 1;
626 mCodecConfiguration->ts_layer_id[0] = 0;
627 mTemporalPattern[0] = kTemporalUpdateLastRefAll;
628 mTemporalPatternLength = 1;
629 break;
630 case 2:
631 mCodecConfiguration->ts_number_layers = 2;
632 mCodecConfiguration->ts_rate_decimator[0] = 2;
633 mCodecConfiguration->ts_rate_decimator[1] = 1;
634 mCodecConfiguration->ts_periodicity = 2;
635 mCodecConfiguration->ts_layer_id[0] = 0;
636 mCodecConfiguration->ts_layer_id[1] = 1;
637 mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
638 mTemporalPattern[1] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
639 mTemporalPattern[2] = kTemporalUpdateLastRefAltRef;
640 mTemporalPattern[3] = kTemporalUpdateGoldenRefAltRef;
641 mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
642 mTemporalPattern[5] = kTemporalUpdateGoldenRefAltRef;
643 mTemporalPattern[6] = kTemporalUpdateLastRefAltRef;
644 mTemporalPattern[7] = kTemporalUpdateNone;
645 mTemporalLayerBitrateRatio[0] = mLayering->m.bitrateRatios[0];
646 mTemporalPatternLength = 8;
647 break;
648 case 3:
649 mCodecConfiguration->ts_number_layers = 3;
650 mCodecConfiguration->ts_rate_decimator[0] = 4;
651 mCodecConfiguration->ts_rate_decimator[1] = 2;
652 mCodecConfiguration->ts_rate_decimator[2] = 1;
653 mCodecConfiguration->ts_periodicity = 4;
654 mCodecConfiguration->ts_layer_id[0] = 0;
655 mCodecConfiguration->ts_layer_id[1] = 2;
656 mCodecConfiguration->ts_layer_id[2] = 1;
657 mCodecConfiguration->ts_layer_id[3] = 2;
658 mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
659 mTemporalPattern[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef;
660 mTemporalPattern[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
661 mTemporalPattern[3] = kTemporalUpdateNone;
662 mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
663 mTemporalPattern[5] = kTemporalUpdateNone;
664 mTemporalPattern[6] = kTemporalUpdateGoldenRefAltRef;
665 mTemporalPattern[7] = kTemporalUpdateNone;
666 mTemporalLayerBitrateRatio[0] = mLayering->m.bitrateRatios[0];
667 mTemporalLayerBitrateRatio[1] = mLayering->m.bitrateRatios[1];
668 mTemporalPatternLength = 8;
669 break;
670 default:
671 ALOGE("Wrong number of temporal layers %zu", mTemporalLayers);
672 goto CleanUp;
673 }
674 // Set bitrate values for each layer
675 for (size_t i = 0; i < mCodecConfiguration->ts_number_layers; i++) {
676 mCodecConfiguration->ts_target_bitrate[i] =
677 mCodecConfiguration->rc_target_bitrate *
678 mTemporalLayerBitrateRatio[i];
679 }
680 if (mIntf->getSyncFramePeriod() >= 0) {
681 mCodecConfiguration->kf_max_dist = mIntf->getSyncFramePeriod();
682 mCodecConfiguration->kf_min_dist = mIntf->getSyncFramePeriod();
683 mCodecConfiguration->kf_mode = VPX_KF_AUTO;
684 }
685 if (mMinQuantizer > 0) {
686 mCodecConfiguration->rc_min_quantizer = mMinQuantizer;
687 }
688 if (mMaxQuantizer > 0) {
689 mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
690 }
691 setCodecSpecificConfiguration();
692 mCodecContext = new vpx_codec_ctx_t;
693 if (!mCodecContext) goto CleanUp;
694 codec_return = vpx_codec_enc_init(mCodecContext,
695 mCodecInterface,
696 mCodecConfiguration,
697 0); // flags
698 if (codec_return != VPX_CODEC_OK) {
699 ALOGE("Error initializing vpx encoder");
700 goto CleanUp;
701 }
702
703 // Extra CBR settings
704 if (mBitrateControlMode == VPX_CBR) {
705 codec_return = vpx_codec_control(mCodecContext,
706 VP8E_SET_STATIC_THRESHOLD,
707 1);
708 if (codec_return == VPX_CODEC_OK) {
709 uint32_t rc_max_intra_target =
710 (uint32_t)(mCodecConfiguration->rc_buf_optimal_sz * mFrameRate->value / 20 + 0.5);
711 // Don't go below 3 times per frame bandwidth.
712 if (rc_max_intra_target < 300) {
713 rc_max_intra_target = 300;
714 }
715 codec_return = vpx_codec_control(mCodecContext,
716 VP8E_SET_MAX_INTRA_BITRATE_PCT,
717 rc_max_intra_target);
718 }
719 if (codec_return == VPX_CODEC_OK) {
720 codec_return = vpx_codec_control(mCodecContext,
721 VP8E_SET_CPUUSED,
722 -8);
723 }
724 if (codec_return != VPX_CODEC_OK) {
725 ALOGE("Error setting cbr parameters for vpx encoder.");
726 goto CleanUp;
727 }
728 }
729
730 codec_return = setCodecSpecificControls();
731 if (codec_return != VPX_CODEC_OK) goto CleanUp;
732
733 {
734 uint32_t width = mSize->width;
735 uint32_t height = mSize->height;
736 if (((uint64_t)width * height) >
737 ((uint64_t)INT32_MAX / 3)) {
738 ALOGE("b/25812794, Buffer size is too big, width=%u, height=%u.", width, height);
739 } else {
740 uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
741 uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
742 mConversionBuffer = MemoryBlock::Allocate(stride * vstride * 3 / 2);
743 if (!mConversionBuffer.size()) {
744 ALOGE("Allocating conversion buffer failed.");
745 } else {
746 mNumInputFrames = -1;
747 return OK;
748 }
749 }
750 }
751
752 CleanUp:
753 onRelease();
754 return result;
755 }
756
getEncodeFlags()757 vpx_enc_frame_flags_t C2SoftVpxEnc::getEncodeFlags() {
758 vpx_enc_frame_flags_t flags = 0;
759 if (mTemporalPatternLength > 0) {
760 int patternIdx = mTemporalPatternIdx % mTemporalPatternLength;
761 mTemporalPatternIdx++;
762 switch (mTemporalPattern[patternIdx]) {
763 case kTemporalUpdateLast:
764 flags |= VP8_EFLAG_NO_UPD_GF;
765 flags |= VP8_EFLAG_NO_UPD_ARF;
766 flags |= VP8_EFLAG_NO_REF_GF;
767 flags |= VP8_EFLAG_NO_REF_ARF;
768 break;
769 case kTemporalUpdateGoldenWithoutDependency:
770 flags |= VP8_EFLAG_NO_REF_GF;
771 [[fallthrough]];
772 case kTemporalUpdateGolden:
773 flags |= VP8_EFLAG_NO_REF_ARF;
774 flags |= VP8_EFLAG_NO_UPD_ARF;
775 flags |= VP8_EFLAG_NO_UPD_LAST;
776 break;
777 case kTemporalUpdateAltrefWithoutDependency:
778 flags |= VP8_EFLAG_NO_REF_ARF;
779 flags |= VP8_EFLAG_NO_REF_GF;
780 [[fallthrough]];
781 case kTemporalUpdateAltref:
782 flags |= VP8_EFLAG_NO_UPD_GF;
783 flags |= VP8_EFLAG_NO_UPD_LAST;
784 break;
785 case kTemporalUpdateNoneNoRefAltref:
786 flags |= VP8_EFLAG_NO_REF_ARF;
787 [[fallthrough]];
788 case kTemporalUpdateNone:
789 flags |= VP8_EFLAG_NO_UPD_GF;
790 flags |= VP8_EFLAG_NO_UPD_ARF;
791 flags |= VP8_EFLAG_NO_UPD_LAST;
792 flags |= VP8_EFLAG_NO_UPD_ENTROPY;
793 break;
794 case kTemporalUpdateNoneNoRefGoldenRefAltRef:
795 flags |= VP8_EFLAG_NO_REF_GF;
796 flags |= VP8_EFLAG_NO_UPD_GF;
797 flags |= VP8_EFLAG_NO_UPD_ARF;
798 flags |= VP8_EFLAG_NO_UPD_LAST;
799 flags |= VP8_EFLAG_NO_UPD_ENTROPY;
800 break;
801 case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
802 flags |= VP8_EFLAG_NO_REF_GF;
803 flags |= VP8_EFLAG_NO_UPD_ARF;
804 flags |= VP8_EFLAG_NO_UPD_LAST;
805 break;
806 case kTemporalUpdateLastRefAltRef:
807 flags |= VP8_EFLAG_NO_UPD_GF;
808 flags |= VP8_EFLAG_NO_UPD_ARF;
809 flags |= VP8_EFLAG_NO_REF_GF;
810 break;
811 case kTemporalUpdateGoldenRefAltRef:
812 flags |= VP8_EFLAG_NO_UPD_ARF;
813 flags |= VP8_EFLAG_NO_UPD_LAST;
814 break;
815 case kTemporalUpdateLastAndGoldenRefAltRef:
816 flags |= VP8_EFLAG_NO_UPD_ARF;
817 flags |= VP8_EFLAG_NO_REF_GF;
818 break;
819 case kTemporalUpdateLastRefAll:
820 flags |= VP8_EFLAG_NO_UPD_ARF;
821 flags |= VP8_EFLAG_NO_UPD_GF;
822 break;
823 }
824 }
825 return flags;
826 }
827
828 // TODO: add support for YUV input color formats
829 // TODO: add support for SVC, ARF. SVC and ARF returns multiple frames
830 // (hierarchical / noshow) in one call. These frames should be combined in to
831 // a single buffer and sent back to the client
process(const std::unique_ptr<C2Work> & work,const std::shared_ptr<C2BlockPool> & pool)832 void C2SoftVpxEnc::process(
833 const std::unique_ptr<C2Work> &work,
834 const std::shared_ptr<C2BlockPool> &pool) {
835 // Initialize output work
836 work->result = C2_OK;
837 work->workletsProcessed = 1u;
838 work->worklets.front()->output.flags = work->input.flags;
839
840 if (mSignalledError || mSignalledOutputEos) {
841 work->result = C2_BAD_VALUE;
842 return;
843 }
844 // Initialize encoder if not already
845 if (!mCodecContext && OK != initEncoder()) {
846 ALOGE("Failed to initialize encoder");
847 mSignalledError = true;
848 work->result = C2_CORRUPTED;
849 return;
850 }
851
852 std::shared_ptr<C2GraphicView> rView;
853 std::shared_ptr<C2Buffer> inputBuffer;
854 if (!work->input.buffers.empty()) {
855 inputBuffer = work->input.buffers[0];
856 rView = std::make_shared<C2GraphicView>(
857 inputBuffer->data().graphicBlocks().front().map().get());
858 if (rView->error() != C2_OK) {
859 ALOGE("graphic view map err = %d", rView->error());
860 work->result = C2_CORRUPTED;
861 return;
862 }
863 //(b/232396154)
864 //workaround for incorrect crop size in view when using surface mode
865 rView->setCrop_be(C2Rect(mSize->width, mSize->height));
866 } else {
867 ALOGV("Empty input Buffer");
868 uint32_t flags = 0;
869 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
870 flags |= C2FrameData::FLAG_END_OF_STREAM;
871 }
872 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
873 work->worklets.front()->output.buffers.clear();
874 work->worklets.front()->output.ordinal = work->input.ordinal;
875 work->workletsProcessed = 1u;
876 return;
877 }
878
879 // Header generation is limited to Android V and above, as MediaMuxer did not handle
880 // CSD for VP9 correctly in Android U and before.
881 if (isAtLeastV() && !mHeaderGenerated) {
882 vpx_fixed_buf_t* codec_private_data = vpx_codec_get_global_headers(mCodecContext);
883 if (codec_private_data) {
884 std::unique_ptr<C2StreamInitDataInfo::output> csd =
885 C2StreamInitDataInfo::output::AllocUnique(codec_private_data->sz, 0u);
886 if (!csd) {
887 ALOGE("CSD allocation failed");
888 mSignalledError = true;
889 work->result = C2_NO_MEMORY;
890 work->workletsProcessed = 1u;
891 return;
892 }
893 memcpy(csd->m.value, codec_private_data->buf, codec_private_data->sz);
894 work->worklets.front()->output.configUpdate.push_back(std::move(csd));
895 ALOGV("CSD Produced of size %zu bytes", codec_private_data->sz);
896 }
897 mHeaderGenerated = true;
898 }
899
900 const C2ConstGraphicBlock inBuffer =
901 inputBuffer->data().graphicBlocks().front();
902 if (inBuffer.width() < mSize->width ||
903 inBuffer.height() < mSize->height) {
904 ALOGE("unexpected Input buffer attributes %d(%d) x %d(%d)",
905 inBuffer.width(), mSize->width, inBuffer.height(),
906 mSize->height);
907 mSignalledError = true;
908 work->result = C2_BAD_VALUE;
909 return;
910 }
911 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
912 vpx_image_t raw_frame;
913 const C2PlanarLayout &layout = rView->layout();
914 uint32_t width = mSize->width;
915 uint32_t height = mSize->height;
916 if (width > 0x8000 || height > 0x8000) {
917 ALOGE("Image too big: %u x %u", width, height);
918 work->result = C2_BAD_VALUE;
919 return;
920 }
921 uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
922 uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
923 switch (layout.type) {
924 case C2PlanarLayout::TYPE_RGB:
925 case C2PlanarLayout::TYPE_RGBA: {
926 std::shared_ptr<C2StreamColorAspectsInfo::output> colorAspects;
927 {
928 IntfImpl::Lock lock = mIntf->lock();
929 colorAspects = mIntf->getCodedColorAspects_l();
930 }
931 ConvertRGBToPlanarYUV(mConversionBuffer.data(), stride, vstride,
932 mConversionBuffer.size(), *rView.get(),
933 colorAspects->matrix, colorAspects->range);
934 vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
935 mStrideAlign, mConversionBuffer.data());
936 break;
937 }
938 case C2PlanarLayout::TYPE_YUV: {
939 if (!IsYUV420(*rView)) {
940 ALOGE("input is not YUV420");
941 work->result = C2_BAD_VALUE;
942 return;
943 }
944
945 if (layout.planes[layout.PLANE_Y].colInc == 1
946 && layout.planes[layout.PLANE_U].colInc == 1
947 && layout.planes[layout.PLANE_V].colInc == 1) {
948 // I420 compatible - though with custom offset and stride
949 vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
950 mStrideAlign, (uint8_t*)rView->data()[0]);
951 raw_frame.planes[1] = (uint8_t*)rView->data()[1];
952 raw_frame.planes[2] = (uint8_t*)rView->data()[2];
953 raw_frame.stride[0] = layout.planes[layout.PLANE_Y].rowInc;
954 raw_frame.stride[1] = layout.planes[layout.PLANE_U].rowInc;
955 raw_frame.stride[2] = layout.planes[layout.PLANE_V].rowInc;
956 } else {
957 // copy to I420
958 MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, stride, vstride);
959 if (mConversionBuffer.size() >= stride * vstride * 3 / 2) {
960 status_t err = ImageCopy(mConversionBuffer.data(), &img, *rView);
961 if (err != OK) {
962 ALOGE("Buffer conversion failed: %d", err);
963 work->result = C2_BAD_VALUE;
964 return;
965 }
966 vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, stride, vstride,
967 mStrideAlign, mConversionBuffer.data());
968 vpx_img_set_rect(&raw_frame, 0, 0, width, height);
969 } else {
970 ALOGE("Conversion buffer is too small: %u x %u for %zu",
971 stride, vstride, mConversionBuffer.size());
972 work->result = C2_BAD_VALUE;
973 return;
974 }
975 }
976 break;
977 }
978 default:
979 ALOGE("Unrecognized plane type: %d", layout.type);
980 work->result = C2_BAD_VALUE;
981 return;
982 }
983
984 vpx_enc_frame_flags_t flags = getEncodeFlags();
985 // handle dynamic config parameters
986 {
987 IntfImpl::Lock lock = mIntf->lock();
988 std::shared_ptr<C2StreamIntraRefreshTuning::output> intraRefresh = mIntf->getIntraRefresh_l();
989 std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
990 std::shared_ptr<C2StreamRequestSyncFrameTuning::output> requestSync = mIntf->getRequestSync_l();
991 lock.unlock();
992
993 if (intraRefresh != mIntraRefresh) {
994 mIntraRefresh = intraRefresh;
995 ALOGV("Got mIntraRefresh request");
996 }
997
998 if (requestSync != mRequestSync) {
999 // we can handle IDR immediately
1000 if (requestSync->value) {
1001 // unset request
1002 C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE);
1003 std::vector<std::unique_ptr<C2SettingResult>> failures;
1004 mIntf->config({ &clearSync }, C2_MAY_BLOCK, &failures);
1005 ALOGV("Got sync request");
1006 flags |= VPX_EFLAG_FORCE_KF;
1007 }
1008 mRequestSync = requestSync;
1009 }
1010
1011 if (bitrate != mBitrate) {
1012 mBitrate = bitrate;
1013 mCodecConfiguration->rc_target_bitrate =
1014 (mBitrate->value + 500) / 1000;
1015 vpx_codec_err_t res = vpx_codec_enc_config_set(mCodecContext,
1016 mCodecConfiguration);
1017 if (res != VPX_CODEC_OK) {
1018 ALOGE("vpx encoder failed to update bitrate: %s",
1019 vpx_codec_err_to_string(res));
1020 mSignalledError = true;
1021 work->result = C2_CORRUPTED;
1022 return;
1023 }
1024 }
1025 }
1026
1027 uint64_t inputTimeStamp = work->input.ordinal.timestamp.peekull();
1028 uint32_t frameDuration;
1029 if (inputTimeStamp > mLastTimestamp) {
1030 frameDuration = (uint32_t)(inputTimeStamp - mLastTimestamp);
1031 } else {
1032 // Use default of 30 fps in case of 0 frame rate.
1033 float frameRate = mFrameRate->value;
1034 if (frameRate < 0.001) {
1035 frameRate = 30;
1036 }
1037 frameDuration = (uint32_t)(1000000 / frameRate + 0.5);
1038 }
1039 mLastTimestamp = inputTimeStamp;
1040
1041 vpx_codec_err_t codec_return = vpx_codec_encode(mCodecContext, &raw_frame,
1042 inputTimeStamp,
1043 frameDuration, flags,
1044 VPX_DL_REALTIME);
1045 if (codec_return != VPX_CODEC_OK) {
1046 ALOGE("vpx encoder failed to encode frame");
1047 mSignalledError = true;
1048 work->result = C2_CORRUPTED;
1049 return;
1050 }
1051
1052 bool populated = false;
1053 vpx_codec_iter_t encoded_packet_iterator = nullptr;
1054 const vpx_codec_cx_pkt_t* encoded_packet;
1055 while ((encoded_packet = vpx_codec_get_cx_data(
1056 mCodecContext, &encoded_packet_iterator))) {
1057 if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) {
1058 std::shared_ptr<C2LinearBlock> block;
1059 C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
1060 c2_status_t err = pool->fetchLinearBlock(encoded_packet->data.frame.sz, usage, &block);
1061 if (err != C2_OK) {
1062 ALOGE("fetchLinearBlock for Output failed with status %d", err);
1063 work->result = C2_NO_MEMORY;
1064 return;
1065 }
1066 C2WriteView wView = block->map().get();
1067 if (wView.error()) {
1068 ALOGE("write view map failed %d", wView.error());
1069 work->result = C2_CORRUPTED;
1070 return;
1071 }
1072
1073 memcpy(wView.data(), encoded_packet->data.frame.buf, encoded_packet->data.frame.sz);
1074 ++mNumInputFrames;
1075
1076 ALOGV("bytes generated %zu", encoded_packet->data.frame.sz);
1077 uint32_t flags = 0;
1078 if (eos) {
1079 flags |= C2FrameData::FLAG_END_OF_STREAM;
1080 }
1081 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
1082 work->worklets.front()->output.buffers.clear();
1083 std::shared_ptr<C2Buffer> buffer =
1084 createLinearBuffer(block, 0, encoded_packet->data.frame.sz);
1085 if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY) {
1086 buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
1087 0u /* stream id */, C2Config::SYNC_FRAME));
1088 }
1089 work->worklets.front()->output.buffers.push_back(buffer);
1090 work->worklets.front()->output.ordinal = work->input.ordinal;
1091 work->worklets.front()->output.ordinal.timestamp = encoded_packet->data.frame.pts;
1092 work->workletsProcessed = 1u;
1093 populated = true;
1094 if (eos) {
1095 mSignalledOutputEos = true;
1096 ALOGV("signalled EOS");
1097 }
1098 }
1099 }
1100 if (!populated) {
1101 work->workletsProcessed = 0u;
1102 }
1103 }
1104
drain(uint32_t drainMode,const std::shared_ptr<C2BlockPool> & pool)1105 c2_status_t C2SoftVpxEnc::drain(
1106 uint32_t drainMode,
1107 const std::shared_ptr<C2BlockPool> &pool) {
1108 (void)pool;
1109 if (drainMode == NO_DRAIN) {
1110 ALOGW("drain with NO_DRAIN: no-op");
1111 return C2_OK;
1112 }
1113 if (drainMode == DRAIN_CHAIN) {
1114 ALOGW("DRAIN_CHAIN not supported");
1115 return C2_OMITTED;
1116 }
1117
1118 return C2_OK;
1119 }
1120
1121 } // namespace android
1122