1 /*
2  * Copyright 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 
18 #include <utils/Log.h>
19 
20 #define DEBUG  0
21 #if DEBUG
22 #  define  DDD(...)    ALOGD(__VA_ARGS__)
23 #else
24 #  define  DDD(...)    ((void)0)
25 #endif
26 
27 #include "GoldfishAVCDec.h"
28 
29 #include <media/stagefright/foundation/ADebug.h>
30 #include <media/stagefright/MediaDefs.h>
31 #include <OMX_VideoExt.h>
32 #include <inttypes.h>
33 
34 #include <nativebase/nativebase.h>
35 
36 #include <android/hardware/graphics/common/1.2/types.h>
37 #include <hidl/LegacySupport.h>
38 
39 using ::android::hardware::graphics::common::V1_2::PixelFormat;
40 using ::android::hardware::graphics::common::V1_0::BufferUsage;
41 
42 namespace android {
43 
44 #define componentName                   "video_decoder.avc"
45 #define codingType                      OMX_VIDEO_CodingAVC
46 #define CODEC_MIME_TYPE                 MEDIA_MIMETYPE_VIDEO_AVC
47 
48 /** Function and structure definitions to keep code similar for each codec */
49 #define ivdec_api_function              ih264d_api_function
50 #define ivdext_create_ip_t              ih264d_create_ip_t
51 #define ivdext_create_op_t              ih264d_create_op_t
52 #define ivdext_delete_ip_t              ih264d_delete_ip_t
53 #define ivdext_delete_op_t              ih264d_delete_op_t
54 #define ivdext_ctl_set_num_cores_ip_t   ih264d_ctl_set_num_cores_ip_t
55 #define ivdext_ctl_set_num_cores_op_t   ih264d_ctl_set_num_cores_op_t
56 
57 #define IVDEXT_CMD_CTL_SET_NUM_CORES    \
58         (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
59 
60 static const CodecProfileLevel kProfileLevels[] = {
61     { OMX_VIDEO_AVCProfileConstrainedBaseline, OMX_VIDEO_AVCLevel52 },
62 
63     { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel52 },
64 
65     { OMX_VIDEO_AVCProfileMain,     OMX_VIDEO_AVCLevel52 },
66 
67     { OMX_VIDEO_AVCProfileConstrainedHigh,     OMX_VIDEO_AVCLevel52 },
68 
69     { OMX_VIDEO_AVCProfileHigh,     OMX_VIDEO_AVCLevel52 },
70 };
71 
GoldfishAVCDec(const char * name,const OMX_CALLBACKTYPE * callbacks,OMX_PTR appData,OMX_COMPONENTTYPE ** component,RenderMode renderMode)72 GoldfishAVCDec::GoldfishAVCDec(
73         const char *name,
74         const OMX_CALLBACKTYPE *callbacks,
75         OMX_PTR appData,
76         OMX_COMPONENTTYPE **component, RenderMode renderMode)
77     : GoldfishVideoDecoderOMXComponent(
78             name, componentName, codingType,
79             kProfileLevels, ARRAY_SIZE(kProfileLevels),
80             320 /* width */, 240 /* height */, callbacks,
81             appData, component),
82       mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
83       mChangingResolution(false),
84       mSignalledError(false),
85       mInputOffset(0), mRenderMode(renderMode){
86     initPorts(
87             1 /* numMinInputBuffers */, kNumBuffers, INPUT_BUF_SIZE,
88             1 /* numMinOutputBuffers */, kNumBuffers, CODEC_MIME_TYPE);
89 
90     mTimeStart = mTimeEnd = systemTime();
91 
92     // If input dump is enabled, then open create an empty file
93     GENERATE_FILE_NAMES();
94     CREATE_DUMP_FILE(mInFile);
95     ALOGI("created %s %d object %p", __func__, __LINE__, this);
96 }
97 
~GoldfishAVCDec()98 GoldfishAVCDec::~GoldfishAVCDec() {
99     CHECK_EQ(deInitDecoder(), (status_t)OK);
100     DDD("destroyed %s %d object %p", __func__, __LINE__, this);
101 }
102 
logVersion()103 void GoldfishAVCDec::logVersion() {
104     // TODO: get emulation decoder implementation version from the host.
105     ALOGI("GoldfishAVC decoder version 1.0");
106 }
107 
resetPlugin()108 status_t GoldfishAVCDec::resetPlugin() {
109     mIsInFlush = false;
110     mReceivedEOS = false;
111 
112     /* Initialize both start and end times */
113     mTimeStart = mTimeEnd = systemTime();
114 
115     return OK;
116 }
117 
resetDecoder()118 status_t GoldfishAVCDec::resetDecoder() {
119     if (mContext) {
120     // The resolution may have changed, so our safest bet is to just destroy the
121     // current context and recreate another one, with the new width and height.
122     mContext->destroyH264Context();
123     mContext.reset(nullptr);
124 
125     }
126     return OK;
127 }
128 
setFlushMode()129 status_t GoldfishAVCDec::setFlushMode() {
130     /* Set the decoder in Flush mode, subsequent decode() calls will flush */
131     mIsInFlush = true;
132     mContext->flush();
133     return OK;
134 }
135 
initDecoder()136 status_t GoldfishAVCDec::initDecoder() {
137     /* Initialize the decoder */
138     if (mEnableAndroidNativeBuffers == false) {
139         mRenderMode = RenderMode::RENDER_BY_GUEST_CPU;
140     }
141     mContext.reset(new MediaH264Decoder(mRenderMode));
142     mContext->initH264Context(mWidth,
143                               mHeight,
144                               mWidth,
145                               mHeight,
146                               MediaH264Decoder::PixelFormat::YUV420P);
147 
148     /* Reset the plugin state */
149     resetPlugin();
150 
151     /* Get codec version */
152     logVersion();
153 
154     return OK;
155 }
156 
deInitDecoder()157 status_t GoldfishAVCDec::deInitDecoder() {
158     if (mContext) {
159         mContext->destroyH264Context();
160         mContext.reset();
161     }
162 
163     mChangingResolution = false;
164 
165     return OK;
166 }
167 
onReset()168 void GoldfishAVCDec::onReset() {
169     GoldfishVideoDecoderOMXComponent::onReset();
170 
171     mSignalledError = false;
172     mInputOffset = 0;
173     resetDecoder();
174     resetPlugin();
175 }
176 
getVUIParams(h264_image_t & img)177 bool GoldfishAVCDec::getVUIParams(h264_image_t& img) {
178     int32_t primaries = img.color_primaries;
179     bool fullRange = img.color_range == 2 ? true : false;
180     int32_t transfer = img.color_trc;
181     int32_t coeffs = img.colorspace;
182 
183     ColorAspects colorAspects;
184     ColorUtils::convertIsoColorAspectsToCodecAspects(
185             primaries, transfer, coeffs, fullRange, colorAspects);
186 
187     DDD("img pts %lld, primaries %d, range %d transfer %d colorspace %d", (long long)img.pts,
188             (int)img.color_primaries, (int)img.color_range, (int)img.color_trc, (int)img.colorspace);
189 
190     // Update color aspects if necessary.
191     if (colorAspectsDiffer(colorAspects, mBitstreamColorAspects)) {
192         mBitstreamColorAspects = colorAspects;
193         status_t err = handleColorAspectsChange();
194         CHECK(err == OK);
195     }
196     return true;
197 }
198 
setDecodeArgs(OMX_BUFFERHEADERTYPE * inHeader,OMX_BUFFERHEADERTYPE * outHeader)199 bool GoldfishAVCDec::setDecodeArgs(
200         OMX_BUFFERHEADERTYPE *inHeader,
201         OMX_BUFFERHEADERTYPE *outHeader) {
202     size_t sizeY = outputBufferWidth() * outputBufferHeight();
203     size_t sizeUV = sizeY / 4;
204 
205     /* When in flush and after EOS with zero byte input,
206      * inHeader is set to zero. Hence check for non-null */
207     if (inHeader) {
208         mConsumedBytes = inHeader->nFilledLen - mInputOffset;
209         mInPBuffer = inHeader->pBuffer + inHeader->nOffset + mInputOffset;
210         DDD("got input timestamp %lld in-addr-base %p real-data-offset %d inputoffset %d", (long long)(inHeader->nTimeStamp),
211                 inHeader->pBuffer, (int)(inHeader->nOffset + mInputOffset), (int)mInputOffset);
212     } else {
213         mConsumedBytes = 0;
214         mInPBuffer = nullptr;
215     }
216 
217     if (outHeader) {
218         if (outHeader->nAllocLen < sizeY + (sizeUV * 2)) {
219             ALOGE("outHeader->nAllocLen %d < needed size %d", outHeader->nAllocLen, (int)(sizeY + sizeUV * 2));
220             android_errorWriteLog(0x534e4554, "27833616");
221             return false;
222         }
223         mOutHeaderBuf = outHeader->pBuffer;
224     } else {
225         // We flush out on the host side
226         mOutHeaderBuf = nullptr;
227     }
228 
229     return true;
230 }
231 
readAndDiscardAllHostBuffers()232 void GoldfishAVCDec::readAndDiscardAllHostBuffers() {
233     while (mContext) {
234         h264_image_t img = mContext->getImage();
235         if (img.data != nullptr) {
236             DDD("img pts %lld is discarded", (long long)img.pts);
237         } else {
238             return;
239         }
240     }
241 }
242 
onPortFlushCompleted(OMX_U32 portIndex)243 void GoldfishAVCDec::onPortFlushCompleted(OMX_U32 portIndex) {
244     /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
245     if (kOutputPortIndex == portIndex) {
246         setFlushMode();
247         DDD("%s %d", __func__, __LINE__);
248         readAndDiscardAllHostBuffers();
249         mContext->resetH264Context(mWidth, mHeight, mWidth, mHeight, MediaH264Decoder::PixelFormat::YUV420P);
250         if (!mCsd0.empty() && !mCsd1.empty()) {
251             mContext->decodeFrame(&(mCsd0[0]), mCsd0.size(), 0);
252             mContext->getImage();
253             mContext->decodeFrame(&(mCsd1[0]), mCsd1.size(), 0);
254             mContext->getImage();
255         }
256         resetPlugin();
257     } else {
258         mInputOffset = 0;
259     }
260 }
261 
copyImageData(OMX_BUFFERHEADERTYPE * outHeader,h264_image_t & img)262 void GoldfishAVCDec::copyImageData( OMX_BUFFERHEADERTYPE *outHeader, h264_image_t & img) {
263     int myStride = outputBufferWidth();
264     for (int i=0; i < mHeight; ++i) {
265         memcpy(outHeader->pBuffer + i * myStride, img.data + i * mWidth, mWidth);
266     }
267     int Y = myStride * outputBufferHeight();
268     for (int i=0; i < mHeight/2; ++i) {
269         memcpy(outHeader->pBuffer + Y + i * myStride / 2 , img.data + mWidth * mHeight + i * mWidth/2, mWidth/2);
270     }
271     int UV = Y/4;
272     for (int i=0; i < mHeight/2; ++i) {
273         memcpy(outHeader->pBuffer + Y + UV + i * myStride / 2 , img.data + mWidth * mHeight * 5/4 + i * mWidth/2, mWidth/2);
274     }
275 }
276 
getHostColorBufferId(void * header)277 int GoldfishAVCDec::getHostColorBufferId(void* header) {
278   if (mNWBuffers.find(header) == mNWBuffers.end()) {
279       DDD("cannot find color buffer for header %p", header);
280     return -1;
281   }
282   sp<ANativeWindowBuffer> nBuf = mNWBuffers[header];
283   cb_handle_t *handle = (cb_handle_t*)nBuf->handle;
284   DDD("found color buffer for header %p --> %d", header, handle->hostHandle);
285   return handle->hostHandle;
286 }
287 
onQueueFilled(OMX_U32 portIndex)288 void GoldfishAVCDec::onQueueFilled(OMX_U32 portIndex) {
289     static int count1=0;
290     DDD("calling %s count %d object %p", __func__, ++count1, this);
291     UNUSED(portIndex);
292     OMX_BUFFERHEADERTYPE *inHeader = NULL;
293     BufferInfo *inInfo = NULL;
294 
295     if (mSignalledError) {
296         return;
297     }
298     if (mOutputPortSettingsChange != NONE) {
299         return;
300     }
301 
302     if (mContext == nullptr) {
303         if (OK != initDecoder()) {
304             ALOGE("Failed to initialize decoder");
305             notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
306             mSignalledError = true;
307             return;
308         }
309     }
310 
311     List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
312     List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
313 
314     int count2=0;
315     while (!outQueue.empty()) {
316         DDD("calling %s in while loop count %d", __func__, ++count2);
317         BufferInfo *outInfo;
318         OMX_BUFFERHEADERTYPE *outHeader;
319 
320         if (!mIsInFlush && (NULL == inHeader)) {
321             if (!inQueue.empty()) {
322                 inInfo = *inQueue.begin();
323                 inHeader = inInfo->mHeader;
324                 if (inHeader == NULL) {
325                     inQueue.erase(inQueue.begin());
326                     inInfo->mOwnedByUs = false;
327                     continue;
328                 }
329             } else {
330                 break;
331             }
332         }
333 
334         outInfo = *outQueue.begin();
335         outHeader = outInfo->mHeader;
336         outHeader->nFlags = 0;
337         outHeader->nTimeStamp = 0;
338         outHeader->nOffset = 0;
339 
340         if (inHeader != NULL) {
341             if (inHeader->nFilledLen == 0) {
342                 // An empty buffer can be end of stream (EOS) buffer, so
343                 // we'll set the decoder in flush mode if so. If it's not EOS,
344                 // then just release the buffer.
345                 inQueue.erase(inQueue.begin());
346                 inInfo->mOwnedByUs = false;
347                 notifyEmptyBufferDone(inHeader);
348 
349                 if (!(inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
350                     return;
351                 }
352 
353                 mReceivedEOS = true;
354                 inHeader = NULL;
355                 setFlushMode();
356             } else if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
357                 mReceivedEOS = true;
358             }
359         }
360 
361         {
362 
363             if (!setDecodeArgs(inHeader, outHeader)) {
364                 ALOGE("Decoder arg setup failed");
365                 notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
366                 mSignalledError = true;
367                 return;
368             }
369 
370             // TODO: We also need to send the timestamp
371             h264_result_t h264Res = {(int)MediaH264Decoder::Err::NoErr, 0};
372             if (inHeader != nullptr) {
373                 if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
374                     unsigned long mysize = (inHeader->nFilledLen - mInputOffset);
375                     uint8_t* mydata = mInPBuffer;
376                     if (mCsd0.empty()) {
377                         mCsd0.assign(mydata, mydata + mysize);
378                     } else if (mCsd1.empty()) {
379                         mCsd1.assign(mydata, mydata + mysize);
380                     }
381                 }
382                 DDD("Decoding frame(sz=%lu)", (unsigned long)(inHeader->nFilledLen - mInputOffset));
383                 h264Res = mContext->decodeFrame(mInPBuffer,
384                                                 inHeader->nFilledLen - mInputOffset,
385                                                 inHeader->nTimeStamp);
386                 mConsumedBytes = h264Res.bytesProcessed;
387                 if (h264Res.ret == (int)MediaH264Decoder::Err::DecoderRestarted) {
388                     mChangingResolution = true;
389                 }
390             } else {
391                 DDD("No more input data. Attempting to get a decoded frame, if any.");
392             }
393             h264_image_t img = {};
394 
395             bool readBackPixels = true;
396             if (mRenderMode == RenderMode::RENDER_BY_GUEST_CPU) {
397               img = mContext->getImage();
398             } else {
399                 int hostColorBufferId = getHostColorBufferId(outHeader);
400                 if (hostColorBufferId >= 0) {
401                     img = mContext->renderOnHostAndReturnImageMetadata(getHostColorBufferId(outHeader));
402                     readBackPixels = false;
403                 } else {
404                     img = mContext->getImage();
405                 }
406             }
407 
408 
409             if (img.data != nullptr) {
410                 getVUIParams(img);
411             }
412 
413 
414             if (inHeader) {
415                 DDD("input time stamp %lld flag %d", inHeader->nTimeStamp, (int)(inHeader->nFlags));
416             }
417 
418             // If the decoder is in the changing resolution mode and there is no output present,
419             // that means the switching is done and it's ready to reset the decoder and the plugin.
420             if (mChangingResolution && img.data == nullptr) {
421                 mChangingResolution = false;
422                 DDD("re-create decoder because resolution changed");
423                 bool portWillReset = false;
424                 handlePortSettingsChange(&portWillReset, img.width, img.height);
425                 {
426                     DDD("handling port reset");
427                     DDD("port resetting (img.width=%u, img.height=%u, mWidth=%u, mHeight=%u)",
428                           img.width, img.height, mWidth, mHeight);
429                     //resetDecoder();
430                     resetPlugin();
431 
432                 //mContext->destroyH264Context();
433                 //mContext.reset(new MediaH264Decoder());
434                 mContext->resetH264Context(mWidth,
435                               mHeight,
436                               mWidth,
437                               mHeight,
438                               MediaH264Decoder::PixelFormat::YUV420P);
439                 //mInputOffset += mConsumedBytes;
440                 return;
441                 }
442             }
443 
444             if (img.data != nullptr) {
445                 int myWidth = img.width;
446                 int myHeight = img.height;
447                 if (myWidth != mWidth || myHeight != mHeight) {
448                     bool portWillReset = false;
449                     handlePortSettingsChange(&portWillReset, myWidth, myHeight);
450                     resetPlugin();
451                     mWidth = myWidth;
452                     mHeight = myHeight;
453                     if (portWillReset) {
454                         DDD("port will reset return now");
455                         return;
456                     } else {
457                         DDD("port will NOT reset keep going now");
458                     }
459                 }
460                 outHeader->nFilledLen =  (outputBufferWidth() * outputBufferHeight() * 3) / 2;
461                 if (readBackPixels) {
462                   if (outputBufferWidth() == mWidth && outputBufferHeight() == mHeight) {
463                     memcpy(outHeader->pBuffer, img.data, outHeader->nFilledLen);
464                   } else {
465                     copyImageData(outHeader, img);
466                   }
467                 }
468 
469                 outHeader->nTimeStamp = img.pts;
470                 DDD("got output timestamp %lld", (long long)(img.pts));
471 
472                 outInfo->mOwnedByUs = false;
473                 outQueue.erase(outQueue.begin());
474                 outInfo = NULL;
475                 notifyFillBufferDone(outHeader);
476                 outHeader = NULL;
477             } else if (mIsInFlush) {
478                 DDD("not img.data and it is in flush mode");
479                 /* If in flush mode and no output is returned by the codec,
480                  * then come out of flush mode */
481                 mIsInFlush = false;
482 
483                 /* If EOS was recieved on input port and there is no output
484                  * from the codec, then signal EOS on output port */
485                 if (mReceivedEOS) {
486                     ALOGI("received EOS, re-create host context");
487                     outHeader->nFilledLen = 0;
488                     outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
489 
490                     outInfo->mOwnedByUs = false;
491                     outQueue.erase(outQueue.begin());
492                     outInfo = NULL;
493                     notifyFillBufferDone(outHeader);
494                     outHeader = NULL;
495                     resetPlugin();
496 
497                     //mContext->destroyH264Context();
498                 //mContext.reset(new MediaH264Decoder());
499                     mContext->resetH264Context(mWidth,
500                               mHeight,
501                               mWidth,
502                               mHeight,
503                               MediaH264Decoder::PixelFormat::YUV420P);
504 
505                 }
506             }
507             mInputOffset += mConsumedBytes;
508         }
509 
510         // If more than 4 bytes are remaining in input, then do not release it
511         if (inHeader != NULL && ((inHeader->nFilledLen - mInputOffset) <= 4)) {
512             inInfo->mOwnedByUs = false;
513             inQueue.erase(inQueue.begin());
514             inInfo = NULL;
515             notifyEmptyBufferDone(inHeader);
516             inHeader = NULL;
517             mInputOffset = 0;
518 
519             /* If input EOS is seen and decoder is not in flush mode,
520              * set the decoder in flush mode.
521              * There can be a case where EOS is sent along with last picture data
522              * In that case, only after decoding that input data, decoder has to be
523              * put in flush. This case is handled here  */
524 
525             if (mReceivedEOS && !mIsInFlush) {
526                 setFlushMode();
527             }
528         }
529     }
530 }
531 
internalGetParameter(OMX_INDEXTYPE index,OMX_PTR params)532 OMX_ERRORTYPE GoldfishAVCDec::internalGetParameter(
533         OMX_INDEXTYPE index, OMX_PTR params) {
534     const int32_t indexFull = index;
535     switch (indexFull) {
536         case kGetAndroidNativeBufferUsageIndex:
537         {
538             DDD("calling kGetAndroidNativeBufferUsageIndex");
539             GetAndroidNativeBufferUsageParams* nativeBuffersUsage = (GetAndroidNativeBufferUsageParams *) params;
540             nativeBuffersUsage->nUsage = (unsigned int)(BufferUsage::GPU_DATA_BUFFER);
541             return OMX_ErrorNone;
542         }
543 
544         default:
545             return GoldfishVideoDecoderOMXComponent::internalGetParameter(index, params);
546     }
547 }
548 
internalSetParameter(OMX_INDEXTYPE index,const OMX_PTR params)549 OMX_ERRORTYPE GoldfishAVCDec::internalSetParameter(
550         OMX_INDEXTYPE index, const OMX_PTR params) {
551     // Include extension index OMX_INDEXEXTTYPE.
552     const int32_t indexFull = index;
553 
554     switch (indexFull) {
555         case kEnableAndroidNativeBuffersIndex:
556         {
557             DDD("calling kEnableAndroidNativeBuffersIndex");
558             EnableAndroidNativeBuffersParams* enableNativeBuffers = (EnableAndroidNativeBuffersParams *) params;
559             if (enableNativeBuffers) {
560                 mEnableAndroidNativeBuffers = enableNativeBuffers->enable;
561                 if (mEnableAndroidNativeBuffers == false) {
562                     mNWBuffers.clear();
563                     DDD("disabled kEnableAndroidNativeBuffersIndex");
564                 } else {
565                     DDD("enabled kEnableAndroidNativeBuffersIndex");
566                 }
567             }
568             return OMX_ErrorNone;
569         }
570 
571         case kUseAndroidNativeBufferIndex:
572         {
573             if (mEnableAndroidNativeBuffers == false) {
574                 ALOGE("Error: not enabled Android Native Buffers");
575                 return OMX_ErrorBadParameter;
576             }
577             UseAndroidNativeBufferParams *use_buffer_params = (UseAndroidNativeBufferParams *)params;
578             if (use_buffer_params) {
579                 sp<ANativeWindowBuffer> nBuf = use_buffer_params->nativeBuffer;
580                 cb_handle_t *handle = (cb_handle_t*)nBuf->handle;
581                 void* dst = NULL;
582                 DDD("kUseAndroidNativeBufferIndex with handle %p host color handle %d calling usebuffer", handle,
583                       handle->hostHandle);
584                 useBufferCallerLockedAlready(use_buffer_params->bufferHeader,use_buffer_params->nPortIndex,
585                         use_buffer_params->pAppPrivate,handle->allocatedSize(), (OMX_U8*)dst);
586                 mNWBuffers[*(use_buffer_params->bufferHeader)] = use_buffer_params->nativeBuffer;;
587             }
588             return OMX_ErrorNone;
589         }
590 
591         default:
592             return GoldfishVideoDecoderOMXComponent::internalSetParameter(index, params);
593     }
594 }
595 
getExtensionIndex(const char * name,OMX_INDEXTYPE * index)596 OMX_ERRORTYPE GoldfishAVCDec::getExtensionIndex(
597         const char *name, OMX_INDEXTYPE *index) {
598 
599     if (mRenderMode == RenderMode::RENDER_BY_HOST_GPU) {
600         if (!strcmp(name, "OMX.google.android.index.enableAndroidNativeBuffers")) {
601             DDD("calling getExtensionIndex for enable ANB");
602             *(int32_t*)index = kEnableAndroidNativeBuffersIndex;
603             return OMX_ErrorNone;
604         } else if (!strcmp(name, "OMX.google.android.index.useAndroidNativeBuffer")) {
605             *(int32_t*)index = kUseAndroidNativeBufferIndex;
606             return OMX_ErrorNone;
607         } else if (!strcmp(name, "OMX.google.android.index.getAndroidNativeBufferUsage")) {
608             *(int32_t*)index = kGetAndroidNativeBufferUsageIndex;
609             return OMX_ErrorNone;
610         }
611     }
612     return GoldfishVideoDecoderOMXComponent::getExtensionIndex(name, index);
613 }
614 
getColorAspectPreference()615 int GoldfishAVCDec::getColorAspectPreference() {
616     return kPreferBitstream;
617 }
618 
619 }  // namespace android
620 
createGoldfishOMXComponent(const char * name,const OMX_CALLBACKTYPE * callbacks,OMX_PTR appData,OMX_COMPONENTTYPE ** component)621 android::GoldfishOMXComponent *createGoldfishOMXComponent(
622         const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
623         OMX_COMPONENTTYPE **component) {
624     if (!strncmp("OMX.android.goldfish", name, 20)) {
625       return new android::GoldfishAVCDec(name, callbacks, appData, component, RenderMode::RENDER_BY_HOST_GPU);
626     } else {
627       return new android::GoldfishAVCDec(name, callbacks, appData, component, RenderMode::RENDER_BY_GUEST_CPU);
628     }
629 }
630 
631