/cts/tests/tests/nfc/src/android/nfc/cts/ |
D | CardEmulationTest.java | 379 ArrayList<PollingFrame> frames = new ArrayList<PollingFrame>(6); in testTypeAPollingLoopToDefault() local 380 frames.add(createFrame(PollingFrame.POLLING_LOOP_TYPE_ON)); in testTypeAPollingLoopToDefault() 381 frames.add(createFrame(PollingFrame.POLLING_LOOP_TYPE_A)); in testTypeAPollingLoopToDefault() 382 frames.add(createFrame(PollingFrame.POLLING_LOOP_TYPE_OFF)); in testTypeAPollingLoopToDefault() 383 frames.add(createFrame(PollingFrame.POLLING_LOOP_TYPE_ON)); in testTypeAPollingLoopToDefault() 384 frames.add(createFrame(PollingFrame.POLLING_LOOP_TYPE_A)); in testTypeAPollingLoopToDefault() 385 frames.add(createFrame(PollingFrame.POLLING_LOOP_TYPE_OFF)); in testTypeAPollingLoopToDefault() 387 notifyPollingLoopAndWait(new ArrayList<PollingFrame>(frames), in testTypeAPollingLoopToDefault() 404 ArrayList<PollingFrame> frames = new ArrayList<PollingFrame>(6); in testTypeAPollingLoopToWalletHolder() 405 frames.add(createFrame(PollingFrame.POLLING_LOOP_TYPE_ON)); in testTypeAPollingLoopToWalletHolder() [all …]
|
D | BackgroundHostApduService.java | 26 public void processPollingFrames(List<PollingFrame> frames) { in processPollingFrames() argument 29 frames); in processPollingFrames() local
|
D | CustomHostApduService.java | 25 public void processPollingFrames(List<PollingFrame> frames) { in processPollingFrames() argument 28 frames); in processPollingFrames() local
|
D | CtsMyHostApduService.java | 30 public void processPollingFrames(List<PollingFrame> frames) { in processPollingFrames() argument 33 frames); in processPollingFrames() local
|
D | HostApduServiceTest.java | 60 ArrayList<PollingFrame> frames = new ArrayList<PollingFrame>(); in testProcessPollingFrame() local 63 frames.add(frame); in testProcessPollingFrame() 64 service.processPollingFrames(frames); in testProcessPollingFrame()
|
D | PollingLoopBroadcastReceiver.java | 35 List<PollingFrame> frames = intent.getParcelableArrayListExtra(FRAMES_KEY, in onReceive() local 37 CardEmulationTest.sCurrentPollLoopReceiver.notifyPollingLoop(className, frames); in onReceive()
|
/cts/apps/CameraITS/tests/scene1_1/ |
D | test_burst_sameness_manual.py | 105 frames = range(_NUM_FRAMES) 108 pylab.plot(frames, r_means, '-ro') 109 pylab.plot(frames, g_means, '-go') 110 pylab.plot(frames, b_means, '-bo')
|
/cts/tests/tests/nfc/WalletRoleHolderApp/src/com/android/test/walletroleholder/ |
D | XWalletRoleHolderApduService.java | 31 public void processPollingFrames(List<PollingFrame> frames) { in processPollingFrames() argument 32 ArrayList<PollingFrame> framesArrayList = new ArrayList<>(frames); in processPollingFrames()
|
D | WalletRoleHolderApduService.java | 35 public void processPollingFrames(List<PollingFrame> frames) { in processPollingFrames() argument 36 ArrayList<PollingFrame> framesArrayList = new ArrayList<>(frames); in processPollingFrames()
|
/cts/tests/tests/nfc/NonPaymentNfcApp/src/com/android/test/nonpaymentnfc/ |
D | NonPaymentApduService.java | 31 public void processPollingFrames(List<PollingFrame> frames) { in processPollingFrames() argument 32 ArrayList<PollingFrame> framesArrayList = new ArrayList<>(frames); in processPollingFrames()
|
/cts/tests/tests/nfc/ForegroundNfcApp/src/com/android/test/foregroundnfc/ |
D | ForegroundApduService.java | 31 public void processPollingFrames(List<PollingFrame> frames) { in processPollingFrames() argument 32 ArrayList<PollingFrame> framesArrayList = new ArrayList<>(frames); in processPollingFrames()
|
/cts/tests/media/common/src/android/mediav2/common/cts/ |
D | VideoErrorManager.java | 187 int frames = 0; in generateErrorStats() local 235 frames, curYMSE, curUMSE, curVMSE, mFramesPSNR.get(frames)[0], in generateErrorStats() 236 mFramesPSNR.get(frames)[1], mFramesPSNR.get(frames)[2]); in generateErrorStats() 239 frames++; in generateErrorStats() 242 mGlobalMSE[i] /= frames; in generateErrorStats() 245 mAvgPSNR[i] /= frames; in generateErrorStats()
|
/cts/apps/CameraITS/tests/sensor_fusion/ |
D | test_sensor_fusion.py | 178 frames = [] 181 frames.append(img) 184 return events, frames 282 frames = [] 286 frames.append(np.array(img).reshape((h, w, 3)) / 255) 287 return events, frames, w, h 360 events, frames, _, _ = load_data() 369 events, frames = _collect_data( 384 frames[_START_FRAME:], events['facing'], img_h,
|
D | test_video_stabilization.py | 195 frames = [] 200 frames.append(img/255) 201 frame_shape = frames[0].shape 207 frames[_START_FRAME:], facing, frame_shape[0],
|
D | test_lens_intrinsic_calibration.py | 83 frames = range(start_frame, len(principal_points_dist)+start_frame) 85 pylab.plot(frames, principal_points_dist, '-ro', label='dist')
|
/cts/tests/tests/view/src/android/view/cts/ |
D | GLProducerThread.java | 54 int frames, int delayMs, Semaphore semaphore) { in GLProducerThread() argument 56 mFrames = frames; in GLProducerThread() 63 GLProducerThread(SurfaceTexture surfaceTexture, GLRenderer renderer, int frames, int delayMs, in GLProducerThread() argument 65 this(surfaceTexture, renderer, null, frames, delayMs, semaphore); in GLProducerThread()
|
/cts/tests/tests/nativemedia/aaudio/jni/ |
D | test_aaudio.cpp | 149 const int32_t frames = actual().sampleRate; in testTimestamp() local 162 processData(frames, timeoutNanos); in testTimestamp() 190 virtual void processData(const int32_t frames, const int64_t timeoutNanos) = 0; 204 void processData(const int32_t frames, const int64_t timeoutNanos) override; 231 void AAudioInputStreamTest::processData(const int32_t frames, const int64_t timeoutNanos) { in processData() argument 235 for (int32_t framesLeft = frames; framesLeft > 0; ) { in processData() 237 stream(), getDataBuffer(), std::min(frames, mFramesPerRead), timeoutNanos); in processData() 363 void processData(const int32_t frames, const int64_t timeoutNanos) override; 382 void AAudioOutputStreamTest::processData(const int32_t frames, const int64_t timeoutNanos) { in processData() argument 383 for (int32_t framesLeft = frames; framesLeft > 0;) { in processData()
|
/cts/apps/CameraITS/tests/inprog/rolling_shutter_skew/ |
D | test_rolling_shutter_skew.py | 130 frames = [its.image.convert_capture_to_rgb_image(c) for c in raw_caps] 133 frames, reported_skew = load_data(args.read_dir) 136 (frame_h, _, _) = frames[0].shape 143 for i, img in enumerate(frames): 149 frames, args.led_time, debug_dir) 227 frames = [] 229 frames.append(its.image.load_rgb_image(frame_file)) 232 return frames, reported_skew 235 def find_average_shutter_skew(frames, led_time, debug_dir=None): argument 255 for i, frame in enumerate(frames):
|
/cts/tests/tests/mediatranscoding/src/android/media/mediatranscoding/cts/ |
D | MediaTranscodingTestUtil.java | 453 int frames = 0; in computePsnr() local 495 minimumPSNRFrameIndex = frames; in computePsnr() 499 frames, curYPSNR, curUPSNR, curVPSNR); in computePsnr() 502 frames++; in computePsnr() 505 averageYPSNR /= frames; in computePsnr() 506 averageUPSNR /= frames; in computePsnr() 507 averageVPSNR /= frames; in computePsnr() 510 Log.d(TAG, "PSNR statistics for " + frames + " frames."); in computePsnr()
|
/cts/apps/CameraITS/utils/ |
D | sensor_fusion_utils.py | 476 def get_cam_rotations(frames, facing, h, file_name_stem, argument 500 for frame in frames: 530 cv2.circle(frames[j], (x, y), 3, (100, 255, 255), -1) 532 frames[j], f'{file_name_stem}_features{j+start_frame:03d}.png') 556 frame = frames[j] 704 frames = range(start_frame, len(cam_rots)+start_frame) 706 pylab.plot(frames, cam_rots*_RADS_TO_DEGS, '-ro', label='x')
|
D | preview_processing_utils.py | 211 frames = [] 218 frames.append(img / 255) 219 frame_h, frame_w, _ = frames[0].shape 230 frames[_START_FRAME:],
|
/cts/tests/tests/media/decoder/src/android/media/decoder/cts/ |
D | AdaptivePlaybackTest.java | 630 int frames = mDecoder.queueInputBufferRange( in addTests() 640 if (lastSequence && frames >= 0) { in addTests() 641 warn("did not receive EOS, received " + frames + " frames"); in addTests() 642 } else if (!lastSequence && frames < 0) { in addTests() 643 warn("received EOS, received " + (-frames) + " frames"); in addTests() 648 mDecodedFrames += Math.abs(frames); in addTests() 788 int frames = mDecoder.queueInputBufferRange( 796 if (lastSequence && frames >= 0) { 797 warn("did not receive EOS, received " + frames + " frames"); 798 } else if (!lastSequence && frames < 0) { [all …]
|
/cts/tests/tests/media/audio/src/android/media/audio/cts/ |
D | AudioTrackTest.java | 1834 final int frames = AudioHelper.frameCountFromMsec(300 /* ms */, format); in playOnceStreamData() local 1835 final int sourceSamples = channelCount * frames; in playOnceStreamData() 3066 AudioTrack track, int frames) throws Exception { in validateWriteStartsStreamWithSetStartThreshold() argument 3068 validateSetStartThresholdInFrames(track, frames); in validateWriteStartsStreamWithSetStartThreshold() 3070 validateWriteStartsStream(track, frames); in validateWriteStartsStreamWithSetStartThreshold() 3081 private static void validateWriteStartsStream(AudioTrack track, int frames) throws Exception { in validateWriteStartsStream() argument 3083 final short[] data = new short[frames]; in validateWriteStartsStream() 3093 final int PARTIAL_WRITE_IN_FRAMES = frames - 1; in validateWriteStartsStream() 3105 expectedFrames += frames; in validateWriteStartsStream() 3106 Thread.sleep(frames * 1000L / track.getSampleRate()); // accommodate for #frames. in validateWriteStartsStream() [all …]
|
/cts/tests/tests/media/common/src/android/media/cts/ |
D | AudioHelper.java | 420 final long frames = ts.framePosition; in add() local 426 final long deltaFrames = frames - mLastFrames; in add() 435 Log.d(mTag, "frames(" + frames in add() 464 mLastFrames = frames; in add()
|
/cts/hostsidetests/securitybulletin/securityPatch/CVE-2017-0479/ |
D | poc.cpp | 50 virtual binder::Status framesProcessed(int32_t frames __unused) override { in framesProcessed()
|