1 /* 2 * Copyright (C) 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 import static org.junit.Assert.assertEquals; 20 import static org.junit.Assert.assertNotNull; 21 import static org.junit.Assert.assertTrue; 22 23 import android.annotation.IntRange; 24 import android.annotation.NonNull; 25 import android.annotation.Nullable; 26 import android.media.AudioAttributes; 27 import android.media.AudioFormat; 28 import android.media.AudioManager; 29 import android.media.AudioRecord; 30 import android.media.AudioTimestamp; 31 import android.media.AudioTrack; 32 import android.os.Looper; 33 import android.os.PersistableBundle; 34 import android.util.Log; 35 36 import androidx.test.InstrumentationRegistry; 37 38 import com.android.compatibility.common.util.CddTest; 39 import com.android.compatibility.common.util.DeviceReportLog; 40 import com.android.compatibility.common.util.ResultType; 41 import com.android.compatibility.common.util.ResultUnit; 42 43 import java.nio.ByteBuffer; 44 import java.nio.ByteOrder; 45 import java.nio.ShortBuffer; 46 47 // Used for statistics and loopers in listener tests. 48 // See AudioRecordTest.java and AudioTrack_ListenerTest.java. 49 public class AudioHelper { 50 51 // asserts key equals expected in the metrics bundle. assertMetricsKeyEquals( PersistableBundle metrics, String key, Object expected)52 public static void assertMetricsKeyEquals( 53 PersistableBundle metrics, String key, Object expected) { 54 Object actual = metrics.get(key); 55 assertEquals("metric " + key + " actual " + actual + " != " + " expected " + expected, 56 expected, actual); 57 } 58 59 // asserts key exists in the metrics bundle. assertMetricsKey(PersistableBundle metrics, String key)60 public static void assertMetricsKey(PersistableBundle metrics, String key) { 61 Object actual = metrics.get(key); 62 assertNotNull("metric " + key + " does not exist", actual); 63 } 64 65 // create sine waves or chirps for data arrays createSoundDataInByteArray(int bufferSamples, final int sampleRate, final double frequency, double sweep)66 public static byte[] createSoundDataInByteArray(int bufferSamples, final int sampleRate, 67 final double frequency, double sweep) { 68 final double rad = 2 * Math.PI * frequency / sampleRate; 69 byte[] vai = new byte[bufferSamples]; 70 sweep = Math.PI * sweep / ((double)sampleRate * vai.length); 71 for (int j = 0; j < vai.length; j++) { 72 int unsigned = (int)(Math.sin(j * (rad + j * sweep)) * Byte.MAX_VALUE) 73 + Byte.MAX_VALUE & 0xFF; 74 vai[j] = (byte) unsigned; 75 } 76 return vai; 77 } 78 createSoundDataInShortArray(int bufferSamples, final int sampleRate, final double frequency, double sweep)79 public static short[] createSoundDataInShortArray(int bufferSamples, final int sampleRate, 80 final double frequency, double sweep) { 81 final double rad = 2 * Math.PI * frequency / sampleRate; 82 short[] vai = new short[bufferSamples]; 83 sweep = Math.PI * sweep / ((double)sampleRate * vai.length); 84 for (int j = 0; j < vai.length; j++) { 85 vai[j] = (short)(Math.sin(j * (rad + j * sweep)) * Short.MAX_VALUE); 86 } 87 return vai; 88 } 89 createSoundDataInFloatArray(int bufferSamples, final int sampleRate, final double frequency, double sweep)90 public static float[] createSoundDataInFloatArray(int bufferSamples, final int sampleRate, 91 final double frequency, double sweep) { 92 final double rad = 2 * Math.PI * frequency / sampleRate; 93 float[] vaf = new float[bufferSamples]; 94 sweep = Math.PI * sweep / ((double)sampleRate * vaf.length); 95 for (int j = 0; j < vaf.length; j++) { 96 vaf[j] = (float)(Math.sin(j * (rad + j * sweep))); 97 } 98 return vaf; 99 } 100 101 /** 102 * Creates a {@link ByteBuffer} containing short values defining a sine wave or chirp sound. 103 * 104 * @param bufferSamples number of short samples in the buffer 105 * @param sampleRate of the output signal 106 * @param frequency the base frequency of the sine wave 107 * @param sweep if 0 will generate a sine wave with the given frequency otherwise a chirp sound 108 * @return a newly allocated {@link ByteBuffer} containing the described audio signal 109 */ createSoundDataInShortByteBuffer(int bufferSamples, final int sampleRate, final double frequency, double sweep)110 public static ByteBuffer createSoundDataInShortByteBuffer(int bufferSamples, 111 final int sampleRate, final double frequency, double sweep) { 112 final double rad = 2.0f * (float) Math.PI * frequency / (float) sampleRate; 113 ByteBuffer audioBuffer = ByteBuffer.allocate(bufferSamples * Short.BYTES); 114 ShortBuffer samples = audioBuffer.order(ByteOrder.nativeOrder()).asShortBuffer(); 115 sweep = Math.PI * sweep / ((double) sampleRate * bufferSamples); 116 for (int j = 0; j < bufferSamples; ++j) { 117 short vai = (short) (Math.sin(j * (rad + j * sweep)) * Short.MAX_VALUE); 118 samples.put(vai); 119 } 120 121 audioBuffer.rewind(); 122 return audioBuffer; 123 } 124 125 /** 126 * Returns a consecutive bit mask starting from the 0th bit indicating which channels 127 * are active, used for maskArray below. 128 * 129 * @param channelMask the channel mask for audio data. 130 * @param validMask the valid channels to permit (should be a subset of channelMask) but 131 * not checked. 132 * @return an integer whose consecutive bits are set for the channels that are permitted. 133 */ packMask(int channelMask, int validMask)134 private static int packMask(int channelMask, int validMask) { 135 final int channels = Integer.bitCount(channelMask); 136 if (channels == 0) { 137 throw new IllegalArgumentException("invalid channel mask " + channelMask); 138 } 139 int packMask = 0; 140 for (int i = 0; i < channels; ++i) { 141 final int lowbit = channelMask & -channelMask; 142 packMask |= (validMask & lowbit) != 0 ? (1 << i) : 0; 143 channelMask -= lowbit; 144 } 145 return packMask; 146 } 147 148 /** 149 * Zeroes out channels in an array of audio data for testing. 150 * 151 * @param array of audio data. 152 * @param channelMask representation for the audio data. 153 * @param validMask which channels are valid (other channels will be zeroed out). A subset 154 * of channelMask. 155 */ maskArray(byte[] array, int channelMask, int validMask)156 public static void maskArray(byte[] array, int channelMask, int validMask) { 157 final int packMask = packMask(channelMask, validMask); 158 final int channels = Integer.bitCount(channelMask); 159 int j = 0; 160 for (int i = 0; i < array.length; ++i) { 161 if ((packMask & (1 << j)) == 0) { 162 array[i] = 0; 163 } 164 if (++j >= channels) { 165 j = 0; 166 } 167 } 168 } 169 maskArray(short[] array, int channelMask, int validMask)170 public static void maskArray(short[] array, int channelMask, int validMask) { 171 final int packMask = packMask(channelMask, validMask); 172 final int channels = Integer.bitCount(channelMask); 173 int j = 0; 174 for (int i = 0; i < array.length; ++i) { 175 if ((packMask & (1 << j)) == 0) { 176 array[i] = 0; 177 } 178 if (++j >= channels) { 179 j = 0; 180 } 181 } 182 } 183 maskArray(float[] array, int channelMask, int validMask)184 public static void maskArray(float[] array, int channelMask, int validMask) { 185 final int packMask = packMask(channelMask, validMask); 186 final int channels = Integer.bitCount(channelMask); 187 int j = 0; 188 for (int i = 0; i < array.length; ++i) { 189 if ((packMask & (1 << j)) == 0) { 190 array[i] = 0; 191 } 192 if (++j >= channels) { 193 j = 0; 194 } 195 } 196 } 197 198 /** 199 * Create and fill a short array with complete sine waves so we can 200 * hear buffer underruns more easily. 201 */ createSineWavesShort(int numFrames, int samplesPerFrame, int numCycles, double amplitude)202 public static short[] createSineWavesShort(int numFrames, int samplesPerFrame, 203 int numCycles, double amplitude) { 204 final short[] data = new short[numFrames * samplesPerFrame]; 205 final double rad = numCycles * 2.0 * Math.PI / numFrames; 206 for (int j = 0; j < data.length;) { 207 short sample = (short)(amplitude * Math.sin(j * rad) * Short.MAX_VALUE); 208 for (int sampleIndex = 0; sampleIndex < samplesPerFrame; sampleIndex++) { 209 data[j++] = sample; 210 } 211 } 212 return data; 213 } 214 frameSizeFromFormat(AudioFormat format)215 public static int frameSizeFromFormat(AudioFormat format) { 216 return format.getChannelCount() 217 * format.getBytesPerSample(format.getEncoding()); 218 } 219 frameCountFromMsec(int ms, AudioFormat format)220 public static int frameCountFromMsec(int ms, AudioFormat format) { 221 return ms * format.getSampleRate() / 1000; 222 } 223 hasAudioSilentProperty()224 public static boolean hasAudioSilentProperty() { 225 String silent = null; 226 227 try { 228 silent = (String) Class.forName("android.os.SystemProperties").getMethod("get", 229 String.class).invoke(null, "ro.audio.silent"); 230 } catch (Exception e) { 231 // pass through 232 } 233 234 if (silent != null && silent.equals("1")) { 235 return true; 236 } 237 238 return false; 239 } 240 241 public static class Statistics { add(double value)242 public void add(double value) { 243 final double absValue = Math.abs(value); 244 mSum += value; 245 mSumAbs += absValue; 246 mMaxAbs = Math.max(mMaxAbs, absValue); 247 ++mCount; 248 } 249 getAvg()250 public double getAvg() { 251 if (mCount == 0) { 252 return 0; 253 } 254 return mSum / mCount; 255 } 256 getAvgAbs()257 public double getAvgAbs() { 258 if (mCount == 0) { 259 return 0; 260 } 261 return mSumAbs / mCount; 262 } 263 getMaxAbs()264 public double getMaxAbs() { 265 return mMaxAbs; 266 } 267 268 private int mCount = 0; 269 private double mSum = 0; 270 private double mSumAbs = 0; 271 private double mMaxAbs = 0; 272 } 273 274 // for listener tests 275 // lightweight java.util.concurrent.Future* 276 public static class FutureLatch<T> 277 { 278 private T mValue; 279 private boolean mSet; set(T value)280 public void set(T value) 281 { 282 synchronized (this) { 283 assert !mSet; 284 mValue = value; 285 mSet = true; 286 notify(); 287 } 288 } get()289 public T get() 290 { 291 T value; 292 synchronized (this) { 293 while (!mSet) { 294 try { 295 wait(); 296 } catch (InterruptedException e) { 297 ; 298 } 299 } 300 value = mValue; 301 } 302 return value; 303 } 304 } 305 306 // for listener tests 307 // represents a factory for T 308 public interface MakesSomething<T> 309 { makeSomething()310 T makeSomething(); 311 } 312 313 // for listener tests 314 // used to construct an object in the context of an asynchronous thread with looper 315 public static class MakeSomethingAsynchronouslyAndLoop<T> 316 { 317 private Thread mThread; 318 volatile private Looper mLooper; 319 private final MakesSomething<T> mWhatToMake; 320 MakeSomethingAsynchronouslyAndLoop(MakesSomething<T> whatToMake)321 public MakeSomethingAsynchronouslyAndLoop(MakesSomething<T> whatToMake) 322 { 323 assert whatToMake != null; 324 mWhatToMake = whatToMake; 325 } 326 make()327 public T make() 328 { 329 final FutureLatch<T> futureLatch = new FutureLatch<T>(); 330 mThread = new Thread() 331 { 332 @Override 333 public void run() 334 { 335 Looper.prepare(); 336 mLooper = Looper.myLooper(); 337 T something = mWhatToMake.makeSomething(); 338 futureLatch.set(something); 339 Looper.loop(); 340 } 341 }; 342 mThread.start(); 343 return futureLatch.get(); 344 } join()345 public void join() 346 { 347 mLooper.quit(); 348 try { 349 mThread.join(); 350 } catch (InterruptedException e) { 351 ; 352 } 353 // avoid dangling references 354 mLooper = null; 355 mThread = null; 356 } 357 } 358 outChannelMaskFromInChannelMask(int channelMask)359 public static int outChannelMaskFromInChannelMask(int channelMask) { 360 switch (channelMask) { 361 case AudioFormat.CHANNEL_IN_MONO: 362 return AudioFormat.CHANNEL_OUT_MONO; 363 case AudioFormat.CHANNEL_IN_STEREO: 364 return AudioFormat.CHANNEL_OUT_STEREO; 365 default: 366 return AudioFormat.CHANNEL_INVALID; 367 } 368 } 369 370 @CddTest(requirement="5.10/C-1-6,C-1-7") 371 public static class TimestampVerifier { 372 373 // CDD 5.6 1ms timestamp accuracy 374 private static final double TEST_MAX_JITTER_MS_ALLOWED = 6.; // a validity check 375 private static final double TEST_STD_JITTER_MS_ALLOWED = 3.; // flaky tolerance 3x 376 private static final double TEST_STD_JITTER_MS_WARN = 1.; // CDD requirement warning 377 378 // CDD 5.6 100ms track startup latency 379 private static final double TEST_STARTUP_TIME_MS_ALLOWED = 500.; // error 380 private final double TEST_STARTUP_TIME_MS_WARN; // warning 381 private static final double TEST_STARTUP_TIME_MS_INFO = 100.; // informational 382 383 private static final int MILLIS_PER_SECOND = 1000; 384 private static final long NANOS_PER_MILLISECOND = 1000000; 385 private static final long NANOS_PER_SECOND = NANOS_PER_MILLISECOND * MILLIS_PER_SECOND; 386 private static final String REPORT_LOG_NAME = "CtsMediaTestCases"; 387 388 private final String mTag; 389 private final int mSampleRate; 390 private final long mStartFrames; // initial timestamp condition for verification. 391 392 // Running statistics 393 private int mCount = 0; 394 private long mLastFrames = 0; 395 private long mLastTimeNs = 0; 396 private int mJitterCount = 0; 397 private double mMeanJitterMs = 0.; 398 private double mSecondMomentJitterMs = 0.; 399 private double mMaxAbsJitterMs = 0.; 400 private int mWarmupCount = 0; 401 TimestampVerifier(@ullable String tag, @IntRange(from=4000) int sampleRate, long startFrames, boolean isProAudioDevice)402 public TimestampVerifier(@Nullable String tag, @IntRange(from=4000) int sampleRate, 403 long startFrames, boolean isProAudioDevice) { 404 mTag = tag; // Log accepts null 405 mSampleRate = sampleRate; 406 mStartFrames = startFrames; 407 // Warning if higher than MUST value for pro audio. Zero means ignore. 408 TEST_STARTUP_TIME_MS_WARN = isProAudioDevice ? 200. : 0.; 409 } 410 getJitterCount()411 public int getJitterCount() { return mJitterCount; } getMeanJitterMs()412 public double getMeanJitterMs() { return mMeanJitterMs; } getStdJitterMs()413 public double getStdJitterMs() { return Math.sqrt(mSecondMomentJitterMs / mJitterCount); } getMaxAbsJitterMs()414 public double getMaxAbsJitterMs() { return mMaxAbsJitterMs; } getStartTimeNs()415 public double getStartTimeNs() { 416 return mLastTimeNs - ((mLastFrames - mStartFrames) * NANOS_PER_SECOND / mSampleRate); 417 } 418 add(@onNull AudioTimestamp ts)419 public void add(@NonNull AudioTimestamp ts) { 420 final long frames = ts.framePosition; 421 final long timeNs = ts.nanoTime; 422 423 assertTrue(mTag + " timestamps must have causal time", System.nanoTime() >= timeNs); 424 425 if (mCount > 0) { // need delta info from previous iteration (skipping first) 426 final long deltaFrames = frames - mLastFrames; 427 final long deltaTimeNs = timeNs - mLastTimeNs; 428 429 if (deltaFrames == 0 && deltaTimeNs == 0) return; 430 431 final double deltaFramesNs = (double)deltaFrames * NANOS_PER_SECOND / mSampleRate; 432 final double jitterMs = (deltaTimeNs - deltaFramesNs) // actual - expected 433 * (1. / NANOS_PER_MILLISECOND); 434 435 Log.d(mTag, "frames(" + frames 436 + ") timeNs(" + timeNs 437 + ") lastframes(" + mLastFrames 438 + ") lastTimeNs(" + mLastTimeNs 439 + ") deltaFrames(" + deltaFrames 440 + ") deltaTimeNs(" + deltaTimeNs 441 + ") jitterMs(" + jitterMs + ")"); 442 assertTrue(mTag + " timestamp time should be increasing", deltaTimeNs >= 0); 443 assertTrue(mTag + " timestamp frames should be increasing", deltaFrames >= 0); 444 445 if (mLastFrames != 0) { 446 if (mWarmupCount++ > 1) { // ensure device is warmed up 447 // Welford's algorithm 448 // https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance 449 ++mJitterCount; 450 final double delta = jitterMs - mMeanJitterMs; 451 mMeanJitterMs += delta / mJitterCount; 452 final double delta2 = jitterMs - mMeanJitterMs; 453 mSecondMomentJitterMs += delta * delta2; 454 455 // jitterMs is signed, so max uses abs() here. 456 final double absJitterMs = Math.abs(jitterMs); 457 if (absJitterMs > mMaxAbsJitterMs) { 458 mMaxAbsJitterMs = absJitterMs; 459 } 460 } 461 } 462 } 463 ++mCount; 464 mLastFrames = frames; 465 mLastTimeNs = timeNs; 466 } 467 verifyAndLog(long trackStartTimeNs, @Nullable String logName)468 public void verifyAndLog(long trackStartTimeNs, @Nullable String logName) { 469 // enough timestamps? 470 assertTrue(mTag + " need at least 2 jitter measurements", mJitterCount >= 2); 471 472 // Compute startup time and std jitter. 473 final int startupTimeMs = 474 (int) ((getStartTimeNs() - trackStartTimeNs) / NANOS_PER_MILLISECOND); 475 final double stdJitterMs = getStdJitterMs(); 476 477 // Check startup time 478 assertTrue(mTag + " expect startupTimeMs " + startupTimeMs 479 + " <= " + TEST_STARTUP_TIME_MS_ALLOWED, 480 startupTimeMs <= TEST_STARTUP_TIME_MS_ALLOWED); 481 if (TEST_STARTUP_TIME_MS_WARN > 0 && startupTimeMs > TEST_STARTUP_TIME_MS_WARN) { 482 Log.w(mTag, "CDD warning: startup time " + startupTimeMs 483 + " > " + TEST_STARTUP_TIME_MS_WARN); 484 } else if (startupTimeMs > TEST_STARTUP_TIME_MS_INFO) { 485 Log.i(mTag, "CDD informational: startup time " + startupTimeMs 486 + " > " + TEST_STARTUP_TIME_MS_INFO); 487 } 488 489 // Check maximum jitter 490 assertTrue(mTag + " expect maxAbsJitterMs(" + mMaxAbsJitterMs + ") < " 491 + TEST_MAX_JITTER_MS_ALLOWED, 492 mMaxAbsJitterMs < TEST_MAX_JITTER_MS_ALLOWED); 493 494 // Check std jitter 495 if (stdJitterMs > TEST_STD_JITTER_MS_WARN) { 496 Log.w(mTag, "CDD warning: std timestamp jitter " + stdJitterMs 497 + " > " + TEST_STD_JITTER_MS_WARN); 498 } 499 assertTrue(mTag + " expect stdJitterMs " + stdJitterMs + 500 " < " + TEST_STD_JITTER_MS_ALLOWED, 501 stdJitterMs < TEST_STD_JITTER_MS_ALLOWED); 502 503 Log.d(mTag, "startupTimeMs(" + startupTimeMs 504 + ") meanJitterMs(" + mMeanJitterMs 505 + ") maxAbsJitterMs(" + mMaxAbsJitterMs 506 + ") stdJitterMs(" + stdJitterMs 507 + ")"); 508 509 // Log results if logName is provided 510 if (logName != null) { 511 DeviceReportLog log = new DeviceReportLog(REPORT_LOG_NAME, logName); 512 // ReportLog needs at least one Value and Summary. 513 log.addValue("startup_time_ms", startupTimeMs, 514 ResultType.LOWER_BETTER, ResultUnit.MS); 515 log.addValue("maximum_abs_jitter_ms", mMaxAbsJitterMs, 516 ResultType.LOWER_BETTER, ResultUnit.MS); 517 log.addValue("mean_jitter_ms", mMeanJitterMs, 518 ResultType.LOWER_BETTER, ResultUnit.MS); 519 log.setSummary("std_jitter_ms", stdJitterMs, 520 ResultType.LOWER_BETTER, ResultUnit.MS); 521 log.submit(InstrumentationRegistry.getInstrumentation()); 522 } 523 } 524 } 525 526 /* AudioRecordAudit extends AudioRecord to allow concurrent playback 527 * of read content to an AudioTrack. This is for testing only. 528 * For general applications, it is NOT recommended to extend AudioRecord. 529 * This affects AudioRecord timing. 530 */ 531 public static class AudioRecordAudit extends AudioRecord { AudioRecordAudit(int audioSource, int sampleRate, int channelMask, int format, int bufferSize, boolean isChannelIndex)532 public AudioRecordAudit(int audioSource, int sampleRate, int channelMask, 533 int format, int bufferSize, boolean isChannelIndex) { 534 this(audioSource, sampleRate, channelMask, format, bufferSize, isChannelIndex, 535 AudioManager.STREAM_MUSIC, 500 /*delayMs*/); 536 } 537 AudioRecordAudit(int audioSource, int sampleRate, int channelMask, int format, int bufferSize, boolean isChannelIndex, int auditStreamType, int delayMs)538 public AudioRecordAudit(int audioSource, int sampleRate, int channelMask, 539 int format, int bufferSize, 540 boolean isChannelIndex, int auditStreamType, int delayMs) { 541 // without channel index masks, one could call: 542 // super(audioSource, sampleRate, channelMask, format, bufferSize); 543 super(new AudioAttributes.Builder() 544 .setInternalCapturePreset(audioSource) 545 .build(), 546 (isChannelIndex 547 ? new AudioFormat.Builder().setChannelIndexMask(channelMask) 548 : new AudioFormat.Builder().setChannelMask(channelMask)) 549 .setEncoding(format) 550 .setSampleRate(sampleRate) 551 .build(), 552 bufferSize, 553 AudioManager.AUDIO_SESSION_ID_GENERATE); 554 555 if (delayMs >= 0) { // create an AudioTrack 556 final int channelOutMask = isChannelIndex ? channelMask : 557 outChannelMaskFromInChannelMask(channelMask); 558 final int bufferOutFrames = sampleRate * delayMs / 1000; 559 final int bufferOutSamples = bufferOutFrames 560 * AudioFormat.channelCountFromOutChannelMask(channelOutMask); 561 final int bufferOutSize = bufferOutSamples 562 * AudioFormat.getBytesPerSample(format); 563 564 // Caution: delayMs too large results in buffer sizes that cannot be created. 565 mTrack = new AudioTrack.Builder() 566 .setAudioAttributes(new AudioAttributes.Builder() 567 .setLegacyStreamType(auditStreamType) 568 .build()) 569 .setAudioFormat((isChannelIndex ? 570 new AudioFormat.Builder().setChannelIndexMask(channelOutMask) : 571 new AudioFormat.Builder().setChannelMask(channelOutMask)) 572 .setEncoding(format) 573 .setSampleRate(sampleRate) 574 .build()) 575 .setBufferSizeInBytes(bufferOutSize) 576 .build(); 577 assertEquals(AudioTrack.STATE_INITIALIZED, mTrack.getState()); 578 mTrackPosition = 0; 579 mFinishAtMs = 0; 580 } 581 } 582 583 @Override read(byte[] audioData, int offsetInBytes, int sizeInBytes)584 public int read(byte[] audioData, int offsetInBytes, int sizeInBytes) { 585 // for byte array access we verify format is 8 bit PCM (typical use) 586 assertEquals(TAG + ": format mismatch", 587 AudioFormat.ENCODING_PCM_8BIT, getAudioFormat()); 588 int samples = super.read(audioData, offsetInBytes, sizeInBytes); 589 if (mTrack != null) { 590 final int result = mTrack.write(audioData, offsetInBytes, samples, 591 AudioTrack.WRITE_NON_BLOCKING); 592 mTrackPosition += result / mTrack.getChannelCount(); 593 } 594 return samples; 595 } 596 597 @Override read(byte[] audioData, int offsetInBytes, int sizeInBytes, int readMode)598 public int read(byte[] audioData, int offsetInBytes, int sizeInBytes, int readMode) { 599 // for byte array access we verify format is 8 bit PCM (typical use) 600 assertEquals(TAG + ": format mismatch", 601 AudioFormat.ENCODING_PCM_8BIT, getAudioFormat()); 602 int samples = super.read(audioData, offsetInBytes, sizeInBytes, readMode); 603 if (mTrack != null) { 604 final int result = mTrack.write(audioData, offsetInBytes, samples, 605 AudioTrack.WRITE_NON_BLOCKING); 606 mTrackPosition += result / mTrack.getChannelCount(); 607 } 608 return samples; 609 } 610 611 @Override read(short[] audioData, int offsetInShorts, int sizeInShorts)612 public int read(short[] audioData, int offsetInShorts, int sizeInShorts) { 613 // for short array access we verify format is 16 bit PCM (typical use) 614 assertEquals(TAG + ": format mismatch", 615 AudioFormat.ENCODING_PCM_16BIT, getAudioFormat()); 616 int samples = super.read(audioData, offsetInShorts, sizeInShorts); 617 if (mTrack != null) { 618 final int result = mTrack.write(audioData, offsetInShorts, samples, 619 AudioTrack.WRITE_NON_BLOCKING); 620 mTrackPosition += result / mTrack.getChannelCount(); 621 } 622 return samples; 623 } 624 625 @Override read(short[] audioData, int offsetInShorts, int sizeInShorts, int readMode)626 public int read(short[] audioData, int offsetInShorts, int sizeInShorts, int readMode) { 627 // for short array access we verify format is 16 bit PCM (typical use) 628 assertEquals(TAG + ": format mismatch", 629 AudioFormat.ENCODING_PCM_16BIT, getAudioFormat()); 630 int samples = super.read(audioData, offsetInShorts, sizeInShorts, readMode); 631 if (mTrack != null) { 632 final int result = mTrack.write(audioData, offsetInShorts, samples, 633 AudioTrack.WRITE_NON_BLOCKING); 634 mTrackPosition += result / mTrack.getChannelCount(); 635 } 636 return samples; 637 } 638 639 @Override read(float[] audioData, int offsetInFloats, int sizeInFloats, int readMode)640 public int read(float[] audioData, int offsetInFloats, int sizeInFloats, int readMode) { 641 // for float array access we verify format is float PCM (typical use) 642 assertEquals(TAG + ": format mismatch", 643 AudioFormat.ENCODING_PCM_FLOAT, getAudioFormat()); 644 int samples = super.read(audioData, offsetInFloats, sizeInFloats, readMode); 645 if (mTrack != null) { 646 final int result = mTrack.write(audioData, offsetInFloats, samples, 647 AudioTrack.WRITE_NON_BLOCKING); 648 mTrackPosition += result / mTrack.getChannelCount(); 649 } 650 return samples; 651 } 652 653 @Override read(ByteBuffer audioBuffer, int sizeInBytes)654 public int read(ByteBuffer audioBuffer, int sizeInBytes) { 655 int bytes = super.read(audioBuffer, sizeInBytes); 656 if (mTrack != null) { 657 // read does not affect position and limit of the audioBuffer. 658 // we make a duplicate to change that for writing to the output AudioTrack 659 // which does check position and limit. 660 ByteBuffer copy = audioBuffer.duplicate(); 661 copy.position(0).limit(bytes); // read places data at the start of the buffer. 662 final int result = mTrack.write(copy, bytes, AudioTrack.WRITE_NON_BLOCKING); 663 mTrackPosition += result / (mTrack.getChannelCount() 664 * AudioFormat.getBytesPerSample(mTrack.getAudioFormat())); 665 } 666 return bytes; 667 } 668 669 @Override read(ByteBuffer audioBuffer, int sizeInBytes, int readMode)670 public int read(ByteBuffer audioBuffer, int sizeInBytes, int readMode) { 671 int bytes = super.read(audioBuffer, sizeInBytes, readMode); 672 if (mTrack != null) { 673 // read does not affect position and limit of the audioBuffer. 674 // we make a duplicate to change that for writing to the output AudioTrack 675 // which does check position and limit. 676 ByteBuffer copy = audioBuffer.duplicate(); 677 copy.position(0).limit(bytes); // read places data at the start of the buffer. 678 final int result = mTrack.write(copy, bytes, AudioTrack.WRITE_NON_BLOCKING); 679 mTrackPosition += result / (mTrack.getChannelCount() 680 * AudioFormat.getBytesPerSample(mTrack.getAudioFormat())); 681 } 682 return bytes; 683 } 684 685 @Override startRecording()686 public void startRecording() { 687 super.startRecording(); 688 if (mTrack != null) { 689 mTrack.play(); 690 } 691 } 692 693 @Override stop()694 public void stop() { 695 super.stop(); 696 if (mTrack != null) { 697 if (mTrackPosition > 0) { // stop may be called multiple times. 698 final int remainingFrames = mTrackPosition - mTrack.getPlaybackHeadPosition(); 699 mFinishAtMs = System.currentTimeMillis() 700 + remainingFrames * 1000 / mTrack.getSampleRate(); 701 mTrackPosition = 0; 702 } 703 mTrack.stop(); // allows remaining data to play out 704 } 705 } 706 707 @Override release()708 public void release() { 709 super.release(); 710 if (mTrack != null) { 711 final long remainingMs = mFinishAtMs - System.currentTimeMillis(); 712 if (remainingMs > 0) { 713 try { 714 Thread.sleep(remainingMs); 715 } catch (InterruptedException e) { 716 ; 717 } 718 } 719 mTrack.release(); 720 mTrack = null; 721 } 722 } 723 724 public AudioTrack mTrack; 725 private final static String TAG = "AudioRecordAudit"; 726 private int mTrackPosition; 727 private long mFinishAtMs; 728 } 729 } 730