1/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.hardware.audio@7.0;
18
19import android.hardware.audio.common@7.0;
20
21enum Result : int32_t {
22    OK,
23    NOT_INITIALIZED,
24    INVALID_ARGUMENTS,
25    INVALID_STATE,
26    /**
27     * Methods marked as "Optional method" must return this result value
28     * if the operation is not supported by HAL.
29     */
30    NOT_SUPPORTED
31};
32
33enum AudioDrain : int32_t {
34    /** drain() returns when all data has been played. */
35    ALL,
36    /**
37     * drain() returns a short time before all data from the current track has
38     * been played to give time for gapless track switch.
39     */
40    EARLY_NOTIFY
41};
42
43/**
44 * A substitute for POSIX timespec.
45 */
46struct TimeSpec {
47    /** Seconds. */
48    uint64_t tvSec;
49    /** Nanoseconds. */
50    uint64_t tvNSec;
51};
52
53struct ParameterValue {
54    string key;
55    string value;
56};
57
58enum MmapBufferFlag : uint32_t {
59    NONE    = 0x0,
60    /**
61     * If the buffer can be securely shared to untrusted applications
62     * through the AAudio exclusive mode.
63     * Only set this flag if applications are restricted from accessing the
64     * memory surrounding the audio data buffer by a kernel mechanism.
65     * See Linux kernel's dma_buf.
66     */
67    APPLICATION_SHAREABLE    = 0x1,
68};
69
70/**
71 * Mmap buffer descriptor returned by IStream.createMmapBuffer().
72 * Used by streams opened in mmap mode.
73 */
74struct MmapBufferInfo {
75    /** Mmap memory buffer */
76    memory  sharedMemory;
77    /** Total buffer size in frames */
78    uint32_t bufferSizeFrames;
79    /** Transfer size granularity in frames */
80    uint32_t burstSizeFrames;
81    /** Attributes describing the buffer. */
82    bitfield<MmapBufferFlag> flags;
83};
84
85/**
86 * Mmap buffer read/write position returned by IStream.getMmapPosition().
87 * Used by streams opened in mmap mode.
88 */
89struct MmapPosition {
90    /** Timestamp in ns, CLOCK_MONOTONIC. */
91    int64_t  timeNanoseconds;
92    /** Increasing 32 bit frame count reset when IStream.stop() is called. */
93    int32_t  positionFrames;
94};
95
96/**
97 * The message queue flags used to synchronize reads and writes from
98 * message queues used by StreamIn and StreamOut.
99 */
100enum MessageQueueFlagBits : uint32_t {
101    NOT_EMPTY = 1 << 0,
102    NOT_FULL = 1 << 1
103};
104
105/*
106 * Microphone information
107 *
108 */
109
110/**
111 * A 3D point used to represent position or orientation of a microphone.
112 *
113 * Position: Coordinates of the microphone's capsule, in meters, from the
114 * bottom-left-back corner of the bounding box of android device in natural
115 * orientation (PORTRAIT for phones, LANDSCAPE for tablets, tvs, etc).
116 * The orientation musth match the reported by the api Display.getRotation().
117 *
118 * Orientation: Normalized vector to signal the main orientation of the
119 * microphone's capsule. Magnitude = sqrt(x^2 + y^2 + z^2) = 1
120 */
121struct AudioMicrophoneCoordinate {
122    float x;
123    float y;
124    float z;
125};
126
127/**
128 * Enum to identify the type of channel mapping for active microphones.
129 * Used channels further identify if the microphone has any significative
130 * process (e.g. High Pass Filtering, dynamic compression)
131 * Simple processing as constant gain adjustment must be DIRECT.
132 */
133@export(name="audio_microphone_channel_mapping_t", value_prefix="AUDIO_MICROPHONE_CHANNEL_MAPPING_")
134enum AudioMicrophoneChannelMapping : uint32_t {
135    /** Channel not used. */
136    UNUSED      = 0,
137    /** Channel used and signal not processed. */
138    DIRECT      = 1,
139    /** Channel used and signal has some processing. */
140    PROCESSED   = 2,
141};
142
143/**
144 * Enum to identify locations of microphones in regards to the body of the
145 * android device.
146 */
147@export(name="audio_microphone_location_t", value_prefix="AUDIO_MICROPHONE_LOCATION_")
148enum AudioMicrophoneLocation : uint32_t {
149    UNKNOWN             = 0,
150    MAINBODY            = 1,
151    MAINBODY_MOVABLE    = 2,
152    PERIPHERAL          = 3,
153};
154
155/**
156 * Identifier to help group related microphones together
157 * e.g. microphone arrays should belong to the same group
158 */
159typedef int32_t AudioMicrophoneGroup;
160
161/**
162 * Enum with standard polar patterns of microphones
163 */
164@export(name="audio_microphone_directionality_t", value_prefix="AUDIO_MICROPHONE_DIRECTIONALITY_")
165enum AudioMicrophoneDirectionality : uint32_t {
166    UNKNOWN         = 0,
167    OMNI            = 1,
168    BI_DIRECTIONAL  = 2,
169    CARDIOID        = 3,
170    HYPER_CARDIOID  = 4,
171    SUPER_CARDIOID  = 5,
172};
173
174/**
175 * A (frequency, level) pair. Used to represent frequency response.
176 */
177struct AudioFrequencyResponsePoint {
178    /** In Hz */
179    float frequency;
180    /** In dB */
181    float level;
182};
183
184/**
185 * Structure used by the HAL to describe microphone's characteristics
186 * Used by StreamIn and Device
187 */
188struct MicrophoneInfo {
189    /**
190     * Unique alphanumeric id for microphone. Guaranteed to be the same
191     * even after rebooting.
192     */
193    string                                  deviceId;
194    /**
195     * Device specific information
196     */
197    DeviceAddress                           deviceAddress;
198    /**
199     * Each element of the vector must describe the channel with the same
200     * index.
201     */
202    vec<AudioMicrophoneChannelMapping>      channelMapping;
203    /** Location of the microphone in regard to the body of the device */
204    AudioMicrophoneLocation                 location;
205    /**
206     * Identifier to help group related microphones together
207     * e.g. microphone arrays should belong to the same group
208     */
209    AudioMicrophoneGroup                    group;
210    /**
211     * Index of this microphone within the group.
212     * (group, index) must be unique within the same device.
213     */
214    uint32_t                                indexInTheGroup;
215    /** Level in dBFS produced by a 1000 Hz tone at 94 dB SPL */
216    float                                   sensitivity;
217    /** Level in dB of the max SPL supported at 1000 Hz */
218    float                                   maxSpl;
219    /** Level in dB of the min SPL supported at 1000 Hz */
220    float                                   minSpl;
221    /** Standard polar pattern of the microphone */
222    AudioMicrophoneDirectionality           directionality;
223    /**
224     * Vector with ordered frequency responses (from low to high frequencies)
225     * with the frequency response of the microphone.
226     * Levels are in dB, relative to level at 1000 Hz
227     */
228    vec<AudioFrequencyResponsePoint>        frequencyResponse;
229    /**
230     * Position of the microphone's capsule in meters, from the
231     * bottom-left-back corner of the bounding box of device.
232     */
233    AudioMicrophoneCoordinate               position;
234    /**
235     * Normalized point to signal the main orientation of the microphone's
236     * capsule. sqrt(x^2 + y^2 + z^2) = 1
237     */
238    AudioMicrophoneCoordinate               orientation;
239};
240
241/**
242 * Constants used by the HAL to determine how to select microphones and process those inputs in
243 * order to optimize for capture in the specified direction.
244 *
245 * MicrophoneDirection Constants are defined in MicrophoneDirection.java.
246 */
247@export(name="audio_microphone_direction_t", value_prefix="MIC_DIRECTION_")
248enum MicrophoneDirection : int32_t {
249    /**
250     * Don't do any directionality processing of the activated microphone(s).
251     */
252    UNSPECIFIED = 0,
253    /**
254     * Optimize capture for audio coming from the screen-side of the device.
255     */
256    FRONT = 1,
257    /**
258     * Optimize capture for audio coming from the side of the device opposite the screen.
259     */
260    BACK = 2,
261    /**
262     * Optimize capture for audio coming from an off-device microphone.
263     */
264    EXTERNAL = 3,
265};
266
267
268/* Dual Mono handling is used when a stereo audio stream
269 * contains separate audio content on the left and right channels.
270 * Such information about the content of the stream may be found, for example,
271 * in ITU T-REC-J.94-201610 A.6.2.3 Component descriptor.
272 */
273@export(name="audio_dual_mono_mode_t", value_prefix="AUDIO_DUAL_MONO_MODE_")
274enum DualMonoMode : int32_t {
275    // Need to be in sync with DUAL_MONO_MODE* constants in
276    // frameworks/base/media/java/android/media/AudioTrack.java
277    /**
278     * Disable any Dual Mono presentation effect.
279     */
280    OFF = 0,
281    /**
282     * This mode indicates that a stereo stream should be presented
283     * with the left and right audio channels blended together
284     * and delivered to both channels.
285     *
286     * Behavior for non-stereo streams is implementation defined.
287     * A suggested guideline is that the left-right stereo symmetric
288     * channels are pairwise blended, the other channels such as center
289     * are left alone.
290     */
291    LR = 1,
292    /**
293     * This mode indicates that a stereo stream should be presented
294     * with the left audio channel replicated into the right audio channel.
295     *
296     * Behavior for non-stereo streams is implementation defined.
297     * A suggested guideline is that all channels with left-right
298     * stereo symmetry will have the left channel position replicated
299     * into the right channel position. The center channels (with no
300     * left/right symmetry) or unbalanced channels are left alone.
301     */
302    LL = 2,
303    /**
304     * This mode indicates that a stereo stream should be presented
305     * with the right audio channel replicated into the left audio channel.
306     *
307     * Behavior for non-stereo streams is implementation defined.
308     * A suggested guideline is that all channels with left-right
309     * stereo symmetry will have the right channel position replicated
310     * into the left channel position. The center channels (with no
311     * left/right symmetry) or unbalanced channels are left alone.
312     */
313    RR = 3,
314};
315
316/**
317 * Algorithms used for timestretching (preserving pitch while playing audio
318 * content at different speed).
319 */
320@export(name="audio_timestretch_stretch_mode_t", value_prefix="AUDIO_TIMESTRETCH_STRETCH_")
321enum TimestretchMode : int32_t {
322    // Need to be in sync with AUDIO_STRETCH_MODE_* constants in
323    // frameworks/base/media/java/android/media/PlaybackParams.java
324    DEFAULT = 0,
325    /** Selects timestretch algorithm best suitable for voice (speech) content. */
326    VOICE = 1,
327};
328
329/**
330 * Behavior when the values for speed and / or pitch are out
331 * of applicable range.
332 */
333@export(name="", value_prefix="HAL_AUDIO_TIMESTRETCH_FALLBACK_")
334enum TimestretchFallbackMode : int32_t {
335    // Need to be in sync with AUDIO_FALLBACK_MODE_* constants in
336    // frameworks/base/media/java/android/media/PlaybackParams.java
337    /** Play silence for parameter values that are out of range. */
338    MUTE = 1,
339    /** Return an error while trying to set the parameters. */
340    FAIL = 2,
341};
342
343/**
344 * Parameters determining playback behavior. They are used to speed up or
345 * slow down playback and / or change the tonal frequency of the audio content
346 * (pitch).
347 */
348struct PlaybackRate {
349    /**
350     * Speed factor (multiplier). Normal speed has the value of 1.0f.
351     * Values less than 1.0f slow down playback, value greater than 1.0f
352     * speed it up.
353     */
354    float speed;
355    /**
356     * Pitch factor (multiplier). Setting pitch value to 1.0f together
357     * with changing playback speed preserves the pitch, this is often
358     * called "timestretching." Setting the pitch value equal to speed produces
359     * the same effect as playing audio content at different sampling rate.
360     */
361    float pitch;
362    /**
363     * Selects the algorithm used for timestretching (preserving pitch while
364     * playing audio at different speed).
365     */
366    TimestretchMode timestretchMode;
367    /**
368     * Selects the behavior when the specified values for speed and / or pitch
369     * are out of applicable range.
370     */
371    TimestretchFallbackMode fallbackMode;
372};
373
374/**
375 * The audio flags serve two purposes:
376 *
377 *  - when a stream is created they indicate its attributes;
378 *
379 *  - when present in a profile descriptor listed for a particular audio
380 *    hardware module, they indicate that a stream can be opened that
381 *    supports the attributes indicated by the flags.
382 *
383 * See 'audioIoFlag' in audio_policy_configuration.xsd for the
384 * list of allowed values.
385 */
386typedef string AudioInOutFlag;
387