MediaCodec的使用(音频编码一)
MediaCodec的使用(音频编码一)
准备工作
我们准备一个pcm文件即可,采样率16000,单声道
如果可以使用ffplay如下命令正常播放,那么音频格式是没有问题的。
ffplay -f s16le -ar 16000 -ch_layout mono -i hello.pcm
将文件推送到download目录
adb -s adb-4128230924000237-Nl4jPZ._adb-tls-connect._tcp push hello.pcm /sdcard/Download/
Android10+是无法直接操作Download目录的文件的,即使你获取到读写权限。
读写权限的前世今生
android.Manifest.permission.READ_EXTERNAL_STORAGE,
android.Manifest.permission.WRITE_EXTERNAL_STORAGE,
这两个已经完全过时了在Android13+
READ_EXTERNAL_STORAGE is deprecated (and is not granted) when targeting Android 13+. If you need to query or interact with MediaStore or media files on the shared storage, you should instead use one or more new storage permissions: READ_MEDIA_IMAGES, READ_MEDIA_VIDEO or READ_MEDIA_AUDIO. Toggle info (Ctrl+F1)
Android10+
shared storage 使用MediaStore
但是上面两个还应用于视频、音频、照片
Android13++
上面两个权限完全过时了,视频、音频、照片使用单独的权限
当然国内的魔改手机在Android13+仍有可能使用上面的两个权限
READ_MEDIA_IMAGES, READ_MEDIA_VIDEO or READ_MEDIA_AUDIO
我现在这台手机是android11,访问Download目录,所有用不上上面的两个权限。
但是如果是android10以下,还是需要的。
所以将hello.pcm只能通过SAF访问或者MANAGE_EXTERNAL_STORAGE.
SAF
@Composable
internal fun MediaCodecScreen(
navHostController: NavHostController,
snackBarHostState: SnackbarHostState,
) {
val context: Context = LocalContext.current
val coroutineScope: CoroutineScope = rememberCoroutineScope()
val launcher: ManagedActivityResultLauncher<Array<String>, Uri?> = rememberLauncherForActivityResult(
contract = ActivityResultContracts.OpenDocument()
) { uri: Uri? ->
uri?.apply {
Log.i(TAG, "MediaCodecScreen -> uri: $this")
context.contentResolver.openInputStream(this)?.use { inputStream ->
val bytes = ByteArray(1024)
inputStream.read(bytes)
Log.i(TAG, "MediaCodecScreen -> data: ${bytes.joinToString()}")
}
}
}
Column(
modifier = Modifier.fillMaxSize()
) {
Text(
text = "发送信息",
Modifier
.padding(top = 10.dp)
.background(color = Color.Black, shape = RoundedCornerShape10)
.padding(all = 5.dp)
.clickable {
launcher.launch(arrayOf("*/*"))
},
color = Color.White
)
}
}
建议在android10+访问shared storage
尽量使用MediaStore,如果不使用的话还是很危险的,如果生成一个文件到Download,用户重新卸载安装那你就不能访问这个文件了,会崩溃的,每个app都相当于一个匿名用户,重新安装那你的匿名身份就变更了,当然文件就不能访问了。当然具体原理的还需要了解SeLinux
.
当然在android10~android12如果访问视频、音频、照片还是要读写权限的。
还要注意MediaStore相当于是一个数据库,他加快了检索文件的速度。当你使用MeidaStore的时候,不要仅仅是插入一条记录,还有把文件的二进制数据写入到MediaStore.
还有尽量不要使用uri转file,file转uri.不符合现代android的开发。
当然在app的私有目录无论是外部存储还是内部存储都可以使用java 的 File因为他和shared storage没有任何关系。
PCM 编码 MP3
编码函数如下:
internal suspend fun pcmToMp3(
context: Context,
pcmUri: Uri,
mp3Uri: Uri,
): Unit = suspendCancellableCoroutine { continuation ->
val mediaCodecList = MediaCodecList(MediaCodecList.ALL_CODECS)
val mp3Encoders: List<MediaCodecInfo> = mediaCodecList.codecInfos.filter { it.isEncoder }.filter {
it.supportedTypes.contains(element = MediaFormat.MIMETYPE_AUDIO_MPEG)
}
if (mp3Encoders.isEmpty()){
if (continuation.isActive){
continuation.resume(Unit) // 不支持的话可以使用lame软编码
}
return@suspendCancellableCoroutine
}
mp3Encoders.forEach { mediaCodecInfo ->
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
Log.i(TAG, "pcmToAac -> name: ${mediaCodecInfo.name}, canonicalName: ${mediaCodecInfo.canonicalName}, isAlias: ${mediaCodecInfo.isAlias}, isVendor: ${mediaCodecInfo.isVendor}, isHardwareAccelerated: ${mediaCodecInfo.isHardwareAccelerated}, isEncoder: ${mediaCodecInfo.isEncoder}, isSoftwareOnly: ${mediaCodecInfo.isSoftwareOnly}, supportedTypes: ${mediaCodecInfo.supportedTypes.joinToString()}")
} else {
Log.i(TAG, "pcmToAac -> name: ${mediaCodecInfo.name}, isEncoder: ${mediaCodecInfo.isEncoder}, supportedTypes: ${mediaCodecInfo.supportedTypes.joinToString()}")
}
}
val mediaFormat: MediaFormat = MediaFormat.createAudioFormat(MediaFormat.MIMETYPE_AUDIO_MPEG, 16000, 1).apply {
setInteger(MediaFormat.KEY_BIT_RATE, 128000)
}
val mediaCodec: MediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_MPEG)
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
val pcmInputStream: InputStream = context.contentResolver.openInputStream(pcmUri)!!
val mp3OutputStream: OutputStream = context.contentResolver.openOutputStream(mp3Uri)!!
val bytes = ByteArray(1024 * 8)
mediaCodec.setCallback(object : MediaCodec.Callback(){
override fun onError(
codec: MediaCodec,
e: MediaCodec.CodecException
) {
Log.e(TAG, "onError name: ${codec.name}, thread: ${Thread.currentThread()}, error: ${e.message}", e)
}
override fun onInputBufferAvailable(
codec: MediaCodec,
index: Int
) {
val inputBuffer: ByteBuffer = codec.getInputBuffer(index) ?: return
val size: Int = pcmInputStream.read(bytes, 0, inputBuffer.limit()) // 或许需要去 min(bytes.size, inputBuffer.limit())
Log.i(TAG, "onInputBufferAvailable -> name: ${mediaCodec.name}, index: $index, thread: ${Thread.currentThread()}, size: $size, limit: ${inputBuffer.limit()}, position: ${inputBuffer.position()}")
if (size > 0) {
inputBuffer.put(bytes, 0, size)
codec.queueInputBuffer(index, 0, size, System.nanoTime() / 1000, 0)
} else {
codec.queueInputBuffer(index, 0, 0, System.nanoTime() / 1000, MediaCodec.BUFFER_FLAG_END_OF_STREAM)
}
}
override fun onOutputBufferAvailable(
codec: MediaCodec,
index: Int,
info: MediaCodec.BufferInfo
) {
Log.i(TAG, "onOutputBufferAvailable -> name: ${codec.name}, index: $index, info: ${info.size}, thread: ${Thread.currentThread()}")
val outputBuffer: ByteBuffer = codec.getOutputBuffer(index) ?: return
outputBuffer.get(bytes, 0, info.size)
mp3OutputStream.write(bytes, 0, info.size)
codec.releaseOutputBuffer(index, false)
if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM){
Log.i(TAG, "onOutputBufferAvailable -> == 编码结束...") // todo
pcmInputStream.close()
mp3OutputStream.close()
if (continuation.isActive){
Log.i(TAG, "pcmToMp3 -> 解码完成 resume before...")
continuation.resume(Unit)
Log.i(TAG, "pcmToMp3 -> 解码完成 resume after...")
}
}
}
override fun onOutputFormatChanged(
codec: MediaCodec,
format: MediaFormat
) {
Log.i(TAG, "onOutputFormatChanged -> name: ${codec.name}, format: ${format.getString(MediaFormat.KEY_MIME)}")
}
})
Log.i(TAG, "pcmToAac -> before start...")
mediaCodec.start()
Log.i(TAG, "pcmToAac -> after start...")
}
调用如下
@Composable
internal fun MediaCodecScreen(
navHostController: NavHostController,
snackBarHostState: SnackbarHostState,
) {
val context: Context = LocalContext.current
val coroutineScope: CoroutineScope = rememberCoroutineScope()
val launcher: ManagedActivityResultLauncher<Array<String>, Uri?> = rememberLauncherForActivityResult(
contract = ActivityResultContracts.OpenDocument()
) { uri: Uri? ->
uri?.apply {
Log.i(TAG, "MediaCodecScreen -> uri: $this")
val audioExtractManager = AudioExtractManager()
val contentValues: ContentValues = contentValuesOf(
MediaStore.Audio.Media.DISPLAY_NAME to "hello.mp3",
MediaStore.Audio.Media.MIME_TYPE to MediaFormat.MIMETYPE_AUDIO_MPEG,
MediaStore.Audio.Media.RELATIVE_PATH to Environment.DIRECTORY_MUSIC
)
val mp3Uri: Uri = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
context.contentResolver.insert(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, contentValues)
} else {
FileProvider.getUriForFile(context, "${context.packageName}.provider", File(
Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC),
"hello.mp3"
))
} ?: return@apply
Log.i(TAG, "MediaCodecScreen -> mp3Uri: $mp3Uri")
coroutineScope.launch {
Log.i(TAG, "MediaCodecScreen -> before ${Thread.currentThread()}")
audioExtractManager.pcmToMp3(context, this@apply, mp3Uri)
Log.i(TAG, "MediaCodecScreen -> after ${Thread.currentThread()}")
}
}
}
Column(
modifier = Modifier.fillMaxSize()
) {
Text(
text = "开发者",
Modifier
.padding(top = 10.dp)
.background(color = Color.Black, shape = RoundedCornerShape10)
.padding(all = 5.dp)
.clickable {
val intent = Intent(Settings.ACTION_APPLICATION_DEVELOPMENT_SETTINGS)
context.startActivity(intent)
},
color = Color.White
)
Text(
text = "发送信息",
Modifier
.padding(top = 10.dp)
.background(color = Color.Black, shape = RoundedCornerShape10)
.padding(all = 5.dp)
.clickable {
launcher.launch(arrayOf("*/*"))
},
color = Color.White
)
}
}
PCM 编码 adts aac
aac 有两种格式 分别是 adif、adts
说到adts,它是aac裸流,是由frame帧组成,在某些设备上是无法正确播放的比如ios.
参考: adts aac
7字节的头
7字节的头其一共7 * 8=56bit位。分为 各占28bit
adts_fixed_header();
adts_variable_header();
我们一位一位的看.
adts_fixed_header()
syncword
: 前12位是固定的都是1, 代表adts header开始的标识
ID
:1bit, MPEG Version: 0 for MPEG-4,1 for MPEG-2
Layer
:2bits, always: '00'
protection_absent
:1bit, Warning, set to 1 if there is no CRC and 0 if there is CRC
一般是0表示: no CRC
profile
:2bits, 表示使用哪个级别的AAC,如01 Low Complexity(LC) -- AAC LC
profile的值等于 Audio Object Type的值减1.
profile = MPEG-4 Audio Object Type - 1
sampling_frequency_index
: 4bits, 采样率下标,可以参考上面的链接查看
Private bit
, 1bit, 设置为0即可
channel_configuration
: 占3bits, 声道数下标,可以参考上面的链接查看
原创性
,1bit: 设为1表示音频为原创,设为0则表示非原创。
家用性
,1bit:设为1表示音频为家用,设为0则表示非家用。
总共28bit
adts_variable_header
版权标识位(Copyright ID bit):1bit, 中央注册版权标识符的下一比特位。采用LSB(最低有效位优先)顺序滑动遍历比特串时,将当前比特值填入此字段,到达末尾时循环回到起始位置(环形缓冲区结构)。
版权标识起始位(Copyright ID start):1bit, 1表示当前帧的版权标识位是起始位,设为0则表示非起始位。
aac_frame_length
:13bits, 一个ADTS帧的长度包括ADTS头和AAC原始流。
frame length, this value must include 7 or 9 bytes of header length:
aac_frame_length = (protection_absent == 1 ? 7 : 9) + size(AACFrame)
protection_absent=0时, header length=9bytes
protection_absent=1时, header length=7bytes
adts_buffer_fullness
:11bit,
计算公式
max_bit_reservoir = minimum_decoder_input_size - mean_bits_per_RDB; // for CBR
// bit reservoir state/available bits (≥0 and <max_bit_reservoir); for the i-th frame.
bit_reservoir_state[i] = (int)(bit_reservoir_state[i - 1] + mean_framelength - framelength[i]);
// NCC is the number of channels.
adts_buffer_fullness = bit_reservoir_state[i] / (NCC * 32);
0x7FF 说明是码率可变的码流。
number_of_raw_data_blocks_in_frame
:2bits, 表示ADTS帧中有number_of_raw_data_blocks_in_frame + 1个AAC原始帧。
所以说number_of_raw_data_blocks_in_frame == 0 表示说ADTS帧中有一个AAC数据块。
(一个AAC原始帧包含一段时间内1024个采样及相关数据)
ADTS帧中的AAC帧数量(即RDB,原始数据块数)减1。为获得最佳兼容性,建议每个ADTS帧始终只包含一个AAC帧。
也是28bit
当CRC时还有
16bit: CRC check (as of ISO/IEC 11172-3, subclause 2.4.3.1), if Protection absent is 0.
下面是一个示例对于裸aac添加adts帧头
private fun addAdtsHeader(packet: ByteArray, packetLen: Int, sampleRate: Int, channels: Int) {
val profile = 2 // AAC LC
val freqIdx = when(sampleRate){
96000 -> 0
88200 -> 1
64000 -> 2
48000 -> 3
44100 -> 4
32000 -> 5
24000 -> 6
22050 -> 7
16000 -> 8
12000 -> 9
11025 -> 10
8000 -> 11
else -> 4 // 默认44100
}
/**
0000 0x00
0001 0x01
0010 0x02
0011 0x03
0100 0x04
0101 0x05
0110 0x06
0111 0x07
1000 0x08
1001 0x09
1010 0x0A
1011 0x0B
1100 0x0C
1101 0x0D
1110 0x0E
1111 0x0F
*/
val chanCfg = channels // CPE = 1, mono = 1
packet[0] = 0xFF.toByte() // 1111 1111 1字节
packet[1] = 0xF9.toByte() // 1111 1001 2字节 id 1 for MPEG-2, layer = 00 protection_absent = 1
// [01]00 0000 profile 2bits
// [0110 00]00 freqIdx 4bits
// [0100 000]0 Private bit
// channel 3 bits 001 右移两位 0
// [0100 0000] channel的1位0加上来 // 3字节
packet[2] = ((profile - 1 shl 6) + (freqIdx shl 2) + (chanCfg shr 2)).toByte()
// 0000 0000
// channel 0000 0001 左移6位 01
// [01]00 0000 channel 处理完成 // 26
// Originality、Home两bit Copyright ID bit, Copyright ID start 2bit 合并到 packetLen,所有其一共17bits
// 0x7FFF
// 0000 0000 0000 000[0 00011|111 1111 1111] packetLen 右边移 11 ->
// 0000 0000 0000 0000 0000 00[00 0011] packetLen 右边移 11 ->
// [0100 0011]
packet[3] = (((chanCfg and 3) shl 6) + (packetLen shr 11)).toByte() // 4字节
packet[4] = (packetLen and 0x7FF shr 3).toByte() // [111 1111 1]111
// [111]0 0000 后面即 0xFFC
packet[5] = ((packetLen and 7 shl 5) + 0x1F).toByte() // 0001 1111
// 1111 1100
packet[6] = 0xFC.toByte() // 7字节
}
编码函数如下:
internal suspend fun pcmToAac(context: Context, pcmUri: Uri, aacUri: Uri): Unit = suspendCancellableCoroutine { continuation ->
// 编码器
// aac 3gpp audio/amr-wb audio/flac
// h264
// 解码器 解码器那可就太多了
val mediaCodecList = MediaCodecList(MediaCodecList.ALL_CODECS)
val aacEncoders: List<MediaCodecInfo> = mediaCodecList.codecInfos.filter { it.isEncoder }.filter {
it.supportedTypes.contains(element = MediaFormat.MIMETYPE_AUDIO_AAC)
}
Log.i(TAG, "pcmToAac -> aacEncoders: ${aacEncoders.joinToString { it.name }}")
if (aacEncoders.isEmpty()){
// 不支持硬件解码
Log.i(TAG, "pcmToAac -> 不支持硬件编码...") // 使用软解码
if (continuation.isActive){
continuation.resume(Unit)
}
return@suspendCancellableCoroutine
}
// 可惜 aac 仅支持软解码
Log.i(TAG, "pcmToAac -> 支持硬件编码")
val mediaFormat: MediaFormat = MediaFormat.createAudioFormat(MediaFormat.MIMETYPE_AUDIO_AAC, 16000, 1).apply {
setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC)
setInteger(MediaFormat.KEY_BIT_RATE, 128000)
}
val mediaCodec: MediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AAC)
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
val pcmInputStream: InputStream = context.contentResolver.openInputStream(pcmUri)!!
val aacOutputStream: OutputStream = context.contentResolver.openOutputStream(aacUri)!!
val bytes = ByteArray(1024 * 8)
mediaCodec.setCallback(object : MediaCodec.Callback() {
override fun onError(
codec: MediaCodec,
e: MediaCodec.CodecException
) {
Log.e(
TAG,
"onError name: ${codec.name}, thread: ${Thread.currentThread()}, error: ${e.message}",
e
)
}
override fun onInputBufferAvailable(
codec: MediaCodec,
index: Int
) {
Log.i(
TAG,
"onInputBufferAvailable -> name: ${mediaCodec.name}, index: $index, thread: ${Thread.currentThread()}"
)
val inputBuffer: ByteBuffer = codec.getInputBuffer(index) ?: return
val size: Int = pcmInputStream.read(bytes, 0, inputBuffer.limit())
if (size > 0) {
inputBuffer.put(bytes, 0, size)
codec.queueInputBuffer(index, 0, size, System.nanoTime() / 1000, 0)
} else {
codec.queueInputBuffer(
index,
0,
0,
System.nanoTime() / 1000,
MediaCodec.BUFFER_FLAG_END_OF_STREAM
)
}
}
override fun onOutputBufferAvailable(
codec: MediaCodec,
index: Int,
info: MediaCodec.BufferInfo
) {
Log.i(
TAG,
"onOutputBufferAvailable -> name: ${codec.name}, index: $index, info: ${info.size}, thread: ${Thread.currentThread()}"
)
val outputBuffer: ByteBuffer = codec.getOutputBuffer(index) ?: return
val aacData = ByteArray(info.size + 7)
addAdtsHeader(aacData, aacData.size, 16000, 1)
outputBuffer.get(aacData, 7, info.size)
aacOutputStream.write(aacData)
codec.releaseOutputBuffer(index, false)
if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
Log.i(TAG, "onOutputBufferAvailable -> == 编码结束...") // todo
pcmInputStream.close()
aacOutputStream.close()
if (continuation.isActive) {
Log.i(TAG, "pcmToAac -> 解码完成 resume before...")
continuation.resume(Unit)
Log.i(TAG, "pcmToAac -> 解码完成 resume after...")
}
}
}
override fun onOutputFormatChanged(
codec: MediaCodec,
format: MediaFormat
) {
Log.i(
TAG,
"onOutputFormatChanged -> name: ${codec.name}, format: ${
format.getString(MediaFormat.KEY_MIME)
}"
)
}
})
Log.i(TAG, "pcmToAac -> before start...")
mediaCodec.start()
Log.i(TAG, "pcmToAac -> after start...")
}
@Suppress("SameParameterValue")
private fun addAdtsHeader(packet: ByteArray, packetLen: Int, sampleRate: Int, channels: Int) {
val profile = 2 // AAC LC
val freqIdx = when(sampleRate){
96000 -> 0
88200 -> 1
64000 -> 2
48000 -> 3
44100 -> 4
32000 -> 5
24000 -> 6
22050 -> 7
16000 -> 8
12000 -> 9
11025 -> 10
8000 -> 11
else -> 4 // 默认44100
}
/**
0000 0x00
0001 0x01
0010 0x02
0011 0x03
0100 0x04
0101 0x05
0110 0x06
0111 0x07
1000 0x08
1001 0x09
1010 0x0A
1011 0x0B
1100 0x0C
1101 0x0D
1110 0x0E
1111 0x0F
*/
val chanCfg = channels // CPE = 1, mono = 1
packet[0] = 0xFF.toByte() // 1111 1111 1字节
packet[1] = 0xF9.toByte() // 1111 1001 2字节 id 1 for MPEG-2, layer = 00 protection_absent = 1
// [01]00 0000 profile 2bits
// [0110 00]00 freqIdx 4bits
// [0100 000]0 Private bit
// channel 3 bits 001 右移两位 0
// [0100 0000] channel的1位0加上来 // 3字节
packet[2] = ((profile - 1 shl 6) + (freqIdx shl 2) + (chanCfg shr 2)).toByte()
// 0000 0000
// channel 0000 0001 左移6位 01
// [01]00 0000 channel 处理完成 // 26
// Originality、Home两bit Copyright ID bit, Copyright ID start 2bit 合并到 packetLen,所有其一共17bits
// 0x7FFF
// 0000 0000 0000 000[0 00011|111 1111 1111] packetLen 右边移 11 ->
// 0000 0000 0000 0000 0000 00[00 0011] packetLen 右边移 11 ->
// [0100 0011]
packet[3] = (((chanCfg and 3) shl 6) + (packetLen shr 11)).toByte() // 4字节
packet[4] = (packetLen and 0x7FF shr 3).toByte() // [111 1111 1]111
// [111]0 0000 后面即 0xFFC
packet[5] = ((packetLen and 7 shl 5) + 0x1F).toByte() // 0001 1111
// 1111 1100
packet[6] = 0xFC.toByte() // 7字节
}
使用案例
@Composable
internal fun MediaCodecScreen(
navHostController: NavHostController,
snackBarHostState: SnackbarHostState,
) {
val context: Context = LocalContext.current
val coroutineScope: CoroutineScope = rememberCoroutineScope()
val permissionLauncher = rememberLauncherForActivityResult(
contract = ActivityResultContracts.RequestMultiplePermissions()
) { map ->
coroutineScope.launch {
snackBarHostState.showSnackbar("权限获取是否成功: ${map.values.all { it }}")
}
}
val launcher: ManagedActivityResultLauncher<Array<String>, Uri?> = rememberLauncherForActivityResult(
contract = ActivityResultContracts.OpenDocument()
) { uri: Uri? ->
uri?.apply {
Log.i(TAG, "MediaCodecScreen -> uri: $this")
val audioExtractManager = AudioExtractManager()
val contentValues: ContentValues = contentValuesOf(
MediaStore.Audio.Media.DISPLAY_NAME to "hello.aac",
MediaStore.Audio.Media.MIME_TYPE to MediaFormat.MIMETYPE_AUDIO_AAC,
MediaStore.Audio.Media.RELATIVE_PATH to Environment.DIRECTORY_MUSIC
)
val mp3Uri: Uri = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
context.contentResolver.insert(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, contentValues)
} else {
FileProvider.getUriForFile(context, "${context.packageName}.provider", File(
Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC),
"hello.aac"
))
} ?: return@apply
Log.i(TAG, "MediaCodecScreen -> mp3Uri: $mp3Uri")
coroutineScope.launch {
Log.i(TAG, "MediaCodecScreen -> before ${Thread.currentThread()}")
audioExtractManager.pcmToAac(context, this@apply, mp3Uri)
Log.i(TAG, "MediaCodecScreen -> after ${Thread.currentThread()}")
}
}
}
Column(
modifier = Modifier.fillMaxSize()
) {
Text(
text = "开发者",
Modifier
.padding(top = 10.dp)
.background(color = Color.Black, shape = RoundedCornerShape10)
.padding(all = 5.dp)
.clickable {
val intent = Intent(Settings.ACTION_APPLICATION_DEVELOPMENT_SETTINGS)
context.startActivity(intent)
},
color = Color.White
)
Text(
text = "发送信息",
Modifier
.padding(top = 10.dp)
.background(color = Color.Black, shape = RoundedCornerShape10)
.padding(all = 5.dp)
.clickable {
launcher.launch(arrayOf("*/*"))
},
color = Color.White
)
}
}
探索为什么Android MediaPlayer
可以获取adts格式aac裸流的音频时长?
用法:
val aacUri: Uri = Uri.Builder().scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.path(R.raw.audio1.toString()).build()
val mediaPlayer: MediaPlayer = MediaPlayer.create(context, aacUri)
Log.i(TAG, "MusicScreen -> duration: ${mediaPlayer.duration}")
mediaPlayer.release()
getDuration()
是一个native
函数,如下所示:
frameworks/base/media/jni/android_media_MediaPlayer.cpp
static jint
android_media_MediaPlayer_getDuration(JNIEnv *env, jobject thiz)
{
sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
if (mp == NULL ) {
jniThrowException(env, "java/lang/IllegalStateException", NULL);
return 0;
}
int msec;
process_media_player_call( env, thiz, mp->getDuration(&msec), NULL, NULL );
ALOGV("getDuration: %d (msec)", msec);
return (jint) msec;
}
可以看待调用的是MediaPlayer
的getDuration
方法。
frameworks/av/media/libmedia/mediaplayer.cpp
status_t MediaPlayer::getDuration_l(int *msec)
{
ALOGV("getDuration_l");
bool isValidState = (mCurrentState & (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_STARTED |
MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_STOPPED | MEDIA_PLAYER_PLAYBACK_COMPLETE));
if (mPlayer != 0 && isValidState) {
int durationMs;
status_t ret = mPlayer->getDuration(&durationMs);
if (ret != OK) {
// Do not enter error state just because no duration was available.
durationMs = -1;
ret = OK;
}
if (msec) {
*msec = durationMs;
}
return ret;
}
ALOGE("Attempt to call getDuration in wrong state: mPlayer=%p, mCurrentState=%u",
mPlayer.get(), mCurrentState);
return INVALID_OPERATION;
}
status_t MediaPlayer::getDuration(int *msec)
{
Mutex::Autolock _l(mLock);
return getDuration_l(msec);
}
status_t MediaPlayer::attachNewPlayer(const sp<IMediaPlayer>& player)
{
status_t err = UNKNOWN_ERROR;
sp<IMediaPlayer> p;
{ // scope for the lock
Mutex::Autolock _l(mLock);
if ( !( (mCurrentState & MEDIA_PLAYER_IDLE) ||
(mCurrentState == MEDIA_PLAYER_STATE_ERROR ) ) ) {
ALOGE("attachNewPlayer called in state %d", mCurrentState);
return INVALID_OPERATION;
}
clear_l();
p = mPlayer;
mPlayer = player;
if (player != 0) {
mCurrentState = MEDIA_PLAYER_INITIALIZED;
err = NO_ERROR;
} else {
ALOGE("Unable to create media player");
}
}
if (p != 0) {
p->disconnect();
}
return err;
}
status_t MediaPlayer::setDataSource(
const sp<IMediaHTTPService> &httpService,
const char *url, const KeyedVector<String8, String8> *headers)
{
ALOGV("setDataSource(%s)", url);
status_t err = BAD_VALUE;
if (url != NULL) {
const sp<IMediaPlayerService> service(getMediaPlayerService()); // 1
if (service != 0) {
sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mAttributionSource)); // 2
if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
(NO_ERROR != player->setDataSource(httpService, url, headers))) {
player.clear();
}
err = attachNewPlayer(player);
}
}
return err;
}
其调用的是IMediaPlayer
的getDuration
方法。且在attachNewPlayer
方法被初始化.
1处代码:
frameworks/av/media/libmedia/IMediaDeathNotifier.cpp
// establish binder interface to MediaPlayerService
/*static*/const sp<IMediaPlayerService>
IMediaDeathNotifier::getMediaPlayerService()
{
ALOGV("getMediaPlayerService");
Mutex::Autolock _l(sServiceLock);
if (sMediaPlayerService == 0) {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->waitForService(String16("media.player"));
if (binder == nullptr) {
return nullptr;
}
if (sDeathNotifier == NULL) {
sDeathNotifier = new DeathNotifier();
}
binder->linkToDeath(sDeathNotifier);
sMediaPlayerService = interface_cast<IMediaPlayerService>(binder);
}
ALOGE_IF(sMediaPlayerService == 0, "no media player service!?");
return sMediaPlayerService;
}
可以看到获取了media.player
服务,且在代码2处调用了create
方法创建了播放器。服务注册就不看了,感兴趣的话可以自行探索。
frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
sp<IMediaPlayer> MediaPlayerService::create(const sp<IMediaPlayerClient>& client,
audio_session_t audioSessionId, const AttributionSourceState& attributionSource)
{
int32_t connId = android_atomic_inc(&mNextConnId);
// TODO b/182392769: use attribution source util
AttributionSourceState verifiedAttributionSource = attributionSource;
verifiedAttributionSource.pid = VALUE_OR_FATAL(
legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
verifiedAttributionSource.uid = VALUE_OR_FATAL(
legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
sp<Client> c = new Client(
this, verifiedAttributionSource, connId, client, audioSessionId);
ALOGV("Create new client(%d) from %s, ", connId,
verifiedAttributionSource.toString().c_str());
wp<Client> w = c;
{
Mutex::Autolock lock(mLock);
mClients.add(w);
}
return c;
}
Client
是MediaPlayerService
的内部类,其继承了BnMediaPlayer
frameworks/av/media/libmediaplayerservice/MediaPlayerService.h
class Client : public BnMediaPlayer
综上所述,调用IMediaPlayer
的getDuration
方法实际上调用的是Client
是MediaPlayerService
的内部类的getDuration
方法。
frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
status_t MediaPlayerService::Client::getDuration(int *msec)
{
ALOGV("getDuration");
sp<MediaPlayerBase> p = getPlayer(); // 1 获取mPlayer
if (p == 0) return UNKNOWN_ERROR;
status_t ret = p->getDuration(msec);
if (ret == NO_ERROR) {
ALOGV("[%d] getDuration = %d", mConnId, *msec);
} else {
ALOGE("getDuration returned %d", ret);
}
return ret;
}
status_t MediaPlayerService::Client::setDataSource_post(
const sp<MediaPlayerBase>& p,
status_t status)
{
ALOGV(" setDataSource");
if (status != OK) {
ALOGE(" error: %d", status);
return status;
}
// Set the re-transmission endpoint if one was chosen.
if (mRetransmitEndpointValid) {
status = p->setRetransmitEndpoint(&mRetransmitEndpoint);
if (status != NO_ERROR) {
ALOGE("setRetransmitEndpoint error: %d", status);
}
}
if (status == OK) {
Mutex::Autolock lock(mLock);
mPlayer = p;
}
return status;
}
status_t MediaPlayerService::Client::setDataSource(int fd, int64_t offset, int64_t length)
{
ALOGV("setDataSource fd=%d (%s), offset=%lld, length=%lld",
fd, nameForFd(fd).c_str(), (long long) offset, (long long) length);
struct stat sb;
int ret = fstat(fd, &sb);
if (ret != 0) {
ALOGE("fstat(%d) failed: %d, %s", fd, ret, strerror(errno));
return UNKNOWN_ERROR;
}
ALOGV("st_dev = %llu", static_cast<unsigned long long>(sb.st_dev));
ALOGV("st_mode = %u", sb.st_mode);
ALOGV("st_uid = %lu", static_cast<unsigned long>(sb.st_uid));
ALOGV("st_gid = %lu", static_cast<unsigned long>(sb.st_gid));
ALOGV("st_size = %llu", static_cast<unsigned long long>(sb.st_size));
if (offset >= sb.st_size) {
ALOGE("offset error");
return UNKNOWN_ERROR;
}
if (offset + length > sb.st_size) {
length = sb.st_size - offset;
ALOGV("calculated length = %lld", (long long)length);
}
player_type playerType = MediaPlayerFactory::getPlayerType(this,
fd,
offset,
length);
sp<MediaPlayerBase> p = setDataSource_pre(playerType); // 解析为什么是NulPlayer类型
if (p == NULL) {
return NO_INIT;
}
// now set data source
return mStatus = setDataSource_post(p, p->setDataSource(fd, offset, length));
}
sp<MediaPlayerBase> MediaPlayerService::Client::setDataSource_pre(
player_type playerType)
{
ALOGV("player type = %d", playerType);
// create the right type of player
sp<MediaPlayerBase> p = createPlayer(playerType);
if (p == NULL) {
return p;
}
std::vector<DeathNotifier> deathNotifiers;
// Listen to death of media.extractor service
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.extractor"));
if (binder == NULL) {
ALOGE("extractor service not available");
return NULL;
}
deathNotifiers.emplace_back(
binder, [l = wp<MediaPlayerBase>(p)]() {
sp<MediaPlayerBase> listener = l.promote();
if (listener) {
ALOGI("media.extractor died. Sending death notification.");
listener->sendEvent(MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED,
MEDIAEXTRACTOR_PROCESS_DEATH);
} else {
ALOGW("media.extractor died without a death handler.");
}
});
{
using ::android::hidl::base::V1_0::IBase;
// Listen to death of OMX service
{
sp<IBase> base = ::android::hardware::media::omx::V1_0::
IOmx::getService();
if (base == nullptr) {
ALOGD("OMX service is not available");
} else {
deathNotifiers.emplace_back(
base, [l = wp<MediaPlayerBase>(p)]() {
sp<MediaPlayerBase> listener = l.promote();
if (listener) {
ALOGI("OMX service died. "
"Sending death notification.");
listener->sendEvent(
MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED,
MEDIACODEC_PROCESS_DEATH);
} else {
ALOGW("OMX service died without a death handler.");
}
});
}
}
// Listen to death of Codec2 services
{
for (std::shared_ptr<Codec2Client> const& client :
Codec2Client::CreateFromAllServices()) {
sp<IBase> hidlBase = client->getHidlBase();
::ndk::SpAIBinder aidlBase = client->getAidlBase();
auto onBinderDied = [l = wp<MediaPlayerBase>(p),
name = std::string(client->getServiceName())]() {
sp<MediaPlayerBase> listener = l.promote();
if (listener) {
ALOGI("Codec2 service \"%s\" died. "
"Sending death notification.",
name.c_str());
listener->sendEvent(
MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED,
MEDIACODEC_PROCESS_DEATH);
} else {
ALOGW("Codec2 service \"%s\" died "
"without a death handler.",
name.c_str());
}
};
if (hidlBase) {
deathNotifiers.emplace_back(hidlBase, onBinderDied);
} else if (aidlBase.get() != nullptr) {
deathNotifiers.emplace_back(aidlBase, onBinderDied);
}
}
}
}
Mutex::Autolock lock(mLock);
mDeathNotifiers.clear();
mDeathNotifiers.swap(deathNotifiers);
mAudioDeviceUpdatedListener = new AudioDeviceUpdatedNotifier(p);
if (!p->hardwareOutput()) {
mAudioOutput = new AudioOutput(mAudioSessionId, mAttributionSource,
mAudioAttributes, mAudioDeviceUpdatedListener);
static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
}
return p;
}
sp<MediaPlayerBase> MediaPlayerService::Client::createPlayer(player_type playerType)
{
// determine if we have the right player type
sp<MediaPlayerBase> p = getPlayer();
if ((p != NULL) && (p->playerType() != playerType)) {
ALOGV("delete player");
p.clear();
}
if (p == NULL) {
p = MediaPlayerFactory::createPlayer(playerType, mListener,
VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mAttributionSource.pid)));
}
if (p != NULL) {
p->setUID(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mAttributionSource.uid)));
}
return p;
}
1处代码返回的是MediaPlayerService
的内部类Client
的成员变量mPlayer
.
sp<MediaPlayerBase> mPlayer;
接下来看mPlayer
是如何被初始化的。
Client的成员函数被MediaPlayer
通过ipc
调用,然后调用setDataSource
、setDataSource_pre
、setDataSource_post
frameworks/av/media/libmediaplayerservice/MediaPlayerFactory.cpp
sp<MediaPlayerBase> MediaPlayerFactory::createPlayer(
player_type playerType,
const sp<MediaPlayerBase::Listener> &listener,
pid_t pid) {
sp<MediaPlayerBase> p;
IFactory* factory;
status_t init_result;
Mutex::Autolock lock_(&sLock);
if (sFactoryMap.indexOfKey(playerType) < 0) {
ALOGE("Failed to create player object of type %d, no registered"
" factory", playerType);
return p;
}
factory = sFactoryMap.valueFor(playerType);
CHECK(NULL != factory);
p = factory->createPlayer(pid);
if (p == NULL) {
ALOGE("Failed to create player object of type %d, create failed",
playerType);
return p;
}
init_result = p->initCheck();
if (init_result == NO_ERROR) {
p->setNotifyCallback(listener);
} else {
ALOGE("Failed to create player object of type %d, initCheck failed"
" (res = %d)", playerType, init_result);
p.clear();
}
return p;
}
通过playerType
拿到创建播放器对应的工厂类然后创建播放器player
.
解析为什么是NulPlayer类型
frameworks/av/media/libmediaplayerservice/MediaPlayerFactory.cpp
player_type MediaPlayerFactory::getPlayerType(const sp<IMediaPlayer>& client,
int fd,
int64_t offset,
int64_t length) {
GET_PLAYER_TYPE_IMPL(client, fd, offset, length);
}
#define GET_PLAYER_TYPE_IMPL(a...) \
Mutex::Autolock lock_(&sLock); \
\
player_type ret = STAGEFRIGHT_PLAYER; \
float bestScore = 0.0; \
\
for (size_t i = 0; i < sFactoryMap.size(); ++i) { \
\
IFactory* v = sFactoryMap.valueAt(i); \
float thisScore; \
CHECK(v != NULL); \
thisScore = v->scoreFactory(a, bestScore); \
if (thisScore > bestScore) { \
ret = sFactoryMap.keyAt(i); \
bestScore = thisScore; \
} \
} \
\
if (0.0 == bestScore) { \
ret = getDefaultPlayerType(); \
} \
\
return ret;
宏展开之后为
thisScore = v->scoreFactory(client, fd, offset, length, bestScore); // 注意:a 被替换了
frameworks/av/media/libmediaplayerservice/MediaPlayerFactory.h
/*
**
** Copyright 2012, The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
#ifndef ANDROID_MEDIAPLAYERFACTORY_H
#define ANDROID_MEDIAPLAYERFACTORY_H
#include <media/MediaPlayerInterface.h>
#include <media/stagefright/foundation/ABase.h>
namespace android {
class MediaPlayerFactory {
public:
class IFactory {
public:
virtual ~IFactory() { }
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const char* /*url*/,
float /*curScore*/) { return 0.0; }
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
int /*fd*/,
int64_t /*offset*/,
int64_t /*length*/,
float /*curScore*/) { return 0.0; }
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const sp<IStreamSource> &/*source*/,
float /*curScore*/) { return 0.0; }
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const sp<DataSource> &/*source*/,
float /*curScore*/) { return 0.0; }
virtual sp<MediaPlayerBase> createPlayer(pid_t pid) = 0;
};
static status_t registerFactory(IFactory* factory,
player_type type);
static void unregisterFactory(player_type type);
static player_type getPlayerType(const sp<IMediaPlayer>& client,
const char* url);
static player_type getPlayerType(const sp<IMediaPlayer>& client,
int fd,
int64_t offset,
int64_t length);
static player_type getPlayerType(const sp<IMediaPlayer>& client,
const sp<IStreamSource> &source);
static player_type getPlayerType(const sp<IMediaPlayer>& client,
const sp<DataSource> &source);
static sp<MediaPlayerBase> createPlayer(player_type playerType,
const sp<MediaPlayerBase::Listener> &listener,
pid_t pid);
static void registerBuiltinFactories();
private:
typedef KeyedVector<player_type, IFactory*> tFactoryMap;
MediaPlayerFactory() { }
static status_t registerFactory_l(IFactory* factory,
player_type type);
static Mutex sLock;
static tFactoryMap sFactoryMap;
static bool sInitComplete;
DISALLOW_EVIL_CONSTRUCTORS(MediaPlayerFactory);
};
} // namespace android
#endif // ANDROID_MEDIAPLAYERFACTORY_H
可以看到默认返回0.0.
frameworks/av/media/libmediaplayerservice/MediaPlayerFactory.cpp
static player_type getDefaultPlayerType() {
return NU_PLAYER;
}
class NuPlayerFactory : public MediaPlayerFactory::IFactory {
public:
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const char* url,
float curScore) {
static const float kOurScore = 0.8;
if (kOurScore <= curScore)
return 0.0;
if (!strncasecmp("http://", url, 7)
|| !strncasecmp("https://", url, 8)
|| !strncasecmp("file://", url, 7)) {
size_t len = strlen(url);
if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) {
return kOurScore;
}
if (strstr(url,"m3u8")) {
return kOurScore;
}
if ((len >= 4 && !strcasecmp(".sdp", &url[len - 4])) || strstr(url, ".sdp?")) {
return kOurScore;
}
}
if (!strncasecmp("rtsp://", url, 7)) {
return kOurScore;
}
return 0.0;
}
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const sp<IStreamSource>& /*source*/,
float /*curScore*/) {
return 1.0;
}
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const sp<DataSource>& /*source*/,
float /*curScore*/) {
// Only NuPlayer supports setting a DataSource source directly.
return 1.0;
}
virtual sp<MediaPlayerBase> createPlayer(pid_t pid) {
ALOGV(" create NuPlayer");
return new NuPlayerDriver(pid);
}
};
默认使用NU_PLAYER.
由此可见
frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
status_t MediaPlayerService::Client::getDuration(int *msec)
{
ALOGV("getDuration");
sp<MediaPlayerBase> p = getPlayer();
if (p == 0) return UNKNOWN_ERROR;
status_t ret = p->getDuration(msec);
if (ret == NO_ERROR) {
ALOGV("[%d] getDuration = %d", mConnId, *msec);
} else {
ALOGE("getDuration returned %d", ret);
}
return ret;
}
getPlayer()
方法返回的是NuPlayerDriver
.p->getDuration(msec)
调用的是NuPlayerDriver
的getDuration
方法
frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
NuPlayerDriver::NuPlayerDriver(pid_t pid)
: mState(STATE_IDLE),
mIsAsyncPrepare(false),
mAsyncResult(UNKNOWN_ERROR),
mSetSurfaceInProgress(false),
mDurationUs(-1), // 默认-1
mPositionUs(-1),
mSeekInProgress(false),
mPlayingTimeUs(0),
mRebufferingTimeUs(0),
mRebufferingEvents(0),
mRebufferingAtExit(false),
mLooper(new ALooper),
mMediaClock(new MediaClock),
mPlayer(new NuPlayer(pid, mMediaClock)),
mPlayerFlags(0),
mCachedPlayerIId(PLAYER_PIID_INVALID),
mMetricsItem(NULL),
mClientUid(-1),
mAtEOS(false),
mLooping(false),
mAutoLoop(false) {
ALOGD("NuPlayerDriver(%p) created, clientPid(%d)", this, pid);
mLooper->setName("NuPlayerDriver Looper");
mMediaClock->init();
// set up an analytics record
mMetricsItem = mediametrics::Item::create(kKeyPlayer);
mLooper->start(
false, /* runOnCallingThread */
true, /* canCallJava */
PRIORITY_AUDIO);
mLooper->registerHandler(mPlayer); // ALooper/AHandler机制
mPlayer->init(this);
}
status_t NuPlayerDriver::getDuration(int *msec) {
Mutex::Autolock autoLock(mLock);
if (mDurationUs < 0) {
return UNKNOWN_ERROR;
}
*msec = (mDurationUs + 500LL) / 1000;
return OK;
}
void NuPlayerDriver::notifyDuration(int64_t durationUs) {
Mutex::Autolock autoLock(mLock);
mDurationUs = durationUs;
}
frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
void NuPlayer::schedulePollDuration() {
sp<AMessage> msg = new AMessage(kWhatPollDuration, this);
msg->setInt32("generation", mPollDurationGeneration);
msg->post();
}
void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
// ...
case kWhatPollDuration:
{
int32_t generation;
CHECK(msg->findInt32("generation", &generation));
if (generation != mPollDurationGeneration) {
// stale
break;
}
int64_t durationUs;
if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) { // 1
sp<NuPlayerDriver> driver = mDriver.promote();
if (driver != NULL) {
driver->notifyDuration(durationUs); // 1
}
}
msg->post(1000000LL); // poll again in a second.
break;
}
}
}
void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) {
sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
sp<GenericSource> source =
new GenericSource(notify, mUIDValid, mUID, mMediaClock);
ALOGV("setDataSourceAsync fd %d/%lld/%lld source: %p",
fd, (long long)offset, (long long)length, source.get());
status_t err = source->setDataSource(fd, offset, length);
if (err != OK) {
ALOGE("Failed to set data source!");
source = NULL;
}
msg->setObject("source", source);
msg->post();
mDataSourceType = DATA_SOURCE_TYPE_GENERIC_FD;
}
可以看到回调了上面NuPlayerDriver
的notifyDuration
方法。
mSource
是GenericSource
frameworks/av/media/libmediaplayerservice/nuplayer/GenericSource.cpp
status_t NuPlayer::GenericSource::getDuration(int64_t *durationUs) {
Mutex::Autolock _l(mLock);
*durationUs = mDurationUs;
return OK;
}
status_t NuPlayer::GenericSource::initFromDataSource() {
sp<IMediaExtractor> extractor;
sp<DataSource> dataSource;
{
Mutex::Autolock _l_d(mDisconnectLock);
dataSource = mDataSource;
}
CHECK(dataSource != NULL);
mLock.unlock();
// This might take long time if data source is not reliable.
extractor = MediaExtractorFactory::Create(dataSource, NULL);
if (extractor == NULL) {
ALOGE("initFromDataSource, cannot create extractor!");
mLock.lock();
return UNKNOWN_ERROR;
}
sp<MetaData> fileMeta = extractor->getMetaData();
size_t numtracks = extractor->countTracks();
if (numtracks == 0) {
ALOGE("initFromDataSource, source has no track!");
mLock.lock();
return UNKNOWN_ERROR;
}
mLock.lock();
mFileMeta = fileMeta;
if (mFileMeta != NULL) {
int64_t duration;
if (mFileMeta->findInt64(kKeyDuration, &duration)) {
mDurationUs = duration;
}
}
int32_t totalBitrate = 0;
mMimes.clear();
for (size_t i = 0; i < numtracks; ++i) {
sp<IMediaSource> track = extractor->getTrack(i);
if (track == NULL) {
continue;
}
sp<MetaData> meta = extractor->getTrackMetaData(i);
if (meta == NULL) {
ALOGE("no metadata for track %zu", i);
return UNKNOWN_ERROR;
}
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
ALOGV("initFromDataSource track[%zu]: %s", i, mime);
// Do the string compare immediately with "mime",
// we can't assume "mime" would stay valid after another
// extractor operation, some extractors might modify meta
// during getTrack() and make it invalid.
if (!strncasecmp(mime, "audio/", 6)) {
if (mAudioTrack.mSource == NULL) {
mAudioTrack.mIndex = i;
mAudioTrack.mSource = track;
mAudioTrack.mPackets =
new AnotherPacketSource(mAudioTrack.mSource->getFormat());
if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
mAudioIsVorbis = true;
} else {
mAudioIsVorbis = false;
}
mMimes.add(String8(mime));
}
} else if (!strncasecmp(mime, "video/", 6)) {
if (mVideoTrack.mSource == NULL) {
mVideoTrack.mIndex = i;
mVideoTrack.mSource = track;
mVideoTrack.mPackets =
new AnotherPacketSource(mVideoTrack.mSource->getFormat());
// video always at the beginning
mMimes.insertAt(String8(mime), 0);
}
}
mSources.push(track);
int64_t durationUs;
if (meta->findInt64(kKeyDuration, &durationUs)) {
if (durationUs > mDurationUs) {
mDurationUs = durationUs;
}
}
int32_t bitrate;
if (totalBitrate >= 0 && meta->findInt32(kKeyBitRate, &bitrate)) {
totalBitrate += bitrate;
} else {
totalBitrate = -1;
}
}
ALOGV("initFromDataSource mSources.size(): %zu mIsSecure: %d mime[0]: %s", mSources.size(),
mIsSecure, (mMimes.isEmpty() ? "NONE" : mMimes[0].c_str()));
if (mSources.size() == 0) {
ALOGE("b/23705695");
return UNKNOWN_ERROR;
}
// Modular DRM: The return value doesn't affect source initialization.
(void)checkDrmInfo();
mBitrate = totalBitrate;
return OK;
}
mDurationUs
成员变量是由sp<MetaData> fileMeta = extractor->getMetaData();
中MetaData
的成员方法findInt32
赋值的.
我快写🤮了
咋还没到下面的源码
zsh@zsh:~/android_asop/frameworks$ vim ./wilhelm/src/android/util/AacAdtsExtractor.cpp
zsh@zsh:~/android_asop/frameworks$ vim ./wilhelm/src/android/AacBqToPcmCbRenderer.cpp
zsh@zsh:~/android_asop/frameworks$ vim ./wilhelm/src/android/AudioPlayer_to_android.cpp