音频
2017-03-22 16:18:28 0 举报
音频是一种通过声音传播信息的方式,它可以是人的声音、乐器的演奏、自然的声音等。音频可以通过各种设备进行录制和播放,如麦克风、扬声器、耳机等。在现代社会,音频被广泛应用于娱乐、教育、通信等领域。例如,音乐、广播、电影、有声书等都是音频的应用形式。此外,随着科技的发展,音频技术也在不断进步,如数字音频、环绕声、降噪等技术的出现,使得音频的质量得到了极大的提升。总的来说,音频是我们生活中不可或缺的一部分,它为我们的生活带来了丰富多彩的体验。
作者其他创作
大纲/内容
open
outputDesc-mLatency
mediaplayer.cpp
Media FMAPP
MediaCodec.cpp
Audio Framework(frameworks/av/services/audioflinger/)
openOutputStream
ioConfigChanged
get_channels
Media PlayerAPP
openOutput
IAudioFlingerClient.cpp
android_media_AudioTrack.cpp
AudioStreamOut.cpp
android_media_MediaCodec.cpp
AudioPolicyManager.cpp
IAudioFlinger.cpp
ServerProxy
desc-mPatch = mPatch;desc-mChannelMask = mChannelMask;desc-mSamplingRate = mSampleRate;desc-mFormat = mFormat;desc-mFrameCount = mNormalFrameCount;desc-mLatency = latency_l();
open_output_stream
android_hardware_Radio.cpp
AudioPolicyClientImpl.cpp
Threads.cpp
getChannelMask
AudioMixer.cpp
AudioHwDevice.cpp
android.media.AudioSystem
MixerThread
getTimestamp
Audio Framework(frameworks/av/services/audiopolicy/)
Media JNI(frameworks/base/media/jni/)
releaseBuffer
get_sample_rate
android_media_AmrInputStream.cpp
getSampleRate
outputDesc-mSamplingRate
setDeviceConnectionStateInt
AudioTrack
AudioTrackShared.cpp
Engine.cpp
android_media_MediaPlayer.cpp
IMediaPlayer.cpp
audio_track_cblk_t* cblk = mCblk;front = android_atomic_acquire_load(&cblk-u.mStreaming.mFront);rear = cblk-u.mStreaming.mRear;buffer-mRaw = part1 0 ? &((char *) mBuffers)[(mIsOut ? rear : front) * mFrameSize] : NULL;
if (mLatchDValid) { mLatchQ = mLatchD; mLatchDValid = false; mLatchQValid = true;}
ClientProxy
AudioTrack.cpp
IAudioRecord.cpp
obtainBuffer
android.media.MediaPlayer
getSamplingRate
mEpoch + mCblk-mServer;
getLatency
MultiMedia Service(frameworks/av/media/libmediaplayerservice/)
get_buffer_size
ACodec.cpp
MediaExtractor.cpp
get_presentation_position
openOutput_l
AudioPolicyManager
AudioSystem.cpp
getMinBufferSize
AudioTrackClientProxy
getFrameSize
android.media.MediaRecorder
native_setup
getOutputSamplingRate
Media Part
get_latency
threadLoop_write
AudioTrackShared.h
createTrack
Tracks.cpp
getFormat
android.media.AmrInputStream
get_format
mLatency
Track
TrackHandle
t.in = t.buffer.raw;
AudioTrack.java
mCblkMemory = client-heap()-allocate(size);mCblk = static_cast(mCblkMemory-pointer())new(mCblk) audio_track_cblk_t();mBuffer = (char*)mCblk + sizeof(audio_track_cblk_t);
android.media.AudioRecord
android.media.AudioManager
audio_hw.cpp
mLatchDValid = false; status_t status = mNormalSink-getTimestamp(mLatchD.mTimestamp); if (status == NO_ERROR) { size_t totalFramesWritten = mNormalSink-framesWritten(); if (totalFramesWritten = mLatchD.mTimestamp.mPosition) { mLatchD.mUnpresentedFrames = totalFramesWritten - mLatchD.mTimestamp.mPosition; // mLatchD.mFramesReleased is set immediately before D is clocked into Q mLatchDValid = true; } }
Phone CallAPP
AudioStreamInSource.cpp
IMediaPlayerService.cpp
getMinFrameCount
threadLoop
outputDesc-mFrameCount
readOutputParameters_l
AudioTrackServerProxy
latency
IMediaRecorder.cpp
convertMixerFormat
SoftOMXPlugin.cpp
mLatency = mAfLatency + (1000*frameCount) / mSampleRate;mFrameCount = frameCount;if (frameCount mReqFrameCount) { mReqFrameCount = frameCount;}
MultiMedia
AudioStreamOutSink.cpp
IAudioTrack.cpp
native_get_latency
Audio Framework(frameworks/av/media/libnbaio/)
SoftOMXComponent.cpp
PlaybackThread
getNextBuffer
android_media_MediaRecorder.cpp
uint32_t sampleRate = mAudioTrackServerProxy-getSampleRate();AudioPlaybackRate playbackRate = mAudioTrackServerProxy-getPlaybackRate();uint32_t unpresentedFrames = ((double) playbackThread-mLatchQ.mUnpresentedFrames * sampleRate * playbackRate.mSpeed)/ playbackThread-mSampleRate;ssize_t i = playbackThread-mLatchQ.mFramesReleased.indexOfKey(this);uint32_t framesWritten = i = 0 ? playbackThread-mLatchQ.mFramesReleased[i] : mAudioTrackServerProxy-framesReleased(); if (framesWritten = unpresentedFrames) { timestamp.mPosition = framesWritten - unpresentedFrames; timestamp.mTime = playbackThread-mLatchQ.mTimestamp.mTime; result = NO_ERROR; }
getFrameCount
Application Part
getPosition
createTrack_l
android_media_AudioRecord.cpp
AudioFlinger.cpp
set
getOutputForAttr
PlaybackThread::Track
Binder IPC
const sp t = mActiveTracks[i].promote();Track* const track = t.get();
updateAndGetPosition_l
TrackBase
setParameter
getOutputFrameCount
omx
nuplayer
out_write
out = reinterpret_cast((uint8_t*)out + BLOCKSIZE * t1.mMixerChannelCount * audio_bytes_per_sample(t1.mMixerFormat));
android_media_MediaExtractor.cpp
mediarecorder.cpp
write
MediaPlayerService.cpp
AudioRecord.cpp
prepareTracks_l
mReqFrameCount = frameCount;
frameCount * channelCount * bytesPerSample
OMXClient.cpp
android_media_AudioSystem.cpp
Audio JNI(frameworks/base/core/jni/)
AudioTrack.h
Media RecordAPP
native_get_min_buff_size
android.media.AudioTrack
Application Framework(frameworks/base/media/java/android/media)
native_get_timestamp
buffer-mRaw = NULL;
calculateMinFrameCount
track.mainBuffer = valueBuf;
sp iMem = track-getCblk();void *iMemPointer = iMem-pointer();audio_track_cblk_t* cblk = static_cast(iMemPointer);buffers = cblk + 1;
process__genericNoResampling
AudioPolicyService.cpp
Audio/Media Framework(frameworks/av/media/libmedia/)
audio_a2dp_hw.c
OMX Integration Layer
setBufferProvider
StageFright Engine(frameworks/av/media/libstagefright/)
IAudioPolicyService.cpp
MultiMedia Framework
Audio
Audio Part
ExtendedAudioBufferProvider
checkOutputsForDevice
getOutputLatency
AudioBufferProvider
track__NoResample
int32_t *out = t1.mainBuffer;
MediaRecorderClient.cpp
0 条评论
回复 删除
下一页