Your SlideShare is downloading. ×
Stagefright recorder part1
Upcoming SlideShare
Loading in...5
×

Thanks for flagging this SlideShare!

Oops! An error has occurred.

×

Introducing the official SlideShare app

Stunning, full-screen experience for iPhone and Android

Text the download link to your phone

Standard text messaging rates apply

Stagefright recorder part1

2,687
views

Published on


2 Comments
8 Likes
Statistics
Notes
No Downloads
Views
Total Views
2,687
On Slideshare
0
From Embeds
0
Number of Embeds
5
Actions
Shares
0
Downloads
0
Comments
2
Likes
8
Embeds 0
No embeds

Report content
Flagged as inappropriate Flag as inappropriate
Flag as inappropriate

Select your reason for flagging this presentation as inappropriate.

Cancel
No notes for slide

Transcript

  • 1. StagefrightRecorder(Camera/audioSource setup,start) 박철희 1 /19
  • 2. 1. 전체구조 -mediarecorder VideoCamera (a (b ) IMediaRecordClient ) IMediaPlayerServiceAandorid.media.MediaR ecorder BnMediaRrecorderClient BnMediaPlayerService 1) 연결 MediaRecorder MediaPlayerService JNI BpMediaPlayerService BpMediaRecorder 2) 생성 5) 연결 (c 4) 연결 ) IMediaRecorder BnMediaRrecorder MediaRecorderClient BpMediaRecorderClient 3) 생성 StagefrightRecorder 2 /19
  • 3. 1. 전체구조 -StagefrightRecorder CameraSerive CameraSource Camera ::Client StagefrightRecorder RecordingProxy mFrameReceived Mpeg4writer theadFunc OMX Codec new camera CameraHAL Video FILL_ EMPTY_ Video theadEntry(mChunks) BUFFER_DONE BUFFER_DONE fillOutput Draininput buffer buffer Audio theadEntry(mChunks) audio AudioSource AudioRecord EMPTY_ mbufferReceived FILL_ BUFFER_DONE BUFFER_DONE fillOutput Draininput buffer buffer OMX encoder AudioHAL fill empty buffer buffer 3 /19
  • 4. 1. 전체구조 -CameraHAL 4 /19
  • 5. 1. 전체구조 -CameraHAL  Qualcomm MSM Camera system 5 /19
  • 6. 1. 전체구조 -CameraHAL 6 /19
  • 7. 2.CamersSource setupstatus_t StagefrightRecorder::setCamera(const sp<ICamera> &camera,const sp<ICameraRecordingProxy> &proxy){ mCamera = camera;  CameraService::Client 를 가리킴 . mCameraProxy = proxy; Camera 안의 Recording proxy 객체 return OK;}status_t StagefrightRecorder::startMPEG4Recording() { int32_t totalBitRate; status_t err = setupMPEG4Recording(mOutputFd, mVideoWidth, mVideoHeight, mVideoBitRate, &totalBitRate, &mWriter); if (err != OK) { return err; }}status_t StagefrightRecorder::setupMPEG4Recording(..) { if (mVideoSource < VIDEO_SOURCE_LIST_END) { sp<MediaSource> mediaSource; err = setupMediaSource(&mediaSource); if (err != OK) { return err; }}status_t StagefrightRecorder::setupMediaSource( sp<MediaSource> *mediaSource){ status_t err = setupCameraSource(&cameraSource); *mediaSource = cameraSource;} 7 /19
  • 8. 2.CamersSource setupstatus_t StagefrightRecorder::setupCameraSource(..){ *cameraSource = CameraSource::CreateFromCamera( mCamera, mCameraProxy, mCameraId, videoSize, mFrameRate, mPreviewSurface, useMeta); mCamera 는 CameraService::Client 를 가리킴 .  Camera 안의 Recording proxy 객체를 가리킴 .}CameraSource *CameraSource::CreateFromCamera(…){ CameraSource *source = new CameraSource(camera, proxy, cameraId, videoSize, frameRate, surface, storeMetaDataInVideoBuffers); return source;}CameraSource::CameraSource(…) : … mInitCheck = init(camera, proxy, cameraId, videoSize, frameRate, storeMetaDataInVideoBuffers); if (mInitCheck != OK) releaseCamera();}status_t CameraSource::init(…){ err = initWithCameraAccess(camera, proxy, cameraId, videoSize, frameRate, storeMetaDataInVideoBuffers);}status_t CameraSource::initWithCameraAccess(…){ if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK)} 8 /19
  • 9. 2.CamersSource setupstatus_t CameraSource::isCameraAvailable(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy, int32_t cameraId) { mCamera = Camera::create(camera); -----------(1) if (mCamera == 0) return -EBUSY; mCameraRecordingProxy = proxy; -----------(2) mCameraFlags |= FLAGS_HOT_CAMERA; return OK;}sp<Camera> Camera::create(const sp<ICamera>& camera){… sp<Camera> c = new Camera(); if (camera->connect(c) == NO_ERROR) { c->mStatus = NO_ERROR; c->mCamera = camera; --------------(4) camera->asBinder()->linkToDeath(c); return c; } return 0;}status_t CameraService::Client::connect(const sp<ICameraClient>& client){ mCameraClient = client; ----------------(3)} 9 /19
  • 10. 2.CamersSource setup Camera RecordingProxy Camerasource mCamera (2) mCamera CameraService::Client mCameraRecordingproxy (1) new Camera (4) mCameraClient mCamera (3) class Camera : public BnCameraClient, public IBinder::DeathRecipient { public: … class RecordingProxy : public BnCameraRecordingProxy { public: RecordingProxy(const sp<Camera>& camera); virtual status_t startRecording(const sp<ICameraRecordingProxyListener>& listener); virtual void stopRecording(); virtual void releaseRecordingFrame(const sp<IMemory>& mem); private: sp<Camera> mCamera; }; 10 /19
  • 11. 3.CamersSource startstatus_t StagefrightRecorder::startMPEG4Recording() { status_t CameraSource::start(MetaData *meta)… { err = mWriter->start(meta.get()); startCameraRecording();} }status_t MPEG4Writer::start(MetaData *param) void CameraSource::startCameraRecording(){ { err = startTracks(param); if (err != OK) { if (mCameraFlags & FLAGS_HOT_CAMERA) { return err; mCamera->unlock(); } mCamera.clear(); new Camera 와 연결 끊음 .} CHECK_EQ(OK, mCameraRecordingProxy->startRecordingstatus_t MPEG4Writer::startTracks(MetaData *params) { (new ProxyListener(this))); for (List<Track *>::iterator it = mTracks.begin(); } it != mTracks.end(); ++it) { status_t err = (*it)->start(params); CameraSource::ProxyListener::ProxyListener(const} sp<CameraSource>& source) { mSource = source;status_t MPEG4Writer::Track::start(MetaData *params) CameraSource 를 proxy listener 로 등록 함 .{ } status_t err = mSource->start(meta.get());} status_t Camera::RecordingProxy::startRecording(const sp<ICameraRecordingProxyListener>& listener)OMXCodec.cpp {status_t OMXCodec::start(MetaData *meta) mCamera->setRecordingProxyListener(listener);{ mCamera->reconnect(); status_t err = mSource->start(params.get()); return mCamera->startRecording();} } 11 /19
  • 12. 3.CamersSource startstatus_t Camera::reconnect(){ sp <ICamera> c = mCamera; if (c == 0) return NO_INIT; return c->connect(this);}status_t CameraService::Client::connect(const sp<ICameraClient>& client){ mCameraClient = client;} Camera mCamera RecordingProxy Camerasource mCamera mCamera CameraService::Client mCameraRecordingproxy x new Camera mCameraClient mCamera x 12 /19
  • 13. 3.CamersSource startstatus_t Camera::startRecording(){ void CameraService::Client::dataCallbackTimestamp(..) sp <ICamera> c = mCamera; { if (c == 0) return NO_INIT; client-> return c->startRecording(); handleGenericDataTimestamp} (timestamp, msgType, dataPtr); }status_t CameraService::Client::startRecording(){ void return startCameraMode(CAMERA_RECORDING_MODE); CameraService::Client::handleGenericDataTimestamp(..)} { sp<ICameraClient> c = mCameraClient;status_t CameraService::Client::startCameraMode(camera_mode mode) c->{ dataCallbackTimestamp(timestamp, msgType, dataPtr); case CAMERA_RECORDING_MODE: } return startRecordingMode();} Camera.cpp void Camera::dataCallbackTimestamp(.. dataPtr)status_t CameraService::Client::startRecordingMode() {{ proxylistener-> result = mHardware->startRecording(); dataCallbackTimestamp} (timestamp, msgType, dataPtr); CameraHardwareinterface.h ->QualcommCamera2.cpp ->QCameraHWI.cpp 를 거쳐서 } Proxylistener 는 camerasource.cpp 의QCameraHWI_record.cppstatus_t QCameraStream_record::processRecordFrame(void *data) proxylistener 를 가리킴 .{ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME, mHalCamCtrl->mRecordingMemory.camera_memory[frame->video.video.idx], 0, mHalCamCtrl->mCallbackCookie); rcb 는 cameraservice.cpp 의 Client 생성자에서 mHardware->setCallbacks 에 의해 dataCallbackTimestamp 가 등록됨 .} 13 /19
  • 14. 3.CamersSource startvoid CameraSource::ProxyListener::dataCallbackTimestamp( nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr){ mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr); mSource 는 CameraSource class 를 가리킴 .}void CameraSource::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, const sp<IMemory> &data){ mFramesReceived.push_back(data); mFrameTimes.push_back(timeUs); mFrameAvailableCondition.signal(); mFramesReceived vector 에 data 가 채워지면 status_t CameraSource::read 에서 기다리고 있던 mFrameAvailableCondition 에 signal 을 날려 줌 .}status_t CameraSource::read(..){ mFrameAvailableCondition.waitRelative(mLock, mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS) frame = *mFramesReceived.begin(); *buffer = new MediaBuffer(frame->pointer(), frame->size());  읽은 data 를 MediaBuffer class 로 만들어서 OMXCodec 으로 넘긴다 .}OMXCodec.cppbool OMXCodec::drainInputBuffer(BufferInfo *info){ err = mSource->read(&srcBuffer); srcBuffer 로 data 를 읽어서 err = mOMX->emptyBuffer(mNode, info->mBuffer, 0, offset,flags, timestampUs); emptybuffer 에 넣는다 . 14 /19
  • 15. 3.CamersSource startemptybuffer 가 다 채워지면 omx_message::EMPTY_BUFFER_DONE 가 전달된다 .void OMXCodec::on_message(const omx_message &msg){ case omx_message::EMPTY_BUFFER_DONE: drainInputBuffer(&buffers->editItemAt(i));  다시 read 를 한 후 emptybuffer 에 채워 넣는다 .} emptybuffer 에 채워진 data 가 encoding 이 되어 fillbuffer 에 채워지면 omx_message::FILL_BUFFER_DONE 가 전달된다 .void OMXCodec::on_message(const omx_message &msg){ case omx_message::FILL_BUFFER_DONE: mFilledBuffers.push_back(i); mBufferFilled.signal(); OMXCodec::read 의 waitForBufferFilled_l 함수에서 기다리고 있던 mBufferFilled 를 풀어준다 .}status_t OMXCodec::read(MediaBuffer **buffer, const ReadOptions *options){ waitForBufferFilled_l(); size_t index = *mFilledBuffers.begin(); BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index); *buffer = info->mMediaBuffer; mpeg4wrtier 의 read 로 buffer 를 넘긴다 .}status_t MPEG4Writer::Track::threadEntry(){ while (!mDone && (err = mSource->read(&buffer)) == OK) { 15 /19
  • 16. 3.CamersSource startMediaBuffer *copy = new MediaBuffer(buffer->range_length());memcpy(copy->data(), (uint8_t *)buffer->data() + buffer->range_offset(),buffer->range_length());mChunkSamples.push_back(copy);bufferChunk(timestampUs);}void MPEG4Writer::Track::bufferChunk(int64_t timestampUs) { Chunk chunk(this, timestampUs, mChunkSamples); mOwner->bufferChunk(chunk); mChunkSamples.clear();}void MPEG4Writer::bufferChunk(const Chunk& chunk) { for (List<ChunkInfo>::iterator it = mChunkInfos.begin(); it != mChunkInfos.end(); ++it) { if (chunk.mTrack == it->mTrack) { // Found owner it->mChunks.push_back(chunk); mChunkReadyCondition.signal(); return; } }void MPEG4Writer::threadFunc() { while (!mDone && !(chunkFound = findChunkToWrite(&chunk))) { mChunkReadyCondition.wait(mLock); } writeChunkToFile(&chunk);} 16 /19
  • 17. 4.AudioSource setupstatus_t StagefrightRecorder::setupMPEG4Recording(..){ err = setupAudioEncoder(writer);}status_t StagefrightRecorder::setupAudioEncoder(..){ sp<MediaSource> audioEncoder = createAudioSource(); if (audioEncoder == NULL) { return UNKNOWN_ERROR; } writer->addSource(audioEncoder);}sp<MediaSource> StagefrightRecorder::createAudioSource(){ sp<AudioSource> audioSource =new AudioSource( mAudioSource, mSampleRate, mAudioChannels); sp<MediaSource> audioEncoder = OMXCodec::Create(client.interface(), encMeta, true /* createEncoder */, audioSource);}AudioSource::AudioSource(…){ mRecord = new AudioRecord(...AudioRecordCallbackFunction,); audio data 에 대한 callback 을 정의 한다 .(AudioRecordCallbackFunction)}static void AudioRecordCallbackFunction(int event, void *user, void *info) { switch (event) { case AudioRecord::EVENT_MORE_DATA: { source->dataCallbackTimestamp(*((AudioRecord::Buffer *) info), systemTime() / 1000); 17 /19
  • 18. 4.AudioSource startstatus_t AudioSource::start(MetaData *params) { status_t err = mRecord->start();}AudioRecord.cppstatus_t AudioRecord::start(){ t->run("ClientRecordThread", ANDROID_PRIORITY_AUDIO); t 는 ClientRecordThread 를 가리킴}bool AudioRecord::ClientRecordThread::threadLoop(){ return mReceiver.processAudioBuffer(this);}bool AudioRecord::processAudioBuffer(const sp<ClientRecordThread>& thread){ do { status_t err = obtainBuffer(&audioBuffer, 1); mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer); AudioSource.cpp 의 AudioRecordCallbackFunction 를 가리킴 . }}static void AudioRecordCallbackFunction(int event, void *user, void *info) { switch (event) { case AudioRecord::EVENT_MORE_DATA: { source->dataCallbackTimestamp(*((AudioRecord::Buffer *) info), systemTime() / 1000);} 18 /19
  • 19. 4.AudioSource startstatus_t AudioSource::dataCallbackTimestamp(const AudioRecord::Buffer& audioBuffer, int64_t timeUs){ MediaBuffer *buffer = new MediaBuffer(bufferSize); memcpy((uint8_t *) buffer->data() + numLostBytes,audioBuffer.i16, audioBuffer.size); mBuffersReceived.push_back(buffer); mFrameAvailableCondition.signal();}status_t AudioSource::read( MediaBuffer **out, const ReadOptions *options){ while (mStarted && mBuffersReceived.empty()) { mFrameAvailableCondition.wait(mLock); } MediaBuffer *buffer = *mBuffersReceived.begin(); *out = buffer;}  이후의 과정은 CameraSource 와 동일 (page 10 OMXCodec::drainInputBuffer(BufferInfo *info)) 19 /19