Android source code Camera2 preview process analysis four

Android source code Camera2 preview process analysis four

The stream start is performed in " Android Source Camera2 Preview Process Analysis II ", which is implemented by calling the QCamera3Channel start() method, which corresponds to the QCamera3Channel implementation class created in the HAL_PIXEL_FORMAT_YCbCr_420_888 format to point to QCamera3RegularChannel.

device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp

int32_t QCamera3RegularChannel::start () { ATRACE_CALL (); int32_t rc = NO_ERROR; if ( 0 <mMemory. getCnt ()) { rc = QCamera3Channel:: start (); } return rc; } Copy code
  1. Start the stream, the stream type is QCamera3Stream, which is added in addStream;
  2. Start the channel.

device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp

int32_t QCamera3Channel::start () { ATRACE_CALL (); int32_t rc = NO_ERROR; if (m_numStreams> 1 ) { ALOGE ( "%s: bundle not supported" , __func__); } else if (m_numStreams == 0 ) { return NO_INIT; } if (m_bIsActive) { ALOGD ( "%s: Attempt to start active channel" , __func__); return rc; } for ( int i = 0 ; i <m_numStreams; i++) { if (mStreams[i] != NULL ) { mStreams[i]-> start (); } } rc = m_camOps-> start_channel (m_camHandle, m_handle); if (rc != NO_ERROR) { for ( int i = 0 ; i <m_numStreams; i++) { if (mStreams[i] != NULL ) { mStreams[i]-> stop (); } } } else { m_bIsActive = true ; } return rc; } Copy code

Start streaming. The main stream thread will be started to handle stream-related operations.

  1. Initialize QCameraQueue;
  2. Start the stream thread and call the dataProcRoutine routine.

device/moto/shamu/camera/QCamera2/HAL3/QCamera3Stream.cpp

int32_t QCamera3Stream::start () { int32_t rc = 0 ; mDataQ. init (); rc = mProcTh. launch (dataProcRoutine, this ); return rc; } Copy code

Call pthread_create to create and start running threads.

device/moto/shamu/camera/QCamera2/util/QCameraCmdThread.cpp

int32_t QCameraCmdThread::launch ( void *(*start_routine)( void *), void * user_data) { /* launch the thread */ pthread_create (&cmd_pid, NULL , start_routine, user_data); return NO_ERROR; } Copy code

Functions used to process data in mainstream threads. Process the new notification of the cmd queue. If the camera_cmd_type_t type is CAMERA_CMD_TYPE_DO_NEXT_JOB, dequeue the data from the QCameraQueue queue and call the function pointed to by mDataCB.

device/moto/shamu/camera/QCamera2/HAL3/QCamera3Stream.cpp

void * QCamera3Stream::dataProcRoutine ( void *data) { int running = 1 ; int ret; QCamera3Stream *pme = (QCamera3Stream *)data; QCameraCmdThread *cmdThread = &pme->mProcTh; cmdThread-> setName ( "cam_stream_proc" ); CDBG ( "%s: E" , __func__); do { do { ret = cam_sem_wait (&cmdThread->cmd_sem); if (ret != 0 && errno != EINVAL) { ALOGE ( "%s: cam_sem_wait error (%s)" , __func__, strerror (errno)); return NULL ; } } while (ret != 0 ); //Receive notification about the availability of new cmd in the cmd queue camera_cmd_type_t cmd = cmdThread-> getCmd (); switch (cmd) { case CAMERA_CMD_TYPE_DO_NEXT_JOB: { CDBG ( "%s: Do next job" , __func__); mm_camera_super_buf_t *frame = ( mm_camera_super_buf_t *)pme->mDataQ. dequeue (); if ( NULL != frame) { if (pme->mDataCB != NULL ) { pme-> mDataCB (frame, pme, pme->mUserData); } else { //There is no data cb routine, return buf here pme-> bufDone (frame->bufs[ 0 ]->buf_idx); } } } break ; case CAMERA_CMD_TYPE_EXIT: CDBG_HIGH ( "%s: Exit" , __func__); /* refresh data buf queue*/ pme->mDataQ. flush (); running = 0 ; break ; default : break ; } } while (running); CDBG ( "%s: X" , __func__); return NULL ; } Copy code

The callback routine of the stream.

device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp

void QCamera3Channel::streamCbRoutine ( mm_camera_super_buf_t *super_frame, QCamera3Stream *stream, void *userdata) { QCamera3Channel *channel = (QCamera3Channel *)userdata; if (channel == NULL ) { ALOGE ( "%s: invalid channel pointer" , __func__); return ; } channel-> streamCbRoutine (super_frame, stream); } Copy code
  1. Parameter verification;
  2. Fill the camera3_stream_buffer_t structure and prepare to call back to the framework;
  3. Call the function pointed to by mChannelCB, which actually points to QCamera3HardwareInterface::captureResultCb;

device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp

void QCamera3RegularChannel::streamCbRoutine ( mm_camera_super_buf_t *super_frame, QCamera3Stream *stream) { ATRACE_CALL (); //FIXME Q Buf back in case of error? uint8_t frameIndex; buffer_handle_t *resultBuffer; int32_t resultFrameNumber; camera3_stream_buffer_t result; if ( NULL == stream) { ALOGE ( "%s: Invalid stream" , __func__); return ; } if (!super_frame) { ALOGE ( "%s: Invalid Super buffer" ,__func__); return ; } if (super_frame->num_bufs != 1 ) { ALOGE ( "%s: Multiple streams are not supported" ,__func__); return ; } if (super_frame->bufs[ 0 ] == NULL ) { ALOGE ( "%s: Error, Super buffer frame does not contain valid buffer" , __func__); return ; } frameIndex = ( uint8_t )super_frame->bufs[ 0 ]->buf_idx; if (frameIndex >= mNumBufs) { ALOGE ( "%s: Error, Invalid index for buffer" ,__func__); stream-> bufDone (frameIndex); return ; } Use the following data to publish the framework callback resultBuffer = ( buffer_handle_t *)mMemory. getBufferHandle (frameIndex); resultFrameNumber = mMemory. getFrameNumber (frameIndex); result.stream = mCamera3Stream; result.buffer = resultBuffer; result.status = CAMERA3_BUFFER_STATUS_OK; result.acquire_fence = -1 ; result.release_fence = -1 ; int32_t rc = stream-> bufRelease (frameIndex); if (NO_ERROR != rc) { ALOGE ( "%s: Error %d releasing stream buffer %d" , __func__, rc, frameIndex); } rc = mMemory. unregisterBuffer (frameIndex); if (NO_ERROR != rc) { ALOGE ( "%s: Error %d unregistering stream buffer %d" , __func__, rc, frameIndex); } if ( 0 <= resultFrameNumber){ mChannelCB ( NULL , &result, ( uint32_t )resultFrameNumber, mUserData); } else { ALOGE ( "%s: Bad frame number" , __func__); } free (super_frame); return ; } Copy code

Callback handlers for all channels (stream and metadata)

device/moto/shamu/camera/QCamera2/HAL3/QCamera3HWI.cpp

void QCamera3HardwareInterface::captureResultCb ( mm_camera_super_buf_t *metadata, camera3_stream_buffer_t *buffer, uint32_t frame_number, void *userdata) { QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata; if (hw == NULL ) { ALOGE ( "%s: Invalid hw %p" , __func__, hw); return ; } hw-> captureResultCb (metadata, buffer, frame_number); return ; } Copy code

The focus here is the handleBufferWithLock function, which handles the callback of the image buffer holding the mMutex lock.

device/moto/shamu/camera/QCamera2/HAL3/QCamera3HWI.cpp

void QCamera3HardwareInterface::captureResultCb ( mm_camera_super_buf_t *metadata_buf, camera3_stream_buffer_t *buffer, uint32_t frame_number) { pthread_mutex_lock (&mMutex); /* Suppose flush() is called before any reprocessing. Send notification and result immediately after receiving any callback*/ if (mLoopBackResult) { /* Send notification*/ camera3_notify_msg_t notify_msg; notify_msg.type = CAMERA3_MSG_SHUTTER; notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number; notify_msg.message.shutter.timestamp = mLoopBackTimestamp; mCallbackOps-> notify (mCallbackOps, ify_msg); /* Send capture result*/ mCallbackOps-> process_capture_result (mCallbackOps, mLoopBackResult); free_camera_metadata (( camera_metadata_t *)mLoopBackResult->result); free (mLoopBackResult); mLoopBackResult = NULL ; } if (metadata_buf) handleMetadataWithLock (metadata_buf); else handleBufferWithLock (buffer, frame_number); pthread_mutex_unlock (&mMutex); } Copy code

If the frame number is not in the pending list, call process_capture_result directly.

device/moto/shamu/camera/QCamera2/HAL3/QCamera3HWI.cpp

void QCamera3HardwareInterface::handleBufferWithLock ( camera3_stream_buffer_t *buffer, uint32_t frame_number) { ATRACE_CALL (); //If there is no frame number in the pending request list, send the buffer directly to the framework, //and update the pending buffer Map, otherwise, record the buffer. List<PendingRequestInfo>::iterator i = mPendingRequestsList. begin (); while (i != mPendingRequestsList. end () && i->frame_number != frame_number){ i++; } if (i == mPendingRequestsList. end ()) { //Verify that all pending requests frame_number is greater for (List<PendingRequestInfo>::iterator j = mPendingRequestsList. begin (); j != mPendingRequestsList. end (); j++) { if (j->frame_number <frame_number) { ALOGE ( "%s: Error: pending frame number %d is smaller than %d" , __func__, j->frame_number, frame_number); } } camera3_capture_result_t result; memset (&result, 0 , sizeof ( camera3_capture_result_t )); result.result = NULL ; result.frame_number = frame_number; result.num_output_buffers = 1 ; result.partial_result = 0 ; for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList. begin (); m != mPendingFrameDropList. end (); m++) { QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv; uint32_t streamID = channel-> getStreamID (channel-> getStreamTypeMask ()); if ((m->stream_ID == streamID) && (m->frame_number==frame_number)) { buffer->status=CAMERA3_BUFFER_STATUS_ERROR; CDBG ( "%s: Stream STATUS_ERROR frame_number=%d, streamID=%d" , __func__, frame_number, streamID); m = mPendingFrameDropList. erase (m); break ; } } result.output_buffers = buffer; CDBG ( "%s: result frame_number = %d, buffer = %p" , __func__, frame_number, buffer->buffer); for (List<PendingBufferInfo>::iterator k = mPendingBuffersMap.mPendingBufferList. begin (); k != mPendingBuffersMap.mPendingBufferList. end (); k++) { if (k->buffer == buffer->buffer) { CDBG ( "%s: Found Frame buffer, take it out from list" , __func__); mPendingBuffersMap.num_buffers--; k = mPendingBuffersMap.mPendingBufferList. erase (k); break ; } } CDBG ( "%s: mPendingBuffersMap.num_buffers = %d" , __func__, mPendingBuffersMap.num_buffers); mCallbackOps-> process_capture_result (mCallbackOps, &result); } else { if (i->input_buffer) { CameraMetadata settings; camera3_notify_msg_t notify_msg; memset ( ify_msg, 0 , sizeof ( camera3_notify_msg_t )); nsecs_t capture_time = systemTime (CLOCK_MONOTONIC); if (i->settings) { settings = i->settings; if (settings. exists (ANDROID_SENSOR_TIMESTAMP)) { capture_time = settings. find (ANDROID_SENSOR_TIMESTAMP).data.i64[ 0 ]; } else { ALOGE ( "%s: No timestamp in input settings! Using current one." , __func__); } } else { ALOGE ( "%s: Input settings missing!" , __func__); } notify_msg.type = CAMERA3_MSG_SHUTTER; notify_msg.message.shutter.frame_number = frame_number; notify_msg.message.shutter.timestamp = capture_time; if (i->input_buffer->release_fence != -1 ) { int32_t rc = sync_wait (i->input_buffer->release_fence, TIMEOUT_NEVER); close (i->input_buffer->release_fence); if (rc != OK) { ALOGE ( "%s: input buffer sync wait failed %d" , __func__, rc); } } for (List<PendingBufferInfo>::iterator k = mPendingBuffersMap.mPendingBufferList. begin (); k != mPendingBuffersMap.mPendingBufferList. end (); k++) { if (k->buffer == buffer->buffer) { CDBG ( "%s: Found Frame buffer, take it out from list" , __func__); mPendingBuffersMap.num_buffers--; k = mPendingBuffersMap.mPendingBufferList. erase (k); break ; } } CDBG ( "%s: mPendingBuffersMap.num_buffers = %d" , __func__, mPendingBuffersMap.num_buffers); bool notifyNow = true ; for (List<PendingRequestInfo>::iterator j = mPendingRequestsList. begin (); j != mPendingRequestsList. end (); j++) { if (j->frame_number <frame_number) { notifyNow = false ; break ; } } if (notifyNow) { camera3_capture_result result; memset (&result, 0 , sizeof (camera3_capture_result)); result.frame_number = frame_number; result.result = i->settings; result.input_buffer = i->input_buffer; result.num_output_buffers = 1 ; result.output_buffers = buffer; result.partial_result = PARTIAL_RESULT_COUNT; mCallbackOps-> notify (mCallbackOps, ify_msg); mCallbackOps-> process_capture_result (mCallbackOps, &result); CDBG ( "%s: Notify reprocess now %d!" , __func__, frame_number); i = mPendingRequestsList. erase (i); mPendingRequest--; } else { //cache reprocessing results for later use PendingReprocessResult pendingResult; memset (&pendingResult, 0 , sizeof (PendingReprocessResult)); pendingResult.notify_msg = notify_msg; pendingResult.buffer = *buffer; pendingResult.frame_number = frame_number; mPendingReprocessResultList. push_back (pendingResult); CDBG ( "%s: Cache reprocess result %d!" , __func__, frame_number); } } else { for (List<RequestedBufferInfo>::iterator j = i->buffers. begin (); j != i->buffers. end (); j++) { if (j->stream == buffer->stream) { if (j->buffer != NULL ) { ALOGE ( "%s: Error: buffer is already set" , __func__); } else { j->buffer = ( camera3_stream_buffer_t *) malloc ( sizeof ( camera3_stream_buffer_t )); *(j->buffer) = *buffer; CDBG ( "%s: cache buffer %p at result frame_number %d" , __func__, buffer, frame_number); } } } } } } Copy code

When the camera device is turned on, camera3_callback_ops::process_capture_result is assigned, and the above function call actually calls the sProcessCaptureResult function.

frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp

:: Camera3Device Camera3Device ( int ID): the mId (ID), mIsConstrainedHighSpeedConfiguration ( to false ), mHal3Device ( NULL ), mStatus (STATUS_UNINITIALIZED), mStatusWaiters ( 0 ), mUsePartialResult ( to false ), mNumPartialResults ( . 1 ), mNextResultFrameNumber ( 0 ), mNextReprocessResultFrameNumber ( 0 ), mNextShutterFrameNumber ( 0 ), mNextReprocessShutterFrameNumber ( 0 ), mListener ( NULL ) { ATRACE_CALL (); camera3_callback_ops::notify = &sNotify; camera3_callback_ops::process_capture_result = &sProcessCaptureResult; ALOGV ( "%s: Created device for camera %d" , __FUNCTION__, id); } Copy code

Static callback forwarding method from HAL to instance.

frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp

void Camera3Device::sProcessCaptureResult ( const camera3_callback_ops *cb, const camera3_capture_result *result) { Camera3Device *d = const_cast <Camera3Device*>( static_cast < const Camera3Device*>(cb)); d-> processCaptureResult (result); } Copy code

The callback method of the camera HAL device. Focus on the analysis of the returnOutputBuffers(...) function.

frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp

void Camera3Device::processCaptureResult ( const camera3_capture_result *result) { ATRACE_CALL (); status_t res; uint32_t frameNumber = result->frame_number; if (result->result == NULL && result->num_output_buffers == 0 && result->input_buffer == NULL ) { SET_ERR ( "No result data provided by HAL for frame %d" , frameNumber); return ; } //For HAL3.2 or higher, if HAL does not support partial, //when this result contains metadata, partial_result must always be set to 1. if (!mUsePartialResult && mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2 && result->result != NULL && result->partial_result != 1 ) { SET_ERR ( "Result is malformed for frame %d: partial_result %u must be 1" "if partial result is not supported" , frameNumber, result->partial_result); return ; } bool isPartialResult = false ; CameraMetadata collectedPartialResult; CaptureResultExtras resultExtras; bool hasInputBufferInRequest = false ; //Get the shutter timestamp and resultExtras from the list of ongoing requests, //and add them to the shutter notification in this frame. If the shutter time stamp has not been received, //attach the output buffer to the request in progress, and return them when the shutter time stamp arrives. //If all result data and shutter timestamp have been received, update the status in progress and delete the entry in progress. nsecs_t shutterTimestamp = 0 ; { :: Autolock mutex L (mInFlightLock) ; an ssize_t . IDX = mInFlightMap indexOfKey (for frameNumber); IF (IDX == NAME_NOT_FOUND) { the SET_ERR ( "Unknown Number for Frame Capture Result:% D" , frameNumber); return ; } InFlightRequest &request = mInFlightMap. editValueAt (idx); ALOGVV ( "%s: got InFlightRequest requestId = %" PRId32 ", frameNumber = %" PRId64 ", burstId = %" PRId32 ", partialResultCount = %d" , __FUNCTION__, request.resultExtras.requestId, request.resultExtras.frameNumber, request.resultExtras.burstId, result->partial_result); //If the partial count is not 0 (only for buffer), always update it to the latest number. //When the framework aggregates adjacent partial results into one, the latest partial count will be used. if (result->partial_result != 0 ) request.resultExtras.partialResultCount = result->partial_result; //Check if this result contains only partial metadata if (mUsePartialResult && result->result != NULL ) { if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) { if (result->partial_result> mNumPartialResults || result->partial_result < 1 ) { SET_ERR ( "Result is malformed for frame %d: partial_result %u must be in" "the range of [1, %d] when metadata is included in the result" , frameNumber, result->partial_result, mNumPartialResults); return ; } isPartialResult = (result->partial_result <mNumPartialResults); if (isPartialResult) { request.partialResult.collectedResult. append (result->result); } } else { camera_metadata_ro_entry_t partialResultEntry; res = find_camera_metadata_ro_entry (result->result, ANDROID_QUIRKS_PARTIAL_RESULT, &partialResultEntry); if (res != NAME_NOT_FOUND && partialResultEntry.count> 0 && partialResultEntry.data.u8[ 0 ] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) { //A partial result. Flag this as such, and collect this //set of metadata into the in-flight entry. isPartialResult = true ; request.partialResult.collectedResult. append ( result->result); request.partialResult.collectedResult. erase ( ANDROID_QUIRKS_PARTIAL_RESULT); } } if (isPartialResult) { //Fire off a 3A-only result if possible if (!request.partialResult.haveSent3A) { request.partialResult.haveSent3A = processPartial3AResult (frameNumber, request.partialResult.collectedResult, request.resultExtras); } } } shutterTimestamp = request.shutterTimestamp; hasInputBufferInRequest = request.hasInputBuffer; //Did we get the (final) result metadata of this capture? if (result->result != NULL && !isPartialResult) { if (request.haveResultMetadata) { SET_ERR ( "Called multiple times with metadata for frame %d" , frameNumber); return ; } if (mUsePartialResult && !request.partialResult.collectedResult. isEmpty ()) { collectedPartialResult. acquire ( request.partialResult.collectedResult); } request.haveResultMetadata = true ; } uint32_t numBuffersReturned = result->num_output_buffers; if (result->input_buffer != NULL ) { if (hasInputBufferInRequest) { numBuffersReturned += 1 ; } else { ALOGW ( "%s: Input buffer should be NULL if there is no input" "buffer sent in the request" , __FUNCTION__); } } request.numBuffersLeft -= numBuffersReturned; if (request.numBuffersLeft < 0 ) { SET_ERR ( "Too many buffers returned for frame %d" , frameNumber); return ; } camera_metadata_ro_entry_t entry; res = find_camera_metadata_ro_entry (result->result, ANDROID_SENSOR_TIMESTAMP, &entry); if (res == OK && entry.count == 1 ) { request.sensorTimestamp = entry.data.i64[ 0 ]; } //If the shutter event has not been received, the output buffer is attached to the request being processed. //Otherwise, return the output buffer to the stream. if (shutterTimestamp == 0 ) { request.pendingOutputBuffers. appendArray (result->output_buffers, result->num_output_buffers); } else { returnOutputBuffers (result->output_buffers, result->num_output_buffers, shutterTimestamp); } if (result->result != NULL && !isPartialResult) { if (shutterTimestamp == 0 ) { request.pendingMetadata = result->result; request.partialResult.collectedResult = collectedPartialResult; } else { CameraMetadata metadata; metadata = result->result; sendCaptureResult (metadata, request.resultExtras, collectedPartialResult, frameNumber, hasInputBufferInRequest, request.aeTriggerCancelOverride); } } removeInFlightRequestIfReadyLocked (idx); } //scope for mInFlightLock if (result->input_buffer != NULL ) { if (hasInputBufferInRequest) { Camera3Stream *stream = Camera3Stream:: cast (result->input_buffer->stream); res = stream-> returnInputBuffer (*(result->input_buffer)); //Note: stream may be deallocated at this point, if this buffer was the //last reference to it. if (res != OK) { ALOGE ( "%s: RequestThread: Can't return input buffer for frame %d to" "its stream:%s (%d)" , __FUNCTION__, frameNumber, strerror (-res), res); } } else { ALOGW ( "%s: Input buffer should be NULL if there is no input" "buffer sent in the request, skipping input buffer return." , __FUNCTION__); } } } Copy code

First obtain the Camera3Stream object, and then call its returnBuffer method.

frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp

void Camera3Device::returnOutputBuffers ( const camera3_stream_buffer_t *outputBuffers, size_t numBuffers, nsecs_t timestamp) { for ( size_t i = 0 ; i <numBuffers; i++) { Camera3Stream *stream = Camera3Stream:: cast (outputBuffers[i].stream); status_t res = stream-> returnBuffer (outputBuffers[i], timestamp); //If the buffer is the last reference to the stream, the stream may be here Was released at the time. if (res != OK) { ALOGE ( "Can't return buffer to its stream: %s (%d)" , strerror (-res), res); } } } Copy code

Here call returnBufferLocked to continue to return.

frameworks/av/services/camera/libcameraservice/device3/Camera3Stream.cpp

status_t Camera3Stream::returnBuffer ( const camera3_stream_buffer &buffer, nsecs_t timestamp) { ATRACE_CALL (); Mutex::Autolock l (mLock) ; /** * TODO: First check whether the status is valid. * * <HAL3.2 IN_CONFIG and IN_RECONFIG in addition to CONFIGURED. * >= HAL3.2 CONFIGURED only * * Perform this operation on getBuffer as well. */ status_t res = returnBufferLocked (buffer, timestamp); if (res == OK) { fireBufferListenersLocked (buffer, /*acquired*/false , /*output*/true ); } //Even if the buffer fails to return, we still want to send a signal to those who are waiting for the buffer to return. mOutputBufferReturnedSignal. signal (); return res; } Copy code

The Camera3OutputStream object was created in " Android Source Camera2 Preview Process Analysis One ". The returnAnyBufferLocked function is called here.

frameworks/av/services/camera/libcameraservice/device3/Camera3OutputStream.cpp

status_t Camera3OutputStream::returnBufferLocked ( const camera3_stream_buffer &buffer, nsecs_t timestamp) { ATRACE_CALL (); status_t res = returnAnyBufferLocked (buffer, timestamp, /*output*/true ); if (res != OK) { return res; } mLastTimestamp = timestamp; return OK; } Copy code

The focus here is on the returnBufferCheckedLocked method.

frameworks/av/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp

status_t Camera3IOStreamBase::returnAnyBufferLocked ( const camera3_stream_buffer &buffer, nsecs_t timestamp, bool output) { status_t res; //returnBuffer may be called from a raw pointer, not a sp<>, and we'll be //decrementing the internal refcount next. In case this is the last ref, we //might get destructed on the decStrong(), so keep an sp around until the //end of the call-otherwise have to sprinkle the decStrong on all exit //points. sp<Camera3IOStreamBase> keepAlive ( this ) ; decStrong ( this ); if ((res = returnBufferPreconditionCheckLocked ()) != OK) { return res; } sp<Fence> releaseFence; res = returnBufferCheckedLocked (buffer, timestamp, output, &releaseFence); //Res may be an error, but we still want to decrement our owned count //to enable clean shutdown. So we'll just return the error but otherwise //carry on if (releaseFence != 0 ) { mCombinedFence = Fence:: merge (mName, mCombinedFence, releaseFence); } if (output) { mHandoutOutputBufferCount--; } mHandoutTotalBufferCount--; if (mHandoutTotalBufferCount == 0 && mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG && mState != STATE_PREPARING) { /** * Avoid a spurious IDLE->ACTIVE->IDLE transition when using buffers * before/after register_stream_buffers during initial configuration * or re-configuration, or during prepare pre-allocation */ ALOGV ( "%s: Stream %d: All buffers returned; now idle" , __FUNCTION__, mId); sp<StatusTracker> statusTracker = mStatusTracker. promote (); if (statusTracker != 0 ) { statusTracker-> markComponentIdle (mStatusId, mCombinedFence); } } mBufferReturnedSignal. signal (); if (output) { mLastTimestamp = timestamp; } return res; } Copy code

Looking back at " Android Source Camera2 Preview Process Analysis II ", the consumer queueBuffer here is really starting to consume Camera frames.

frameworks/av/services/camera/libcameraservice/device3/Camera3OutputStream.cpp

status_t Camera3OutputStream::returnBufferCheckedLocked ( const camera3_stream_buffer &buffer, nsecs_t timestamp, bool output, /*out*/ sp<Fence> *releaseFenceOut) { ( void )output; ALOG_ASSERT (output, "Expected output to be true" ); status_t res; //Fence management-always honor release fence from HAL sp<Fence> releaseFence = new Fence (buffer.release_fence); int anwReleaseFence = releaseFence-> dup (); /** * Simply release the lock to avoid deadlock * StreamingProcessor::startStream -> Camera3Stream::isConfiguring * During the queueBuffer (this thread will enter StreamingProcessor::onFrameAvailable) */ sp<ANativeWindow> currentConsumer = mConsumer; mLock. unlock (); /** * Return the buffer to ANativeWindow */ if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { //Cancel buffer res = currentConsumer-> cancelBuffer (currentConsumer. get (), container_of (buffer.buffer, ANativeWindowBuffer, handle), anwReleaseFence); if (res != OK) { ALOGE ( "%s: Stream %d: Error cancelling buffer to native window:" "%s (%d)" , __FUNCTION__, mId, strerror (-res), res); } } else { if (mTraceFirstBuffer && (stream_type == CAMERA3_STREAM_OUTPUT)) { { char traceLog[ 48 ]; snprintf (traceLog, sizeof (traceLog), "Stream %d: first full buffer\n" , mId); ATRACE_NAME (traceLog); } mTraceFirstBuffer = false ; } //Set timestamp res = native_window_set_buffers_timestamp (mConsumer. get (), timestamp); if (res != OK) { ALOGE ( "%s: Stream %d: Error setting timestamp: %s (%d)" , __FUNCTION__, mId, strerror (-res), res); return res; } //Consumer queueBuffer res = currentConsumer-> queueBuffer (currentConsumer. get (), container_of (buffer.buffer, ANativeWindowBuffer, handle), anwReleaseFence); if (res != OK) { ALOGE ( "%s: Stream %d: Error queueing buffer to native window: " "%s (%d)" , __FUNCTION__, mId, strerror (-res), res); } } mLock. lock (); //Once a valid buffer is returned to the queue, all the buffers can no longer be taken out for pre-allocation. if (buffer.status != CAMERA3_BUFFER_STATUS_ERROR) { mStreamUnpreparable = true ; } if (res != OK) { close (anwReleaseFence); } *releaseFenceOut = releaseFence; return res; } Copy code

Now continue to analyze the startup channel.

  1. Find the mm_camera_obj_t object according to the camera handle;
  2. Call mm_camera_start_channel for further processing.

device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c

static int32_t mm_camera_intf_start_channel ( uint32_t camera_handle, uint32_t ch_id) { int32_t rc = -1 ; mm_camera_obj_t * my_obj = NULL ; pthread_mutex_lock(&g_intf_lock); my_obj = mm_camera_util_get_camera_by_handler(camera_handle); if (my_obj) { pthread_mutex_lock(&my_obj->cam_lock); pthread_mutex_unlock(&g_intf_lock); rc = mm_camera_start_channel(my_obj, ch_id); } else { pthread_mutex_unlock(&g_intf_lock); } CDBG( "%s :X rc = %d" , __func__, rc); return rc; } Copy code
  1. Find the mm_channel_t object;
  2. Call mm_channel_fsm_fn for further processing.

device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c

int32_t mm_camera_start_channel ( mm_camera_obj_t *my_obj, uint32_t ch_id) { int32_t rc = -1 ; mm_channel_t * ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id); if ( NULL != ch_obj) { pthread_mutex_lock(&ch_obj->ch_lock); pthread_mutex_unlock(&my_obj->cam_lock); rc = mm_channel_fsm_fn(ch_obj, MM_CHANNEL_EVT_START, NULL , NULL ); } else { pthread_mutex_unlock(&my_obj->cam_lock); } return rc; } Copy code

At this time, the mm_channel_t state is MM_CHANNEL_STATE_STOPPED.

device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c

int32_t mm_channel_fsm_fn ( mm_channel_t *my_obj, mm_channel_evt_type_t evt, void * in_val, void * out_val) { int32_t rc = -1 ; CDBG( "%s: E state = %d" , __func__, my_obj->state); switch (my_obj->state) { ...... case MM_CHANNEL_STATE_STOPPED: rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val); break ; ...... default : CDBG( "%s: Not a valid state (%d)" , __func__, my_obj->state); break ; } /* unlock ch_lock */ pthread_mutex_unlock(&my_obj->ch_lock); CDBG( "%s: X rc = %d" , __func__, rc); return rc; } Copy code
  1. Call mm_channel_start to start further;
  2. At this time, the mm_channel_t state changes to MM_CHANNEL_STATE_ACTIVE.

device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c

int32_t mm_channel_fsm_fn_stopped ( mm_channel_t *my_obj, mm_channel_evt_type_t evt, void * in_val, void * out_val) { int32_t rc = 0 ; CDBG( "%s: E evt = %d" , __func__, evt); switch (evt) { ...... case MM_CHANNEL_EVT_START: { rc = mm_channel_start(my_obj); /* The first stream starts in a stopped state and then becomes active */ if ( 0 == rc) { my_obj->state = MM_CHANNEL_STATE_ACTIVE; } } break ; ...... default : CDBG_ERROR( "%s: invalid state (%d) for evt (%d)" , __func__, my_obj->state, evt); break ; } CDBG( "%s: E rc = %d" , __func__, rc); return rc; } Copy code

Core steps:

  1. Start cb thread, start cmd thread;
  2. Start all streams in the channel;

device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c

int32_t mm_channel_start ( mm_channel_t * my_obj) { int32_t RC = 0 ; int I, J; mm_stream_t * s_objs [MAX_STREAM_NUM_IN_BUNDLE] = { NULL }; uint8_t num_streams_to_start = 0 ; mm_stream_t * s_obj = NULL ; int meta_stream_idx = 0 ; for (i = 0 ; i <MAX_STREAM_NUM_IN_BUNDLE; i++) { if (my_obj->streams[i].my_hdl> 0 ) { s_obj = mm_channel_util_get_stream_by_handler(my_obj, my_obj->streams[i].my_hdl); if ( NULL != s_obj) { /* Remember the metadata stream index*/ if (s_obj->stream_info->stream_type == CAM_STREAM_TYPE_METADATA) { meta_stream_idx = num_streams_to_start; } s_objs[num_streams_to_start++] = s_obj; } } } if (meta_stream_idx> 0 ) { /* Always start the metadata stream first, so exchange the stream object with the first object*/ s_obj = s_objs[ 0 ]; s_objs[ 0 ] = s_objs[meta_stream_idx]; s_objs[meta_stream_idx] = s_obj; } if ( NULL != my_obj->bundle.super_buf_notify_cb) { /* The cb needs to be sent upwards, so start the thread*/ /* initialize the superbuf queue*/ mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue); my_obj->bundle.superbuf_queue.num_streams = num_streams_to_start; my_obj->bundle.superbuf_queue.expected_frame_id = 0 ; my_obj->bundle.superbuf_queue.expected_frame_id_without_led = 0 ; my_obj->bundle.superbuf_queue.led_off_start_frame_id = 0 ; my_obj->bundle.superbuf_queue.led_on_start_frame_id = 0 ; my_obj->bundle.superbuf_queue.led_on_num_frames = 0 ; for (i = 0 ; i <num_streams_to_start; i++) { /* Set the bundled flag for the stream*/ s_objs[i]->is_bundled = 1 ; /* Initialize the bundled stream to an invalid value -1 */ my_obj->bundle.superbuf_queue.bundled_streams[i] = s_objs[i]->my_hdl; } /* Start cb thread, distribute super buf through cb */ mm_camera_cmd_thread_launch(&my_obj->cb_thread, mm_channel_dispatch_super_buf, ( void *)my_obj); /* Start cmd thread to get super buf dataCB */ mm_camera_cmd_thread_launch(&my_obj->cmd_thread, mm_channel_process_stream_buf, ( void *)my_obj); /* Set the flag bit TRUE */ my_obj->bundle.is_active = TRUE; } for (i = 0 ; i <num_streams_to_start; i++) { /* All streams in a channel should be started at the same time*/ if (s_objs[i]->state == MM_STREAM_STATE_ACTIVE) { CDBG_ERROR( "%s: stream already started idx(%d)" , __func__, i); rc = -1 ; break ; } /* Assign buf */ rc = mm_stream_fsm_fn(s_objs[i], MM_STREAM_EVT_GET_BUF, NULL , NULL ); if ( 0 != rc) { CDBG_ERROR( "%s: get buf failed at idx(%d)" , __func__, i); break ; } /* Register buf */ rc = mm_stream_fsm_fn(s_objs[i], MM_STREAM_EVT_REG_BUF, NULL , NULL ); if ( 0 != rc) { CDBG_ERROR( "%s: reg buf failed at idx(%d)" , __func__, i); break ; } /* Start the stream*/ rc = mm_stream_fsm_fn(s_objs[i], MM_STREAM_EVT_START, NULL , NULL ); if ( 0 != rc) { CDBG_ERROR( "%s: start stream failed at idx(%d)" , __func__, i); break ; } } /* Error handling*/ if ( 0 != rc) { for (j = 0 ; j<=i; j++) { /* Stop the stream*/ mm_stream_fsm_fn(s_objs[j], MM_STREAM_EVT_STOP, NULL , NULL ); /* Unregister buf */ mm_stream_fsm_fn(s_objs[j], MM_STREAM_EVT_UNREG_BUF, NULL , NULL ); /* put buf back */ mm_stream_fsm_fn(s_objs[j], MM_STREAM_EVT_PUT_BUF, NULL , NULL ); } /* Destroy super buf cmd thread*/ if (TRUE == my_obj->bundle.is_active) { /* first stop bundle thread */ mm_camera_cmd_thread_release(&my_obj->cmd_thread); mm_camera_cmd_thread_release(&my_obj->cb_thread); /* Deinitialize the superbuf queue*/ mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue); /* memset bundle information*/ memset (&my_obj->bundle, 0 , sizeof ( mm_channel_bundle_t )); } } my_obj->bWaitForPrepSnapshotDone = 0 ; return rc; } Copy code