MediaRecorder recorder=newMediaRecorder(); recorder.setAudioSource(MediaRecorder.AudioSource.MIC); recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); recorder.setOutputFile(PATH_NAME); recorder.prepare(); recorder.start(); // Recording is now started ... recorder.stop(); // recorder.reset(); // You can reuse the object by going back to setAudioSource() step recorder.release();// Now the object cannot be reused
Line 3036: 07-0217:00:07.17584308479 V ACodec : Now uninitialized Line 3674: 07-0217:00:35.7177451949 V ACodec : Now uninitialized Line 3675: 07-0217:00:35.7187458649 V ACodec : onAllocateComponent Line 3678: 07-0217:00:35.721731921 I OMXMaster: makeComponentInstance(OMX.google.amrnb.encoder) in mediacodec process Line 3679: 07-0217:00:35.7427458649 V ACodec : [OMX.google.amrnb.encoder] Now Loaded Line 3680: 07-0217:00:35.7427458649 I MediaCodec: MediaCodec will operate in async mode Line 3680: 07-0217:00:35.7427458649 I MediaCodec: MediaCodec will operate in async mode Line 3681: 07-0217:00:35.7427458649 V MediaCodec: Found 0 pieces of codec specific data. Line 3682: 07-0217:00:35.7427458649 V ACodec : onConfigureComponent Line 3684: 07-0217:00:35.7447458649 I ACodec : codec does not support config priority(err -2147483648) Line 3686: 07-02 17:00:35.751 745 8649 I ACodec : codec does not support config priority (err -2147483648) Line 3687: 07-0217:00:35.7557458649 V MediaCodec: [OMX.google.amrnb.encoder] configured as input format: AMessage(what = 0x00000000) = { Line 3688: 07-0217:00:35.7557458649 V MediaCodec: string mime = "audio/raw" Line 3689: 07-0217:00:35.7557458649 V MediaCodec: int32_t channel-count = 1 Line 3690: 07-0217:00:35.7557458649 V MediaCodec: int32_t sample-rate = 8000 Line 3691: 07-0217:00:35.7557458649 V MediaCodec: int32_t pcm-encoding = 2 Line 3692: 07-0217:00:35.7557458649 V MediaCodec: }, output format: AMessage(what = 0x00000000) = { Line 3693: 07-0217:00:35.7557458649 V MediaCodec: int32_t bitrate = 12200 Line 3694: 07-0217:00:35.7557458649 V MediaCodec: int32_t max-bitrate = 12200 Line 3695: 07-0217:00:35.7557458649 V MediaCodec: int32_t channel-count = 1 Line 3696: 07-0217:00:35.7557458649 V MediaCodec: string mime = "audio/3gpp" Line 3697: 07-0217:00:35.7557458649 V MediaCodec: int32_t sample-rate = 8000 Line 3698: 07-0217:00:35.7557458649 V MediaCodec: } Line 3699: 07-0217:00:35.7577458649 V ACodec : onStart Line 3700: 07-0217:00:35.7587458649 V ACodec : [OMX.google.amrnb.encoder] Now Loaded->Idle Line 3701: 07-0217:00:35.7587458649 V ACodec : [OMX.google.amrnb.encoder] Allocating 4 buffers of size 2048/2048 (from 2048using Invalid) on input port Line 3702: 07-0217:00:35.7667458649 V ACodec : [OMX.google.amrnb.encoder] Allocating 4 buffers of size 8192/8192 (from 8192using Invalid) on output port Line 3703: 07-0217:00:35.7717458649 V MediaCodec: input buffers allocated Line 3704: 07-0217:00:35.7717458649 V MediaCodec: output buffers allocated Line 3705: 07-0217:00:35.7727458646 I MediaCodecSource: MediaCodecSource (audio) starting Line 3706: 07-0217:00:35.7727458649 V ACodec : [OMX.google.amrnb.encoder] Now Idle->Executing Line 3701: 07-0217:00:35.7587458649 V ACodec : [OMX.google.amrnb.encoder] Allocating 4 buffers of size 2048/2048 (from 2048using Invalid) on input port Line 3702: 07-0217:00:35.7667458649 V ACodec : [OMX.google.amrnb.encoder] Allocating 4 buffers of size 8192/8192 (from 8192using Invalid) on output port Line 3706: 07-0217:00:35.7727458649 V ACodec : [OMX.google.amrnb.encoder] Now Idle->Executing Line 3707: 07-0217:00:35.7737458649 V ACodec : [OMX.google.amrnb.encoder] calling fillBuffer 5 Line 3708: 07-0217:00:35.7737458649 V ACodec : [OMX.google.amrnb.encoder] calling fillBuffer 6 Line 3711: 07-0217:00:35.7757458649 V ACodec : [OMX.google.amrnb.encoder] calling fillBuffer 7 Line 3715: 07-0217:00:35.7757458649 V ACodec : [OMX.google.amrnb.encoder] calling fillBuffer 8 Line 3717: 07-0217:00:35.7767458649 V ACodec : [OMX.google.amrnb.encoder] Now Executing Line 3833: 07-0217:00:35.9027458649 V ACodec : [OMX.google.amrnb.encoder] calling emptyBuffer 1 w/ time 20000 us Line 3834: 07-0217:00:35.9047458649 V ACodec : [OMX.google.amrnb.encoder] onOMXEmptyBufferDone 1 Line 3835: 07-0217:00:35.9077458649 V ACodec : [OMX.google.amrnb.encoder] onOMXFillBufferDone 5 time 20000 us, flags = 0x00000010 Line 3836: 07-0217:00:35.9117458649 V MediaCodec: [OMX.google.amrnb.encoder] output format changed to: AMessage(what = 0x00000000) = { Line 3843: 07-0217:00:35.9137458649 V ACodec : [OMX.google.amrnb.encoder] calling fillBuffer 5 Line 3844: 07-0217:00:35.9257458649 V ACodec : [OMX.google.amrnb.encoder] calling emptyBuffer 2 w/ time 40000 us Line 3845: 07-0217:00:35.9267458649 V ACodec : [OMX.google.amrnb.encoder] onOMXEmptyBufferDone 2 Line 3846: 07-0217:00:35.9277458649 V ACodec : [OMX.google.amrnb.encoder] onOMXFillBufferDone 6 time 40000 us, flags = 0x00000010 Line 3847: 07-0217:00:35.9277458649 V ACodec : [OMX.google.amrnb.encoder] calling fillBuffer 6 Line 3848: 07-0217:00:35.9427458649 V ACodec : [OMX.google.amrnb.encoder] calling emptyBuffer 3 w/ time 60000 us Line 3849: 07-0217:00:35.9457458649 V ACodec : [OMX.google.amrnb.encoder] onOMXEmptyBufferDone 3 Line 3850: 07-0217:00:35.9457458649 V ACodec : [OMX.google.amrnb.encoder] onOMXFillBufferDone 7 time 60000 us, flags = 0x00000010 Line 3851: 07-0217:00:35.9467458649 V ACodec : [OMX.google.amrnb.encoder] calling fillBuffer 7 Line 3852: 07-0217:00:35.9637458649 V ACodec : [OMX.google.amrnb.encoder] calling emptyBuffer 4 w/ time 80000 us Line 3853: 07-0217:00:35.9657458649 V ACodec : [OMX.google.amrnb.encoder] onOMXEmptyBufferDone 4 Line 3854: 07-0217:00:35.9667458649 V ACodec : [OMX.google.amrnb.encoder] onOMXFillBufferDone 8 time 80000 us, flags = 0x00000010 Line 3855: 07-0217:00:35.9677458649 V ACodec : [OMX.google.amrnb.encoder] calling fillBuffer 8 Line 3856: 07-0217:00:35.9827458649 V ACodec : [OMX.google.amrnb.encoder] calling emptyBuffer 1 w/ time 100000 us Line 3857: 07-0217:00:35.9837458649 V ACodec : [OMX.google.amrnb.encoder] onOMXEmptyBufferDone 1 Line 3858: 07-0217:00:35.9857458649 V ACodec : [OMX.google.amrnb.encoder] onOMXFillBufferDone 5 time 100000 us, flags = 0x00000010 ...... Line 8217: 07-0217:00:56.4467458649 V ACodec : [OMX.google.amrnb.encoder] calling fillBuffer 8 Line 8218: 07-0217:00:56.4627458649 V ACodec : [OMX.google.amrnb.encoder] calling emptyBuffer 1 w/ time 20580000 us Line 8219: 07-0217:00:56.4647458649 V ACodec : [OMX.google.amrnb.encoder] onOMXEmptyBufferDone 1 Line 8220: 07-0217:00:56.4667458649 V ACodec : [OMX.google.amrnb.encoder] onOMXFillBufferDone 5 time 20580000 us, flags = 0x00000010 Line 8221: 07-0217:00:56.4677458649 V ACodec : [OMX.google.amrnb.encoder] calling fillBuffer 5 Line 8222: 07-0217:00:56.4687458646 I MediaCodecSource: encoder (audio) stopping Line 8223: 07-0217:00:56.4837458649 V ACodec : [OMX.google.amrnb.encoder] Now Executing->Idle Line 8224: 07-0217:00:56.4847458649 V ACodec : [OMX.google.amrnb.encoder] onOMXFillBufferDone 5 time 20580000 us, flags = 0x00000000 Line 8225: 07-0217:00:56.4847458649 V ACodec : [OMX.google.amrnb.encoder] onOMXFillBufferDone 6 time 20520000 us, flags = 0x00000000 Line 8226: 07-0217:00:56.4847458649 V ACodec : [OMX.google.amrnb.encoder] onOMXFillBufferDone 7 time 20540000 us, flags = 0x00000000 Line 8227: 07-0217:00:56.4847458649 V ACodec : [OMX.google.amrnb.encoder] onOMXFillBufferDone 8 time 20560000 us, flags = 0x00000000 Line 8228: 07-0217:00:56.4967458649 V ACodec : [OMX.google.amrnb.encoder] Now Idle->Loaded Line 8229: 07-0217:00:56.4967458649 V ACodec : [OMX.google.amrnb.encoder] Now Loaded Line 8231: 07-0217:00:56.4997458646 I MediaCodecSource: encoder (audio) stopped
07-0311:43:44.94762386238 V CAM_VideoModule: startVideoRecording 07-0311:43:44.97462386238 D CameraStorage: External storage state=mounted 07-0311:43:44.99562386238 D CameraStorage: External storage state=mounted 07-0311:43:44.99862386238 V CAM_VideoModule: initializeRecorder 07-0311:43:45.0037411966 V MediaPlayerService: Create new media recorder client from pid 6238 07-0311:43:45.00962386238 I CAM_VideoModule: NOTE: hfr = off : hsr = off 07-0311:43:45.0248026282 E mm-camera: <ISP ><ERROR> 378: tintless40_algo_process_be: failed: update_func rc -4 07-0311:43:45.0248026282 E mm-camera: <ISP ><ERROR> 851: tintless40_algo_execute: failed: tintless40_trigger_algo 07-0311:43:45.0248026282 E mm-camera: <ISP ><ERROR> 98: isp_algo_execute_internal_algo: failed to run algo tintless 07-0311:43:45.0248026282 E mm-camera: <ISP ><ERROR> 710: isp_parser_thread_func: failed: isp_parser_process 07-0311:43:45.03162386238 D LocationManager: No location received yet. 07-0311:43:45.03362386238 D LocationManager: No location received yet. 07-0311:43:45.03362386238 V CAM_VideoModule: New video filename: /storage/emulated/0/DCIM/Camera/VID_20180703_114345.mp4 07-0311:43:45.05913912526 I MediaFocusControl: AudioFocus requestAudioFocus() from uid/pid 10025/6238 clientId=android.media.AudioManager@11f939d req=2 flags=0x0 07-0311:43:45.06249664966 D AudioManager: AudioManager dispatching onAudioFocusChange(-2) for android.media.AudioManager@e6fb066com.android.music.MediaPlaybackService$4@2315fa7 07-0311:43:45.06349664966 V MediaPlaybackService: AudioFocus: received AUDIOFOCUS_LOSS_TRANSIENT 07-0311:43:45.0797266266 E QCamera : <HAL><ERROR> status_t qcamera::QCameraParameters::setSkinBeautify(const qcamera::QCameraParameters &): 15184: gpw status_t qcamera::QCameraParameters::setSkinBeautify(const qcamera::QCameraParameters &): str=off , prev_str=off 07-0311:43:45.0797266266 E QCamera : <HAL><ERROR> int32_t qcamera::QCameraParameters::setAjustLevel(const qcamera::QCameraParameters &): 15302: gpw -1-1-1-1-1 07-0311:43:45.0797266266 E QCamera : <HAL><ERROR> int32_t qcamera::QCameraParameters::setAjustLevel(int, int, int, int, int): 15232: ggw3 -1-1 07-0311:43:45.0917262209 E CameraClient: setVideoBufferMode: 535: videoBufferMode 2 is not supported. 07-0311:43:45.1008026282 E mm-camera: <ISP ><ERROR> 378: tintless40_algo_process_be: failed: update_func rc -4 07-0311:43:45.1018026282 E mm-camera: <ISP ><ERROR> 851: tintless40_algo_execute: failed: tintless40_trigger_algo 07-0311:43:45.1018026282 E mm-camera: <ISP ><ERROR> 98: isp_algo_execute_internal_algo: failed to run algo tintless 07-0311:43:45.1018026282 E mm-camera: <ISP ><ERROR> 710: isp_parser_thread_func: failed: isp_parser_process 07-0311:43:45.1058026296 E mm-camera: <IMGLIB><ERROR> 318: faceproc_comp_set_param: Error param=523 07-0311:43:45.1117266266 E QCamera : <HAL><ERROR> status_t qcamera::QCameraParameters::setSkinBeautify(const qcamera::QCameraParameters &): 15184: gpw status_t qcamera::QCameraParameters::setSkinBeautify(const qcamera::QCameraParameters &): str=off , prev_str=off 07-0311:43:45.1117266266 E QCamera : <HAL><ERROR> int32_t qcamera::QCameraParameters::setAjustLevel(const qcamera::QCameraParameters &): 15302: gpw -1-1-1-1-1 07-0311:43:45.1117266266 E QCamera : <HAL><ERROR> int32_t qcamera::QCameraParameters::setAjustLevel(int, int, int, int, int): 15232: ggw3 -1-1 07-0311:43:45.1217416349 I MediaPlayerService: MediaPlayerService::getOMX 07-0311:43:45.1227416349 I OMXClient: MuxOMX ctor
Line 5362: 07-0311:43:41.71262386238 V CAM_VideoModule: Video Encoder selected = 2 Line 5363: 07-0311:43:41.71262386238 V CAM_VideoModule: Audio Encoder selected = 3 Line 5449: 07-0311:43:41.94462386238 V CAM_VideoModule: Video Encoder selected = 2 Line 5450: 07-0311:43:41.94462386238 V CAM_VideoModule: Audio Encoder selected = 3 Line 6011: 07-0311:43:45.1237322029 I OMXMaster: makeComponentInstance(OMX.qcom.video.encoder.avc) in mediacodec process Line 6014: 07-0311:43:45.1707322029 I OMX-VENC: Component_init : OMX.qcom.video.encoder.avc : return = 0x0 Line 6017: 07-0311:43:45.1807322168 E OMXNodeInstance: getParameter(2dc0040:qcom.encoder.avc, ParamConsumerUsageBits(0x6f800004)) ERROR: UnsupportedIndex(0x8000101a) Line 6019: 07-0311:43:45.1817321863 W OMXNodeInstance: [2dc0040:qcom.encoder.avc] component does not support metadata mode; using fallback Line 6020: 07-0311:43:45.1817416349 E ACodec : [OMX.qcom.video.encoder.avc] storeMetaDataInBuffers (output) failed w/ err -1010 Line 6021: 07-0311:43:45.1827416349 I ExtendedACodec: setupVideoEncoder() Line 6026: 07-0311:43:45.2317416349 I ACodec : setupAVCEncoderParameters with [profile: Baseline] [level: Level1] Line 6028: 07-0311:43:45.2417321863 E OMXNodeInstance: getConfig(2dc0040:qcom.encoder.avc, ??(0x7f000062)) ERROR: UnsupportedSetting(0x80001019) Line 6029: 07-0311:43:45.2457416349 I ACodec : [OMX.qcom.video.encoder.avc] cannot encode HDR static metadata. Ignoring. Line 6030: 07-0311:43:45.2457416349 I ACodec : setupVideoEncoder succeeded Line 6031: 07-03 11:43:45.245 741 6349 I ExtendedACodec: [OMX.qcom.video.encoder.avc] configure, AMessage : AMessage(what = 'conf', target = 75) = { Line 6044: 07-0311:43:45.2457416349 I ExtendedACodec: int32_t encoder = 1 Line 6174: 07-0311:43:45.4557322169 I OMXMaster: makeComponentInstance(OMX.qcom.audio.encoder.aac) in mediacodec process Line 6180: 07-0311:43:45.4607322169 E QC_AACENC: component init: role = OMX.qcom.audio.encoder.aac Line 6182: 07-0311:43:45.491732732 E OMXNodeInstance: setConfig(2dc0041:qcom.encoder.aac, ConfigPriority(0x6f800002)) ERROR: UnsupportedIndex(0x8000101a) Line 6184: 07-0311:43:45.5027322169 E OMXNodeInstance: setConfig(2dc0041:qcom.encoder.aac, ConfigPriority(0x6f800002)) ERROR: UnsupportedIndex(0x8000101a) Line 6193: 07-0311:43:45.5377416363 I CameraSource: Using encoder format: 0x22 Line 6194: 07-0311:43:45.5377416363 I CameraSource: Using encoder data space: 0x104 Line 7431: 07-0311:43:56.4727416347 I MediaCodecSource: encoder (video) stopping Line 7456: 07-0311:43:56.6117416347 I MediaCodecSource: encoder (video) stopped Line 7494: 07-0311:43:56.6777416347 I MediaCodecSource: encoder (audio) stopping Line 7534: 07-0311:43:56.8007416347 I MediaCodecSource: encoder (audio) stopped
(五)、音视频混合MediaMuxer源码分析
1 2 3 4 5 6 7 8 9 10 11 12 13
sp<MediaWriter> mWriter; [->\android\frameworks\av\media\libstagefright\MediaMuxer.cpp] status_tMediaMuxer::start(){ Mutex::Autolock autoLock(mMuxerLock); if (mState == INITIALIZED) { mState = STARTED; mFileMeta->setInt32(kKeyRealTimeRecording, false); return mWriter->start(mFileMeta.get()); } else { ALOGE("start() is called in invalid state %d", mState); return INVALID_OPERATION; } }
// If the codec specific data has not been received yet, delay pause. // After the codec specific data is received, discard what we received // when the track is to be paused. if (mPaused && !mResumed) { buffer->release(); buffer = NULL; continue; }
++count;
int32_t isCodecConfig; if (buffer->meta_data()->findInt32(kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig) { // if config format (at track addition) already had CSD, keep that // UNLESS we have not received any frames yet. // TODO: for now the entire CSD has to come in one frame for encoders, even though // they need to be spread out for decoders. if (mGotAllCodecSpecificData && nActualFrames > 0) { ALOGI("ignoring additional CSD for video track after first frame"); } else { mMeta = mSource->getFormat(); // get output format after format change
MediaBuffer *copy = NULL; // Check if the upstream source hints it is OK to hold on to the // buffer without releasing immediately and avoid cloning the buffer if (AVUtils::get()->canDeferRelease(buffer->meta_data())) { copy = buffer; meta_data = new MetaData(*buffer->meta_data().get()); } else { // Make a deep copy of the MediaBuffer and Metadata and release // the original as soon as we can copy = new MediaBuffer(buffer->range_length()); memcpy(copy->data(), (uint8_t *)buffer->data() + buffer->range_offset(), buffer->range_length()); copy->set_range(0, buffer->range_length()); meta_data = new MetaData(*buffer->meta_data().get()); buffer->release(); buffer = NULL; }
ALOGV("%s media time stamp: %" PRId64 " and previous paused duration %" PRId64, trackName, timestampUs, previousPausedDurationUs); if (timestampUs > mTrackDurationUs) { mTrackDurationUs = timestampUs; }
// We need to use the time scale based ticks, rather than the // timestamp itself to determine whether we have to use a new // stts entry, since we may have rounding errors. // The calculation is intended to reduce the accumulated // rounding errors. currDurationTicks = ((timestampUs * mTimeScale + 500000LL) / 1000000LL - (lastTimestampUs * mTimeScale + 500000LL) / 1000000LL); if (currDurationTicks < 0ll) { ALOGE("do not support out of order frames (timestamp: %lld < last: %lld for %s track", (longlong)timestampUs, (longlong)lastTimestampUs, trackName); copy->release(); mSource->stop(); mIsMalformed = true; break; }
// if the duration is different for this sample, see if it is close enough to the previous // duration that we can fudge it and use the same value, to avoid filling the stts table // with lots of near-identical entries. // "close enough" here means that the current duration needs to be adjusted by less // than 0.1 milliseconds if (lastDurationTicks && (currDurationTicks != lastDurationTicks)) { int64_t deltaUs = ((lastDurationTicks - currDurationTicks) * 1000000LL + (mTimeScale / 2)) / mTimeScale; if (deltaUs > -100 && deltaUs < 100) { // use previous ticks, and adjust timestamp as if it was actually that number // of ticks currDurationTicks = lastDurationTicks; timestampUs += deltaUs; } }
mStszTableEntries->add(htonl(sampleSize)); if (mStszTableEntries->count() > 2) {
// Force the first sample to have its own stts entry so that // we can adjust its value later to maintain the A/V sync. if (mStszTableEntries->count() == 3 || currDurationTicks != lastDurationTicks) { addOneSttsTableEntry(sampleCount, lastDurationTicks); sampleCount = 1; } else { ++sampleCount; }
if (isTrackMalFormed()) { err = ERROR_MALFORMED; }
mOwner->trackProgressStatus(mTrackId, -1, err);
// Last chunk if (!hasMultipleTracks) { addOneStscTableEntry(1, mStszTableEntries->count()); } elseif (!mChunkSamples.empty()) { addOneStscTableEntry(++nChunks, mChunkSamples.size()); bufferChunk(timestampUs); }
// We don't really know how long the last frame lasts, since // there is no frame time after it, just repeat the previous // frame's duration. if (mStszTableEntries->count() == 1) { lastDurationUs = 0; // A single sample's duration lastDurationTicks = 0; } else { ++sampleCount; // Count for the last sample }
// The last ctts box may not have been written yet, and this // is to make sure that we write out the last ctts box. if (currCttsOffsetTimeTicks == lastCttsOffsetTimeTicks) { if (cttsSampleCount > 0) { addOneCttsTableEntry(cttsSampleCount, lastCttsOffsetTimeTicks); } }
ALOGI("Received total/0-length (%d/%d) buffers and encoded %d frames. - %s", count, nZeroLengthFrames, mStszTableEntries->count(), trackName); if (mIsAudio) { ALOGI("Audio track drift time: %" PRId64 " us", mOwner->getDriftTimeUs()); } // if err is ERROR_IO (ex: during SSR), return OK to save the // recorded file successfully. Session tear down will happen as part of // client callback if ((err == ERROR_IO) || (err == ERROR_END_OF_STREAM)) { return OK; } return err; }
status_t MPEG4Writer::Track::stop() { ALOGD("%s track stopping", mIsAudio? "Audio": "Video"); if (!mStarted) { ALOGE("Stop() called but track is not started"); return ERROR_END_OF_STREAM; }