/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.media; import android.app.ActivityThread; import android.hardware.Camera; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.util.Log; import android.view.Surface; import java.io.FileDescriptor; import java.io.IOException; import java.io.RandomAccessFile; import java.lang.ref.WeakReference; /** * Used to record audio and video. The recording control is based on a * simple state machine (see below). * *
*
* *A common case of using MediaRecorder to record audio works as follows: * *
MediaRecorder recorder = new MediaRecorder(); * recorder.setAudioSource(MediaRecorder.AudioSource.MIC); * recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); * recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); * recorder.setOutputFile(PATH_NAME); * recorder.prepare(); * recorder.start(); // Recording is now started * ... * recorder.stop(); * recorder.reset(); // You can reuse the object by going back to setAudioSource() step * recorder.release(); // Now the object cannot be reused ** *
Applications may want to register for informational and error * events in order to be informed of some internal update and possible * runtime errors during recording. Registration for such events is * done by setting the appropriate listeners (via calls * (to {@link #setOnInfoListener(OnInfoListener)}setOnInfoListener and/or * {@link #setOnErrorListener(OnErrorListener)}setOnErrorListener). * In order to receive the respective callback associated with these listeners, * applications are required to create MediaRecorder objects on threads with a * Looper running (the main UI thread by default already has a Looper running). * *
Note: Currently, MediaRecorder does not work on the emulator. * *
For more information about how to use MediaRecorder for recording video, read the * Camera developer guide. * For more information about how to use MediaRecorder for recording sound, read the * Audio Capture developer guide.
*Use this function to switch quickly between preview and capture mode without a teardown of * the camera object. {@link android.hardware.Camera#unlock()} should be called before * this. Must call before {@link #prepare}.
* * @param c the Camera to use for recording * @deprecated Use {@link #getSurface} and the {@link android.hardware.camera2} API instead. */ @Deprecated public native void setCamera(Camera c); /** * Gets the surface to record from when using SURFACE video source. * *May only be called after {@link #prepare}. Frames rendered to the Surface before * {@link #start} will be discarded.
* * @throws IllegalStateException if it is called before {@link #prepare}, after * {@link #stop}, or is called when VideoSource is not set to SURFACE. * @see android.media.MediaRecorder.VideoSource */ public native Surface getSurface(); /** * Sets a Surface to show a preview of recorded media (video). Calls this * before prepare() to make sure that the desirable preview display is * set. If {@link #setCamera(Camera)} is used and the surface has been * already set to the camera, application do not need to call this. If * this is called with non-null surface, the preview surface of the camera * will be replaced by the new surface. If this method is called with null * surface or not called at all, media recorder will not change the preview * surface of the camera. * * @param sv the Surface to use for the preview * @see android.hardware.Camera#setPreviewDisplay(android.view.SurfaceHolder) */ public void setPreviewDisplay(Surface sv) { mSurface = sv; } /** * Defines the audio source. These constants are used with * {@link MediaRecorder#setAudioSource(int)}. */ public final class AudioSource { private AudioSource() {} /** @hide */ public final static int AUDIO_SOURCE_INVALID = -1; /* Do not change these values without updating their counterparts * in system/core/include/system/audio.h! */ /** Default audio source **/ public static final int DEFAULT = 0; /** Microphone audio source */ public static final int MIC = 1; /** Voice call uplink (Tx) audio source */ public static final int VOICE_UPLINK = 2; /** Voice call downlink (Rx) audio source */ public static final int VOICE_DOWNLINK = 3; /** Voice call uplink + downlink audio source */ public static final int VOICE_CALL = 4; /** Microphone audio source with same orientation as camera if available, the main * device microphone otherwise */ public static final int CAMCORDER = 5; /** Microphone audio source tuned for voice recognition if available, behaves like * {@link #DEFAULT} otherwise. */ public static final int VOICE_RECOGNITION = 6; /** Microphone audio source tuned for voice communications such as VoIP. It * will for instance take advantage of echo cancellation or automatic gain control * if available. It otherwise behaves like {@link #DEFAULT} if no voice processing * is applied. */ public static final int VOICE_COMMUNICATION = 7; /** * Audio source for a submix of audio streams to be presented remotely. ** An application can use this audio source to capture a mix of audio streams * that should be transmitted to a remote receiver such as a Wifi display. * While recording is active, these audio streams are redirected to the remote * submix instead of being played on the device speaker or headset. *
* Certain streams are excluded from the remote submix, including * {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_ALARM}, * and {@link AudioManager#STREAM_NOTIFICATION}. These streams will continue * to be presented locally as usual. *
* Capturing the remote submix audio requires the * {@link android.Manifest.permission#CAPTURE_AUDIO_OUTPUT} permission. * This permission is reserved for use by system components and is not available to * third-party applications. *
*/ public static final int REMOTE_SUBMIX = 8; /** * Audio source for preemptible, low-priority software hotword detection * It presents the same gain and pre processing tuning as {@link #VOICE_RECOGNITION}. ** An application should use this audio source when it wishes to do * always-on software hotword detection, while gracefully giving in to any other application * that might want to read from the microphone. *
* This is a hidden audio source. * @hide */ protected static final int HOTWORD = 1999; } /** * Defines the video source. These constants are used with * {@link MediaRecorder#setVideoSource(int)}. */ public final class VideoSource { /* Do not change these values without updating their counterparts * in include/media/mediarecorder.h! */ private VideoSource() {} public static final int DEFAULT = 0; /** Camera video source ** Using the {@link android.hardware.Camera} API as video source. *
*/ public static final int CAMERA = 1; /** Surface video source ** Using a Surface as video source. *
* This flag must be used when recording from an * {@link android.hardware.camera2} API source. *
* When using this video source type, use {@link MediaRecorder#getSurface()} * to retrieve the surface created by MediaRecorder. */ public static final int SURFACE = 2; } /** * Defines the output format. These constants are used with * {@link MediaRecorder#setOutputFormat(int)}. */ public final class OutputFormat { /* Do not change these values without updating their counterparts * in include/media/mediarecorder.h! */ private OutputFormat() {} public static final int DEFAULT = 0; /** 3GPP media file format*/ public static final int THREE_GPP = 1; /** MPEG4 media file format*/ public static final int MPEG_4 = 2; /** The following formats are audio only .aac or .amr formats */ /** * AMR NB file format * @deprecated Deprecated in favor of MediaRecorder.OutputFormat.AMR_NB */ public static final int RAW_AMR = 3; /** AMR NB file format */ public static final int AMR_NB = 3; /** AMR WB file format */ public static final int AMR_WB = 4; /** @hide AAC ADIF file format */ public static final int AAC_ADIF = 5; /** AAC ADTS file format */ public static final int AAC_ADTS = 6; /** @hide Stream over a socket, limited to a single stream */ public static final int OUTPUT_FORMAT_RTP_AVP = 7; /** @hide H.264/AAC data encapsulated in MPEG2/TS */ public static final int OUTPUT_FORMAT_MPEG2TS = 8; /** VP8/VORBIS data in a WEBM container */ public static final int WEBM = 9; }; /** * Defines the audio encoding. These constants are used with * {@link MediaRecorder#setAudioEncoder(int)}. */ public final class AudioEncoder { /* Do not change these values without updating their counterparts * in include/media/mediarecorder.h! */ private AudioEncoder() {} public static final int DEFAULT = 0; /** AMR (Narrowband) audio codec */ public static final int AMR_NB = 1; /** AMR (Wideband) audio codec */ public static final int AMR_WB = 2; /** AAC Low Complexity (AAC-LC) audio codec */ public static final int AAC = 3; /** High Efficiency AAC (HE-AAC) audio codec */ public static final int HE_AAC = 4; /** Enhanced Low Delay AAC (AAC-ELD) audio codec */ public static final int AAC_ELD = 5; /** Ogg Vorbis audio codec */ public static final int VORBIS = 6; } /** * Defines the video encoding. These constants are used with * {@link MediaRecorder#setVideoEncoder(int)}. */ public final class VideoEncoder { /* Do not change these values without updating their counterparts * in include/media/mediarecorder.h! */ private VideoEncoder() {} public static final int DEFAULT = 0; public static final int H263 = 1; public static final int H264 = 2; public static final int MPEG_4_SP = 3; public static final int VP8 = 4; } /** * Sets the audio source to be used for recording. If this method is not * called, the output file will not contain an audio track. The source needs * to be specified before setting recording-parameters or encoders. Call * this only before setOutputFormat(). * * @param audio_source the audio source to use * @throws IllegalStateException if it is called after setOutputFormat() * @see android.media.MediaRecorder.AudioSource */ public native void setAudioSource(int audio_source) throws IllegalStateException; /** * Gets the maximum value for audio sources. * @see android.media.MediaRecorder.AudioSource */ public static final int getAudioSourceMax() { return AudioSource.REMOTE_SUBMIX; } /** * Sets the video source to be used for recording. If this method is not * called, the output file will not contain an video track. The source needs * to be specified before setting recording-parameters or encoders. Call * this only before setOutputFormat(). * * @param video_source the video source to use * @throws IllegalStateException if it is called after setOutputFormat() * @see android.media.MediaRecorder.VideoSource */ public native void setVideoSource(int video_source) throws IllegalStateException; /** * Uses the settings from a CamcorderProfile object for recording. This method should * be called after the video AND audio sources are set, and before setOutputFile(). * If a time lapse CamcorderProfile is used, audio related source or recording * parameters are ignored. * * @param profile the CamcorderProfile to use * @see android.media.CamcorderProfile */ public void setProfile(CamcorderProfile profile) { setOutputFormat(profile.fileFormat); setVideoFrameRate(profile.videoFrameRate); setVideoSize(profile.videoFrameWidth, profile.videoFrameHeight); setVideoEncodingBitRate(profile.videoBitRate); setVideoEncoder(profile.videoCodec); if (profile.quality >= CamcorderProfile.QUALITY_TIME_LAPSE_LOW && profile.quality <= CamcorderProfile.QUALITY_TIME_LAPSE_QVGA) { // Nothing needs to be done. Call to setCaptureRate() enables // time lapse video recording. } else { setAudioEncodingBitRate(profile.audioBitRate); setAudioChannels(profile.audioChannels); setAudioSamplingRate(profile.audioSampleRate); setAudioEncoder(profile.audioCodec); } } /** * Set video frame capture rate. This can be used to set a different video frame capture * rate than the recorded video's playback rate. This method also sets the recording mode * to time lapse. In time lapse video recording, only video is recorded. Audio related * parameters are ignored when a time lapse recording session starts, if an application * sets them. * * @param fps Rate at which frames should be captured in frames per second. * The fps can go as low as desired. However the fastest fps will be limited by the hardware. * For resolutions that can be captured by the video camera, the fastest fps can be computed using * {@link android.hardware.Camera.Parameters#getPreviewFpsRange(int[])}. For higher * resolutions the fastest fps may be more restrictive. * Note that the recorder cannot guarantee that frames will be captured at the * given rate due to camera/encoder limitations. However it tries to be as close as * possible. */ public void setCaptureRate(double fps) { // Make sure that time lapse is enabled when this method is called. setParameter("time-lapse-enable=1"); double timeBetweenFrameCapture = 1 / fps; long timeBetweenFrameCaptureUs = (long) (1000000 * timeBetweenFrameCapture); setParameter("time-between-time-lapse-frame-capture=" + timeBetweenFrameCaptureUs); } /** * Sets the orientation hint for output video playback. * This method should be called before prepare(). This method will not * trigger the source video frame to rotate during video recording, but to * add a composition matrix containing the rotation angle in the output * video if the output format is OutputFormat.THREE_GPP or * OutputFormat.MPEG_4 so that a video player can choose the proper * orientation for playback. Note that some video players may choose * to ignore the compostion matrix in a video during playback. * * @param degrees the angle to be rotated clockwise in degrees. * The supported angles are 0, 90, 180, and 270 degrees. * @throws IllegalArgumentException if the angle is not supported. * */ public void setOrientationHint(int degrees) { if (degrees != 0 && degrees != 90 && degrees != 180 && degrees != 270) { throw new IllegalArgumentException("Unsupported angle: " + degrees); } setParameter("video-param-rotation-angle-degrees=" + degrees); } /** * Set and store the geodata (latitude and longitude) in the output file. * This method should be called before prepare(). The geodata is * stored in udta box if the output format is OutputFormat.THREE_GPP * or OutputFormat.MPEG_4, and is ignored for other output formats. * The geodata is stored according to ISO-6709 standard. * * @param latitude latitude in degrees. Its value must be in the * range [-90, 90]. * @param longitude longitude in degrees. Its value must be in the * range [-180, 180]. * * @throws IllegalArgumentException if the given latitude or * longitude is out of range. * */ public void setLocation(float latitude, float longitude) { int latitudex10000 = (int) (latitude * 10000 + 0.5); int longitudex10000 = (int) (longitude * 10000 + 0.5); if (latitudex10000 > 900000 || latitudex10000 < -900000) { String msg = "Latitude: " + latitude + " out of range."; throw new IllegalArgumentException(msg); } if (longitudex10000 > 1800000 || longitudex10000 < -1800000) { String msg = "Longitude: " + longitude + " out of range"; throw new IllegalArgumentException(msg); } setParameter("param-geotag-latitude=" + latitudex10000); setParameter("param-geotag-longitude=" + longitudex10000); } /** * Sets the format of the output file produced during recording. Call this * after setAudioSource()/setVideoSource() but before prepare(). * *
It is recommended to always use 3GP format when using the H.263 * video encoder and AMR audio encoder. Using an MPEG-4 container format * may confuse some desktop players.
* * @param output_format the output format to use. The output format * needs to be specified before setting recording-parameters or encoders. * @throws IllegalStateException if it is called after prepare() or before * setAudioSource()/setVideoSource(). * @see android.media.MediaRecorder.OutputFormat */ public native void setOutputFormat(int output_format) throws IllegalStateException; /** * Sets the width and height of the video to be captured. Must be called * after setVideoSource(). Call this after setOutFormat() but before * prepare(). * * @param width the width of the video to be captured * @param height the height of the video to be captured * @throws IllegalStateException if it is called after * prepare() or before setOutputFormat() */ public native void setVideoSize(int width, int height) throws IllegalStateException; /** * Sets the frame rate of the video to be captured. Must be called * after setVideoSource(). Call this after setOutFormat() but before * prepare(). * * @param rate the number of frames per second of video to capture * @throws IllegalStateException if it is called after * prepare() or before setOutputFormat(). * * NOTE: On some devices that have auto-frame rate, this sets the * maximum frame rate, not a constant frame rate. Actual frame rate * will vary according to lighting conditions. */ public native void setVideoFrameRate(int rate) throws IllegalStateException; /** * Sets the maximum duration (in ms) of the recording session. * Call this after setOutFormat() but before prepare(). * After recording reaches the specified duration, a notification * will be sent to the {@link android.media.MediaRecorder.OnInfoListener} * with a "what" code of {@link #MEDIA_RECORDER_INFO_MAX_DURATION_REACHED} * and recording will be stopped. Stopping happens asynchronously, there * is no guarantee that the recorder will have stopped by the time the * listener is notified. * * @param max_duration_ms the maximum duration in ms (if zero or negative, disables the duration limit) * */ public native void setMaxDuration(int max_duration_ms) throws IllegalArgumentException; /** * Sets the maximum filesize (in bytes) of the recording session. * Call this after setOutFormat() but before prepare(). * After recording reaches the specified filesize, a notification * will be sent to the {@link android.media.MediaRecorder.OnInfoListener} * with a "what" code of {@link #MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED} * and recording will be stopped. Stopping happens asynchronously, there * is no guarantee that the recorder will have stopped by the time the * listener is notified. * * @param max_filesize_bytes the maximum filesize in bytes (if zero or negative, disables the limit) * */ public native void setMaxFileSize(long max_filesize_bytes) throws IllegalArgumentException; /** * Sets the audio encoder to be used for recording. If this method is not * called, the output file will not contain an audio track. Call this after * setOutputFormat() but before prepare(). * * @param audio_encoder the audio encoder to use. * @throws IllegalStateException if it is called before * setOutputFormat() or after prepare(). * @see android.media.MediaRecorder.AudioEncoder */ public native void setAudioEncoder(int audio_encoder) throws IllegalStateException; /** * Sets the video encoder to be used for recording. If this method is not * called, the output file will not contain an video track. Call this after * setOutputFormat() and before prepare(). * * @param video_encoder the video encoder to use. * @throws IllegalStateException if it is called before * setOutputFormat() or after prepare() * @see android.media.MediaRecorder.VideoEncoder */ public native void setVideoEncoder(int video_encoder) throws IllegalStateException; /** * Sets the audio sampling rate for recording. Call this method before prepare(). * Prepare() may perform additional checks on the parameter to make sure whether * the specified audio sampling rate is applicable. The sampling rate really depends * on the format for the audio recording, as well as the capabilities of the platform. * For instance, the sampling rate supported by AAC audio coding standard ranges * from 8 to 96 kHz, the sampling rate supported by AMRNB is 8kHz, and the sampling * rate supported by AMRWB is 16kHz. Please consult with the related audio coding * standard for the supported audio sampling rate. * * @param samplingRate the sampling rate for audio in samples per second. */ public void setAudioSamplingRate(int samplingRate) { if (samplingRate <= 0) { throw new IllegalArgumentException("Audio sampling rate is not positive"); } setParameter("audio-param-sampling-rate=" + samplingRate); } /** * Sets the number of audio channels for recording. Call this method before prepare(). * Prepare() may perform additional checks on the parameter to make sure whether the * specified number of audio channels are applicable. * * @param numChannels the number of audio channels. Usually it is either 1 (mono) or 2 * (stereo). */ public void setAudioChannels(int numChannels) { if (numChannels <= 0) { throw new IllegalArgumentException("Number of channels is not positive"); } setParameter("audio-param-number-of-channels=" + numChannels); } /** * Sets the audio encoding bit rate for recording. Call this method before prepare(). * Prepare() may perform additional checks on the parameter to make sure whether the * specified bit rate is applicable, and sometimes the passed bitRate will be clipped * internally to ensure the audio recording can proceed smoothly based on the * capabilities of the platform. * * @param bitRate the audio encoding bit rate in bits per second. */ public void setAudioEncodingBitRate(int bitRate) { if (bitRate <= 0) { throw new IllegalArgumentException("Audio encoding bit rate is not positive"); } setParameter("audio-param-encoding-bitrate=" + bitRate); } /** * Sets the video encoding bit rate for recording. Call this method before prepare(). * Prepare() may perform additional checks on the parameter to make sure whether the * specified bit rate is applicable, and sometimes the passed bitRate will be * clipped internally to ensure the video recording can proceed smoothly based on * the capabilities of the platform. * * @param bitRate the video encoding bit rate in bits per second. */ public void setVideoEncodingBitRate(int bitRate) { if (bitRate <= 0) { throw new IllegalArgumentException("Video encoding bit rate is not positive"); } setParameter("video-param-encoding-bitrate=" + bitRate); } /** * Currently not implemented. It does nothing. * @deprecated Time lapse mode video recording using camera still image capture * is not desirable, and will not be supported. * @hide */ public void setAuxiliaryOutputFile(FileDescriptor fd) { Log.w(TAG, "setAuxiliaryOutputFile(FileDescriptor) is no longer supported."); } /** * Currently not implemented. It does nothing. * @deprecated Time lapse mode video recording using camera still image capture * is not desirable, and will not be supported. * @hide */ public void setAuxiliaryOutputFile(String path) { Log.w(TAG, "setAuxiliaryOutputFile(String) is no longer supported."); } /** * Pass in the file descriptor of the file to be written. Call this after * setOutputFormat() but before prepare(). * * @param fd an open file descriptor to be written into. * @throws IllegalStateException if it is called before * setOutputFormat() or after prepare() */ public void setOutputFile(FileDescriptor fd) throws IllegalStateException { mPath = null; mFd = fd; } /** * Sets the path of the output file to be produced. Call this after * setOutputFormat() but before prepare(). * * @param path The pathname to use. * @throws IllegalStateException if it is called before * setOutputFormat() or after prepare() */ public void setOutputFile(String path) throws IllegalStateException { mFd = null; mPath = path; } // native implementation private native void _setOutputFile(FileDescriptor fd, long offset, long length) throws IllegalStateException, IOException; private native void _prepare() throws IllegalStateException, IOException; /** * Prepares the recorder to begin capturing and encoding data. This method * must be called after setting up the desired audio and video sources, * encoders, file format, etc., but before start(). * * @throws IllegalStateException if it is called after * start() or before setOutputFormat(). * @throws IOException if prepare fails otherwise. */ public void prepare() throws IllegalStateException, IOException { if (mPath != null) { RandomAccessFile file = new RandomAccessFile(mPath, "rws"); try { _setOutputFile(file.getFD(), 0, 0); } finally { file.close(); } } else if (mFd != null) { _setOutputFile(mFd, 0, 0); } else { throw new IOException("No valid output file"); } _prepare(); } /** * Begins capturing and encoding data to the file specified with * setOutputFile(). Call this after prepare(). * *Since API level 13, if applications set a camera via * {@link #setCamera(Camera)}, the apps can use the camera after this method * call. The apps do not need to lock the camera again. However, if this * method fails, the apps should still lock the camera back. The apps should * not start another recording session during recording. * * @throws IllegalStateException if it is called before * prepare(). */ public native void start() throws IllegalStateException; /** * Stops recording. Call this after start(). Once recording is stopped, * you will have to configure it again as if it has just been constructed. * Note that a RuntimeException is intentionally thrown to the * application, if no valid audio/video data has been received when stop() * is called. This happens if stop() is called immediately after * start(). The failure lets the application take action accordingly to * clean up the output file (delete the output file, for instance), since * the output file is not properly constructed when this happens. * * @throws IllegalStateException if it is called before start() */ public native void stop() throws IllegalStateException; /** * Restarts the MediaRecorder to its idle state. After calling * this method, you will have to configure it again as if it had just been * constructed. */ public void reset() { native_reset(); // make sure none of the listeners get called anymore mEventHandler.removeCallbacksAndMessages(null); } private native void native_reset(); /** * Returns the maximum absolute amplitude that was sampled since the last * call to this method. Call this only after the setAudioSource(). * * @return the maximum absolute amplitude measured since the last call, or * 0 when called for the first time * @throws IllegalStateException if it is called before * the audio source has been set. */ public native int getMaxAmplitude() throws IllegalStateException; /* Do not change this value without updating its counterpart * in include/media/mediarecorder.h or mediaplayer.h! */ /** Unspecified media recorder error. * @see android.media.MediaRecorder.OnErrorListener */ public static final int MEDIA_RECORDER_ERROR_UNKNOWN = 1; /** Media server died. In this case, the application must release the * MediaRecorder object and instantiate a new one. * @see android.media.MediaRecorder.OnErrorListener */ public static final int MEDIA_ERROR_SERVER_DIED = 100; /** * Interface definition for a callback to be invoked when an error * occurs while recording. */ public interface OnErrorListener { /** * Called when an error occurs while recording. * * @param mr the MediaRecorder that encountered the error * @param what the type of error that has occurred: *