Camera開發系列之四-使用MediaMuxer封裝編碼後的音視頻到mp4容器框架
Camera開發系列之五-使用MediaExtractor製做一個簡易播放器ide
Camera開發系列之六-使用mina框架實現視頻推流post
Camera開發系列之七-使用GLSurfaceviw繪製Camera預覽畫面 ui
一個音視頻文件是由音頻和視頻組成的,咱們能夠經過MediaExtractor把音頻或視頻給單獨抽取出來,抽取出來的音頻和視頻能單獨播放。this
主要方法有下面幾個:編碼
使用MediaExtractor能夠分離出來音視頻數據,那麼如何使用MediaCodec 來播放解碼出來的數據,實現一個視頻播放器呢?我以爲須要以下幾部就能夠實現:spa
由於以前一直在介紹MediaCodec ,相信你們對它的使用都不陌生了,MediaExtractor的使用也很簡單,這裏直接貼上MediaExtractor解析視頻數據的代碼:
private Surface mSurface;
private MediaCodec mCodec;
private static boolean isDecoding = false;
private static boolean isPause = false;
public void startDecodeFromMPEG_4(final String MPEG_4_Path, Surface surface){
if (!new File(MPEG_4_Path).exists()){
try {
throw new FileNotFoundException("MPEG_4 file not find");
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
this.mSurface = surface;
isDecoding = true;
Thread mediaDecodeTrhread = new Thread(new Runnable() {
@Override
public void run() {
try {
MediaExtractor videoExtractor = new MediaExtractor(); //MediaExtractor做用是將音頻和視頻的數據進行分離
videoExtractor.setDataSource(MPEG_4_Path);
int videoTrackIndex = -1; //提供音頻的音頻軌
//多媒體流中video軌和audio軌的總個數
for (int i = 0; i < videoExtractor.getTrackCount(); i++) {
MediaFormat format = videoExtractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);//主要描述mime類型的媒體格式
if (mime.startsWith("video/")) { //找到音軌
videoExtractor.selectTrack(i);
videoTrackIndex = i;
int width = format.getInteger(MediaFormat.KEY_WIDTH);
int height = format.getInteger(MediaFormat.KEY_HEIGHT);
float time = format.getLong(MediaFormat.KEY_DURATION) / 1000000;
try {
mCodec = MediaCodec.createDecoderByType(mime);
mCodec.configure(format, mSurface, null, 0);
mCodec.start();
if (mVideoCallBack != null){
mVideoCallBack.onGetVideoInfo(width,height,time);
}
} catch (IOException e) {
e.printStackTrace();
}
break;
}
}
MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
long startTimeStamp = System.currentTimeMillis(); //記錄開始解碼的時間
while (isDecoding){
// 暫停
if (isPause) {
continue;
}
int inputBufferIndex = mCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
inputBuffer = mCodec.getInputBuffer(inputBufferIndex);
}else {
ByteBuffer[] inputBuffers = mCodec.getInputBuffers();
inputBuffer = inputBuffers[inputBufferIndex];
}
//檢索當前編碼的樣本並將其存儲在字節緩衝區中
int sampleSize = videoExtractor.readSampleData(inputBuffer, 0);
if (sampleSize < 0) {
//若是沒有可獲取的樣本則退出循環
mCodec.queueInputBuffer(inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
videoExtractor.unselectTrack(videoTrackIndex);
break;
} else {
mCodec.queueInputBuffer(inputBufferIndex, 0, sampleSize, videoExtractor.getSampleTime(), 0);
videoExtractor.advance();
}
}
int outputBufferIndex = mCodec.dequeueOutputBuffer(videoBufferInfo, TIMEOUT_USEC);
while (outputBufferIndex >= 0) {
decodeDelay(videoBufferInfo, startTimeStamp);
mCodec.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = mCodec.dequeueOutputBuffer(videoBufferInfo, 0);
}
}
stopDecodeSync();
videoExtractor.release();
}catch (IOException e){
e.printStackTrace();
}
}
});
mediaDecodeTrhread.start();
}
private void stopDecodeSync(){
if (null != mCodec){
mCodec.stop();
mCodec.release();
mCodec = null;
}
}
複製代碼
MediaExtractor解析音頻數據的代碼:
private static volatile boolean isDecoding = false;
private static volatile boolean isPause = false;
private MediaCodec mCodec;
private static AudioDecoder mAudioDecoder;
private final String mime = "audio/mp4a-latm";
private MediaCodec.BufferInfo mBufferInfo;
private BufferedOutputStream outputStream;
public void startDecodeFromMPEG_4(final String MPEG_4_Path){
if (!new File(MPEG_4_Path).exists()){
try {
throw new FileNotFoundException("MPEG_4 file not find");
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
isDecoding = true;
Thread audioDecodeTrhread = new Thread(new Runnable() {
@Override
public void run() {
try {
MediaExtractor audioExtractor = new MediaExtractor(); //MediaExtractor做用是將音頻和視頻的數據進行分離
audioExtractor.setDataSource(MPEG_4_Path);
AudioTrack audioTrack = null;
int audioExtractorTrackIndex = -1; //提供音頻的音頻軌
//多媒體流中video軌和audio軌的總個數
for (int i = 0; i < audioExtractor.getTrackCount(); i++) {
MediaFormat format = audioExtractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);//主要描述mime類型的媒體格式
if (mime.startsWith("audio/")) { //找到音軌
audioExtractor.selectTrack(i);
audioExtractorTrackIndex = i;
int audioChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int audioSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
int minBufferSize = AudioTrack.getMinBufferSize(audioSampleRate,
(audioChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO),
AudioFormat.ENCODING_PCM_16BIT);
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
audioSampleRate,
(audioChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO),
AudioFormat.ENCODING_PCM_16BIT,
minBufferSize,
AudioTrack.MODE_STREAM);
audioTrack.play();
try {
mCodec = MediaCodec.createDecoderByType(mime);
mCodec.configure(format, null, null, 0);
mCodec.start();
} catch (IOException e) {
e.printStackTrace();
}
break;
}
}
MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();
ByteBuffer[] inputBuffers = mCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mCodec.getOutputBuffers();
long startTimeStamp = System.currentTimeMillis();
while (isDecoding){
// 暫停
if (isPause) {
continue;
}
int inputBufferIndex = mCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
inputBuffer = mCodec.getInputBuffer(inputBufferIndex);
}else {
inputBuffer = inputBuffers[inputBufferIndex];
}
if (inputBuffer == null) return;
//檢索當前編碼的樣本並將其存儲在字節緩衝區中
int sampleSize = audioExtractor.readSampleData(inputBuffer, 0);
if (sampleSize < 0) {
//若是沒有可獲取的樣本則退出循環
mCodec.queueInputBuffer(inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
audioExtractor.unselectTrack(audioExtractorTrackIndex);
break;
} else {
mCodec.queueInputBuffer(inputBufferIndex, 0, sampleSize, audioExtractor.getSampleTime(), 0);
audioExtractor.advance();
}
}
int outputBufferIndex = mCodec.dequeueOutputBuffer(audioBufferInfo, TIMEOUT_USEC);
ByteBuffer outputBuffer;
while (outputBufferIndex >= 0) {
decodeDelay(audioBufferInfo,startTimeStamp);
//獲取解碼後的ByteBuffer
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
outputBuffer = mCodec.getOutputBuffer(outputBufferIndex);
}else {
outputBuffer = outputBuffers[outputBufferIndex];//6.0以上,使用這個部分機型出現crash
}
//用來保存解碼後的數據
byte[] outData = new byte[audioBufferInfo.size];
outputBuffer.get(outData);
//清空緩存
outputBuffer.clear();
//播放解碼後的數據
if (audioTrack != null){
audioTrack.write(outData,0,outData.length);
}
//釋放已經解碼的buffer
mCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mCodec.dequeueOutputBuffer(audioBufferInfo, TIMEOUT_USEC);
}
}
if (audioTrack != null){
audioTrack.stop();
audioTrack.release();
audioTrack = null;
}
audioExtractor.release();
stopDecodeSync();
}catch (IOException e){
e.printStackTrace();
}
}
});
audioDecodeTrhread.start();
}
複製代碼
音視頻同步:
/** * 延遲解碼 * @param bufferInfo * @param startMillis */
private void decodeDelay(MediaCodec.BufferInfo bufferInfo, long startMillis) {
long current = bufferInfo.presentationTimeUs / 1000 - (System.currentTimeMillis() - startMillis);
if (current > 0) {
try {
Thread.sleep(current);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
複製代碼
上面的代碼只是簡單地獲取本地視頻文件,分別將視頻幀解碼顯示和音頻幀解碼播放出來,還存在同步問題。同步無非就是,當視頻幀播放快了,則等待音頻幀播放完或者加快、丟棄音頻幀,當音頻播放快了,則判斷是否須要加快視頻幀的播放。這裏不一樣的同步方式,產生了幾種不一樣的同步策略,分別是視頻同步音頻、音頻同步視頻、之外部時鐘做爲同步基準等等。這些須要考慮到的地方太多了,因此若是想作一個音視頻項目,而不是項目中的一個小功能,最好仍是選用市面上比較成熟的方案,好比ijkplayer,ffmpeg等。