Android使用FFMpeg實現推送視頻直播流到服務器


背景

在過去的2015年中,視頻直播頁的新寵無疑是戶外直播。隨着4G網絡的普及和覆蓋率的提升,主播可以在戶外通過手機進行直播。而觀眾也願意為這種可以足不出戶而觀天下事的服務買單。基於這樣的背景,本文主要實現在Android設備上采集視頻並推流到服務器。

概覽

如下圖所示,在安卓上采集並推流主要應用到兩個類。首先是安卓Api自帶的Camera,實現從攝像頭采集圖像。然后是Javacv 中的FFMpegFrameRecorder類實現對Camera采集到的幀編碼並推流。安卓視頻采集圖

關鍵步驟與代碼

下面結合上面的流程圖給出視頻采集的關鍵步驟。 首先是Camera類的初始化。

// 初始化Camera設備 cameraDevice = Camera.open(); Log.i(LOG_TAG, "cameara open"); cameraView = new CameraView(this, cameraDevice); 

上面的CameraView類是我們實現的負責預覽視頻采集和將采集到的幀寫入FFMpegFrameRecorder的類。具體代碼如下:

class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback { private SurfaceHolder mHolder; private Camera mCamera; public CameraView(Context context, Camera camera) { super(context); Log.w("camera", "camera view"); mCamera = camera; mHolder = getHolder(); //設置SurfaceView 的SurfaceHolder的回調函數 mHolder.addCallback(CameraView.this); mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); //設置Camera預覽的回調函數 mCamera.setPreviewCallback(CameraView.this); } @Override public void surfaceCreated(SurfaceHolder holder) { try { stopPreview(); mCamera.setPreviewDisplay(holder); } catch (IOException exception) { mCamera.release(); mCamera = null; } } public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { stopPreview(); Camera.Parameters camParams = mCamera.getParameters(); List<Camera.Size> sizes = camParams.getSupportedPreviewSizes(); // Sort the list in ascending order Collections.sort(sizes, new Comparator<Camera.Size>() { public int compare(final Camera.Size a, final Camera.Size b) { return a.width * a.height - b.width * b.height; } }); // Pick the first preview size that is equal or bigger, or pick the last (biggest) option if we cannot // reach the initial settings of imageWidth/imageHeight. for (int i = 0; i < sizes.size(); i++) { if ((sizes.get(i).width >= imageWidth && sizes.get(i).height >= imageHeight) || i == sizes.size() - 1) { imageWidth = sizes.get(i).width; imageHeight = sizes.get(i).height; Log.v(LOG_TAG, "Changed to supported resolution: " + imageWidth + "x" + imageHeight); break; } } camParams.setPreviewSize(imageWidth, imageHeight); Log.v(LOG_TAG, "Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate); camParams.setPreviewFrameRate(frameRate); Log.v(LOG_TAG, "Preview Framerate: " + camParams.getPreviewFrameRate()); mCamera.setParameters(camParams); // Set the holder (which might have changed) again try { mCamera.setPreviewDisplay(holder); mCamera.setPreviewCallback(CameraView.this); startPreview(); } catch (Exception e) { Log.e(LOG_TAG, "Could not set preview display in surfaceChanged"); } } @Override public void surfaceDestroyed(SurfaceHolder holder) { try { mHolder.addCallback(null); mCamera.setPreviewCallback(null); } catch (RuntimeException e) { // The camera has probably just been released, ignore. } } public void startPreview() { if (!isPreviewOn && mCamera != null) { isPreviewOn = true; mCamera.startPreview(); } } public void stopPreview() { if (isPreviewOn && mCamera != null) { isPreviewOn = false; mCamera.stopPreview(); } } @Override public void onPreviewFrame(byte[] data, Camera camera) { if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { startTime = System.currentTimeMillis(); return; } //如果是錄播,則把該幀先存在內存中 if (RECORD_LENGTH > 0) { int i = imagesIndex++ % images.length; yuvImage = images[i]; timestamps[i] = 1000 * (System.currentTimeMillis() - startTime); } if (yuvImage != null && recording) { ((ByteBuffer) yuvImage.image[0].position(0)).put(data); //如果是直播則直接寫入到FFmpegFrameRecorder中 if (RECORD_LENGTH <= 0) try { Log.v(LOG_TAG, "Writing Frame"); long t = 1000 * (System.currentTimeMillis() - startTime); if (t > recorder.getTimestamp()) { recorder.setTimestamp(t); } recorder.record(yuvImage); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG, e.getMessage()); e.printStackTrace(); } } } } 

初始化FFmpegFrameRecorder類

    recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1); //設置視頻編碼 28 指代h.264 recorder.setVideoCodec(28); recorder.setFormat("flv"); //設置采樣頻率 recorder.setSampleRate(sampleAudioRateInHz); // 設置幀率,即每秒的圖像數 recorder.setFrameRate(frameRate); //音頻采集線程 audioRecordRunnable = new AudioRecordRunnable(); audioThread = new Thread(audioRecordRunnable); runAudioThread = true; 

其中的AudioRecordRunnable是我們自己實現的音頻采集線程,代碼如下

 class AudioRecordRunnable implements Runnable { @Override public void run() { android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); // Audio int bufferSize; ShortBuffer audioData; int bufferReadResult; bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); //如果是錄播,則需要錄播長度的緩存 if (RECORD_LENGTH > 0) { samplesIndex = 0; samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1]; for (int i = 0; i < samples.length; i++) { samples[i] = ShortBuffer.allocate(bufferSize); } } else { //直播只需要相當於一幀的音頻的數據緩存 audioData = ShortBuffer.allocate(bufferSize); } Log.d(LOG_TAG, "audioRecord.startRecording()"); audioRecord.startRecording(); /* ffmpeg_audio encoding loop */ while (runAudioThread) { if (RECORD_LENGTH > 0) { audioData = samples[samplesIndex++ % samples.length]; audioData.position(0).limit(0); } //Log.v(LOG_TAG,"recording? " + recording); bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity()); audioData.limit(bufferReadResult); if (bufferReadResult > 0) { Log.v(LOG_TAG, "bufferReadResult: " + bufferReadResult); // If "recording" isn't true when start this thread, it never get's set according to this if statement...!!! // Why? Good question... if (recording) { //如果是直播,則直接調用recordSamples 將音頻寫入Recorder if (RECORD_LENGTH <= 0) try { recorder.recordSamples(audioData); //Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG, e.getMessage()); e.printStackTrace(); } } } } Log.v(LOG_TAG, "AudioThread Finished, release audioRecord"); /* encoding finish, release recorder */ if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; Log.v(LOG_TAG, "audioRecord released"); } } } 

接下來是開始直播和停止直播的方法

//開始直播 public void startRecording() { initRecorder(); try { recorder.start(); startTime = System.currentTimeMillis(); recording = true; audioThread.start(); } catch (FFmpegFrameRecorder.Exception e) { e.printStackTrace(); } } public void stopRecording() { //停止音頻線程 runAudioThread = false; try { audioThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } audioRecordRunnable = null; audioThread = null; if (recorder != null && recording) { //如果是錄播,則將緩存中的幀加上時間戳后寫入 if (RECORD_LENGTH > 0) { Log.v(LOG_TAG, "Writing frames"); try { int firstIndex = imagesIndex % samples.length; int lastIndex = (imagesIndex - 1) % images.length; if (imagesIndex <= images.length) { firstIndex = 0; lastIndex = imagesIndex - 1; } if ((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) { startTime = 0; } if (lastIndex < firstIndex) { lastIndex += images.length; } for (int i = firstIndex; i <= lastIndex; i++) { long t = timestamps[i % timestamps.length] - startTime; if (t >= 0) { if (t > recorder.getTimestamp()) { recorder.setTimestamp(t); } recorder.record(images[i % images.length]); } } firstIndex = samplesIndex % samples.length; lastIndex = (samplesIndex - 1) % samples.length; if (samplesIndex <= samples.length) { firstIndex = 0; lastIndex = samplesIndex - 1; } if (lastIndex < firstIndex) { lastIndex += samples.length; } for (int i = firstIndex; i <= lastIndex; i++) { recorder.recordSamples(samples[i % samples.length]); } } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG, e.getMessage()); e.printStackTrace(); } } recording = false; Log.v(LOG_TAG, "Finishing recording, calling stop and release on recorder"); try { recorder.stop(); recorder.release(); } catch (FFmpegFrameRecorder.Exception e) { e.printStackTrace(); } recorder = null; } } 

以上即為關鍵的步驟和代碼,下面給出完整項目地址 RtmpRecorder

推薦:

 Android開發中多進程共享數據


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM