Java通過FFmpeg錄制屏幕


FFmpeg是一套可以用來記錄、轉換數字音頻、視頻,並能將其轉化為流的開源計算機程序。采用LGPL或GPL許可證。它提供了錄制、轉換以及流化音視頻的完整解決方案。它包含了非常先進的音頻/視頻編解碼庫libavcodec,為了保證高可移植性和編解碼質量,libavcodec里很多code都是從頭開發的。
PS:有不少人不清楚“FFmpeg”應該怎么讀。它讀作“ef ef em peg”

maven依賴

<!-- https://mvnrepository.com/artifact/org.bytedeco/javacv -->
        <dependency>
            <groupId>org.bytedeco</groupId>
            <artifactId>javacv</artifactId>
            <version>1.4.4</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.bytedeco.javacpp-presets/ffmpeg-platform -->
        <dependency>
            <groupId>org.bytedeco.javacpp-presets</groupId>
            <artifactId>ffmpeg-platform</artifactId>
            <version>4.1-1.4.4</version>
        </dependency>

java代碼

import java.awt.AWTException;
import java.awt.Graphics2D;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.Robot;
import java.awt.image.BufferedImage;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.util.Scanner;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.TargetDataLine;
import org.bytedeco.javacpp.avcodec;
import org.bytedeco.javacpp.avutil;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.FrameRecorder.Exception;
import org.bytedeco.javacv.Java2DFrameConverter;

/**
 * 使用javacv進行錄屏
 * 
 */
public class VideoRecord {
    //線程池 screenTimer
    private ScheduledThreadPoolExecutor screenTimer;
    //獲取屏幕尺寸
    private final Rectangle rectangle = new Rectangle(Constant.WIDTH, Constant.HEIGHT); // 截屏的大小
    //視頻類 FFmpegFrameRecorder
    private FFmpegFrameRecorder recorder;
    private Robot robot;
    //線程池 exec
    private ScheduledThreadPoolExecutor exec;
    private TargetDataLine line;
    private AudioFormat audioFormat;
    private DataLine.Info dataLineInfo;
    private boolean isHaveDevice = true;
    private long startTime = 0;
    private long videoTS = 0;
    private long pauseTime = 0;
    private double frameRate = 5;

    public VideoRecord(String fileName, boolean isHaveDevice) {
        // TODO Auto-generated constructor stub
        recorder = new FFmpegFrameRecorder(fileName + ".mp4", Constant.WIDTH, Constant.HEIGHT);
        // recorder.setVideoCodec(avcodec.AV_CODEC_ID_H265); // 28
        // recorder.setVideoCodec(avcodec.AV_CODEC_ID_FLV1); // 28
        recorder.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4); // 13
        recorder.setFormat("mp4");
        // recorder.setFormat("mov,mp4,m4a,3gp,3g2,mj2,h264,ogg,MPEG4");
        recorder.setSampleRate(44100);
        recorder.setFrameRate(frameRate);

        recorder.setVideoQuality(0);
        recorder.setVideoOption("crf", "23");
        // 2000 kb/s, 720P視頻的合理比特率范圍
        recorder.setVideoBitrate(1000000);
        /**
         * 權衡quality(視頻質量)和encode speed(編碼速度) values(值): ultrafast(終極快),superfast(超級快),
         * veryfast(非常快), faster(很快), fast(快), medium(中等), slow(慢), slower(很慢),
         * veryslow(非常慢)
         * ultrafast(終極快)提供最少的壓縮(低編碼器CPU)和最大的視頻流大小;而veryslow(非常慢)提供最佳的壓縮(高編碼器CPU)的同時降低視頻流的大小
         * 參考:https://trac.ffmpeg.org/wiki/Encode/H.264 官方原文參考:-preset ultrafast as the
         * name implies provides for the fastest possible encoding. If some tradeoff
         * between quality and encode speed, go for the speed. This might be needed if
         * you are going to be transcoding multiple streams on one machine.
         */
        recorder.setVideoOption("preset", "slow");
        recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P); // yuv420p
        recorder.setAudioChannels(2);
        recorder.setAudioOption("crf", "0");
        // Highest quality
        recorder.setAudioQuality(0);
        recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
        try {
            robot = new Robot();
        } catch (AWTException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        try {
            recorder.start();
        } catch (Exception e) {
            // TODO Auto-generated catch block
            System.out.print("*******************************");
        }
        this.isHaveDevice = isHaveDevice;
    }

    /**
     * 開始錄制
     */
    public void start() {

        if (startTime == 0) {
            startTime = System.currentTimeMillis();
        }
        if (pauseTime == 0) {
            pauseTime = System.currentTimeMillis();
        }
        // 如果有錄音設備則啟動錄音線程
//如果錄制的視頻沒有聲音 加上
isHaveDevice=true;

if (isHaveDevice) {
new Thread(new Runnable() { @Override public void run() { // TODO Auto-generated method stub
 caputre(); } }).start(); } // 錄屏
        screenTimer = new ScheduledThreadPoolExecutor(1); screenTimer.scheduleAtFixedRate(new Runnable() { @Override public void run() { // 將screenshot對象寫入圖像文件 // try { // ImageIO.write(screenCapture, "JPEG", f); // videoGraphics.drawImage(screenCapture, 0, 0, null); // IplImage image = cvLoadImage(name); // 非常吃內存!! // // 創建一個 timestamp用來寫入幀中 // videoTS = 1000 // * (System.currentTimeMillis() - startTime - (System.currentTimeMillis() - // pauseTime)); // // 檢查偏移量 // if (videoTS > recorder.getTimestamp()) { // recorder.setTimestamp(videoTS); // }
                BufferedImage screenCapture = robot.createScreenCapture(rectangle); // 截屏
 BufferedImage videoImg = new BufferedImage(Constant.WIDTH, Constant.HEIGHT, BufferedImage.TYPE_3BYTE_BGR); // 聲明一個BufferedImage用重繪截圖
 Graphics2D videoGraphics = videoImg.createGraphics();// 創建videoImg的Graphics2D
 videoGraphics.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_DISABLE); videoGraphics.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_SPEED); videoGraphics.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED); videoGraphics.drawImage(screenCapture, 0, 0, null); // 重繪截圖
 Java2DFrameConverter java2dConverter = new Java2DFrameConverter(); Frame frame = java2dConverter.convert(videoImg); try { videoTS = 1000L
                            * (System.currentTimeMillis() - startTime - (System.currentTimeMillis() - pauseTime)); // 檢查偏移量
                    if (videoTS > recorder.getTimestamp()) { recorder.setTimestamp(videoTS); } recorder.record(frame); // 錄制視頻
                } catch (Exception e) { // TODO Auto-generated catch block
 e.printStackTrace(); } // 釋放資源
 videoGraphics.dispose(); videoGraphics = null; videoImg.flush(); videoImg = null; java2dConverter = null; screenCapture.flush(); screenCapture = null; } }, (int) (1000 / frameRate), (int) (1000 / frameRate), TimeUnit.MILLISECONDS); } /** * 抓取聲音 */
    public void caputre() { audioFormat = new AudioFormat(44100.0F, 16, 2, true, false); dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat); try { line = (TargetDataLine) AudioSystem.getLine(dataLineInfo); } catch (LineUnavailableException e1) { // TODO Auto-generated catch block
            System.out.println("#################"); } try { line.open(audioFormat); } catch (LineUnavailableException e1) { // TODO Auto-generated catch block
 e1.printStackTrace(); } line.start(); final int sampleRate = (int) audioFormat.getSampleRate(); final int numChannels = audioFormat.getChannels(); int audioBufferSize = sampleRate * numChannels; final byte[] audioBytes = new byte[audioBufferSize]; exec = new ScheduledThreadPoolExecutor(1); exec.scheduleAtFixedRate(new Runnable() { @Override public void run() { try { int nBytesRead = line.read(audioBytes, 0, line.available()); int nSamplesRead = nBytesRead / 2; short[] samples = new short[nSamplesRead]; // Let's wrap our short[] into a ShortBuffer and // pass it to recordSamples
                    ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples); ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, nSamplesRead); // recorder is instance of // org.bytedeco.javacv.FFmpegFrameRecorder
 recorder.recordSamples(sampleRate, numChannels, sBuff); // System.gc();
                } catch (org.bytedeco.javacv.FrameRecorder.Exception e) { e.printStackTrace(); } } }, (int) (1000 / frameRate), (int) (1000 / frameRate), TimeUnit.MILLISECONDS); } /** * 停止 */
    public void stop() { if (null != screenTimer) { screenTimer.shutdownNow(); } try { recorder.stop(); recorder.release(); recorder.close(); screenTimer = null; // screenCapture = null;
            if (isHaveDevice) { if (null != exec) { exec.shutdownNow(); } if (null != line) { line.stop(); line.close(); } dataLineInfo = null; audioFormat = null; } } catch (Exception e) { // TODO Auto-generated catch block
 e.printStackTrace(); } } /** * 暫停 * * @throws Exception */
    public void pause() throws Exception { screenTimer.shutdownNow(); screenTimer = null; if (isHaveDevice) { exec.shutdownNow(); exec = null; line.stop(); line.close(); dataLineInfo = null; audioFormat = null; line = null; } pauseTime = System.currentTimeMillis(); } public static void main(String[] args) throws Exception, AWTException { VideoRecord videoRecord = new VideoRecord("D:\\test", false); videoRecord.start(); while (true) { System.out.println("你要停止嗎?請輸入(stop),程序會停止。"); Scanner sc = new Scanner(System.in); if (sc.next().equalsIgnoreCase("stop")) { videoRecord.stop(); System.out.println("停止"); } if (sc.next().equalsIgnoreCase("pause")) { videoRecord.pause(); System.out.println("暫停"); } if (sc.next().equalsIgnoreCase("start")) { videoRecord.start(); System.out.println("開始"); } } } }

類constant

@Data
public class Constant {
    public static int WIDTH = 800;
    public static int HEIGHT = 860;
}

注意:本錄屏功能的視頻窗口為電腦屏幕的固定位置,錄的視頻內容可能不太"干凈"

轉載:https://blog.csdn.net/weixin_42802298/article/details/98101491

 


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM