截圖的幾種方法
Android獲取屏幕截圖主要有以下三種方法
1、
通過view.getDrawingCache()獲取指定View的繪制緩存來實現
截屏
。
這種方式Android 5.0之前也可以
,且不需要權限。可以截取本應用內任意位置的屏幕截屏,可能會有些webview會截取不到。
注意這種方式只能獲取當前應用內的截圖(連頂部狀態欄中的時間等都信息都獲取不到,獲取到的狀態欄是一片空白)。
2、
通過Linux底層驅動來實現截屏。
linux的圖像信息都是通過
FrameBuffer
來寫到顯示設備上的,所以可以通過讀取這個buffer的信息來獲取屏幕截圖。
DDMS工具就是通過這種方式來獲取截圖的。
FrameBuffer對應的設備文件目錄是/dev/graphics/fb0。
但是這種方法需要root權限,由於是直接從底層顯示數據讀取的,所以理論上是屬於手機屏幕的任何信息都可以截取到。
3、
通過
Android 5.0后的
MediaProjection API
實現截屏。
該接口官方說是用來屏幕錄制和音頻錄制,可以參照系統自帶的sample案例ScreenCapture。
關於sample的分析可參照這篇
http://www.cnblogs.com/tgyf/p/4675082.html 。
這種方法不用root,api是開放的,不過只針對L版以上。
MediaProjection介紹
做過Android屏幕截圖的朋友應該知道在Android5.0之前如果希望截圖屏幕,是需要獲取系統root權限的。但在5.0之后Android開放了新的接口android.media.projection,使用該接口,
第三方應用程序無需獲取系統root權限也可以直接進行屏幕截圖操作了
。查詢其官方api可知,該接口主要用來“屏幕截圖”操作和“音頻錄制”操作,這里只討論用於屏幕截圖的功能。由於使用了媒體的映射技術手段,故截取的屏幕並不是真正的設備屏幕,而是截取的通過映射出來的“虛擬屏幕”。不過,因為截圖我們希望的得到的肯定是一張圖而已,而“映射”出來的圖片與系統屏幕完全一致,所以,對於普通截屏操作,該方法完全可行。
MediaProjection由
MediaProjectionManager來管理和獲取,
可以
截取當前屏幕和錄制屏幕視頻
。
MediaProjection使用步驟:
- 首先獲取MediaProjectionManager,和其他的Manager一樣通過 Context.getSystemService() 傳入參數MEDIA_PROJECTION_SERVICE獲得
- 接着調用MediaProjectionManager.createScreenCaptureIntent(),調用后會彈出一個dialog詢問用戶是否授權應用捕捉屏幕
- 然后在onActivityResult()中獲取授權結果
- 如果授權成功,通過MediaProjectionManager.getMediaProjection(int resultCode, Intent resultData)獲取MediaProjection實例,通過MediaProjection.createVirtualDisplay(String name, int width, int height, int dpi, int flags, Surface surface, VirtualDisplay.Callback callback, Handler handler)創建VirtualDisplay實例。實際上在上述方法中傳入的surface參數是真正用來截屏或者錄屏的。
Activity
public class Activity2 extends ListActivity {
private MediaProjectionManager mMpMngr;
private static final int REQUEST_MEDIA_PROJECTION = 10086;
boolean isCapture;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
String[] array = { "截屏", "錄屏", };
setListAdapter(new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, new ArrayList<String>(Arrays.asList(array))));
//1、首先獲取MediaProjectionManager
mMpMngr = (MediaProjectionManager) getApplicationContext().getSystemService(Context.MEDIA_PROJECTION_SERVICE);
}
@Override
protected void onListItemClick(ListView l, View v, int position, long id) {
switch (position) {
case 0:
isCapture = true;
stopService(new Intent(getApplicationContext(), RecordService.class));
startIntent();
break;
case 1:
isCapture = false;
stopService(new Intent(getApplicationContext(), CaptureService.class));
startIntent();
break;
case 2:
break;
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
//3、通過onActivityResult()獲取授權結果。
if (requestCode == REQUEST_MEDIA_PROJECTION && resultCode == RESULT_OK) {
MyApplication.mResultCode = resultCode;
MyApplication.mResultIntent = data;
MyApplication.mMpmngr = mMpMngr;
startIntent();
}
}
private void startIntent() {
if (MyApplication.mResultIntent != null && MyApplication.mResultCode != 0) {//已授權
if (isCapture) startService(new Intent(getApplicationContext(), CaptureService.class));//開始截屏
else startService(new Intent(getApplicationContext(), RecordService.class));//開始錄屏
}
//2、調用MediaProjectionManager.createScreenCaptureIntent()后,會彈出一個dialog詢問用戶是否授權應用捕捉屏幕
else startActivityForResult(mMpMngr.createScreenCaptureIntent(), REQUEST_MEDIA_PROJECTION);//未授權
}
}
RecordService
/**
* 錄屏服務
*/
public class RecordService extends Service {
private static final String TAG = "bqt";
private static final String mVideoPath = Environment.getExternalStorageDirectory().getPath() + "/";
private MediaProjection mMpj;
private VirtualDisplay mVirtualDisplay;
private int windowWidth;
private int windowHeight;
private int screenDensity;
private Surface mSurface;
private MediaCodec mMediaCodec;
private MediaMuxer mMuxer;
private LinearLayout mCaptureLl;
private WindowManager wm;
private boolean isRecordOn;
private AtomicBoolean mIsQuit = new AtomicBoolean(false);
private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
private boolean mMuxerStarted = false;
private int mVideoTrackIndex = -1;
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
createEnvironment();
configureMedia();
createFloatView();
}
private void configureMedia() {
//MediaFormat是用來定義視頻格式相關信息的。video/avc,這里的avc是高級視頻編碼Advanced Video Coding
//windowWidth和windowHeight是視頻的尺寸,這個尺寸不能超過視頻采集時采集到的尺寸,否則會直接crash
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", windowWidth, windowHeight);
//設置碼率,通常碼率越高,視頻越清晰,但是對應的視頻也越大,500000代表500k
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2000000);
//設置幀率,通常這個值越高,視頻會顯得越流暢,一般默認我設置成30,你最低可以設置成24,不要低於這個值,低於24會明顯卡頓
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
//COLOR_FormatSurface這里表明數據將是一個graphicbuffer元數據
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
//設置幀間隔,它指的是,關鍵幀的間隔時間。通常情況下,你設置成多少問題都不大。
//比如你設置成10,那就是10秒一個關鍵幀,那10秒內的預覽都是一個截圖。如果你有需求要做視頻的預覽,那你最好設置成1
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);
try {
//創建一個MediaCodec的實例
mMediaCodec = MediaCodec.createEncoderByType("video/avc");
} catch (IOException e) {
e.printStackTrace();
}
//定義這個實例的格式,也就是上面我們定義的format,其他參數不用過於關注
mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
//這一步非常關鍵,它設置的是MediaCodec的編碼源,也就是說,我要告訴mEncoder,你給我解碼哪些流。
//很出乎大家的意料,MediaCodec並沒有要求我們傳一個流文件進去,而是要求我們指定一個surface,這個surface其實就是用來展示屏幕采集數據的surface
mSurface = mMediaCodec.createInputSurface();
mMediaCodec.start();
}
private void createEnvironment() {
wm = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
DisplayMetrics metric = new DisplayMetrics();
wm.getDefaultDisplay().getMetrics(metric);
windowWidth = metric.widthPixels;
windowHeight = metric.heightPixels;
screenDensity = metric.densityDpi;
}
@SuppressLint("InflateParams")
private void createFloatView() {
final WindowManager.LayoutParams params = new WindowManager.LayoutParams(WindowManager.LayoutParams.TYPE_PHONE,
WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, PixelFormat.RGBA_8888);
params.x = windowWidth;
params.y = windowHeight / 2;
params.gravity = Gravity.LEFT | Gravity.TOP;
params.width = WindowManager.LayoutParams.WRAP_CONTENT;
params.height = WindowManager.LayoutParams.WRAP_CONTENT;
LayoutInflater inflater = LayoutInflater.from(getApplicationContext());
mCaptureLl = (LinearLayout) inflater.inflate(R.layout.float_record, null);
final ImageView mCaptureIv = (ImageView) mCaptureLl.findViewById(R.id.iv_record);
wm.addView(mCaptureLl, params);
mCaptureIv.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
isRecordOn = !isRecordOn;
if (isRecordOn) {
mCaptureIv.setImageResource(R.drawable.ic_recording);
Toast.makeText(RecordService.this.getApplicationContext(), "開始錄屏", Toast.LENGTH_SHORT).show();
recordStart();
} else {
mCaptureIv.setImageResource(R.drawable.ic_record);
Toast.makeText(RecordService.this.getApplicationContext(), "結束錄屏", Toast.LENGTH_SHORT).show();
recordStop();
}
}
});
mCaptureIv.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
params.x = (int) (motionEvent.getRawX() - mCaptureIv.getMeasuredWidth() / 2);
params.y = (int) (motionEvent.getRawY() - mCaptureIv.getMeasuredHeight() / 2 - 20);
wm.updateViewLayout(mCaptureLl, params);
return false;
}
});
}
private void recordStop() {
mIsQuit.set(true);
}
private void recordStart() {
configureMedia();
startVirtual();
new Thread() {
@Override
public void run() {
Log.e(TAG, "start startRecord");
startRecord();
}
}.start();
}
private void startRecord() {
try {
mMuxer = new MediaMuxer(mVideoPath + System.currentTimeMillis() + ".mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
recordVirtualDisplay();
} catch (IOException e) {
e.printStackTrace();
} finally {
release();
}
}
private void startVirtual() {
if (mMpj == null) mMpj = MyApplication.mMpmngr.getMediaProjection(MyApplication.mResultCode, MyApplication.mResultIntent);
mVirtualDisplay = mMpj.createVirtualDisplay("record_screen", windowWidth, windowHeight, screenDensity, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mSurface, null, null);
}
private void recordVirtualDisplay() {
while (!mIsQuit.get()) {
int index = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 10000);
Log.i(TAG, "dequeue output buffer index=" + index);
if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {//后續輸出格式變化
resetOutputFormat();
} else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {//請求超時
Log.d(TAG, "retrieving buffers time out!");
try {
// wait 10ms
Thread.sleep(10);
} catch (InterruptedException e) {
}
} else if (index >= 0) {//有效輸出
if (!mMuxerStarted) {
throw new IllegalStateException("MediaMuxer dose not call addTrack(format) ");
}
encodeToVideoTrack(index);
mMediaCodec.releaseOutputBuffer(index, false);
}
}
}
private void encodeToVideoTrack(int index) {
ByteBuffer encodedData = mMediaCodec.getOutputBuffer(index);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {//是編碼需要的特定數據,不是媒體數據
// The codec config data was pulled out and fed to the muxer when we got the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size == 0) {
Log.d(TAG, "info.size == 0, drop it.");
encodedData = null;
} else {
Log.d(TAG, "got buffer, info: size=" + mBufferInfo.size + ", presentationTimeUs=" + mBufferInfo.presentationTimeUs + ", offset="
+ mBufferInfo.offset);
}
if (encodedData != null) {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mVideoTrackIndex, encodedData, mBufferInfo);//寫入
Log.i(TAG, "sent " + mBufferInfo.size + " bytes to muxer...");
}
}
private void resetOutputFormat() {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new IllegalStateException("output format already changed!");
}
MediaFormat newFormat = mMediaCodec.getOutputFormat();
Log.i(TAG, "output format changed.\n new format: " + newFormat.toString());
mVideoTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
Log.i(TAG, "started media muxer, videoIndex=" + mVideoTrackIndex);
}
private void release() {
mIsQuit.set(false);
mMuxerStarted = false;
Log.i(TAG, " release() ");
if (mMediaCodec != null) {
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
}
if (mVirtualDisplay != null) {
mVirtualDisplay.release();
mVirtualDisplay = null;
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
@Override
public void onDestroy() {
super.onDestroy();
release();
if (mMpj != null) {
mMpj.stop();
}
if (mCaptureLl != null) {
wm.removeView(mCaptureLl);
}
}
}
CaptureService
/**
* 截屏服務
* @author 白乾濤
*/
public class CaptureService extends Service {
private static final String TAG = "bqt";
private static final String mImagePath = Environment.getExternalStorageDirectory().getPath() + "/screenshort/";
private MediaProjection mMpj;
private ImageView mCaptureIv;
private LinearLayout mCaptureLl;
private ImageReader mImageReader;
private String mImageName;
private int screenDensity;
private int windowWidth;
private int windowHeight;
private VirtualDisplay mVirtualDisplay;
private WindowManager wm;
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
createEnvironment();
createFloatView();
}
private void createEnvironment() {
wm = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
DisplayMetrics metric = new DisplayMetrics();
wm.getDefaultDisplay().getMetrics(metric);
windowWidth = metric.widthPixels;
windowHeight = metric.heightPixels;
screenDensity = metric.densityDpi;
mImageReader = ImageReader.newInstance(windowWidth, windowHeight, 0x1, 2);
}
@SuppressLint("InflateParams")
private void createFloatView() {
final WindowManager.LayoutParams params = new WindowManager.LayoutParams(WindowManager.LayoutParams.TYPE_PHONE,
WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, PixelFormat.RGBA_8888);
params.x = 0;
params.y = windowHeight / 2;
params.gravity = Gravity.LEFT | Gravity.TOP;
params.width = WindowManager.LayoutParams.WRAP_CONTENT;
params.height = WindowManager.LayoutParams.WRAP_CONTENT;
LayoutInflater inflater = LayoutInflater.from(getApplicationContext());
mCaptureLl = (LinearLayout) inflater.inflate(R.layout.float_capture, null);
mCaptureIv = (ImageView) mCaptureLl.findViewById(R.id.iv_capture);
wm.addView(mCaptureLl, params);
mCaptureIv.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mCaptureIv.setVisibility(View.INVISIBLE);
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
Log.e(TAG, "start startVirtual");
startVirtual();
}
}, 500);
// Handler handler1 = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
Log.e(TAG, "start startCapture");
startCapture();
}
}, 1000);
// Handler handler2 = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
Log.e(TAG, "start stopVirtual");
mCaptureIv.setVisibility(View.VISIBLE);
stopVirtual();
}
}, 1500);
}
});
mCaptureIv.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
params.x = (int) (motionEvent.getRawX() - mCaptureIv.getMeasuredWidth() / 2);
params.y = (int) (motionEvent.getRawY() - mCaptureIv.getMeasuredHeight() / 2 - 20);
wm.updateViewLayout(mCaptureLl, params);
return false;
}
});
}
private void stopVirtual() {
if (mVirtualDisplay != null) {
mVirtualDisplay.release();
mVirtualDisplay = null;
}
}
private void startCapture() {
mImageName = System.currentTimeMillis() + ".png";
Log.e(TAG, "image name is : " + mImageName);
Image image = mImageReader.acquireLatestImage();
int width = image.getWidth();
int height = image.getHeight();
final Image.Plane[] planes = image.getPlanes();
final ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * width;
Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
image.close();
if (bitmap != null) {
Log.e(TAG, "bitmap create success ");
try {
File fileFolder = new File(mImagePath);
if (!fileFolder.exists()) fileFolder.mkdirs();
File file = new File(mImagePath, mImageName);
if (!file.exists()) {
Log.e(TAG, "file create success ");
file.createNewFile();
}
FileOutputStream out = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.PNG, 100, out);
out.flush();
out.close();
Log.e(TAG, "file save success ");
Toast.makeText(this.getApplicationContext(), "截圖成功", Toast.LENGTH_SHORT).show();
} catch (IOException e) {
Log.e(TAG, e.toString());
e.printStackTrace();
}
}
}
private void startVirtual() {
if (mMpj == null) mMpj = MyApplication.mMpmngr.getMediaProjection(MyApplication.mResultCode, MyApplication.mResultIntent);
mVirtualDisplay = mMpj.createVirtualDisplay("capture_screen", windowWidth, windowHeight, screenDensity,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, mImageReader.getSurface(), null, null);
}
@Override
public void onDestroy() {
super.onDestroy();
if (mCaptureLl != null) {
wm.removeView(mCaptureLl);
}
if (mMpj != null) {
mMpj.stop();
mMpj = null;
}
}
}
附件列表
