一、設置會話
1、初始化會話樞紐
//創建捕捉會話。AVCaptureSession 是捕捉場景的中心樞紐
self.captureSession = [[AVCaptureSession alloc] init];
/*
AVCaptureSessionPresetHigh
AVCaptureSessionPresetMedium
AVCaptureSessionPresetLow
AVCaptureSessionPreset640x480
AVCaptureSessionPreset1280x720
AVCaptureSessionPresetPhoto
*/
//設置圖像的分辨率
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
2、創建視頻輸入
//設置輸入
//拿到默認視頻捕捉設備 iOS系統返回后置攝像頭
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//將捕捉設備封裝成AVCaptureDeviceInput
//注意:為會話添加捕捉設備,必須將設備封裝成AVCaptureDeviceInput對象
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error];
3、添加到會話中
//判斷videoInput是否有效
if (videoInput)
{
//canAddInput:測試是否能被添加到會話中
if ([self.captureSession canAddInput:videoInput])
{
//將videoInput 添加到 captureSession中
[self.captureSession addInput:videoInput];
self.activeVideoInput = videoInput;
}
}else
{
return NO;
}
4、創建音頻輸入
//選擇默認音頻捕捉設備 即返回一個內置麥克風
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
//為這個設備創建一個捕捉設備輸入
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:error];
5、添加音頻輸入
//判斷audioInput是否有效
if (audioInput) {
//canAddInput:測試是否能被添加到會話中
if ([self.captureSession canAddInput:audioInput])
{
//將audioInput 添加到 captureSession中
[self.captureSession addInput:audioInput];
}
}else
{
return NO;
}
6、設置靜態圖片輸出
//AVCaptureStillImageOutput 實例 從攝像頭捕捉靜態圖片,設置輸出(照片/視頻文件)
self.imageOutput = [[AVCaptureStillImageOutput alloc] init];
//配置字典:希望捕捉到JPEG格式的圖片
self.imageOutput.outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG};
//輸出連接 判斷是否可用,可用則添加到輸出連接中去
if ([self.captureSession canAddOutput:self.imageOutput])
{
[self.captureSession addOutput:self.imageOutput];
}
7、設置錄像輸出
//創建一個AVCaptureMovieFileOutput 實例,用於將Quick Time 電影錄制到文件系統
self.movieOutput = [[AVCaptureMovieFileOutput alloc]init];
//輸出連接 判斷是否可用,可用則添加到輸出連接中去
if ([self.captureSession canAddOutput:self.movieOutput])
{
[self.captureSession addOutput:self.movieOutput];
}
// 初始化隊列
self.videoQueue = dispatch_queue_create("videoQueue", NULL);
二、開啟會話
1、開啟會話
//檢查是否處於運行狀態
if (![self.captureSession isRunning])
{
//使用同步調用會損耗一定的時間,則用異步的方式處理
dispatch_async(self.videoQueue, ^{
[self.captureSession startRunning];
});
}
2、攝像頭處理
2.1 獲取指定位置攝像頭
AVCaptureDevicePosition
- AVCaptureDevicePositionUnspecified = 0, // 不支持
- AVCaptureDevicePositionBack = 1, // 后置
- AVCaptureDevicePositionFront = 2, // 前置
//尋找指定的攝像頭設備
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position {
//獲取可用視頻設備
NSArray *devicess = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
//遍歷可用的視頻設備 並返回position 參數值
for (AVCaptureDevice *device in devicess)
{
if (device.position == position) {
return device;
}
}
return nil;
}
2.2 獲取當前活躍的攝像頭
//當前活躍的設備
- (AVCaptureDevice *)activeCamera {
//返回當前捕捉會話對應的攝像頭的device 屬性
// 這里的 activeVideoInput 是切換前后攝像頭時標記的當前使用的輸入
return self.activeVideoInput.device;
}
2.3 獲取當前可用攝像頭數和未使用的攝像頭
//返回當前未激活的攝像頭
- (AVCaptureDevice *)inactiveCamera {
//通過查找當前激活攝像頭的反向攝像頭獲得,如果設備只有1個攝像頭,則返回nil
AVCaptureDevice *device = nil;
if (self.cameraCount > 1)
{
if ([self activeCamera].position == AVCaptureDevicePositionBack) {
device = [self cameraWithPosition:AVCaptureDevicePositionFront];
}else
{
device = [self cameraWithPosition:AVCaptureDevicePositionBack];
}
}
return device;
}
//可用視頻捕捉設備的數量
- (NSUInteger)cameraCount {
return [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
}
2.4 切換攝像頭
//切換攝像頭
- (BOOL)switchCameras {
//判斷是否有多個攝像頭
if (![self canSwitchCameras]){
return NO;
}
//獲取當前設備的反向設備
NSError *error;
AVCaptureDevice *videoDevice = [self inactiveCamera];
//將輸入設備封裝成AVCaptureDeviceInput
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
//判斷videoInput 是否為nil
if (videoInput)
{
//標注原配置變化開始
[self.captureSession beginConfiguration];
//將捕捉會話中,原本的捕捉輸入設備移除
[self.captureSession removeInput:self.activeVideoInput];
//判斷新的設備是否能加入
if ([self.captureSession canAddInput:videoInput])
{
//能加入成功,則將videoInput 作為新的視頻捕捉設備
[self.captureSession addInput:videoInput];
//將獲得設備 改為 videoInput
self.activeVideoInput = videoInput;
}else
{
//如果新設備,無法加入。則將原本的視頻捕捉設備重新加入到捕捉會話中
[self.captureSession addInput:self.activeVideoInput];
}
//配置完成后, AVCaptureSession commitConfiguration 會分批的將所有變更整合在一起。
[self.captureSession commitConfiguration];
}else{
//創建AVCaptureDeviceInput 出現錯誤,則通知委托來處理該錯誤
[self.delegate deviceConfigurationFailedWithError:error];
return NO;
}
return YES;
}
3、對焦處理
3.1 是否支持對焦
- (BOOL)cameraSupportsTapToFocus {
//詢問激活中的攝像頭是否支持興趣點對焦
return [[self activeCamera] isFocusPointOfInterestSupported];
}
3.2 設置對焦點
- (void)focusAtPoint:(CGPoint)point {
AVCaptureDevice *device = [self activeCamera];
//是否支持興趣點對焦 & 是否自動對焦模式
if (device.isFocusPointOfInterestSupported && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
NSError *error;
//鎖定設備准備配置,如果獲得了鎖
if ([device lockForConfiguration:&error]) {
//將focusPointOfInterest屬性設置CGPoint
device.focusPointOfInterest = point;
//focusMode 設置為AVCaptureFocusModeAutoFocus
device.focusMode = AVCaptureFocusModeAutoFocus;
//釋放該鎖定
[device unlockForConfiguration];
}else{
//錯誤時,則返回給錯誤處理代理
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
4、曝光處理
4.1 是否支持曝光
- (BOOL)cameraSupportsTapToExpose {
//詢問設備是否支持對一個興趣點進行曝光
return [[self activeCamera] isExposurePointOfInterestSupported];
}
4.2 設置曝光
static const NSString *YCameraAdjustingExposureContext;
- (void)exposeAtPoint:(CGPoint)point {
// 獲取當前設備
AVCaptureDevice *device = [self activeCamera];
// 曝光模式設置為自動曝光
AVCaptureExposureMode exposureMode =AVCaptureExposureModeContinuousAutoExposure;
//判斷是否支持 AVCaptureExposureModeContinuousAutoExposure 模式
if (device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode]) {
NSError *error;
//鎖定設備准備配置
if ([device lockForConfiguration:&error])
{
//配置期望值
device.exposurePointOfInterest = point;
device.exposureMode = exposureMode;
//判斷設備是否支持鎖定曝光的模式。
if ([device isExposureModeSupported:AVCaptureExposureModeLocked]) {
//支持,則使用kvo確定設備的adjustingExposure屬性的狀態。
[device addObserver:self forKeyPath:@"adjustingExposure" options:NSKeyValueObservingOptionNew context:&YCameraAdjustingExposureContext];
}
//釋放該鎖定
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
// 觀察者方法
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
//判斷context(上下文)是否為YCameraAdjustingExposureContext
if (context == &YCameraAdjustingExposureContext) {
//獲取device
AVCaptureDevice *device = (AVCaptureDevice *)object;
//判斷設備是否不再調整曝光等級,確認設備的exposureMode是否可以設置為AVCaptureExposureModeLocked
if(!device.isAdjustingExposure && [device isExposureModeSupported:AVCaptureExposureModeLocked])
{
//移除作為adjustingExposure 的self,就不會得到后續變更的通知
[object removeObserver:self forKeyPath:@"adjustingExposure" context:&THCameraAdjustingExposureContext];
//異步方式調回主隊列,
dispatch_async(dispatch_get_main_queue(), ^{
NSError *error;
if ([device lockForConfiguration:&error]) {
//修改exposureMode
device.exposureMode = AVCaptureExposureModeLocked;
//釋放該鎖定
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
});
}
}else
{
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
4.3 重置對焦曝光
//重新設置對焦&曝光
- (void)resetFocusAndExposureModes {
AVCaptureDevice *device = [self activeCamera];
AVCaptureFocusMode focusMode = AVCaptureFocusModeContinuousAutoFocus;
//獲取對焦興趣點 和 連續自動對焦模式 是否被支持
BOOL canResetFocus = [device isFocusPointOfInterestSupported]&& [device isFocusModeSupported:focusMode];
AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
//確認曝光度可以被重設
BOOL canResetExposure = [device isFocusPointOfInterestSupported] && [device isExposureModeSupported:exposureMode];
//捕捉設備空間左上角(0,0),右下角(1,1) 中心點則(0.5,0.5)
CGPoint centPoint = CGPointMake(0.5f, 0.5f);
NSError *error;
//鎖定設備,准備配置
if ([device lockForConfiguration:&error]) {
//焦點可設,則修改
if (canResetFocus) {
device.focusMode = focusMode;
device.focusPointOfInterest = centPoint;
}
//曝光度可設,則設置為期望的曝光模式
if (canResetExposure) {
device.exposureMode = exposureMode;
device.exposurePointOfInterest = centPoint;
}
//釋放鎖定
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
5、閃光燈 & 手電筒
5.1 判斷是否有閃光燈
//判斷是否有閃光燈
- (BOOL)cameraHasFlash {
return [[self activeCamera]hasFlash];
}
5.2 閃光燈模式
閃光燈有3種
- AVCaptureFlashModeOff = 0, // 關閉
- AVCaptureFlashModeOn = 1, // 打開
- AVCaptureFlashModeAuto = 2, // 自動
//閃光燈模式
- (AVCaptureFlashMode)flashMode {
return [[self activeCamera] flashMode];
}
5.3 設置閃光燈模式
//設置閃光燈
- (void)setFlashMode:(AVCaptureFlashMode)flashMode {
//獲取會話
AVCaptureDevice *device = [self activeCamera];
//判斷是否支持閃光燈模式
if ([device isFlashModeSupported:flashMode]) {
//如果支持,則鎖定設備
NSError *error;
if ([device lockForConfiguration:&error]) {
//修改閃光燈模式
device.flashMode = flashMode;
//修改完成,解鎖釋放設備
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
5.4 是否支持手電筒
//是否支持手電筒
- (BOOL)cameraHasTorch {
return [[self activeCamera] hasTorch];
}
5.5 手電筒模式
手電筒也有3種模式
- AVCaptureTorchModeOff = 0,
- AVCaptureTorchModeOn = 1,
- AVCaptureTorchModeAuto = 2,
//手電筒模式
- (AVCaptureTorchMode)torchMode {
return [[self activeCamera] torchMode];
}
5.6 設置手電筒模式
//設置是否打開手電筒
- (void)setTorchMode:(AVCaptureTorchMode)torchMode {
AVCaptureDevice *device = [self activeCamera];
if ([device isTorchModeSupported:torchMode]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.torchMode = torchMode;
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
三、拍攝
1、拍攝靜態圖片
/*
AVCaptureStillImageOutput 是AVCaptureOutput的子類。用於捕捉圖片
*/
- (void)captureStillImage {
//從輸出中獲取連接
AVCaptureConnection *connection = [self.imageOutput connectionWithMediaType:AVMediaTypeVideo];
//程序只支持縱向,但是如果用戶橫向拍照時,需要調整結果照片的方向
//判斷是否支持設置視頻方向
if (connection.isVideoOrientationSupported) {
//獲取方向值
connection.videoOrientation = [self currentVideoOrientation];
}
//定義一個handler 塊,會返回1個圖片的NSData數據
id handler = ^(CMSampleBufferRef sampleBuffer,NSError *error){
if (sampleBuffer != NULL)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:sampleBuffer];
UIImage *image = [[UIImage alloc]initWithData:imageData];
**** 重點:這里可以將捕捉到的圖片傳出去
}
else
{
NSLog(@"NULL sampleBuffer:%@",[error localizedDescription]);
}
};
//捕捉靜態圖片
[self.imageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:handler];
}
//獲取方向值
- (AVCaptureVideoOrientation)currentVideoOrientation {
AVCaptureVideoOrientation orientation;
//獲取UIDevice 的 orientation
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationPortrait:
orientation = AVCaptureVideoOrientationPortrait;
break;
case UIDeviceOrientationLandscapeRight:
orientation = AVCaptureVideoOrientationLandscapeLeft;
break;
case UIDeviceOrientationPortraitUpsideDown:
orientation = AVCaptureVideoOrientationPortraitUpsideDown;
break;
default:
orientation = AVCaptureVideoOrientationLandscapeRight;
break;
}
return orientation;
return 0;
}
2、寫入媒體庫
/*
Assets Library 框架
用來讓開發者通過代碼方式訪問iOS photo
注意:會訪問到相冊,需要修改plist 權限。否則會導致項目崩潰
*/
- (void)writeImageToAssetsLibrary:(UIImage *)image {
//創建ALAssetsLibrary 實例
ALAssetsLibrary *library = [[ALAssetsLibrary alloc]init];
//參數1:圖片(參數為CGImageRef 所以image.CGImage)
//參數2:方向參數 轉為NSUInteger
//參數3:寫入成功、失敗處理
[library writeImageToSavedPhotosAlbum:image.CGImage
orientation:(NSUInteger)image.imageOrientation
completionBlock:^(NSURL *assetURL, NSError *error) {
//成功后,發送捕捉圖片通知。用於繪制程序的左下角的縮略圖
if (!error)
{
[self postThumbnailNotifification:image];
}else
{
//失敗打印錯誤信息
id message = [error localizedDescription];
NSLog(@"%@",message);
}
}];
}
//發送縮略圖通知
- (void)postThumbnailNotifification:(UIImage *)image {
//回到主隊列
dispatch_async(dispatch_get_main_queue(), ^{
//發送請求
NSNotificationCenter *nc = [NSNotificationCenter defaultCenter];
[nc postNotificationName:THThumbnailCreatedNotification object:image];
});
}
3、捕捉視頻
3.1 判斷是否錄制中
//判斷是否錄制狀態
- (BOOL)isRecording {
// movieoutput 為初始化會話時設置的輸出
return self.movieOutput.isRecording;
}
3.2 開始錄制
//開始錄制
- (void)startRecording {
if (![self isRecording]) {
//獲取當前視頻捕捉連接信息,用於捕捉視頻數據配置一些核心屬性
AVCaptureConnection * videoConnection = [self.movieOutput connectionWithMediaType:AVMediaTypeVideo];
//判斷是否支持設置videoOrientation 屬性。
if([videoConnection isVideoOrientationSupported])
{
//支持則修改當前視頻的方向
videoConnection.videoOrientation = [self currentVideoOrientation];
}
//判斷是否支持視頻穩定 可以顯著提高視頻的質量。只會在錄制視頻文件涉及
if([videoConnection isVideoStabilizationSupported])
{
videoConnection.enablesVideoStabilizationWhenAvailable = YES;
}
AVCaptureDevice *device = [self activeCamera];
//攝像頭可以進行平滑對焦模式操作。即減慢攝像頭鏡頭對焦速度。當用戶移動拍攝時攝像頭會嘗試快速自動對焦。
if (device.isSmoothAutoFocusEnabled) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.smoothAutoFocusEnabled = YES;
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
//查找寫入捕捉視頻的唯一文件系統URL.
self.outputURL = [self uniqueURL];
//在捕捉輸出上調用方法 參數1:錄制保存路徑 參數2:代理
[self.movieOutput startRecordingToOutputFileURL:self.outputURL recordingDelegate:self];
}
}
- (CMTime)recordedDuration {
return self.movieOutput.recordedDuration;
}
- (NSURL *)uniqueURL {
NSFileManager *fileManager = [NSFileManager defaultManager];
//temporaryDirectoryWithTemplateString 可以將文件寫入的目的創建一個唯一命名的目錄;
NSString *dirPath = [fileManager temporaryDirectoryWithTemplateString:@"kamera.XXXXXX"];
if (dirPath) {
NSString *filePath = [dirPath stringByAppendingPathComponent:@"kamera_movie.mov"];
return [NSURL fileURLWithPath:filePath];
}
return nil;
}
3.3 結束錄制
//停止錄制
- (void)stopRecording {
//是否正在錄制
if ([self isRecording]) {
[self.movieOutput stopRecording];
}
}
3.4 錄制結束回調
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error {
//錯誤
if (error) {
[self.delegate mediaCaptureFailedWithError:error];
}else
{
//寫入
[self writeVideoToAssetsLibrary:[self.outputURL copy]];
}
self.outputURL = nil;
}
3.5 將視頻寫入媒體庫
//寫入捕捉到的視頻
- (void)writeVideoToAssetsLibrary:(NSURL *)videoURL {
//ALAssetsLibrary 實例 提供寫入視頻的接口
ALAssetsLibrary *library = [[ALAssetsLibrary alloc]init];
//寫資源庫寫入前,檢查視頻是否可被寫入 (寫入前盡量養成判斷的習慣)
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:videoURL]) {
//創建block塊
ALAssetsLibraryWriteVideoCompletionBlock completionBlock;
completionBlock = ^(NSURL *assetURL,NSError *error)
{
if (error) {
[self.delegate assetLibraryWriteFailedWithError:error];
}else
{
//用於界面展示視頻縮略圖
[self generateThumbnailForVideoAtURL:videoURL];
}
};
//執行實際寫入資源庫的動作
[library writeVideoAtPathToSavedPhotosAlbum:videoURL completionBlock:completionBlock];
}
}
3.6 制作視頻縮略圖
//獲取視頻左下角縮略圖
- (void)generateThumbnailForVideoAtURL:(NSURL *)videoURL {
//在videoQueue 上,
dispatch_async(self.videoQueue, ^{
//建立新的AVAsset & AVAssetImageGenerator
AVAsset *asset = [AVAsset assetWithURL:videoURL];
AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
//設置maximumSize 寬為100,高為0 根據視頻的寬高比來計算圖片的高度
imageGenerator.maximumSize = CGSizeMake(100.0f, 0.0f);
//捕捉視頻縮略圖會考慮視頻的變化(如視頻的方向變化),如果不設置,縮略圖的方向可能出錯
imageGenerator.appliesPreferredTrackTransform = YES;
//獲取CGImageRef圖片 注意需要自己管理它的創建和釋放
CGImageRef imageRef = [imageGenerator copyCGImageAtTime:kCMTimeZero actualTime:NULL error:nil];
//將圖片轉化為UIImage
UIImage *image = [UIImage imageWithCGImage:imageRef];
//釋放CGImageRef imageRef 防止內存泄漏
CGImageRelease(imageRef);
//回到主線程
dispatch_async(dispatch_get_main_queue(), ^{
//發送通知,傳遞最新的image
[self postThumbnailNotifification:image];
});
});
}
四、QuckTime相關描述
正常情況下,我們視頻內容的頭信息處在文件的開始位置,這樣可以讓視頻播放器快速讀取頭包含的信息,來確定文件的內容、結構和其他的樣本位置
但我們使用AVCaptureMovieFileOutput
錄制視頻時,視頻信息是實時寫入文件的,只有在結束視頻之后才可以准確的知道頭信息。
像這樣:
發布了的視頻信息
視頻頭 + 視頻的元數據
實時錄制的視頻
視頻的元數據 + 視頻頭
這樣存在一個問題就是如果錄制中被中斷,視頻頭信息沒有完全寫入文件,將無法准確讀取視頻信息。
AVCaptureMovieFileOutput
的提供的核心功能是分段捕捉QuickTime影片
即:
頭 + 數據 + 片段 + 數據 + 片段 + 數據 + 片段
默認情況下,系統每10s寫入一個片段,片段寫入后更新完整的頭信息。
我們可以使用movieFragmentInterval
屬性來修改片段的時間間隔。