iOS开发之录制小视频

调用
- (void)setupConfigWithVideo {
NSString *unUserInfo = nil;
if (TARGET_IPHONE_SIMULATOR) {
    unUserInfo = @"您的设备不支持此功能";
}
AVAuthorizationStatus videoAuthStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if(videoAuthStatus == ALAuthorizationStatusRestricted || videoAuthStatus == ALAuthorizationStatusDenied){
    unUserInfo = @"相机访问受限";
}
AVAuthorizationStatus audioAuthStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio];
if(audioAuthStatus == ALAuthorizationStatusRestricted || audioAuthStatus == ALAuthorizationStatusDenied){
    unUserInfo = @"录音访问受限";
}

ALAuthorizationStatus author = [ALAssetsLibrary authorizationStatus];
if (author == AVAuthorizationStatusRestricted || author == AVAuthorizationStatusDenied){
    
    unUserInfo = @"相册访问权限受限";
    
    return;
  }

if (unUserInfo != nil) {
    [self alertWithClick:unUserInfo];
    return;
} else {
    [self pushWithTakeVideo];
}
}

   - (void)alertWithClick:(NSString *)msg {

UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"提示" message:msg preferredStyle:1];

UIAlertAction *action    = [UIAlertAction actionWithTitle:@"确定" style:(UIAlertActionStyleDefault) handler:^(UIAlertAction * _Nonnull action) {
    [SVProgressHUD dismiss];
}];

[alert addAction:action];
[self presentViewController:alert animated:YES completion:nil];
}
跳转至拍摄界面
- (void)pushWithTakeVideo {
WBTakeVideoViewController *videoVC = [[WBTakeVideoViewController alloc]init];
videoVC.delegate = self;
videoVC.takeDelegate = self;
[self presentViewController:videoVC animated:YES completion:nil];
  }
WBTakeVideoViewController.h
#import <UIKit/UIKit.h>
#import "WBVideoConfig.h"
 @protocol TakeVideoDelegate <NSObject>
 - (void)takeVideoDelegateAction:(NSString *)videoPath;
 @end


@protocol WBVideoViewControllerDelegate ;
@interface WBTakeVideoViewController : UIViewController

@property (nonatomic, assign) id<WBVideoViewControllerDelegate> delegate;

@property (nonatomic, assign) BOOL savePhotoAlbum;  // 是否保存至相册

@property (nonatomic, weak) id <TakeVideoDelegate>takeDelegate;

@end


//  录制的代理
@protocol WBVideoViewControllerDelegate <NSObject>

@required
- (void)videoViewController:(WBTakeVideoViewController *)videoController didRecordVideo:(WBVideoModel *)videoModel;  // 录制完成

@optional
- (void)videoViewControllerDidCancel:(WBTakeVideoViewController *)videoController;  // 取消

@end
WBTakeVideoViewController.m
#import "WBTakeVideoViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <Photos/Photos.h>
#import "WBVideoConfig.h"
#import "WBVideoSupport.h"

@interface WBTakeVideoViewController () <WBControllerBarDelegate,AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate,AVCaptureFileOutputRecordingDelegate>

@property (nonatomic, strong) UIView *videoView;
@property (nonatomic, strong) UIView *holdView;
@property (nonatomic, strong) UIButton  *holdBut;
@property (nonatomic, strong) UIView *actionView;

@end

static WBTakeVideoViewController *__currentVC = nil;
@implementation WBTakeVideoViewController
{
dispatch_queue_t _recoding_queue;

AVCaptureSession *_videoSession;  // 输入输出设备之间的数据传递
AVCaptureVideoPreviewLayer *_videoPreLayer; // 图片预览层
AVCaptureDevice *_videoDevice; // 输入设备(麦克风,相机等)

AVCaptureVideoDataOutput *_videoDataOut;
AVCaptureAudioDataOutput *_audioDataOut;

WBControllerBar *_ctrlBar;  // 控制条

AVAssetWriter *_assetWriter;
AVAssetWriterInputPixelBufferAdaptor *_assetWriterPixelBufferInput;
AVAssetWriterInput *_assetWriterVideoInput;
AVAssetWriterInput *_assetWriterAudioInput;
CMTime _currentSampleTime;
BOOL _recoding;

WBFocusView *_focusView;
UILabel *_statusInfo;
UILabel *_cancelInfo;

WBVideoModel *_currentRecord;
BOOL _currentRecordIsCancel;
}


- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.

__currentVC = self;

[self setupSubView];

    self.view.backgroundColor = THENEBLACK;
    [self viewDidComplete];
[self setupWithVideo];

_savePhotoAlbum = YES;


UIView *topView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, ScreenWidth, 64)];
topView.backgroundColor = THENEBLACK;
[self.view addSubview:topView];

UIButton *left = [UIButton buttonWithType:UIButtonTypeCustom];
left.frame = CGRectMake(10, 20, 40, 30);
left.font = [UIFont systemFontOfSize:14.0f];

[left setTitle:@"取消" forState:UIControlStateNormal];
[left addTarget:self action:@selector(leftAction) forControlEvents:UIControlEventTouchUpInside];
[topView addSubview:left];
}

 - (void)leftAction {
[self dismissViewControllerAnimated:YES completion:nil];
}

- (void)viewWillDisappear:(BOOL)animated {
[super viewWillDisappear:animated];
}


- (void)endAniamtion {
[UIView animateWithDuration:0.3 animations:^{
    self.view.backgroundColor = [UIColor clearColor];
    self.actionView.transform = CGAffineTransformTranslate(CGAffineTransformIdentity, 0, ScreenHeight);
} completion:^(BOOL finished) {
    [self closeView];
}];
}

- (void)closeView {
[_videoSession stopRunning];
[_videoPreLayer removeFromSuperlayer];
_videoPreLayer = nil;
[_videoView removeFromSuperview];
_videoView = nil;

_videoDevice = nil;
_videoDataOut = nil;
_assetWriter = nil;
_assetWriterAudioInput = nil;
_assetWriterVideoInput = nil;
_assetWriterPixelBufferInput = nil;
__currentVC = nil;
}



 //添加view 控制条  放大label

- (void)setupSubView {

/*self.view Config*/
self.view.backgroundColor = [UIColor whiteColor];

if ([self.navigationController respondsToSelector:@selector(interactivePopGestureRecognizer)]) {
    self.navigationController.interactivePopGestureRecognizer.enabled = NO;
}



 _actionView = [[UIView alloc] initWithFrame:[WBVideoConfig viewFrame]];
[self.view addSubview:_actionView];
_actionView.clipsToBounds = YES;


CGSize videoViewSize = [WBVideoConfig videoViewDefaultSize];
    
//   视频下部的控制条
_videoView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, ScreenWidth, videoViewSize.height)];
[self.actionView addSubview:_videoView];

_ctrlBar = [[WBControllerBar alloc] initWithFrame:CGRectMake(0,0, ScreenWidth, ScreenHeight)];
[_ctrlBar setupSubViews];
_ctrlBar.delegate = self;
[self.view addSubview:_ctrlBar];
[_ctrlBar mas_makeConstraints:^(MASConstraintMaker *make) {
    make.left.right.mas_equalTo(0);
    make.height.mas_equalTo(ScreenHeight/2);
    make.top.mas_equalTo(self.videoView.mas_bottom);
}];

UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(focusAction:)];
tapGesture.delaysTouchesBegan = YES;
[_videoView addGestureRecognizer:tapGesture];

UITapGestureRecognizer *doubleTapGesture = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(zoomVideo:)];
doubleTapGesture.numberOfTapsRequired = 2;
doubleTapGesture.numberOfTouchesRequired = 1;
doubleTapGesture.delaysTouchesBegan = YES;
[_videoView addGestureRecognizer:doubleTapGesture];
[tapGesture requireGestureRecognizerToFail:doubleTapGesture];

_focusView = [[WBFocusView alloc] initWithFrame:CGRectMake(0, 0, 60, 60)];
_focusView.backgroundColor = [UIColor clearColor];

_statusInfo = [[UILabel alloc] initWithFrame:CGRectMake(0, CGRectGetMaxY(_videoView.frame) - 30, _videoView.frame.size.width, 20)];
_statusInfo.textAlignment = NSTextAlignmentCenter;
_statusInfo.font = [UIFont systemFontOfSize:14.0];
_statusInfo.textColor = [UIColor whiteColor];
_statusInfo.hidden = YES;
[self.actionView addSubview:_statusInfo];

_cancelInfo = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 120, 24)];
_cancelInfo.center = _videoView.center;
_cancelInfo.textAlignment = NSTextAlignmentCenter;
_cancelInfo.hidden = YES;
[self.actionView addSubview:_cancelInfo];

[_actionView sendSubviewToBack:_videoView];
}


- (void)setupWithVideo {
_recoding_queue = dispatch_queue_create("com.wbsmallvideo.queue", DISPATCH_QUEUE_SERIAL);
NSArray *deviceVideo = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
NSArray *deviceAudio = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:deviceVideo[0] error:nil];
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:deviceAudio[0] error:nil];

_videoDevice = deviceVideo[0];

_videoDataOut = [[AVCaptureVideoDataOutput alloc] init];
_videoDataOut.videoSettings = @{(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};
_videoDataOut.alwaysDiscardsLateVideoFrames = YES;
[_videoDataOut setSampleBufferDelegate:self queue:_recoding_queue];

_audioDataOut = [[AVCaptureAudioDataOutput alloc] init];
[_audioDataOut setSampleBufferDelegate:self queue:_recoding_queue];


_videoSession = [[AVCaptureSession alloc] init];
if ([_videoSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
    _videoSession.sessionPreset = AVCaptureSessionPreset640x480;
}

if ([_videoSession canAddInput:videoInput]) {
    [_videoSession addInput:videoInput];
}
if ([_videoSession canAddInput:audioInput]) {
    [_videoSession addInput:audioInput];
}
if ([_videoSession canAddOutput:_videoDataOut]) {
    [_videoSession addOutput:_videoDataOut];
}
if ([_videoSession canAddOutput:_audioDataOut]) {
    [_videoSession addOutput:_audioDataOut];
}
CGFloat viewWidth = CGRectGetWidth(self.videoView.frame);
_videoPreLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_videoSession];
_videoPreLayer.frame = CGRectMake(0, -CGRectGetMinY(_videoView.frame), viewWidth, viewWidth*wbVideo_w_h);
_videoPreLayer.position = CGPointMake(viewWidth/2, CGRectGetHeight(_videoView.frame)/2);
_videoPreLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[_videoView.layer addSublayer:_videoPreLayer];

[_videoSession startRunning];

}



//  放大label
- (void)viewDidComplete {

__block UILabel *zoomLab = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 200, 20)];
zoomLab.center = CGPointMake(self.videoView.center.x, CGRectGetMaxY(self.videoView.frame) - 50);
zoomLab.font = [UIFont boldSystemFontOfSize:14];
zoomLab.text = @"双击放大";
zoomLab.textColor = [UIColor whiteColor];
zoomLab.textAlignment = NSTextAlignmentCenter;
[_videoView addSubview:zoomLab];
[_videoView bringSubviewToFront:zoomLab];

wbdispatch_after(1.6, ^{
    [zoomLab removeFromSuperview];
});
}

- (void)focusInPointAtVideoView:(CGPoint)point {
CGPoint cameraPoint= [_videoPreLayer captureDevicePointOfInterestForPoint:point];
_focusView.center = point;
[_videoView addSubview:_focusView];
[_videoView bringSubviewToFront:_focusView];
[_focusView focusing];

NSError *error = nil;
if ([_videoDevice lockForConfiguration:&error]) {
    if ([_videoDevice isFocusPointOfInterestSupported]) {
        _videoDevice.focusPointOfInterest = cameraPoint;
    }
    if ([_videoDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
        _videoDevice.focusMode = AVCaptureFocusModeAutoFocus;
    }
    if ([_videoDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) {
        _videoDevice.exposureMode = AVCaptureExposureModeAutoExpose;
    }
    if ([_videoDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeAutoWhiteBalance]) {
        _videoDevice.whiteBalanceMode = AVCaptureWhiteBalanceModeAutoWhiteBalance;
    }
    [_videoDevice unlockForConfiguration];
}
if (error) {
    NSLog(@"聚焦失败:%@",error);
}
wbdispatch_after(1.0, ^{
    [_focusView removeFromSuperview];
});
}


  #pragma mark - Actions --
- (void)focusAction:(UITapGestureRecognizer *)gesture {
CGPoint point = [gesture locationInView:_videoView];
[self focusInPointAtVideoView:point];
}

 - (void)zoomVideo:(UITapGestureRecognizer *)gesture {
NSError *error = nil;
if ([_videoDevice lockForConfiguration:&error]) {
    CGFloat zoom = _videoDevice.videoZoomFactor == 2.0?1.0:2.0;
    _videoDevice.videoZoomFactor = zoom;
    [_videoDevice unlockForConfiguration];
}
}

 #pragma mark - controllerBarDelegate

- (void)ctrollVideoDidStart:(WBControllerBar *)controllerBar {

_currentRecord = [WBVideoUtil createNewVideo];
_currentRecordIsCancel = NO;

NSURL *outURL = [NSURL fileURLWithPath:_currentRecord.videoAbsolutePath];
[self createWriter:outURL];

_statusInfo.textColor = THEMEGREEN;
_statusInfo.text = @"↑上移取消";
_statusInfo.hidden = NO;
wbdispatch_after(0.5, ^{
    _statusInfo.hidden = YES;
});

_recoding = YES;
NSLog(@"视频开始录制");

UIBarButtonItem *leftItem = [[UIBarButtonItem alloc]initWithTitle:@"Cancel" style:UIBarButtonItemStylePlain target:self action:@selector(leftItemAction)];
self.navigationController.navigationItem.leftBarButtonItem = leftItem;

}

- (void)leftItemAction {
[self dismissViewControllerAnimated:YES completion:nil];
}

 - (void)ctrollVideoDidEnd:(WBControllerBar *)controllerBar {
_recoding = NO;
[self saveVideo:^(NSURL *outFileURL) {
    if (_delegate) {
        [_delegate videoViewController:self didRecordVideo:_currentRecord];
        [self endAniamtion];
    }
}];

    NSLog(@"视频录制结束");
}

- (void)ctrollVideoDidCancel:(WBControllerBar *)controllerBar reason:(WBRecordCancelReason)reason{
_currentRecordIsCancel = YES;
_recoding = NO;
if (reason == WBRecordCancelReasonTimeShort) {
    [WBVideoConfig showHinInfo:@"录制时间过短" inView:_videoView frame:CGRectMake(0,CGRectGetHeight(_videoView.frame)/3*2,CGRectGetWidth(_videoView.frame),20) timeLong:1.0];
}
    NSLog(@"当前视频录制取消");
}

- (void)ctrollVideoWillCancel:(WBControllerBar *)controllerBar {
if (!_cancelInfo.hidden) {
    return;
}
_cancelInfo.text = @"松手取消";
_cancelInfo.hidden = NO;
wbdispatch_after(0.5, ^{
    _cancelInfo.hidden = YES;
});
}

- (void)ctrollVideoDidRecordSEC:(WBControllerBar *)controllerBar {
//    _topSlideView.isRecoding = YES;
//    NSLog(@"视频录又过了 1 秒");
}

- (void)ctrollVideoDidClose:(WBControllerBar *)controllerBar {
//    NSLog(@"录制界面关闭");
if (_delegate && [_delegate respondsToSelector:@selector(videoViewControllerDidCancel:)]) {
    [_delegate videoViewControllerDidCancel:self];
}
[self endAniamtion];
}


//  创建录像对象(视频,音频)
- (void)createWriter:(NSURL *)assetUrl {
NSError *error = nil;
_assetWriter = [AVAssetWriter assetWriterWithURL:assetUrl fileType:AVFileTypeQuickTimeMovie error:&error];

int videoWidth = [WBVideoConfig defualtVideoSize].width;
int videoHeight = [WBVideoConfig defualtVideoSize].height;


NSDictionary *outputSettings = @{
                                 AVVideoCodecKey : AVVideoCodecH264,
                                 AVVideoWidthKey : @(videoHeight),
                                 AVVideoHeightKey : @(videoWidth),
                                 AVVideoScalingModeKey:AVVideoScalingModeResizeAspectFill,
                                 //                          AVVideoCompressionPropertiesKey:codecSettings
                                 };
_assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
_assetWriterVideoInput.expectsMediaDataInRealTime = YES; // 设置数据为实时输入
_assetWriterVideoInput.transform = CGAffineTransformMakeRotation(M_PI / 2.0);


NSDictionary *audioOutputSettings = @{
                                      AVFormatIDKey:@(kAudioFormatMPEG4AAC),
                                      AVEncoderBitRateKey:@(64000),
                                      AVSampleRateKey:@(44100),
                                      AVNumberOfChannelsKey:@(1),
                                      };

_assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
_assetWriterAudioInput.expectsMediaDataInRealTime = YES;


NSDictionary *SPBADictionary = @{
                                 (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
                                 (__bridge NSString *)kCVPixelBufferWidthKey : @(videoWidth),
                                 (__bridge NSString *)kCVPixelBufferHeightKey  : @(videoHeight),
                                 (__bridge NSString *)kCVPixelFormatOpenGLESCompatibility : ((__bridge NSNumber *)kCFBooleanTrue)
                                 };
_assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_assetWriterVideoInput sourcePixelBufferAttributes:SPBADictionary];
if ([_assetWriter canAddInput:_assetWriterVideoInput]) {
    [_assetWriter addInput:_assetWriterVideoInput];
}else {
    NSLog(@"不能添加视频writer的input \(assetWriterVideoInput)");
}
if ([_assetWriter canAddInput:_assetWriterAudioInput]) {
    [_assetWriter addInput:_assetWriterAudioInput];
}else {
    NSLog(@"不能添加视频writer的input \(assetWriterVideoInput)");
}

if(error)
{
    NSLog(@"error = %@", [error localizedDescription]);
}

NSLog(@"_assetWriter = %ld",(long)_assetWriter.status);

}




- (void)saveVideo:(void(^)(NSURL *outFileURL))complier {

if (_recoding) return;

if (!_recoding_queue){
    complier(nil);
    return;
};


dispatch_async(_recoding_queue, ^{
    NSURL *outputFileURL = [NSURL fileURLWithPath:_currentRecord.videoAbsolutePath];

    
    [_assetWriter finishWritingWithCompletionHandler:^{
        
        if (_currentRecordIsCancel) return ;
        
        //  保存
        [WBVideoUtil saveThumImageWithVideoURL:outputFileURL second:1];
        
        if (complier) {
            dispatch_async(dispatch_get_main_queue(), ^{
                complier(outputFileURL);
            });
        }
        if (_savePhotoAlbum) {
            BOOL ios8Later = [[[UIDevice currentDevice] systemVersion] floatValue] >= 8;
            if (ios8Later) {
                [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
                    [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:outputFileURL];
                } completionHandler:^(BOOL success, NSError * _Nullable error) {
                    if (!error && success) {
                        
                        
                        [self.takeDelegate takeVideoDelegateAction:_currentRecord.videoAbsolutePath];
                     
                        
                    }
                    else {
                        NSLog(@"保存相册失败! :%@",error);
                    }
                }];
                
            }
            else {
                [[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
                    if (!error) {
                        
                        [self.takeDelegate takeVideoDelegateAction:_currentRecord.videoAbsolutePath];
                        
                        NSLog(@"保存相册成功!");
                    }
                    else {
                        NSLog(@"保存相册失败!");
                    }
                }];
                
            }
            
        }
    }];
});

}

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {

if (!_recoding) return;

@autoreleasepool {
    _currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer);
    if (_assetWriter.status != AVAssetWriterStatusWriting) {
        [_assetWriter startWriting];
        [_assetWriter startSessionAtSourceTime:_currentSampleTime];
    }
    if (captureOutput == _videoDataOut) {
        if (_assetWriterPixelBufferInput.assetWriterInput.isReadyForMoreMediaData) {
            CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
            BOOL success = [_assetWriterPixelBufferInput appendPixelBuffer:pixelBuffer withPresentationTime:_currentSampleTime];
            if (!success) {
                NSLog(@"Pixel Buffer没有append成功");
            }
        }
    }
    if (captureOutput == _audioDataOut) {
        [_assetWriterAudioInput appendSampleBuffer:sampleBuffer];
    }
}
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {

}

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) {
    [library writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error){
        dispatch_async(dispatch_get_main_queue(), ^{
            if (error) {
                // erre
            }else
            {
                // success
            }
        });
    }];
}
NSLog(@"recordEnd");
}

 @end
WBVideoConfig.h
#import <Foundation/Foundation.h>


// 视频录制 时长
#define wbRecordTime        10.0

// 视频的长宽按比例
#define wbVideo_w_h (4.0/3)

// 视频默认 宽的分辨率  高 = kzVideoWidthPX / kzVideo_w_h
#define wbVideoWidthPX  [UIScreen mainScreen].bounds.size.height

//控制条高度 小屏幕时
#define wbControViewHeight  120.0

// 视频保存路径
#define wbVideoDicName      @"wbSmailVideo"

extern void wbdispatch_after(float time, dispatch_block_t block);

@interface WBVideoConfig : NSObject
+ (CGRect)viewFrame;

//  视频view的尺寸
+ (CGSize)videoViewDefaultSize;
//  默认视频分辨率
+ (CGSize)defualtVideoSize;

+ (void)showHinInfo:(NSString *)text inView:(UIView *)superView frame:(CGRect)frame timeLong:(NSTimeInterval)time;


@end


 //视频对象 Model
@interface WBVideoModel : NSObject

/// 完整视频 本地路径
@property (nonatomic, copy) NSString *videoAbsolutePath;
/// 缩略图 路径
@property (nonatomic, copy) NSString *thumAbsolutePath;
// 录制时间
//@property (nonatomic, strong) NSDate *recordTime;
@end

//录制视频
@interface WBVideoUtil : NSObject


//保存缩略图
//@param videoUrl 视频路径
//@param second   第几秒的缩略图

+ (void)saveThumImageWithVideoURL:(NSURL *)videoUrl second:(int64_t)second;

 //产生新的对象
+ (WBVideoModel *)createNewVideo;


 //有视频的存在
+ (BOOL)existVideo;

//  删除视频
+ (void)deleteVideo:(NSString *)videoPath;

//+ (NSString *)getVideoPath;

@end
WBVideoConfig.m
void wbdispatch_after(float time, dispatch_block_t block)
{
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(time * NSEC_PER_SEC)), dispatch_get_main_queue(), block);
}

@implementation WBVideoConfig
+ (CGRect)viewFrame {

return CGRectMake(0, 0, ScreenWidth, ScreenHeight/2);
}

+ (CGSize)videoViewDefaultSize {
return CGSizeMake(ScreenWidth, ScreenHeight/2);
}
+ (CGSize)defualtVideoSize {
return CGSizeMake(wbVideoWidthPX, wbVideoWidthPX/wbVideo_w_h);
}
+ (void)showHinInfo:(NSString *)text inView:(UIView *)superView frame:(CGRect)frame timeLong:(NSTimeInterval)time {
__block UILabel *zoomLab = [[UILabel alloc] initWithFrame:frame];
zoomLab.font = [UIFont boldSystemFontOfSize:15.0];
zoomLab.text = text;
zoomLab.textColor = [UIColor whiteColor];
zoomLab.textAlignment = NSTextAlignmentCenter;
[superView addSubview:zoomLab];
[superView bringSubviewToFront:zoomLab];
wbdispatch_after(1.6, ^{
    [zoomLab removeFromSuperview];
});
}
@end


@implementation WBVideoModel

+ (instancetype)modelWithPath:(NSString *)videoPath thumPath:(NSString *)thumPath recordTime:(NSDate *)recordTime {
WBVideoModel *model = [[WBVideoModel alloc] init];
model.videoAbsolutePath = videoPath;
model.thumAbsolutePath = thumPath;
return model;
}
@end


@implementation WBVideoUtil
+ (void)saveThumImageWithVideoURL:(NSURL *)videoUrl second:(int64_t)second {
AVURLAsset *urlSet = [AVURLAsset assetWithURL:videoUrl];
AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:urlSet];
NSString *videoPath = [videoUrl.absoluteString stringByReplacingOccurrencesOfString:@"file://" withString: @""];

NSString *thumPath = [videoPath stringByReplacingOccurrencesOfString:@"MOV" withString: @"JPG"];

UIImage *shotImage;
//视频路径URL
NSURL *fileURL = [NSURL fileURLWithPath:videoPath];

AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:fileURL options:nil];

AVAssetImageGenerator *gen = [[AVAssetImageGenerator alloc] initWithAsset:asset];

gen.appliesPreferredTrackTransform = YES;

CMTime time = CMTimeMake(second, 10);

NSError *error = nil;

CMTime actualTime;

CGImageRef image = [gen copyCGImageAtTime:time actualTime:&actualTime error:&error];

if (error) {
    NSLog(@"缩略图获取失败!:%@",error);
    return;
}

shotImage = [[UIImage alloc] initWithCGImage:image];

NSData *imgData = UIImageJPEGRepresentation(shotImage, 1.0);

BOOL isok = [imgData writeToFile:thumPath atomically: YES];
NSLog(@"缩略图获取结果:%d",isok);    
CGImageRelease(image);
    
}

+ (WBVideoModel *)createNewVideo {
    model.videoAbsolutePath = [NSHomeDirectory() stringByAppendingString:@"/Documents/test.MOV"];
model.thumAbsolutePath = [NSHomeDirectory() stringByAppendingString:@"/Documents/test.JPG"];
unlink([model.videoAbsolutePath UTF8String]);
unlink([model.thumAbsolutePath UTF8String]);
return model;
}

+ (NSString *)getDocumentSubPath {
NSString *documentPath = [NSSearchPathForDirectoriesInDomains(NSDocumentationDirectory, NSUserDomainMask, YES) firstObject];
return [documentPath stringByAppendingPathComponent:wbVideoDicName];
}


+ (void)deleteVideo:(NSString *)videoPath {
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *error = nil;
[fileManager removeItemAtPath:videoPath error:&error];
if (error) {
    NSLog(@"删除视频失败:%@",error);
}
NSString *thumPath = [videoPath stringByReplacingOccurrencesOfString:@"MOV" withString:@"JPG"];
NSError *error2 = nil;
[fileManager removeItemAtPath:thumPath error:&error2];
if (error2) {
    NSLog(@"删除缩略图失败:%@",error);
}
}
@end
WBVideoSupport.h
#import <UIKit/UIKit.h>
#import "WBVideoConfig.h"
@class WBVideoModel;

//************* 点击录制的按钮 ****************
@interface WBRecordBtn : UILabel
- (instancetype)initWithFrame:(CGRect)frame;
@end

//************* 聚焦的方框 ****************
@interface WBFocusView : UIView
- (void)focusing;
@end

//************* 录视频下部的控制条 ****************
typedef NS_ENUM(NSUInteger, WBRecordCancelReason) {
WBRecordCancelReasonDefault,
WBRecordCancelReasonTimeShort,
WBRecordCancelReasonUnknown,
};

@class WBControllerBar;
@protocol WBControllerBarDelegate <NSObject>
@optional

- (void)ctrollVideoDidStart:(WBControllerBar *)controllerBar;

- (void)ctrollVideoDidEnd:(WBControllerBar *)controllerBar;

- (void)ctrollVideoDidCancel:(WBControllerBar *)controllerBar reason:(WBRecordCancelReason)reason;

- (void)ctrollVideoWillCancel:(WBControllerBar *)controllerBar;

- (void)ctrollVideoDidRecordSEC:(WBControllerBar *)controllerBar;

- (void)ctrollVideoDidClose:(WBControllerBar *)controllerBar;

- (void)ctrollVideoOpenVideoList:(WBControllerBar *)controllerBar;

@end

//************* 录视频下部的控制条 ****************
@interface WBControllerBar : UIView <UIGestureRecognizerDelegate>

@property (nonatomic, assign) id<WBControllerBarDelegate> delegate;
- (void)setupSubViews;
@end
WBVideoSupport.m
#import "WBVideoSupport.h"
#import "WBVideoConfig.h"
#pragma mark - Custom View --

@implementation WBRecordBtn {
UITapGestureRecognizer *_tapGesture;
}

- (instancetype)initWithFrame:(CGRect)frame{
if (self = [super initWithFrame:frame]) {
    [self setupRoundButton];
    self.layer.cornerRadius = 40.0f;
    self.layer.masksToBounds = YES;
    self.userInteractionEnabled = YES;
}
return self;
}

- (void)setupRoundButton {
self.backgroundColor = [UIColor clearColor];

CGFloat width = self.frame.size.width;
UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:self.bounds cornerRadius:width/2];

CAShapeLayer *trackLayer = [CAShapeLayer layer];
trackLayer.frame = self.bounds;
trackLayer.strokeColor = THEMEGREEN.CGColor;
trackLayer.fillColor = [UIColor clearColor].CGColor;
trackLayer.opacity = 1.0;
trackLayer.lineCap = kCALineCapRound;
trackLayer.lineWidth = 2.0;
trackLayer.path = path.CGPath;
[self.layer addSublayer:trackLayer];

    CATextLayer *textLayer = [CATextLayer layer];
    textLayer.string = @"按住拍";
    textLayer.frame = CGRectMake(0, 0, 120, 30);
    textLayer.position = CGPointMake(self.bounds.size.width/2, self.bounds.size.height/2);
    UIFont *font = [UIFont boldSystemFontOfSize:22];
    CFStringRef fontName = (__bridge CFStringRef)font.fontName;
    CGFontRef fontRef = CGFontCreateWithFontName(fontName);
    textLayer.font = fontRef;
    textLayer.fontSize = font.pointSize;
    CGFontRelease(fontRef);
    textLayer.contentsScale = [UIScreen mainScreen].scale;
    textLayer.foregroundColor = THEMEGREEN.CGColor;
    textLayer.alignmentMode = kCAAlignmentCenter;
    textLayer.wrapped = YES;
    [trackLayer addSublayer:textLayer];
    CAGradientLayer *gradLayer = [CAGradientLayer layer];
gradLayer.frame = self.bounds;
[self.layer addSublayer:gradLayer];
gradLayer.mask = trackLayer;
}
@end

@implementation WBFocusView {
CGFloat _width;
CGFloat _height;
}
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
    _width = CGRectGetWidth(frame);
    _height = _width;
}
return self;
}

- (void)focusing {
[UIView animateWithDuration:0.5 animations:^{
   
    self.transform = CGAffineTransformScale(CGAffineTransformIdentity, 0.8, 0.8);
} completion:^(BOOL finished) {
    self.transform = CGAffineTransformIdentity;
}];
}

- (void)drawRect:(CGRect)rect {
[super drawRect:rect];
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetStrokeColorWithColor(context, THEMEGREEN.CGColor);
CGContextSetLineWidth(context, 1.0);

CGFloat len = 4;

CGContextMoveToPoint(context, 0.0, 0.0);
CGContextAddRect(context, self.bounds);

CGContextMoveToPoint(context, 0, _height/2);
CGContextAddLineToPoint(context, len, _height/2);
CGContextMoveToPoint(context, _width/2, _height);
CGContextAddLineToPoint(context, _width/2, _height - len);
CGContextMoveToPoint(context, _width, _height/2);
CGContextAddLineToPoint(context, _width - len, _height/2);
CGContextMoveToPoint(context, _width/2, 0);
CGContextAddLineToPoint(context, _width/2, len);
    CGContextDrawPath(context, kCGPathStroke);
}
@end

//------  分割线  ------

@implementation WBControllerBar {
WBRecordBtn *_startBtn;
UILongPressGestureRecognizer *_longPress;
UIView *_progressLine;
BOOL _touchIsInside;
BOOL _recording;
 NSTimer *_timer;
NSTimeInterval _surplusTime;
BOOL _videoDidEnd;
}

- (void)setupSubViews {
[self layoutIfNeeded];

_startBtn = [[WBRecordBtn alloc] initWithFrame:CGRectMake(0, 100, 200, 100)];
_startBtn.text = @"按住拍";
_startBtn.textAlignment = NSTextAlignmentCenter;
_startBtn.textColor = [UIColor whiteColor];

CAShapeLayer *solidLine =  [CAShapeLayer layer];
CGMutablePathRef solidPath =  CGPathCreateMutable();
solidLine.lineWidth = 2.0f ;
solidLine.strokeColor = THEMEGREEN.CGColor;
solidLine.fillColor = [UIColor clearColor].CGColor;
CGPathAddEllipseInRect(solidPath, nil, CGRectMake(1,  1, 132, 132));
solidLine.path = solidPath;
CGPathRelease(solidPath);
[_startBtn.layer addSublayer:solidLine];

[self addSubview:_startBtn];
[_startBtn mas_makeConstraints:^(MASConstraintMaker *make) {
    make.centerX.mas_equalTo(self.mas_centerX);
    make.centerY.mas_equalTo(self.mas_centerY);
    make.height.width.mas_equalTo(135);
}];

_longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(longpressAction:)];
_longPress.minimumPressDuration = 0.01;
_longPress.delegate = self;
[self addGestureRecognizer:_longPress];

_progressLine = [[UIView alloc] initWithFrame:CGRectMake(0, 0, ScreenWidth, 4)];
_progressLine.backgroundColor = THEMEGREEN;
_progressLine.hidden = YES;
[self addSubview:_progressLine];
_surplusTime = wbRecordTime;
}

- (void)startRecordSet {
_startBtn.alpha = 1.0;

_progressLine.frame = CGRectMake(0, 0, self.bounds.size.width, 2);
_progressLine.backgroundColor = THEMEGREEN;
_progressLine.hidden = NO;

_surplusTime = wbRecordTime;
_recording = YES;

_videoDidEnd = NO;

if (_timer == nil) {
    _timer = [NSTimer timerWithTimeInterval:1.0 target:self selector:@selector(recordTimerAction) userInfo:nil repeats:YES];
    [[NSRunLoop currentRunLoop] addTimer:_timer forMode:NSDefaultRunLoopMode];
}
[_timer fire];

[UIView animateWithDuration:0.4 animations:^{
    _startBtn.alpha = 0.0;
    _startBtn.transform = CGAffineTransformScale(CGAffineTransformIdentity, 2.0, 2.0);
} completion:^(BOOL finished) {
    if (finished) {
        _startBtn.transform = CGAffineTransformIdentity;
    }
}];
}

- (void)endRecordSet {
_progressLine.hidden = YES;
[_timer invalidate];
_timer = nil;
_recording = NO;
_startBtn.alpha = 1;
}

//pragma mark - UIGestureRecognizerDelegate
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer {
if (gestureRecognizer == _longPress) {
    if (_surplusTime <= 0) return  NO;
    
    CGPoint point = [gestureRecognizer locationInView:self];
    CGPoint startBtnCent = _startBtn.center;
    
    CGFloat dx = point.x - startBtnCent.x;
    CGFloat dy = point.y - startBtnCent.y;
    
    CGFloat startWidth = _startBtn.bounds.size.width;
    if ((dx * dx) + (dy * dy) < (startWidth * startWidth)) {
        return YES;
    }
    return NO;
}
return YES;
}

//pragma mark - Actions --
- (void)longpressAction:(UILongPressGestureRecognizer *)gesture {
CGPoint point = [gesture locationInView:self];
_touchIsInside = point.y >= 0;
switch (gesture.state) {
    case UIGestureRecognizerStateBegan: {
        [self videoStartAction];
    }
        break;
    case UIGestureRecognizerStateChanged: {
        if (!_touchIsInside) {
            _progressLine.backgroundColor = THEMEGREEN;
            if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoWillCancel:)]) {
                [_delegate ctrollVideoWillCancel:self];
            }
        }
        else {
            _progressLine.backgroundColor = THEMEGREEN;
        }
    }
        break;
    case UIGestureRecognizerStateEnded: {
        [self endRecordSet];
        if (!_touchIsInside || wbRecordTime - _surplusTime <= 1) {
            WBRecordCancelReason reason = WBRecordCancelReasonTimeShort;
            if (!_touchIsInside) {
                reason = WBRecordCancelReasonDefault;
            }
            [self videoCancelAction:reason];
        }
        else {
            [self videoEndAction];
        }
    }
        break;
    case UIGestureRecognizerStateCancelled:
        break;
    default:
        break;

}
}

- (void)videoStartAction {
[self startRecordSet];
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidStart:)]) {
    [_delegate ctrollVideoDidStart:self];
}
}

- (void)videoCancelAction:(WBRecordCancelReason)reason {
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidCancel:reason:)]) {
    [_delegate ctrollVideoDidCancel:self reason:reason];
}
}

- (void)videoEndAction {

if (_videoDidEnd) return;

_videoDidEnd = YES;
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidEnd:)]) {
    [_delegate ctrollVideoDidEnd:self];
}
}

- (void)videoListAction {
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoOpenVideoList:)]) {
    [_delegate ctrollVideoOpenVideoList:self];
}
}

- (void)videoCloseAction {
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidClose:)]) {
    [_delegate ctrollVideoDidClose:self];
}
}

- (void)recordTimerAction {
CGFloat reduceLen = self.bounds.size.width/wbRecordTime;
CGFloat oldLineLen = _progressLine.frame.size.width;
CGRect oldFrame = _progressLine.frame;

[UIView animateWithDuration:1.0 delay: 0.0 options: UIViewAnimationOptionCurveLinear animations:^{
    _progressLine.frame = CGRectMake(oldFrame.origin.x, oldFrame.origin.y, oldLineLen - reduceLen, oldFrame.size.height);
    _progressLine.center = CGPointMake(self.bounds.size.width/2, _progressLine.bounds.size.height/2);
} completion:^(BOOL finished) {
    _surplusTime --;
    if (_recording) {
        if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidRecordSEC:)]) {
            [_delegate ctrollVideoDidRecordSEC:self];
        }
    }
    if (_surplusTime <= 0.0) {
        [self endRecordSet];
        [self videoEndAction];
    }
}];
}

@end

如果上传视频使用保利威视的话要注意一下几点:
1.如果上传是正常的,但是到保利威视后台却有绿屏的情况下,查看自己拍摄的视频分辨率,一般出现都是分辨率太低
2.播放的视频不清楚,需要自己在保利威视后台设置播放的格式,一般分流畅-->标清-->高清等
3.如果上传之后,但是获取不到返回的mp4地址,就去自己的管理后台看一下,自己的视频到底有没有上传上去
4.有时候根据视频url会出现点击播放但是没有视频url却存在的问题,因为保利威视会有个视频转码的过程,此处需要自己处理一下,可根据网络url,截取视频第一帧,通过第一帧的image,判断image是否等于nil,如果等于nil,就做其他处理,例如提示视频正在转码类,不等于nil就直接播放

最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
  • 序言:七十年代末,一起剥皮案震惊了整个滨河市,随后出现的几起案子,更是在滨河造成了极大的恐慌,老刑警刘岩,带你破解...
    沈念sama阅读 203,362评论 5 477
  • 序言:滨河连续发生了三起死亡事件,死亡现场离奇诡异,居然都是意外死亡,警方通过查阅死者的电脑和手机,发现死者居然都...
    沈念sama阅读 85,330评论 2 381
  • 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
    开封第一讲书人阅读 150,247评论 0 337
  • 文/不坏的土叔 我叫张陵,是天一观的道长。 经常有香客问我,道长,这世上最难降的妖魔是什么? 我笑而不...
    开封第一讲书人阅读 54,560评论 1 273
  • 正文 为了忘掉前任,我火速办了婚礼,结果婚礼上,老公的妹妹穿的比我还像新娘。我一直安慰自己,他们只是感情好,可当我...
    茶点故事阅读 63,580评论 5 365
  • 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
    开封第一讲书人阅读 48,569评论 1 281
  • 那天,我揣着相机与录音,去河边找鬼。 笑死,一个胖子当着我的面吹牛,可吹牛的内容都是我干的。 我是一名探鬼主播,决...
    沈念sama阅读 37,929评论 3 395
  • 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
    开封第一讲书人阅读 36,587评论 0 258
  • 序言:老挝万荣一对情侣失踪,失踪者是张志新(化名)和其女友刘颖,没想到半个月后,有当地人在树林里发现了一具尸体,经...
    沈念sama阅读 40,840评论 1 297
  • 正文 独居荒郊野岭守林人离奇死亡,尸身上长有42处带血的脓包…… 初始之章·张勋 以下内容为张勋视角 年9月15日...
    茶点故事阅读 35,596评论 2 321
  • 正文 我和宋清朗相恋三年,在试婚纱的时候发现自己被绿了。 大学时的朋友给我发了我未婚夫和他白月光在一起吃饭的照片。...
    茶点故事阅读 37,678评论 1 329
  • 序言:一个原本活蹦乱跳的男人离奇死亡,死状恐怖,灵堂内的尸体忽然破棺而出,到底是诈尸还是另有隐情,我是刑警宁泽,带...
    沈念sama阅读 33,366评论 4 318
  • 正文 年R本政府宣布,位于F岛的核电站,受9级特大地震影响,放射性物质发生泄漏。R本人自食恶果不足惜,却给世界环境...
    茶点故事阅读 38,945评论 3 307
  • 文/蒙蒙 一、第九天 我趴在偏房一处隐蔽的房顶上张望。 院中可真热闹,春花似锦、人声如沸。这庄子的主人今日做“春日...
    开封第一讲书人阅读 29,929评论 0 19
  • 文/苍兰香墨 我抬头看了看天上的太阳。三九已至,却和暖如春,着一层夹袄步出监牢的瞬间,已是汗流浃背。 一阵脚步声响...
    开封第一讲书人阅读 31,165评论 1 259
  • 我被黑心中介骗来泰国打工, 没想到刚下飞机就差点儿被人妖公主榨干…… 1. 我叫王不留,地道东北人。 一个月前我还...
    沈念sama阅读 43,271评论 2 349
  • 正文 我出身青楼,却偏偏与公主长得像,于是被迫代替她去往敌国和亲。 传闻我的和亲对象是个残疾皇子,可洞房花烛夜当晚...
    茶点故事阅读 42,403评论 2 342

推荐阅读更多精彩内容