调用
- (void)setupConfigWithVideo {
NSString *unUserInfo = nil;
if (TARGET_IPHONE_SIMULATOR) {
unUserInfo = @"您的设备不支持此功能";
}
AVAuthorizationStatus videoAuthStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if(videoAuthStatus == ALAuthorizationStatusRestricted || videoAuthStatus == ALAuthorizationStatusDenied){
unUserInfo = @"相机访问受限";
}
AVAuthorizationStatus audioAuthStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio];
if(audioAuthStatus == ALAuthorizationStatusRestricted || audioAuthStatus == ALAuthorizationStatusDenied){
unUserInfo = @"录音访问受限";
}
ALAuthorizationStatus author = [ALAssetsLibrary authorizationStatus];
if (author == AVAuthorizationStatusRestricted || author == AVAuthorizationStatusDenied){
unUserInfo = @"相册访问权限受限";
return;
}
if (unUserInfo != nil) {
[self alertWithClick:unUserInfo];
return;
} else {
[self pushWithTakeVideo];
}
}
- (void)alertWithClick:(NSString *)msg {
UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"提示" message:msg preferredStyle:1];
UIAlertAction *action = [UIAlertAction actionWithTitle:@"确定" style:(UIAlertActionStyleDefault) handler:^(UIAlertAction * _Nonnull action) {
[SVProgressHUD dismiss];
}];
[alert addAction:action];
[self presentViewController:alert animated:YES completion:nil];
}
跳转至拍摄界面
- (void)pushWithTakeVideo {
WBTakeVideoViewController *videoVC = [[WBTakeVideoViewController alloc]init];
videoVC.delegate = self;
videoVC.takeDelegate = self;
[self presentViewController:videoVC animated:YES completion:nil];
}
WBTakeVideoViewController.h
#import <UIKit/UIKit.h>
#import "WBVideoConfig.h"
@protocol TakeVideoDelegate <NSObject>
- (void)takeVideoDelegateAction:(NSString *)videoPath;
@end
@protocol WBVideoViewControllerDelegate ;
@interface WBTakeVideoViewController : UIViewController
@property (nonatomic, assign) id<WBVideoViewControllerDelegate> delegate;
@property (nonatomic, assign) BOOL savePhotoAlbum; // 是否保存至相册
@property (nonatomic, weak) id <TakeVideoDelegate>takeDelegate;
@end
// 录制的代理
@protocol WBVideoViewControllerDelegate <NSObject>
@required
- (void)videoViewController:(WBTakeVideoViewController *)videoController didRecordVideo:(WBVideoModel *)videoModel; // 录制完成
@optional
- (void)videoViewControllerDidCancel:(WBTakeVideoViewController *)videoController; // 取消
@end
WBTakeVideoViewController.m
#import "WBTakeVideoViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <Photos/Photos.h>
#import "WBVideoConfig.h"
#import "WBVideoSupport.h"
@interface WBTakeVideoViewController () <WBControllerBarDelegate,AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate,AVCaptureFileOutputRecordingDelegate>
@property (nonatomic, strong) UIView *videoView;
@property (nonatomic, strong) UIView *holdView;
@property (nonatomic, strong) UIButton *holdBut;
@property (nonatomic, strong) UIView *actionView;
@end
static WBTakeVideoViewController *__currentVC = nil;
@implementation WBTakeVideoViewController
{
dispatch_queue_t _recoding_queue;
AVCaptureSession *_videoSession; // 输入输出设备之间的数据传递
AVCaptureVideoPreviewLayer *_videoPreLayer; // 图片预览层
AVCaptureDevice *_videoDevice; // 输入设备(麦克风,相机等)
AVCaptureVideoDataOutput *_videoDataOut;
AVCaptureAudioDataOutput *_audioDataOut;
WBControllerBar *_ctrlBar; // 控制条
AVAssetWriter *_assetWriter;
AVAssetWriterInputPixelBufferAdaptor *_assetWriterPixelBufferInput;
AVAssetWriterInput *_assetWriterVideoInput;
AVAssetWriterInput *_assetWriterAudioInput;
CMTime _currentSampleTime;
BOOL _recoding;
WBFocusView *_focusView;
UILabel *_statusInfo;
UILabel *_cancelInfo;
WBVideoModel *_currentRecord;
BOOL _currentRecordIsCancel;
}
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
__currentVC = self;
[self setupSubView];
self.view.backgroundColor = THENEBLACK;
[self viewDidComplete];
[self setupWithVideo];
_savePhotoAlbum = YES;
UIView *topView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, ScreenWidth, 64)];
topView.backgroundColor = THENEBLACK;
[self.view addSubview:topView];
UIButton *left = [UIButton buttonWithType:UIButtonTypeCustom];
left.frame = CGRectMake(10, 20, 40, 30);
left.font = [UIFont systemFontOfSize:14.0f];
[left setTitle:@"取消" forState:UIControlStateNormal];
[left addTarget:self action:@selector(leftAction) forControlEvents:UIControlEventTouchUpInside];
[topView addSubview:left];
}
- (void)leftAction {
[self dismissViewControllerAnimated:YES completion:nil];
}
- (void)viewWillDisappear:(BOOL)animated {
[super viewWillDisappear:animated];
}
- (void)endAniamtion {
[UIView animateWithDuration:0.3 animations:^{
self.view.backgroundColor = [UIColor clearColor];
self.actionView.transform = CGAffineTransformTranslate(CGAffineTransformIdentity, 0, ScreenHeight);
} completion:^(BOOL finished) {
[self closeView];
}];
}
- (void)closeView {
[_videoSession stopRunning];
[_videoPreLayer removeFromSuperlayer];
_videoPreLayer = nil;
[_videoView removeFromSuperview];
_videoView = nil;
_videoDevice = nil;
_videoDataOut = nil;
_assetWriter = nil;
_assetWriterAudioInput = nil;
_assetWriterVideoInput = nil;
_assetWriterPixelBufferInput = nil;
__currentVC = nil;
}
//添加view 控制条 放大label
- (void)setupSubView {
/*self.view Config*/
self.view.backgroundColor = [UIColor whiteColor];
if ([self.navigationController respondsToSelector:@selector(interactivePopGestureRecognizer)]) {
self.navigationController.interactivePopGestureRecognizer.enabled = NO;
}
_actionView = [[UIView alloc] initWithFrame:[WBVideoConfig viewFrame]];
[self.view addSubview:_actionView];
_actionView.clipsToBounds = YES;
CGSize videoViewSize = [WBVideoConfig videoViewDefaultSize];
// 视频下部的控制条
_videoView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, ScreenWidth, videoViewSize.height)];
[self.actionView addSubview:_videoView];
_ctrlBar = [[WBControllerBar alloc] initWithFrame:CGRectMake(0,0, ScreenWidth, ScreenHeight)];
[_ctrlBar setupSubViews];
_ctrlBar.delegate = self;
[self.view addSubview:_ctrlBar];
[_ctrlBar mas_makeConstraints:^(MASConstraintMaker *make) {
make.left.right.mas_equalTo(0);
make.height.mas_equalTo(ScreenHeight/2);
make.top.mas_equalTo(self.videoView.mas_bottom);
}];
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(focusAction:)];
tapGesture.delaysTouchesBegan = YES;
[_videoView addGestureRecognizer:tapGesture];
UITapGestureRecognizer *doubleTapGesture = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(zoomVideo:)];
doubleTapGesture.numberOfTapsRequired = 2;
doubleTapGesture.numberOfTouchesRequired = 1;
doubleTapGesture.delaysTouchesBegan = YES;
[_videoView addGestureRecognizer:doubleTapGesture];
[tapGesture requireGestureRecognizerToFail:doubleTapGesture];
_focusView = [[WBFocusView alloc] initWithFrame:CGRectMake(0, 0, 60, 60)];
_focusView.backgroundColor = [UIColor clearColor];
_statusInfo = [[UILabel alloc] initWithFrame:CGRectMake(0, CGRectGetMaxY(_videoView.frame) - 30, _videoView.frame.size.width, 20)];
_statusInfo.textAlignment = NSTextAlignmentCenter;
_statusInfo.font = [UIFont systemFontOfSize:14.0];
_statusInfo.textColor = [UIColor whiteColor];
_statusInfo.hidden = YES;
[self.actionView addSubview:_statusInfo];
_cancelInfo = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 120, 24)];
_cancelInfo.center = _videoView.center;
_cancelInfo.textAlignment = NSTextAlignmentCenter;
_cancelInfo.hidden = YES;
[self.actionView addSubview:_cancelInfo];
[_actionView sendSubviewToBack:_videoView];
}
- (void)setupWithVideo {
_recoding_queue = dispatch_queue_create("com.wbsmallvideo.queue", DISPATCH_QUEUE_SERIAL);
NSArray *deviceVideo = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
NSArray *deviceAudio = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:deviceVideo[0] error:nil];
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:deviceAudio[0] error:nil];
_videoDevice = deviceVideo[0];
_videoDataOut = [[AVCaptureVideoDataOutput alloc] init];
_videoDataOut.videoSettings = @{(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};
_videoDataOut.alwaysDiscardsLateVideoFrames = YES;
[_videoDataOut setSampleBufferDelegate:self queue:_recoding_queue];
_audioDataOut = [[AVCaptureAudioDataOutput alloc] init];
[_audioDataOut setSampleBufferDelegate:self queue:_recoding_queue];
_videoSession = [[AVCaptureSession alloc] init];
if ([_videoSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
_videoSession.sessionPreset = AVCaptureSessionPreset640x480;
}
if ([_videoSession canAddInput:videoInput]) {
[_videoSession addInput:videoInput];
}
if ([_videoSession canAddInput:audioInput]) {
[_videoSession addInput:audioInput];
}
if ([_videoSession canAddOutput:_videoDataOut]) {
[_videoSession addOutput:_videoDataOut];
}
if ([_videoSession canAddOutput:_audioDataOut]) {
[_videoSession addOutput:_audioDataOut];
}
CGFloat viewWidth = CGRectGetWidth(self.videoView.frame);
_videoPreLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_videoSession];
_videoPreLayer.frame = CGRectMake(0, -CGRectGetMinY(_videoView.frame), viewWidth, viewWidth*wbVideo_w_h);
_videoPreLayer.position = CGPointMake(viewWidth/2, CGRectGetHeight(_videoView.frame)/2);
_videoPreLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[_videoView.layer addSublayer:_videoPreLayer];
[_videoSession startRunning];
}
// 放大label
- (void)viewDidComplete {
__block UILabel *zoomLab = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 200, 20)];
zoomLab.center = CGPointMake(self.videoView.center.x, CGRectGetMaxY(self.videoView.frame) - 50);
zoomLab.font = [UIFont boldSystemFontOfSize:14];
zoomLab.text = @"双击放大";
zoomLab.textColor = [UIColor whiteColor];
zoomLab.textAlignment = NSTextAlignmentCenter;
[_videoView addSubview:zoomLab];
[_videoView bringSubviewToFront:zoomLab];
wbdispatch_after(1.6, ^{
[zoomLab removeFromSuperview];
});
}
- (void)focusInPointAtVideoView:(CGPoint)point {
CGPoint cameraPoint= [_videoPreLayer captureDevicePointOfInterestForPoint:point];
_focusView.center = point;
[_videoView addSubview:_focusView];
[_videoView bringSubviewToFront:_focusView];
[_focusView focusing];
NSError *error = nil;
if ([_videoDevice lockForConfiguration:&error]) {
if ([_videoDevice isFocusPointOfInterestSupported]) {
_videoDevice.focusPointOfInterest = cameraPoint;
}
if ([_videoDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
_videoDevice.focusMode = AVCaptureFocusModeAutoFocus;
}
if ([_videoDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) {
_videoDevice.exposureMode = AVCaptureExposureModeAutoExpose;
}
if ([_videoDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeAutoWhiteBalance]) {
_videoDevice.whiteBalanceMode = AVCaptureWhiteBalanceModeAutoWhiteBalance;
}
[_videoDevice unlockForConfiguration];
}
if (error) {
NSLog(@"聚焦失败:%@",error);
}
wbdispatch_after(1.0, ^{
[_focusView removeFromSuperview];
});
}
#pragma mark - Actions --
- (void)focusAction:(UITapGestureRecognizer *)gesture {
CGPoint point = [gesture locationInView:_videoView];
[self focusInPointAtVideoView:point];
}
- (void)zoomVideo:(UITapGestureRecognizer *)gesture {
NSError *error = nil;
if ([_videoDevice lockForConfiguration:&error]) {
CGFloat zoom = _videoDevice.videoZoomFactor == 2.0?1.0:2.0;
_videoDevice.videoZoomFactor = zoom;
[_videoDevice unlockForConfiguration];
}
}
#pragma mark - controllerBarDelegate
- (void)ctrollVideoDidStart:(WBControllerBar *)controllerBar {
_currentRecord = [WBVideoUtil createNewVideo];
_currentRecordIsCancel = NO;
NSURL *outURL = [NSURL fileURLWithPath:_currentRecord.videoAbsolutePath];
[self createWriter:outURL];
_statusInfo.textColor = THEMEGREEN;
_statusInfo.text = @"↑上移取消";
_statusInfo.hidden = NO;
wbdispatch_after(0.5, ^{
_statusInfo.hidden = YES;
});
_recoding = YES;
NSLog(@"视频开始录制");
UIBarButtonItem *leftItem = [[UIBarButtonItem alloc]initWithTitle:@"Cancel" style:UIBarButtonItemStylePlain target:self action:@selector(leftItemAction)];
self.navigationController.navigationItem.leftBarButtonItem = leftItem;
}
- (void)leftItemAction {
[self dismissViewControllerAnimated:YES completion:nil];
}
- (void)ctrollVideoDidEnd:(WBControllerBar *)controllerBar {
_recoding = NO;
[self saveVideo:^(NSURL *outFileURL) {
if (_delegate) {
[_delegate videoViewController:self didRecordVideo:_currentRecord];
[self endAniamtion];
}
}];
NSLog(@"视频录制结束");
}
- (void)ctrollVideoDidCancel:(WBControllerBar *)controllerBar reason:(WBRecordCancelReason)reason{
_currentRecordIsCancel = YES;
_recoding = NO;
if (reason == WBRecordCancelReasonTimeShort) {
[WBVideoConfig showHinInfo:@"录制时间过短" inView:_videoView frame:CGRectMake(0,CGRectGetHeight(_videoView.frame)/3*2,CGRectGetWidth(_videoView.frame),20) timeLong:1.0];
}
NSLog(@"当前视频录制取消");
}
- (void)ctrollVideoWillCancel:(WBControllerBar *)controllerBar {
if (!_cancelInfo.hidden) {
return;
}
_cancelInfo.text = @"松手取消";
_cancelInfo.hidden = NO;
wbdispatch_after(0.5, ^{
_cancelInfo.hidden = YES;
});
}
- (void)ctrollVideoDidRecordSEC:(WBControllerBar *)controllerBar {
// _topSlideView.isRecoding = YES;
// NSLog(@"视频录又过了 1 秒");
}
- (void)ctrollVideoDidClose:(WBControllerBar *)controllerBar {
// NSLog(@"录制界面关闭");
if (_delegate && [_delegate respondsToSelector:@selector(videoViewControllerDidCancel:)]) {
[_delegate videoViewControllerDidCancel:self];
}
[self endAniamtion];
}
// 创建录像对象(视频,音频)
- (void)createWriter:(NSURL *)assetUrl {
NSError *error = nil;
_assetWriter = [AVAssetWriter assetWriterWithURL:assetUrl fileType:AVFileTypeQuickTimeMovie error:&error];
int videoWidth = [WBVideoConfig defualtVideoSize].width;
int videoHeight = [WBVideoConfig defualtVideoSize].height;
NSDictionary *outputSettings = @{
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : @(videoHeight),
AVVideoHeightKey : @(videoWidth),
AVVideoScalingModeKey:AVVideoScalingModeResizeAspectFill,
// AVVideoCompressionPropertiesKey:codecSettings
};
_assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
_assetWriterVideoInput.expectsMediaDataInRealTime = YES; // 设置数据为实时输入
_assetWriterVideoInput.transform = CGAffineTransformMakeRotation(M_PI / 2.0);
NSDictionary *audioOutputSettings = @{
AVFormatIDKey:@(kAudioFormatMPEG4AAC),
AVEncoderBitRateKey:@(64000),
AVSampleRateKey:@(44100),
AVNumberOfChannelsKey:@(1),
};
_assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
_assetWriterAudioInput.expectsMediaDataInRealTime = YES;
NSDictionary *SPBADictionary = @{
(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
(__bridge NSString *)kCVPixelBufferWidthKey : @(videoWidth),
(__bridge NSString *)kCVPixelBufferHeightKey : @(videoHeight),
(__bridge NSString *)kCVPixelFormatOpenGLESCompatibility : ((__bridge NSNumber *)kCFBooleanTrue)
};
_assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_assetWriterVideoInput sourcePixelBufferAttributes:SPBADictionary];
if ([_assetWriter canAddInput:_assetWriterVideoInput]) {
[_assetWriter addInput:_assetWriterVideoInput];
}else {
NSLog(@"不能添加视频writer的input \(assetWriterVideoInput)");
}
if ([_assetWriter canAddInput:_assetWriterAudioInput]) {
[_assetWriter addInput:_assetWriterAudioInput];
}else {
NSLog(@"不能添加视频writer的input \(assetWriterVideoInput)");
}
if(error)
{
NSLog(@"error = %@", [error localizedDescription]);
}
NSLog(@"_assetWriter = %ld",(long)_assetWriter.status);
}
- (void)saveVideo:(void(^)(NSURL *outFileURL))complier {
if (_recoding) return;
if (!_recoding_queue){
complier(nil);
return;
};
dispatch_async(_recoding_queue, ^{
NSURL *outputFileURL = [NSURL fileURLWithPath:_currentRecord.videoAbsolutePath];
[_assetWriter finishWritingWithCompletionHandler:^{
if (_currentRecordIsCancel) return ;
// 保存
[WBVideoUtil saveThumImageWithVideoURL:outputFileURL second:1];
if (complier) {
dispatch_async(dispatch_get_main_queue(), ^{
complier(outputFileURL);
});
}
if (_savePhotoAlbum) {
BOOL ios8Later = [[[UIDevice currentDevice] systemVersion] floatValue] >= 8;
if (ios8Later) {
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
[PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:outputFileURL];
} completionHandler:^(BOOL success, NSError * _Nullable error) {
if (!error && success) {
[self.takeDelegate takeVideoDelegateAction:_currentRecord.videoAbsolutePath];
}
else {
NSLog(@"保存相册失败! :%@",error);
}
}];
}
else {
[[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (!error) {
[self.takeDelegate takeVideoDelegateAction:_currentRecord.videoAbsolutePath];
NSLog(@"保存相册成功!");
}
else {
NSLog(@"保存相册失败!");
}
}];
}
}
}];
});
}
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
if (!_recoding) return;
@autoreleasepool {
_currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer);
if (_assetWriter.status != AVAssetWriterStatusWriting) {
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:_currentSampleTime];
}
if (captureOutput == _videoDataOut) {
if (_assetWriterPixelBufferInput.assetWriterInput.isReadyForMoreMediaData) {
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
BOOL success = [_assetWriterPixelBufferInput appendPixelBuffer:pixelBuffer withPresentationTime:_currentSampleTime];
if (!success) {
NSLog(@"Pixel Buffer没有append成功");
}
}
}
if (captureOutput == _audioDataOut) {
[_assetWriterAudioInput appendSampleBuffer:sampleBuffer];
}
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
// erre
}else
{
// success
}
});
}];
}
NSLog(@"recordEnd");
}
@end
WBVideoConfig.h
#import <Foundation/Foundation.h>
// 视频录制 时长
#define wbRecordTime 10.0
// 视频的长宽按比例
#define wbVideo_w_h (4.0/3)
// 视频默认 宽的分辨率 高 = kzVideoWidthPX / kzVideo_w_h
#define wbVideoWidthPX [UIScreen mainScreen].bounds.size.height
//控制条高度 小屏幕时
#define wbControViewHeight 120.0
// 视频保存路径
#define wbVideoDicName @"wbSmailVideo"
extern void wbdispatch_after(float time, dispatch_block_t block);
@interface WBVideoConfig : NSObject
+ (CGRect)viewFrame;
// 视频view的尺寸
+ (CGSize)videoViewDefaultSize;
// 默认视频分辨率
+ (CGSize)defualtVideoSize;
+ (void)showHinInfo:(NSString *)text inView:(UIView *)superView frame:(CGRect)frame timeLong:(NSTimeInterval)time;
@end
//视频对象 Model
@interface WBVideoModel : NSObject
/// 完整视频 本地路径
@property (nonatomic, copy) NSString *videoAbsolutePath;
/// 缩略图 路径
@property (nonatomic, copy) NSString *thumAbsolutePath;
// 录制时间
//@property (nonatomic, strong) NSDate *recordTime;
@end
//录制视频
@interface WBVideoUtil : NSObject
//保存缩略图
//@param videoUrl 视频路径
//@param second 第几秒的缩略图
+ (void)saveThumImageWithVideoURL:(NSURL *)videoUrl second:(int64_t)second;
//产生新的对象
+ (WBVideoModel *)createNewVideo;
//有视频的存在
+ (BOOL)existVideo;
// 删除视频
+ (void)deleteVideo:(NSString *)videoPath;
//+ (NSString *)getVideoPath;
@end
WBVideoConfig.m
void wbdispatch_after(float time, dispatch_block_t block)
{
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(time * NSEC_PER_SEC)), dispatch_get_main_queue(), block);
}
@implementation WBVideoConfig
+ (CGRect)viewFrame {
return CGRectMake(0, 0, ScreenWidth, ScreenHeight/2);
}
+ (CGSize)videoViewDefaultSize {
return CGSizeMake(ScreenWidth, ScreenHeight/2);
}
+ (CGSize)defualtVideoSize {
return CGSizeMake(wbVideoWidthPX, wbVideoWidthPX/wbVideo_w_h);
}
+ (void)showHinInfo:(NSString *)text inView:(UIView *)superView frame:(CGRect)frame timeLong:(NSTimeInterval)time {
__block UILabel *zoomLab = [[UILabel alloc] initWithFrame:frame];
zoomLab.font = [UIFont boldSystemFontOfSize:15.0];
zoomLab.text = text;
zoomLab.textColor = [UIColor whiteColor];
zoomLab.textAlignment = NSTextAlignmentCenter;
[superView addSubview:zoomLab];
[superView bringSubviewToFront:zoomLab];
wbdispatch_after(1.6, ^{
[zoomLab removeFromSuperview];
});
}
@end
@implementation WBVideoModel
+ (instancetype)modelWithPath:(NSString *)videoPath thumPath:(NSString *)thumPath recordTime:(NSDate *)recordTime {
WBVideoModel *model = [[WBVideoModel alloc] init];
model.videoAbsolutePath = videoPath;
model.thumAbsolutePath = thumPath;
return model;
}
@end
@implementation WBVideoUtil
+ (void)saveThumImageWithVideoURL:(NSURL *)videoUrl second:(int64_t)second {
AVURLAsset *urlSet = [AVURLAsset assetWithURL:videoUrl];
AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:urlSet];
NSString *videoPath = [videoUrl.absoluteString stringByReplacingOccurrencesOfString:@"file://" withString: @""];
NSString *thumPath = [videoPath stringByReplacingOccurrencesOfString:@"MOV" withString: @"JPG"];
UIImage *shotImage;
//视频路径URL
NSURL *fileURL = [NSURL fileURLWithPath:videoPath];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:fileURL options:nil];
AVAssetImageGenerator *gen = [[AVAssetImageGenerator alloc] initWithAsset:asset];
gen.appliesPreferredTrackTransform = YES;
CMTime time = CMTimeMake(second, 10);
NSError *error = nil;
CMTime actualTime;
CGImageRef image = [gen copyCGImageAtTime:time actualTime:&actualTime error:&error];
if (error) {
NSLog(@"缩略图获取失败!:%@",error);
return;
}
shotImage = [[UIImage alloc] initWithCGImage:image];
NSData *imgData = UIImageJPEGRepresentation(shotImage, 1.0);
BOOL isok = [imgData writeToFile:thumPath atomically: YES];
NSLog(@"缩略图获取结果:%d",isok);
CGImageRelease(image);
}
+ (WBVideoModel *)createNewVideo {
model.videoAbsolutePath = [NSHomeDirectory() stringByAppendingString:@"/Documents/test.MOV"];
model.thumAbsolutePath = [NSHomeDirectory() stringByAppendingString:@"/Documents/test.JPG"];
unlink([model.videoAbsolutePath UTF8String]);
unlink([model.thumAbsolutePath UTF8String]);
return model;
}
+ (NSString *)getDocumentSubPath {
NSString *documentPath = [NSSearchPathForDirectoriesInDomains(NSDocumentationDirectory, NSUserDomainMask, YES) firstObject];
return [documentPath stringByAppendingPathComponent:wbVideoDicName];
}
+ (void)deleteVideo:(NSString *)videoPath {
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *error = nil;
[fileManager removeItemAtPath:videoPath error:&error];
if (error) {
NSLog(@"删除视频失败:%@",error);
}
NSString *thumPath = [videoPath stringByReplacingOccurrencesOfString:@"MOV" withString:@"JPG"];
NSError *error2 = nil;
[fileManager removeItemAtPath:thumPath error:&error2];
if (error2) {
NSLog(@"删除缩略图失败:%@",error);
}
}
@end
WBVideoSupport.h
#import <UIKit/UIKit.h>
#import "WBVideoConfig.h"
@class WBVideoModel;
//************* 点击录制的按钮 ****************
@interface WBRecordBtn : UILabel
- (instancetype)initWithFrame:(CGRect)frame;
@end
//************* 聚焦的方框 ****************
@interface WBFocusView : UIView
- (void)focusing;
@end
//************* 录视频下部的控制条 ****************
typedef NS_ENUM(NSUInteger, WBRecordCancelReason) {
WBRecordCancelReasonDefault,
WBRecordCancelReasonTimeShort,
WBRecordCancelReasonUnknown,
};
@class WBControllerBar;
@protocol WBControllerBarDelegate <NSObject>
@optional
- (void)ctrollVideoDidStart:(WBControllerBar *)controllerBar;
- (void)ctrollVideoDidEnd:(WBControllerBar *)controllerBar;
- (void)ctrollVideoDidCancel:(WBControllerBar *)controllerBar reason:(WBRecordCancelReason)reason;
- (void)ctrollVideoWillCancel:(WBControllerBar *)controllerBar;
- (void)ctrollVideoDidRecordSEC:(WBControllerBar *)controllerBar;
- (void)ctrollVideoDidClose:(WBControllerBar *)controllerBar;
- (void)ctrollVideoOpenVideoList:(WBControllerBar *)controllerBar;
@end
//************* 录视频下部的控制条 ****************
@interface WBControllerBar : UIView <UIGestureRecognizerDelegate>
@property (nonatomic, assign) id<WBControllerBarDelegate> delegate;
- (void)setupSubViews;
@end
WBVideoSupport.m
#import "WBVideoSupport.h"
#import "WBVideoConfig.h"
#pragma mark - Custom View --
@implementation WBRecordBtn {
UITapGestureRecognizer *_tapGesture;
}
- (instancetype)initWithFrame:(CGRect)frame{
if (self = [super initWithFrame:frame]) {
[self setupRoundButton];
self.layer.cornerRadius = 40.0f;
self.layer.masksToBounds = YES;
self.userInteractionEnabled = YES;
}
return self;
}
- (void)setupRoundButton {
self.backgroundColor = [UIColor clearColor];
CGFloat width = self.frame.size.width;
UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:self.bounds cornerRadius:width/2];
CAShapeLayer *trackLayer = [CAShapeLayer layer];
trackLayer.frame = self.bounds;
trackLayer.strokeColor = THEMEGREEN.CGColor;
trackLayer.fillColor = [UIColor clearColor].CGColor;
trackLayer.opacity = 1.0;
trackLayer.lineCap = kCALineCapRound;
trackLayer.lineWidth = 2.0;
trackLayer.path = path.CGPath;
[self.layer addSublayer:trackLayer];
CATextLayer *textLayer = [CATextLayer layer];
textLayer.string = @"按住拍";
textLayer.frame = CGRectMake(0, 0, 120, 30);
textLayer.position = CGPointMake(self.bounds.size.width/2, self.bounds.size.height/2);
UIFont *font = [UIFont boldSystemFontOfSize:22];
CFStringRef fontName = (__bridge CFStringRef)font.fontName;
CGFontRef fontRef = CGFontCreateWithFontName(fontName);
textLayer.font = fontRef;
textLayer.fontSize = font.pointSize;
CGFontRelease(fontRef);
textLayer.contentsScale = [UIScreen mainScreen].scale;
textLayer.foregroundColor = THEMEGREEN.CGColor;
textLayer.alignmentMode = kCAAlignmentCenter;
textLayer.wrapped = YES;
[trackLayer addSublayer:textLayer];
CAGradientLayer *gradLayer = [CAGradientLayer layer];
gradLayer.frame = self.bounds;
[self.layer addSublayer:gradLayer];
gradLayer.mask = trackLayer;
}
@end
@implementation WBFocusView {
CGFloat _width;
CGFloat _height;
}
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
_width = CGRectGetWidth(frame);
_height = _width;
}
return self;
}
- (void)focusing {
[UIView animateWithDuration:0.5 animations:^{
self.transform = CGAffineTransformScale(CGAffineTransformIdentity, 0.8, 0.8);
} completion:^(BOOL finished) {
self.transform = CGAffineTransformIdentity;
}];
}
- (void)drawRect:(CGRect)rect {
[super drawRect:rect];
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetStrokeColorWithColor(context, THEMEGREEN.CGColor);
CGContextSetLineWidth(context, 1.0);
CGFloat len = 4;
CGContextMoveToPoint(context, 0.0, 0.0);
CGContextAddRect(context, self.bounds);
CGContextMoveToPoint(context, 0, _height/2);
CGContextAddLineToPoint(context, len, _height/2);
CGContextMoveToPoint(context, _width/2, _height);
CGContextAddLineToPoint(context, _width/2, _height - len);
CGContextMoveToPoint(context, _width, _height/2);
CGContextAddLineToPoint(context, _width - len, _height/2);
CGContextMoveToPoint(context, _width/2, 0);
CGContextAddLineToPoint(context, _width/2, len);
CGContextDrawPath(context, kCGPathStroke);
}
@end
//------ 分割线 ------
@implementation WBControllerBar {
WBRecordBtn *_startBtn;
UILongPressGestureRecognizer *_longPress;
UIView *_progressLine;
BOOL _touchIsInside;
BOOL _recording;
NSTimer *_timer;
NSTimeInterval _surplusTime;
BOOL _videoDidEnd;
}
- (void)setupSubViews {
[self layoutIfNeeded];
_startBtn = [[WBRecordBtn alloc] initWithFrame:CGRectMake(0, 100, 200, 100)];
_startBtn.text = @"按住拍";
_startBtn.textAlignment = NSTextAlignmentCenter;
_startBtn.textColor = [UIColor whiteColor];
CAShapeLayer *solidLine = [CAShapeLayer layer];
CGMutablePathRef solidPath = CGPathCreateMutable();
solidLine.lineWidth = 2.0f ;
solidLine.strokeColor = THEMEGREEN.CGColor;
solidLine.fillColor = [UIColor clearColor].CGColor;
CGPathAddEllipseInRect(solidPath, nil, CGRectMake(1, 1, 132, 132));
solidLine.path = solidPath;
CGPathRelease(solidPath);
[_startBtn.layer addSublayer:solidLine];
[self addSubview:_startBtn];
[_startBtn mas_makeConstraints:^(MASConstraintMaker *make) {
make.centerX.mas_equalTo(self.mas_centerX);
make.centerY.mas_equalTo(self.mas_centerY);
make.height.width.mas_equalTo(135);
}];
_longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(longpressAction:)];
_longPress.minimumPressDuration = 0.01;
_longPress.delegate = self;
[self addGestureRecognizer:_longPress];
_progressLine = [[UIView alloc] initWithFrame:CGRectMake(0, 0, ScreenWidth, 4)];
_progressLine.backgroundColor = THEMEGREEN;
_progressLine.hidden = YES;
[self addSubview:_progressLine];
_surplusTime = wbRecordTime;
}
- (void)startRecordSet {
_startBtn.alpha = 1.0;
_progressLine.frame = CGRectMake(0, 0, self.bounds.size.width, 2);
_progressLine.backgroundColor = THEMEGREEN;
_progressLine.hidden = NO;
_surplusTime = wbRecordTime;
_recording = YES;
_videoDidEnd = NO;
if (_timer == nil) {
_timer = [NSTimer timerWithTimeInterval:1.0 target:self selector:@selector(recordTimerAction) userInfo:nil repeats:YES];
[[NSRunLoop currentRunLoop] addTimer:_timer forMode:NSDefaultRunLoopMode];
}
[_timer fire];
[UIView animateWithDuration:0.4 animations:^{
_startBtn.alpha = 0.0;
_startBtn.transform = CGAffineTransformScale(CGAffineTransformIdentity, 2.0, 2.0);
} completion:^(BOOL finished) {
if (finished) {
_startBtn.transform = CGAffineTransformIdentity;
}
}];
}
- (void)endRecordSet {
_progressLine.hidden = YES;
[_timer invalidate];
_timer = nil;
_recording = NO;
_startBtn.alpha = 1;
}
//pragma mark - UIGestureRecognizerDelegate
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer {
if (gestureRecognizer == _longPress) {
if (_surplusTime <= 0) return NO;
CGPoint point = [gestureRecognizer locationInView:self];
CGPoint startBtnCent = _startBtn.center;
CGFloat dx = point.x - startBtnCent.x;
CGFloat dy = point.y - startBtnCent.y;
CGFloat startWidth = _startBtn.bounds.size.width;
if ((dx * dx) + (dy * dy) < (startWidth * startWidth)) {
return YES;
}
return NO;
}
return YES;
}
//pragma mark - Actions --
- (void)longpressAction:(UILongPressGestureRecognizer *)gesture {
CGPoint point = [gesture locationInView:self];
_touchIsInside = point.y >= 0;
switch (gesture.state) {
case UIGestureRecognizerStateBegan: {
[self videoStartAction];
}
break;
case UIGestureRecognizerStateChanged: {
if (!_touchIsInside) {
_progressLine.backgroundColor = THEMEGREEN;
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoWillCancel:)]) {
[_delegate ctrollVideoWillCancel:self];
}
}
else {
_progressLine.backgroundColor = THEMEGREEN;
}
}
break;
case UIGestureRecognizerStateEnded: {
[self endRecordSet];
if (!_touchIsInside || wbRecordTime - _surplusTime <= 1) {
WBRecordCancelReason reason = WBRecordCancelReasonTimeShort;
if (!_touchIsInside) {
reason = WBRecordCancelReasonDefault;
}
[self videoCancelAction:reason];
}
else {
[self videoEndAction];
}
}
break;
case UIGestureRecognizerStateCancelled:
break;
default:
break;
}
}
- (void)videoStartAction {
[self startRecordSet];
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidStart:)]) {
[_delegate ctrollVideoDidStart:self];
}
}
- (void)videoCancelAction:(WBRecordCancelReason)reason {
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidCancel:reason:)]) {
[_delegate ctrollVideoDidCancel:self reason:reason];
}
}
- (void)videoEndAction {
if (_videoDidEnd) return;
_videoDidEnd = YES;
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidEnd:)]) {
[_delegate ctrollVideoDidEnd:self];
}
}
- (void)videoListAction {
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoOpenVideoList:)]) {
[_delegate ctrollVideoOpenVideoList:self];
}
}
- (void)videoCloseAction {
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidClose:)]) {
[_delegate ctrollVideoDidClose:self];
}
}
- (void)recordTimerAction {
CGFloat reduceLen = self.bounds.size.width/wbRecordTime;
CGFloat oldLineLen = _progressLine.frame.size.width;
CGRect oldFrame = _progressLine.frame;
[UIView animateWithDuration:1.0 delay: 0.0 options: UIViewAnimationOptionCurveLinear animations:^{
_progressLine.frame = CGRectMake(oldFrame.origin.x, oldFrame.origin.y, oldLineLen - reduceLen, oldFrame.size.height);
_progressLine.center = CGPointMake(self.bounds.size.width/2, _progressLine.bounds.size.height/2);
} completion:^(BOOL finished) {
_surplusTime --;
if (_recording) {
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidRecordSEC:)]) {
[_delegate ctrollVideoDidRecordSEC:self];
}
}
if (_surplusTime <= 0.0) {
[self endRecordSet];
[self videoEndAction];
}
}];
}
@end
如果上传视频使用保利威视的话要注意一下几点:
1.如果上传是正常的,但是到保利威视后台却有绿屏的情况下,查看自己拍摄的视频分辨率,一般出现都是分辨率太低
2.播放的视频不清楚,需要自己在保利威视后台设置播放的格式,一般分流畅-->标清-->高清等
3.如果上传之后,但是获取不到返回的mp4地址,就去自己的管理后台看一下,自己的视频到底有没有上传上去
4.有时候根据视频url会出现点击播放但是没有视频url却存在的问题,因为保利威视会有个视频转码的过程,此处需要自己处理一下,可根据网络url,截取视频第一帧,通过第一帧的image,判断image是否等于nil,如果等于nil,就做其他处理,例如提示视频正在转码类,不等于nil就直接播放