//
// LiveTelecastController.m
// FFmpegDemo
//
// Created by huoliquankai on 2017/7/20.
// Copyright © 2017年 深圳机械行业协会. All rights reserved.
//
#import "LiveTelecastController.h"
#import <AVFoundation/AVFoundation.h>
#import "avcodec.h"
#import "imgutils.h"
#import "avdevice.h"
#import "swscale.h"
#import "x264.h"
#include "time.h"
#import "GPUImage.h"
@interface LiveTelecastController () <AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, strong)AVCaptureSession *session;
@property (nonatomic, strong)AVCaptureDeviceInput *videoInput;
@property (nonatomic, strong)AVCaptureVideoDataOutput *videoDataOutput;
@property (nonatomic, strong)UIView *cameraShowView;
@property (nonatomic, strong)AVCaptureVideoPreviewLayer *previewLayer;
@end
@implementation LiveTelecastController
{
AVOutputFormat *oFmt;
AVFormatContext *pFormatCtx;
AVStream *video_st;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame;
AVPacket pkt;
int y_size;
int framecnt;
int encoder_h264_frame_width;
int encoder_h264_frame_height;
unsigned char *picture_buf;
int picture_size;
//
AVStream *out_stream;
int frame_count;
int y_length;
int uv_length;
AVFormatContext *ofmt_ctx;
// AVFrame *yuv_frame;
int src_height;
int src_width;
int64_t start_time;
}
- (instancetype)init
{
self = [super init];
if (self) {
[self initialSession];
[self initialCameraShowView];
}
return self;
}
- (void)initialSession {
self.session = [[AVCaptureSession alloc] init];
self.videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backCamera] error:nil];
self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
// 表示设置摄像头返回的数据类型为YUV420SP类型
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt: 640], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithInt: 480], (id)kCVPixelBufferHeightKey,
nil];
[self.videoDataOutput setVideoSettings:outputSettings];
dispatch_queue_t queue = dispatch_queue_create("linlinqi", NULL);
[self.videoDataOutput setSampleBufferDelegate:self queue:queue];
if ([self.session canAddInput:self.videoInput]) {
[self.session addInput:self.videoInput];
}
if ([self.session canAddOutput:self.videoDataOutput]) {
[self.session addOutput:self.videoDataOutput];
} else {
NSLog(@"failed get output");
}
}
- (void)initialCameraShowView {
self.cameraShowView = [[UIView alloc] initWithFrame:self.view.frame];
[self.view addSubview:self.cameraShowView];
}
- (AVCaptureDevice *)backCamera {
return [self cameraWithPosition:AVCaptureDevicePositionBack];
}
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position {
AVCaptureDeviceDiscoverySession *devicesIOS10 = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack];
NSArray *devicesIOS = devicesIOS10.devices;
for (AVCaptureDevice *device in devicesIOS) {
if (device.position == position) {
return device;
}
}
return nil;
}
- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
[self setUpCameraLayer];
}
//启动摄像头捕获数据
- (void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
}
- (void)setUpCameraLayer {
if (self.previewLayer == nil) {
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
UIView *view = self.cameraShowView;
CALayer *viewLayer = [view layer];
[viewLayer setMasksToBounds:YES];
CGRect bounds = [view bounds];
[self.previewLayer setFrame:bounds];
[self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
[viewLayer addSublayer:self.previewLayer];
}
}
- (void)viewDidLoad {
[super viewDidLoad];
if (self.session) {
[self streamerInit];
[self.session startRunning];
}
}
#pragma AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo]) {
//视频数据
//
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
//获取Y分量
UInt8 *bufferPrt = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
//获取UV分量
UInt8 *bufferPrt1 = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
size_t bytesrow0 = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
size_t bytesrow1 = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
UInt8 *yuv420_data = (UInt8 *)malloc(width * height * 3/2);
/*convert NV21 data to YUV420*/
UInt8 *pY = bufferPrt;
UInt8 *pUV = bufferPrt1;
UInt8 *pU = yuv420_data + width * height;
UInt8 *pV = pU + width * height / 4;
for (int i = 0; i < height; i ++) {
memcpy(yuv420_data + i * width, pY + i * bytesrow0, width);
}
for (int j = 0; j < height/2; j ++) {
for (int i = 0; i < width/2; i ++) {
*(pU++) = pUV[i<<1];
*(pV++) = pUV[(i<<1) + 1];
//*pU = pUV[2*i];
//pU++;
}
pUV += bytesrow1;
}
//这里可以开始使用yuv420_data去编码为视频了
[self yuv420ToH264:yuv420_data];
//编码完成后
free(yuv420_data);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}
}
- (void)streamerInit {
int ret = 0;
const char *address = "rtmp://192.168.0.111/live/livestream";
encoder_h264_frame_width = 1920;
encoder_h264_frame_height = 1080;
src_width = 1920;
src_height = 1080;
y_length = encoder_h264_frame_width * encoder_h264_frame_height;
uv_length = y_length/4;
//
// av_log_set_callback(void (*callback)(void *, int, const char *, va_list))
av_register_all();
avformat_network_init();
avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", address);
if (!ofmt_ctx) {
printf("不能打开输出");
return;
}
//寻找编码器
pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!pCodec) {
printf("找不到编码器");
return;
}
//初始化编码器操作者
pCodecCtx = avcodec_alloc_context3(pCodec);
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P; //指定编码格式
pCodecCtx->width = encoder_h264_frame_width;
pCodecCtx->height = encoder_h264_frame_height;
pCodecCtx->time_base.num = 1;
pCodecCtx->time_base.den = 30;
pCodecCtx->bit_rate = 400000;
// pCodecCtx->rc_max_rate = 400000;
// pCodecCtx->rc_max_rate = 400000;
// pCodecCtx->rc_buffer_size = 200000;
pCodecCtx->gop_size = 250;
if(ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
pCodecCtx->qmin = 10;
pCodecCtx->qmax = 51;
pCodecCtx->max_b_frames = 0;
AVDictionary *dicParams = NULL;
av_dict_set(&dicParams, "preset", "slow", 0);
av_dict_set(&dicParams, "tune", "zerolatency", 0);
//打开编码器
if(avcodec_open2(pCodecCtx, pCodec, &dicParams) < 0) {
printf("Failed to open encoder!\n");
return;
}
//新建输出流
out_stream = avformat_new_stream(ofmt_ctx, pCodec);
if(!out_stream) {
printf("Failed allocation output stream\n");
return;
}
out_stream->time_base.num = 1;
out_stream->time_base.den = 30;
//复制一份编码器的配置给输出流
// avcodec_copy_context(out_stream->codec, pCodecCtx);
avcodec_parameters_from_context(out_stream->codecpar, pCodecCtx);
ret = avio_open(&ofmt_ctx->pb, address, AVIO_FLAG_WRITE);
if(ret < 0) {
printf("Could not open output URL %s", address);
return;
}
ret = avformat_write_header(ofmt_ctx, NULL);
if(ret < 0) {
printf("Error occurred when open output URL\n");
return;
}
//初始化一个帧的数据结构,用于编码用
//指定AV_PIX_FMT_YUV420P这种格式的
pFrame = av_frame_alloc();
uint8_t *out_buffer = (uint8_t *) av_malloc(av_image_get_buffer_size(pCodecCtx->pix_fmt, src_width, src_height, 1));
av_image_fill_arrays(pFrame->data, pFrame->linesize, out_buffer, pCodecCtx->pix_fmt, src_width, src_height, 1);
start_time = av_gettime();
}
int encode(AVCodecContext *pCodecCtx, AVPacket* pPkt, AVFrame *pFrame, int *got_packet) {
int ret;
*got_packet = 0;
ret = avcodec_send_frame(pCodecCtx, pFrame);
if(ret <0 && ret != AVERROR_EOF) {
return ret;
}
ret = avcodec_receive_packet(pCodecCtx, pPkt);
if(ret < 0 && ret != AVERROR(EAGAIN)) {
return ret;
}
if(ret >= 0) {
*got_packet = 1;
}
return 0;
}
- (void)yuv420ToH264:(UInt8 *)yuv420_data {
//
picture_buf = yuv420_data;
pFrame->data[0] = picture_buf;//y分量占一份所以 // Y
pFrame->data[1] = picture_buf+ y_length; // U
pFrame->data[2] = picture_buf+ y_length*5/4;
pFrame->pts = (1.0 / 30) * 90 * frame_count;
int got_picture = 0;
// Encode
pFrame->width = encoder_h264_frame_width;
pFrame->height = encoder_h264_frame_height;
pFrame->format = AV_PIX_FMT_YUV420P;
int ret = encode(pCodecCtx, &pkt, pFrame, &got_picture);
// encode(pCodecCtx, &pkt, pFrame, &got_picture)
if(ret < 0) {
printf("Failed to encode! \n");
return;
}
//ofmt_ctx
if (got_picture == 1) {
printf("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, pkt.size);
framecnt++;
pkt.stream_index = out_stream->index;
//写PTS/DTS
AVRational time_base = ofmt_ctx->streams[0]->time_base;
AVRational r_frame_ratel = {30, 2};
AVRational time_base_q = {1, AV_TIME_BASE};
int64_t calc_duration = (double)(AV_TIME_BASE) * (1/av_q2d(r_frame_ratel));
// int64_t calc_duration = (double)AV_TIME_BASE/av_q2d(ofmt_ctx->streams[0]->r_frame_rate);
pkt.pts = av_rescale_q(frame_count * calc_duration, time_base_q, time_base);
pkt.dts = pkt.pts;
pkt.pos = -1;
frame_count ++;
ofmt_ctx->duration = pkt.duration * frame_count;
ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
if (ret < 0) {
printf("-====");
return;
}
av_packet_unref(&pkt);
}
// av_write_trailer(pFormatCtx);
}
@end
[木木方文技术分享之音视频五]FFmpeg+x264摄像头直播推流
最后编辑于 :
©著作权归作者所有,转载或内容合作请联系作者
- 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
- 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
- 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
推荐阅读更多精彩内容
- 上篇文章介绍了FFmpeg+x264的编译也可以去下载我已经编译好的,star一个么么哒丷丷https://git...
- 先下载好所有资源 编译x264:1、复制gas-preprocessor.pl到/usr/local/bin 2、...
- 前几天看到一篇文章中写到,现实生活中“欢乐颂”式的五姐妹不可能存在。理由是,五个人生活层次相差太大,如安迪,华尔街...