RTMP(Real Time Messaging Protocol)实时消息传送协议是Adobe Systems公司为Flash播放器和服务器之间音频、视频和数据传输 开发的开放协议。
流程
1、设备需将摄像头获取到的数据传给jni层,并交给底层一个接口
/**
*
* @param data 子集nv21 == YUV420类型的数据
* @param camera
*
* C++层 nv21不能用 必须换成 i420
*/
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
// TODO 作业:data没有做旋转处理
// 这个只是画面的旋转,但是数据不会旋转,你还需要额外处理
if (mPreviewCallback != null) {
mPreviewCallback.onPreviewFrame(data, camera); // byte[] data == nv21 ===> C++层 ---> 流媒体服务器
}
camera.addCallbackBuffer(buffer);
}
if (mOnChangedSizeListener != null) { // 你的宽和高发生改变,就会回调此接口
mOnChangedSizeListener.onChanged(mWidth, mHeight);
}
2、将音频信息传给jni层
private class AudioTask implements Runnable {
@Override
public void run() {
audioRecord.startRecording();
byte[] data = new byte[inputSamples];
while(isLive){
int len = audioRecord.read(data,0,data.length);
if (len>0){
pusher.native_pushAudio(data); //jni传数据
}
}
audioRecord.stop();
}
}
3、初始化c层的音频和视频通道
extern "C"
JNIEXPORT void JNICALL
Java_com_wangwang_wwpush_WWPush_native_1init(JNIEnv *env, jobject thiz) {
videoChannel = new VideoChannel();
audioChannel = new AudioChannel;
audioChannel->setAudioCallback(callback);
videoChannel->setVideoCallback(callback);
packets.setReleaseCallback(releasePackets);
}
4、开始直播
//1、 连接流媒体服务器
void *task_start(void *args){
char *url = static_cast<char *>(args);
RTMP *rtmp = nullptr;
int ret;
do{
rtmp = RTMP_Alloc();
if (!rtmp){
LOGE("rtmp 初始化失败")
break;
}
RTMP_Init(rtmp);
rtmp->Link.timeout = 5;
//2、设置地址
ret = RTMP_SetupURL(rtmp,url);
if (!ret){
LOGE("rtmp 设置地址失败")
break;
}
//3、开启输出模式
RTMP_EnableWrite(rtmp);
//4、建立链接
ret = RTMP_Connect(rtmp, nullptr);
if (!ret){ //ret ==0为失败
LOGE("rtmp 建立连接失败:%d, url: %s", ret, url);
break;
}
//5、链接流
ret = RTMP_ConnectStream(rtmp,0);
if (!ret){
LOGE("rtmp 链接流失败 error_code->%d",ret)
break;
}
start_time = RTMP_GetTime();
//准备好了,可以向服务器推流了
readyPushing = true;
//TODO 测试是不用发送音频序列头信息的,但是为了规范
callback(audioChannel->getAudioSeqHeader());
//从队列里面发包
packets.setWork(1);//队列开始工作
RTMPPacket *packet = nullptr;
while(readyPushing){
packets.pop(packet);
if (!readyPushing){
break;
}
if (!packet){
continue;
}
//成功取出数据包,开始发包
//给rtmp设置一个id
packet->m_nInfoField2 = rtmp->m_stream_id;
//p:1代表开启内部的队列
ret = RTMP_SendPacket(rtmp,packet,1);
releasePackets(&packet);
if (!ret){
LOGE("rtmp 发包失败")
break;
}
}
releasePackets(&packet);
}while(false);
isStart = false;
readyPushing = false;
packets.setWork(0);
packets.clear();
if (rtmp){
RTMP_Close(rtmp);
RTMP_Free(rtmp);
}
delete url;
return nullptr;
}
5、初始化x264编码器
/**
* 初始化x264编码器
* @param width
* @param height
* @param fps
* @param bitrate
*/
void VideoChannel::initVideoEncoder(int width, int height, int fps, int bitrate) {
//要防止编码器多次创建 互斥锁
pthread_mutex_lock(&mutex);
mWidth = width;
mHeight = height;
mFps = fps;
mBitrate = bitrate;
y_len = width * height;
uv_len = y_len / 4;
if (videoEncoder) {
x264_encoder_close(videoEncoder);
videoEncoder = nullptr;
}
if (pic_in) {
x264_picture_clean(pic_in);
DELETE(pic_in);
}
//初始化x264参数集
x264_param_t param;
/**
* 设置编码器属性
* ultrafast 最快
* zerolatency 零延迟
*/
x264_param_default_preset(¶m, "ultrafast", "zerolatency");
//3.2 中等偏上的规格 自动匹配分辨率 模糊度 如果试82就是超清
param.i_level_idc = 32;
//输入数据格式 YUV420P
param.i_csp = X264_CSP_I420;
param.i_width = width;
param.i_height = height;
//直播不能有b帧
param.i_bframe = 0;
//码率控制 CQP(恒定质量) CRF(恒定码率) ABR(平均码率)
param.rc.i_rc_method = X264_RC_CRF;
//设置码率
param.rc.i_bitrate = bitrate / 1000;
//瞬时最大码率 网络波动
param.rc.i_vbv_max_bitrate = bitrate / 1000 * 1.2;
// 设置buffer大小
param.rc.i_vbv_buffer_size = bitrate / 1000;
//码率控制是timebase 和 timestamp 而是通过fps来控制码率
//根据fps来控制
param.b_vfr_input = 0;
param.i_fps_num = fps;
param.i_fps_den = 1;
param.i_timebase_den = param.i_fps_num;
param.i_timebase_num = param.i_fps_den;
//什么时候来一个I帧 2秒钟一个I帧
param.i_keyint_max = fps * 2;
//sps序列参数集 pps图像参数集 所以要设置header(sps pps)
param.b_repeat_headers = 1;
//设置一个线程
param.i_threads = 1;
//提交参数
x264_param_apply_profile(¶m, "baseline");
//输入图像初始化 本身空间初始化
pic_in = new x264_picture_t;
//内部成员初始化
x264_picture_alloc(pic_in, param.i_csp, param.i_width, param.i_height);
//打开编码器
videoEncoder = x264_encoder_open(¶m);
if (videoEncoder) {
LOGI("打开编码器成功")
}
pthread_mutex_unlock(&mutex);
}
6、编码并发送sps和pps以及I帧
void VideoChannel::encodeData(signed char *data) {
pthread_mutex_lock(&mutex);
//把y分量 copy 到 yuv420p 的 y分量
memcpy(pic_in->img.plane[0], data, y_len);
//libyuv 可以旋转
for (int i = 0; i < uv_len; ++i) {
// u
// data + y_len +i * 2 + 1
*(pic_in->img.plane[1] + i) = *(data + y_len + i * 2 + 1);
// v
//data + y_len + i * 2
*(pic_in->img.plane[2] + i) = *(data + y_len + i * 2);
}
x264_nal_t *nal = nullptr; //x264编码得到的nal数组
int pi_nal;//pi_nal是nal中输出的NAL单元数据
x264_picture_t pic_out; //输出编码后的图片
//1、视频编码器 2、nal 3、pi_nal 4、输入原始的图片 5、输出编码后的图片
int ret = x264_encoder_encode(videoEncoder, &nal,
&pi_nal, pic_in, &pic_out);
if (ret < 0) {
LOGE("X264编码失败")
pthread_mutex_unlock(&mutex);
return;
}
// 入队
// 需要sps 和 pps
int sps_len, pps_len;
uint8_t sps[100];
uint8_t pps[100];
pic_in->i_pts += 1;
for (int i = 0; i < pi_nal; ++i) {
if (nal[i].i_type == NAL_SPS) {
sps_len = nal[i].i_payload - 4;
memcpy(sps, nal[i].p_payload + 4, sps_len);
} else if (nal[i].i_type == NAL_PPS) {
pps_len = nal[i].i_payload - 4;
memcpy(pps, nal[i].p_payload + 4, pps_len);
//sps + pps
sendSpsPps(sps, pps, sps_len, pps_len);
} else {
//发送I帧 P帧
sendFrame(nal[i].i_type, nal[i].i_payload, nal[i].p_payload);
}
}
pthread_mutex_unlock(&mutex);
}
- 发送sps 和pps
void VideoChannel::sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_len) {
int body_size = 5 + 8 + sps_len + 3 + pps_len;
RTMPPacket *packet = new RTMPPacket;
RTMPPacket_Alloc(packet, body_size);
int i = 0;
packet->m_body[i++] = 0x17;//十六进制转成2进制 二进制查表
packet->m_body[i++] = 0x00;// 重点是此字节 如果是1 帧类型(关键帧 非关键帧), 如果是0一定是 sps pps
packet->m_body[i++] = 0x00;
packet->m_body[i++] = 0x00;
packet->m_body[i++] = 0x00;
packet->m_body[i++] = 0x01;// 版本
packet->m_body[i++] = sps[1];
packet->m_body[i++] = sps[2];
packet->m_body[i++] = sps[3];
packet->m_body[i++] = 0xFF;
packet->m_body[i++] = 0xE1;
//两个字节表达一个长度,需要位移
packet->m_body[i++] = (sps_len >> 8) & 0xFF;
packet->m_body[i++] = sps_len & 0xFF;
//拷贝sps
memcpy(&packet->m_body[i], sps, sps_len);
i += sps_len; //拷贝完后 i移位
packet->m_body[i++] = 0x01; //用一个字节表示pps的个数
//pps的长度
packet->m_body[i++] = (pps_len >> 8) & 0xFF;
packet->m_body[i++] = pps_len & 0xFF;
memcpy(&packet->m_body[i], pps, pps_len); // pps拷贝
i += pps_len;
//封包处理
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nBodySize = body_size;
packet->m_nChannel = 10;
packet->m_nTimeStamp = 0;
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;//中等的包
//packet 存入队列
videoCallback(packet);
}
- 发送帧信息
/**
* 发送帧信息
* @param type 帧类型
* @param i_payload 帧数据长度
* @param p_payload 帧数据
*/
void VideoChannel::sendFrame(int type, int i_payload, uint8_t *p_payload) {
//去掉起始码 00 00 00 01 或者 00 00 01
if (p_payload[2] == 0x00) {
p_payload += 4;
i_payload -= 4;
} else if (p_payload[0] == 0x01){
p_payload += 3;
i_payload -= 3;
}
RTMPPacket *packet = new RTMPPacket;
int body_size = 5 + 4 + i_payload;
RTMPPacket_Alloc(packet,body_size);
//区分关键帧 和 非
packet->m_body[0] =0x27; //非关键帧
if(type == NAL_SLICE_IDR){
packet->m_body[0] = 0x17;//关键帧
}
//这个参数是重点 如果是1 就是帧类型 如果是0 一定是pps 和sps
packet->m_body[1] = 0x01;
packet->m_body[2] = 0x00;
packet->m_body[3] = 0x00;
packet->m_body[4] = 0x00;
//四个字节表达一个长度 需要位移
packet->m_body[5] = (i_payload >> 24) & 0xFF;
packet->m_body[6] = (i_payload >> 16) & 0xFF;
packet->m_body[7] = (i_payload >> 8) & 0xFF;
packet->m_body[8] = i_payload & 0xFF;
memcpy(&packet->m_body[9],p_payload,i_payload);//拷贝h264裸数据
//封包处理
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nBodySize = body_size;
packet->m_nChannel = 10;
packet->m_nTimeStamp = -1;
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;//中等的包
//packet 存入队列
videoCallback(packet);
}
7、faac初始化
void AudioChannel::initAudioEncoder(int sample_rate, int num_channels) {
this->mChannels = num_channels;
/**
* 441000
* 两个声道
*
* 单通道 1024
* 两个通道 inputSamples = 1024 *2
* 还需要在上层 *2 位声 1024*2*2 = 4096
*/
/**
* TODO 1 打开faac编码器
*/
audioEncoder = faacEncOpen(sample_rate,mChannels,&inputSamples,&maxOutputBytes);
if (!audioEncoder){
LOGE("打开faac编码器失败")
return;
}
/**
* TODO 2 编码器的参数设置
*/
faacEncConfigurationPtr config = faacEncGetCurrentConfiguration(audioEncoder);
config->mpegVersion = MPEG4;
config->aacObjectType = LOW;
config->inputFormat = FAAC_INPUT_16BIT;
config->outputFormat = 0;
//工作中 降噪
//1发送的时候回音消除 2接收的时候回音消除 最复杂
config->useTns = 1;
config->useLfe = 0;
/**
* TODO 3 配置参数提交给编码器
*/
int ret = faacEncSetConfiguration(audioEncoder,config);
if (!ret){
LOGE("音频编码器配置参数失败")
return;
}
LOGI("音频编码器打开成功")
buffer = new u_char(maxOutputBytes);
}
8、发送faac编码的音频头
RTMPPacket * AudioChannel::getAudioSeqHeader() {
u_char *ppBuffer;
u_long len;
RTMPPacket * packet = new RTMPPacket;
// 获取编码器的解码配置信息
faacEncGetDecoderSpecificInfo(audioEncoder,&ppBuffer,&len);
// int body_size = 2 + 2; // 后面的2:有16bit描述 头数据的长度,就是16bit
int body_size = 2 + len;
RTMPPacket_Alloc(packet,body_size);//堆区实例化
packet->m_body[0] = 0xAF;//双声道
if (mChannels == 1){
packet->m_body[0] = 0xAE; //单声道
}
packet->m_body[1] = 0x00;//数据
memcpy(&packet->m_body[2],ppBuffer,2);
//封包
packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
packet->m_nBodySize = body_size;
packet->m_nChannel = 11; //视频是10 rtmp中有4
packet->m_nTimeStamp = 0; //头没有时间轴
packet->m_hasAbsTimestamp = 0; //一般不用
packet->m_headerType = RTMP_PACKET_SIZE_SMALL;
return packet;
}
9、发送音频数据
//signed char 是有符号 改成无符号 uint8_t
void AudioChannel::encodeData(int8_t *data) {
/**
* 1.faac编码器
* 2.数据
* 3.初始化好的样本数
* 4.接受成果的输出缓冲区
* 5接受成果的输出缓冲区大小
*/
int byteLen = faacEncEncode(audioEncoder, reinterpret_cast<int32_t *>(data), inputSamples, buffer, maxOutputBytes);
if (byteLen > 0){
RTMPPacket *packet = new RTMPPacket;
//0xAF AAC编码器 44100采样率 位声 16 双声道
//0xAE AAC编码器 44100采样率 位声 16 单声道
int body_size = 2 + byteLen;
RTMPPacket_Alloc(packet,body_size);//堆区实例化
packet->m_body[0] = 0xAF;//双声道
if (mChannels == 1){
packet->m_body[0] = 0xAE; //单声道
}
packet->m_body[1] = 0x01;//数据
memcpy(&packet->m_body[2],buffer,byteLen);
//封包
packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
packet->m_nBodySize = body_size;
packet->m_nChannel = 11; //视频是10 rtmp中有4
packet->m_nTimeStamp = -1;
packet->m_hasAbsTimestamp = 0; //一般不用
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
audioCallback(packet);
}
}