最近在做社交功能,需要收发语音,所以就仿照微信做了一个语音录制功能
使用的是react-native-audio
GitHub地址:https://github.com/jsierles/react-native-audio
配置按照GitHub上配置就可以,挺好配置的
iOS支持的编码格式:lpcm, ima4, aac, MAC3, MAC6, ulaw, alaw, mp1, mp2, alac, amr
Android支持的编码:aac, aac_eld, amr_nb, amr_wb, he_aac, vorbis
简单说下我遇到的问题,android上录制的在ios上不能播放最后发现录制的语音设置参数少设置了
最后把录制格式设定为如下android和ios问题完美解决
AudioRecorder.prepareRecordingAtPath(audioPath, {
SampleRate: 22050,
Channels: 1,
AudioQuality: 'Low',
AudioEncoding: 'aac',
OutputFormat: 'aac_adts',
});
效果图如下:
总体思路就是把下面的小方块注册为手势模块去监听用户手势的变化,然后在state里面设置一些参数根据手势的变化给用户反馈
import {AudioRecorder, AudioUtils} from 'react-native-audio';
/*
this.audioPath
注意这个是你录音后文件的路径+文件名,
可以使用react-native-audio下的AudioUtils路径也可以使用其他路径,
如果名称重复会覆盖掉原有的录音文件
*/
this.audioPat = AudioUtils.DocumentDirectoryPath + '/test.aac', //路径下的文件名
this.state = {
actionVisible: false,
paused: false,
recordingText: "",
opacity: 'white',
recordingColor: "transparent",
text: I18n.t('message.Chat.Voice.holdToTalk'),
currentTime: null, //开始录音到现在的持续时间
recording: false, //是否正在录音
stoppedRecording: false, //是否停止了录音
finished: false, //是否完成录音
hasPermission: undefined, //是否获取权限
}
componentDidMount() {
this.prepareRecordingPath(this.audioPath);
//添加监听
AudioRecorder.onProgress = (data) => {
this.setState({
currentTime: Math.floor(data.currentTime)
}, () => {
if (this.state.currentTime >= maxTime) {
Alert.alert(I18n.t('message.Chat.Voice.speakTooLong'))
this._cancel(false)
}
});
};
AudioRecorder.onFinished = (data) => {
// Android callback comes in the form of a promise instead.
if (Platform.OS === 'ios') {
this._finishRecording(data.status === "OK", data.audioFileURL);
}
};
//手势
this.Gesture = {
onStartShouldSetResponder: (evt) => true,
onMoveShouldSetResponder: (evt) => true,
onResponderGrant: (evt) => {
if (!this.state.hasPermission) {
Alert.alert(I18n.t('message.Chat.Voice.jurisdiction'))
}
this.setState({
opacity: "#c9c9c9",
recordingText: I18n.t('message.Chat.Voice.fingerStroke'),
text: I18n.t('message.Chat.Voice.releaseEnd'),
icon: "ios-mic",
recordingColor: 'transparent'
}, _ => RecordView.show(this.state.recordingText, this.state.recordingColor, this.state.icon));
this._record();
},
onResponderReject: (evt) => {
},
onResponderMove: (evt) => {
if (evt.nativeEvent.pageY < this.recordPageY - UpperDistance) {
if (this.state.recordingColor != 'red') {
this.setRecordView(I18n.t('message.Chat.Voice.loosenFingers'), 'red', "ios-mic-off")
}
} else if (this.state.recordingColor != 'transparent') {
this.setRecordView(I18n.t('message.Chat.Voice.fingerStroke'), 'transparent', "ios-mic")
}
},
onResponderRelease: (evt) => {
this.setState({
opacity: "white",
text: I18n.t('message.Chat.Voice.holdToTalk')
});
RecordView.hide();
let canceled;
if (evt.nativeEvent.locationY < 0 ||
evt.nativeEvent.pageY < this.recordPageY) {
canceled = true;
} else {
canceled = false;
}
this._cancel(canceled)
},
onResponderTerminationRequest: (evt) => true,
onResponderTerminate: (evt) => {
},
}
}
setRecordView = (recordingText, recordingColor, icon) => {
this.setState({
recordingText: recordingText,
recordingColor: recordingColor,
icon: icon
}, _ => RecordView.show(this.state.recordingText, this.state.recordingColor, this.state.icon));
}
render() {
const { opacity, text } = this.state
return (
<View style={styles.Box}>
<View
{...this.Gesture} //注册为手势组件
onLayout={this.handleLayout}
ref={(record) => this.record = record}
style={[styles.textBoxStyles, { backgroundColor: opacity }]}>
<Text>{text}</Text>
</View>
</View>
)
}
上面弹出层的浮框实现为
使用的是一个三方库teaset
GitHub地址:https://github.com/rilyu/teaset
class RecordView {
static key = null;
static show(text, color, icon) {
let showIcon;
if (RecordView.key) RecordView.hide()
if (color == 'red') {
showIcon = (<Image source={ic_ch3x} style={styles.imageStyles} />)
} else if (icon == 'short') {
showIcon = (<Image source={SHORT4} style={styles.imageStyles} />)
} else {
showIcon = (
<View>
<Icon name={'ios-mic'} style={styles.IconStyles} />
<Spinner size={24} type="ThreeBounce" color='white' />
</View>
);
}
RecordView.key = Toast.show({
text: (
<Text style={[styles.textStyles, { backgroundColor: color }]}>
{text}
</Text>
),
icon: showIcon,
position: 'center',
duration: 1000000,
});
}
static hide() {
if (!RecordView.key) return;
Toast.hide(RecordView.key);
RecordView.key = null;
}
}
我把代码直接贴上吧,没单独从项目中摘出来,就贴整个文件了
import React, { Component } from 'react';
import {
Image, PermissionsAndroid, Alert,
Platform, UIManager, findNodeHandle, DeviceEventEmitter
} from 'react-native';
import styles from './Styles';
import { Toast } from 'teaset';
import I18n from '../../../../I18n';
import { UpperDistance } from '../config';
import Spinner from "react-native-spinkit";
import { Text, Icon, View } from 'native-base'
import { AudioRecorder } from 'react-native-audio';
import Permissions from 'react-native-permissions';
import SHORT4 from '../../../../Images/C2CImg/SHORT4.png';
import ic_ch3x from '../../../../Images/C2CImg/ic_ch3x.png';
import MessageUtil from '../../MessageUtilModel/MessageUtil';
const maxTime = 45; //最大时间
const minTime = 1; //最小时间
export default class Voice extends Component {
constructor(props) {
super(props)
this.state = {
paused: false,
recordingText: "",
opacity: 'white',
recordingColor: "transparent",
text: I18n.t('message.Chat.Voice.holdToTalk'),
currentTime: null, //开始录音到现在的持续时间
recording: false, //是否正在录音
stoppedRecording: false, //是否停止了录音
finished: false, //是否完成录音
hasPermission: undefined, //是否获取权限
}
// 语音存储路径
const { userId } = this.props.chatFriend || {}
this.audioPath = MessageUtil.getFilePath(userId, `${(new Date()).getTime()}.aac`)
}
componentDidMount() {
this._checkPermission()
this.prepareRecordingPath(this.audioPath);
AudioRecorder.onProgress = (data) => {
this.setState({
currentTime: Math.floor(data.currentTime)
}, () => {
if (this.state.currentTime >= maxTime) {
Alert.alert(I18n.t('message.Chat.Voice.speakTooLong'))
this._cancel(false)
}
});
};
AudioRecorder.onFinished = (data) => {
// Android callback comes in the form of a promise instead.
if (Platform.OS === 'ios') {
this._finishRecording(data.status === "OK", data.audioFileURL);
}
};
this.Gesture = {
onStartShouldSetResponder: (evt) => true,
onMoveShouldSetResponder: (evt) => true,
onResponderGrant: (evt) => {
if (!this.state.hasPermission) {
Alert.alert(I18n.t('message.Chat.Voice.jurisdiction'))
}
this.setState({
opacity: "#c9c9c9",
recordingText: I18n.t('message.Chat.Voice.fingerStroke'),
text: I18n.t('message.Chat.Voice.releaseEnd'),
icon: "ios-mic",
recordingColor: 'transparent'
}, _ => RecordView.show(this.state.recordingText, this.state.recordingColor, this.state.icon));
this._record();
},
onResponderMove: (evt) => {
if (evt.nativeEvent.pageY < this.recordPageY - UpperDistance) {
if (this.state.recordingColor != 'red') {
this.setRecordView(I18n.t('message.Chat.Voice.loosenFingers'), 'red', "ios-mic-off")
}
} else if (this.state.recordingColor != 'transparent') {
this.setRecordView(I18n.t('message.Chat.Voice.fingerStroke'), 'transparent', "ios-mic")
}
},
onResponderRelease: (evt) => {
this.setState({
opacity: "white",
text: I18n.t('message.Chat.Voice.holdToTalk')
});
RecordView.hide();
let canceled;
if (evt.nativeEvent.locationY < 0 ||
evt.nativeEvent.pageY < this.recordPageY) {
canceled = true;
} else {
canceled = false;
}
this._cancel(canceled)
},
onResponderTerminationRequest: (evt) => true
}
//chatChange type 1前台 0后台 2中间
this.ShowLocation = DeviceEventEmitter.addListener('chatChange', (type) => {
this._cancel(false)
})
}
componentWillUnmount() {
this.ShowLocation.remove()
AudioRecorder.removeListeners()
this.timer && clearTimeout(this.timer);
}
prepareRecordingPath = (audioPath) => {
AudioRecorder.prepareRecordingAtPath(audioPath, {
SampleRate: 22050,
Channels: 1,
AudioQuality: 'Low',
AudioEncoding: 'aac',
OutputFormat: 'aac_adts',
});
}
_checkPermission = async () => {
const rationale = {
'title': I18n.t('message.Chat.Voice.tips'),
'message': I18n.t('message.Chat.Voice.tipsMessage')
};
let askForGrant = false
if (Platform.OS === 'ios') {
Permissions.check('microphone', { type: 'always' }).then(res => {
if (res == 'authorized') {
this.setState({ hasPermission: true })
} else {
Permissions.request('microphone', { type: 'always' }).then(response => {
if (response == 'denied') {
askForGrant = true
} else if (response == 'authorized') {
this.setState({ hasPermission: true });
}
});
}
});
} else {
const status = await PermissionsAndroid.request(PermissionsAndroid.PERMISSIONS.RECORD_AUDIO, rationale)
if (status !== "granted") {
askForGrant = true
} else {
this.setState({ hasPermission: true });
}
}
if (askForGrant) {
Alert.alert(
'Can we access your microphone and Speech Recognition?',
'We need access so you can record your voice',
[
{
text: 'Later',
onPress: () => console.log('Permission denied'),
style: 'cancel',
},
{
text: 'Open Settings',
onPress: Permissions.openSettings
},
],
);
}
}
setRecordView = (recordingText, recordingColor, icon) => {
this.setState({
recordingText: recordingText,
recordingColor: recordingColor,
icon: icon
}, _ => RecordView.show(recordingText, recordingColor, icon));
}
_cancel = (canceled) => {
let filePath = this._stop();
if (canceled) return;
if (this.state.currentTime < minTime) {
this.setRecordView(I18n.t('message.Chat.Voice.speakTooShort'), 'transparent', "short")
this.timer = setTimeout(() => { RecordView.hide() }, 300)
return;
}
this.setState({ currentTime: null })
let voice = {
audioPath: this.audioPath,
currentTime: this.state.currentTime
}
setTimeout(() => { this.props.SendVoice(voice) }, 500)
}
_pause = async () => {
if (!this.state.recording) return;
try {
const filePath = await AudioRecorder.pauseRecording();
this.setState({ paused: true });
} catch (error) {
}
}
_resume = async () => {
if (!this.state.paused) return;
try {
await AudioRecorder.resumeRecording();
this.setState({ paused: false });
} catch (error) {
}
}
_stop = async () => {
if (!this.state.recording) return;
this.setState({ stoppedRecording: true, recording: false, paused: false });
try {
const filePath = await AudioRecorder.stopRecording();
if (Platform.OS === 'android') {
this._finishRecording(true, filePath);
}
return filePath;
} catch (error) {
}
}
_finishRecording = (didSucceed, filePath) => {
this.setState({ finished: didSucceed });
}
_record = async () => {
const { recording, hasPermission, stoppedRecording } = this.state
const { userId } = this.props.chatFriend || {}
if (recording) return;
if (!hasPermission) return;
if (stoppedRecording) {
this.audioPath = MessageUtil.getFilePath(userId, `${(new Date()).getTime()}.aac`)
this.prepareRecordingPath(this.audioPath);
}
this.setState({
recording: true,
paused: false
});
try {
const filePath = await AudioRecorder.startRecording();
} catch (error) {
}
}
handleLayout = () => {
const handle = findNodeHandle(this.record);
UIManager.measure(handle, (x, y, w, h, px, py) => {
// this._ownMeasurements = { x, y, w, h, px, py };
this.recordPageX = px;
this.recordPageY = py;
});
}
render() {
const { opacity, text } = this.state
return (
<View style={styles.Box}>
<View
{...this.Gesture}
onLayout={this.handleLayout}
ref={(record) => this.record = record}
style={[styles.textBoxStyles, { backgroundColor: opacity }]}>
<Text>{text}</Text>
</View>
</View>
)
}
}
class RecordView {
static key = null;
static show(text, color, icon) {
let showIcon;
if (RecordView.key) RecordView.hide()
if (color == 'red') {
showIcon = (<Image source={ic_ch3x} style={styles.imageStyles} />)
} else if (icon == 'short') {
showIcon = (<Image source={SHORT4} style={styles.imageStyles} />)
} else {
showIcon = (
<>
<Icon name={'ios-mic'} style={styles.IconStyles} />
<Spinner size={24} type="ThreeBounce" color='white' />
</>
);
}
RecordView.key = Toast.show({
text: (
<Text style={[styles.textStyles, { backgroundColor: color }]}>
{text}
</Text>
),
icon: showIcon,
position: 'center',
duration: 1000000,
});
}
static hide() {
if (!RecordView.key) return;
Toast.hide(RecordView.key);
RecordView.key = null;
}
}