一、流程
理解OpenGL坐标系
camera相机预览和opengl关联
布局中使用 GLSurfacView 作为预览窗口。
准备相关的顶点属性数据和着色器文件。
实现 GLSurfaceView.Render 接口,编写具体的渲染代码。
二、实现思路
1、理解OpenGL坐标系
image
2、camera相机预览和opengl关联
Android 相机的预览数据可以输出到 SurfaceTexture 上,所以用 opengl 做相机预览的主要思路是
①. 绑定纹理ID
- 创建纹理ID,用于GL渲染
- 通过纹理ID创建一个SurfaceTexture对象
- 将SurfaceTexture对象传入Camera中
通过以上3步,将纹理ID和Camera进行关联。
MyRender.java
int[] textures = new int[1];
GLES20.glGenTextures(1,textures,0);
int cameraTextureId =textures[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,cameraTextureId);
// 设置纹理环绕方式
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES
GLES20.GL_TEXTURE_WRAP_S,GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,GLES20.GL_TEXTURE_WRAP_T,GLES20.GL_REPEAT);
// 设置纹理过滤方式
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_LINEAR);
//创建一个SurfaceTexture用于接收camera数据
surfaceTexture = new SurfaceTexture(cameraTextureId);
// camera设置进来的事件
if(renderListener != null)
renderListener.onSurfaceCreate(surfaceTexture);
#清除绑定
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,0);
# 在activity中xml中使用该类显示
MyCameraView.java
public class MyCameraView extends GLSurfaceView implements MyRender.RenderListener{
private static final String TAG = "MyCameraView";
MyRender myRender;
MyCameraHelper myCameraHelper;
private int cameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
private boolean mIsFocusing;
public MyCameraView(Context context) {
this(context,null);
}
public MyCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
//设置版本为2,会做opengles环境的初始化
setEGLContextClientVersion(2);
myCameraHelper = new MyCameraHelper(context);
myRender = new MyRender(context);
setRenderer(myRender);
myRender.setOnRenderListener(this);
}
...
}
MyCameraHelper.java
public void openCamera(int cameraId) {
close();
try {
this.cameraId = cameraId;
camera = Camera.open(cameraId);
camera.setPreviewTexture(surfaceTexture);
...
}
②. 接收画面解析完毕的回调
当Camera捕捉到新的画面时,Camera会通过层层的接口数据刷到到SurfaceTexture。
③. 驱动画面更新纹理ID
@Override
public void onDrawFrame(GL10 gl) {
//...
// 当有画面帧解析完毕时,驱动SurfaceTexture更新纹理ID到最近一帧解析完的画面,并且驱动底层去解析下一帧画面
surfaceTexture.updateTexImage();
// ...之后通过纹理ID绘制画面
}
4、准备相关的顶点属性数据和着色器文件。
提醒:如果想编写着色器文件时候有高亮,可以安装“GLSL Support"插件,或者你可以直接写成String
顶点着色器
// 定义一个属性,顶点坐标
attribute vec4 v_Position;
// 定义一个属性,纹理坐标
attribute vec2 f_Position;
// varying 可用于相互传值
varying vec2 ft_Position;
uniform mat4 u_Matrix;
void main(){
ft_Position = f_Position;
gl_Position = v_Position *u_Matrix;
}
片元着色器
// 着色器纹理扩展类型
#extension GL_OES_EGL_image_external : require
// 设置精度,中等精度
precision mediump float;
// varying 可用于相互传值
varying vec2 ft_Position;
// 2D 纹理 ,uniform 用于 application 向 gl 传值
uniform samplerExternalOES sTexture;
void main(){
gl_FragColor = texture2D(sTexture,ft_Position);
}
5、实现 GLSurfaceView.Render 接口,编写具体的渲染代码。
public class MyRender extends BaseRender {
Context mContext;
protected int mViewWidth;
protected int mViewHeight;
private float[] mVertexCoordinate = new float[]{
-1f,-1f,
1f,-1f,
-1f,1f,
1f,1f,
};
private FloatBuffer mVertexBuffer;
private float[] mFragmentCoordinate = new float[]{
0f, 1f,
1f, 1f,
0f, 0f,
1f, 0f
};
private FloatBuffer mFragmentBuffer;
private int program;
private int vPosition;
private int fPosition;
private int u_Matrix;
private int mVboId;
private float[] matrix = new float[16];
private String TAG = "size";
// private int cameraTextureId;
private SurfaceTexture surfaceTexture;
public MyRender(Context context) {
this.mContext = context;
//一个float 4字节
mVertexBuffer = ByteBuffer.allocateDirect(mVertexCoordinate.length*4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(mVertexCoordinate);
mVertexBuffer.position(0);
mFragmentBuffer = ByteBuffer.allocateDirect(mFragmentCoordinate.length*4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(mFragmentCoordinate);
mFragmentBuffer.position(0);
// fbo 的坐标是标准坐标
Matrix.orthoM(matrix, 0, -1, 1, -1f, 1f, -1f, 1f);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
String vertexSource = Utils.getGLResource(mContext, R.raw.vertex_shader_matrix);
String fragmentSource = Utils.getGLResource(mContext,R.raw.fragment_shader_camera);
program = Utils.createProgram(vertexSource,fragmentSource);
vPosition = GLES20.glGetAttribLocation(program,"v_Position");
fPosition = GLES20.glGetAttribLocation(program,"f_Position");
// int sTexture = GLES20.glGetUniformLocation(program,"sTexture");
u_Matrix = GLES20.glGetUniformLocation(program,"u_Matrix");
// 创建 vbos
int[] vBos = new int[1];
// 分配n个缓冲区对象,申明
GLES20.glGenBuffers(1,vBos,0);
mVboId = vBos[0];
// 赋值 vbos,初始化
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER,mVboId);
// 开辟 vbos , 分配空间
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER,(mVertexCoordinate.length+mFragmentCoordinate.length)*4,
null,GLES20.GL_STATIC_DRAW);
// 初始化,分两段,第一段存顶底数据,第二段存片元数据
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER,0,mVertexCoordinate.length*4,mVertexBuffer);
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER,mVertexCoordinate.length*4,
mFragmentCoordinate.length*4,mFragmentBuffer);
//一旦我们用缓冲区绘制完成,我们应该解除它
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER,0);
// 激活 program
GLES20.glUseProgram(program);
int[] textures = new int[1];
GLES20.glGenTextures(1,textures,0);
int cameraTextureId =textures[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,cameraTextureId);
// 设置纹理环绕方式
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,GLES20.GL_TEXTURE_WRAP_S,GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,GLES20.GL_TEXTURE_WRAP_T,GLES20.GL_REPEAT);
// 设置纹理过滤方式
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_LINEAR);
surfaceTexture = new SurfaceTexture(cameraTextureId);
if(renderListener != null)
renderListener.onSurfaceCreate(surfaceTexture);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,0);
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0,0,width,height);
}
@Override
public void onDrawFrame(GL10 gl) {
// 清屏并绘制红色
// 第一条语句表示清除颜色设为红色,第二条语句表示实际完成了把整个窗口清除为黑色的任务,glClear()的唯一参数表示需要被清除的缓冲区。
// GLES20.glClearColor(1f,0f,0f,1f);
// GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// 激活 program
GLES20.glUseProgram(program);
// 绑定纹理,如果使用需要再次glBindTexture
// GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,cameraTextureId);
surfaceTexture.updateTexImage();
// 把matrix传入,设置uMatrix正交矩阵的值
GLES20.glUniformMatrix4fv(u_Matrix,1,false,matrix,0);
//glBufferData顶点数据已经缓存了,在这里取出
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER,mVboId);
/**
* 设置坐标
* 2:2个为一个点
* GLES20.GL_FLOAT:float 类型
* false:不做归一化
* 8:步长是 8
*/
//默认非激活状态
GLES20.glEnableVertexAttribArray(vPosition);
// 取2个数据 ,一个float数据占四个字节,参考 :https://www.cnblogs.com/fordreamxin/p/4676208.html
// 跳转8个字节位再取另外2个数据,这是实现块状数据存储的关键
GLES20.glVertexAttribPointer(vPosition,2,GLES20.GL_FLOAT,false,8,0);
GLES20.glEnableVertexAttribArray(fPosition);
GLES20.glVertexAttribPointer(fPosition,2,GLES20.GL_FLOAT,false,8,mVertexCoordinate.length*4);
// GL_TRIANGLE_STRIP+4个顶点,对应矩形 https://www.cnblogs.com/lxb0478/p/6381677.html
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP,0,4);
//清零
// GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
// GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_BINDING_EXTERNAL_OES,0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER,0);
}
/**
* 重置矩阵参数
*/
public void resetMatrix() {
Matrix.setIdentityM(matrix, 0);
}
/**
* 旋转矩阵
*
* @param offset 偏移
* @param a 角度
* @param x x轴
* @param y y轴
* @param z z轴
*/
public void rotateMatrix(int offset, float a, float x, float y, float z) {
Matrix.rotateM(matrix, offset, a, x, y, z);
}
public void setViewHeight(int viewHeight) { this.mViewHeight = viewHeight; }
public void setViewWidth(int viewWidth) {
this.mViewWidth = viewWidth;
}
RenderListener renderListener;
public void setOnRenderListener(RenderListener onRenderListener) {
this.renderListener = onRenderListener;
}
public interface RenderListener{
void onSurfaceCreate(SurfaceTexture surfaceTexture);
}
}
三、其他片段代码
1.camera类
public class MyCameraHelper {
Context mContext;
SurfaceTexture surfaceTexture;
int mWidth;
int mHeight;
Camera camera;
// SurfaceHolder surfaceHolder;
public MyCameraHelper(Context context) {
this.mContext = context;
}
public void setSurfaceTexture(SurfaceTexture surfaceTexture) {
this.surfaceTexture = surfaceTexture;
}
int cameraId;
public void openCamera(int cameraId) {
close();
try {
this.cameraId = cameraId;
camera = Camera.open(cameraId);
camera.setPreviewTexture(surfaceTexture);
// camera.setPreviewDisplay(surfaceHolder);
Camera.Parameters parameters = camera.getParameters();
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
//默认格式,yuv420
parameters.setPreviewFormat(ImageFormat.NV21);
Camera.Size pictureSize = getOptimalSize(parameters.getSupportedPictureSizes(),mWidth,mHeight);
parameters.setPictureSize(pictureSize.width,pictureSize.height);
Camera.Size previewSize = getOptimalSize(parameters.getSupportedPreviewSizes(),mWidth,mHeight);
parameters.setPreviewSize(previewSize.width,previewSize.height);
// int degree = calculateCameraPreviewOrientation((Activity) mContext);
//旋转角介绍
// https://www.jianshu.com/p/f8d0d1467584
// camera.setDisplayOrientation(degree);
camera.setParameters(parameters);
camera.startPreview();
camera.autoFocus(null);
Log.e("TAG", "开始预览相机:" + cameraId);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 获取最合适的尺寸
*
* @param supportList
* @param width
* @param height
* @return
*/
private static Camera.Size getOptimalSize(List<Camera.Size> supportList, int width, int height) {
// camera的宽度是大于高度的,这里要保证expectWidth > expectHeight
int expectWidth = Math.max(width, height);
int expectHeight = Math.min(width, height);
// 根据宽度进行排序,升序
Collections.sort(supportList, new Comparator<Camera.Size>() {
@Override
public int compare(Camera.Size pre, Camera.Size after) {
if (pre.width > after.width) {
return 1;
} else if (pre.width < after.width) {
return -1;
}
return 0;
}
});
Camera.Size result = supportList.get(0);
boolean widthOrHeight = false; // 判断存在宽或高相等的Size
// 辗转计算宽高最接近的值
for (Camera.Size size : supportList) {
// 如果宽高相等,则直接返回
if (size.width == expectWidth && size.height == expectHeight) {
result = size;
break;
}
// 仅仅是宽度相等,计算高度最接近的size
if (size.width == expectWidth) {
widthOrHeight = true;
if (Math.abs(result.height - expectHeight)
> Math.abs(size.height - expectHeight)) {
result = size;
}
}
// 高度相等,则计算宽度最接近的Size
else if (size.height == expectHeight) {
widthOrHeight = true;
if (Math.abs(result.width - expectWidth)
> Math.abs(size.width - expectWidth)) {
result = size;
}
}
// 如果之前的查找不存在宽或高相等的情况,则计算宽度和高度都最接近的期望值的Size
else if (!widthOrHeight) {
if (Math.abs(result.width - expectWidth)
> Math.abs(size.width - expectWidth)
&& Math.abs(result.height - expectHeight)
> Math.abs(size.height - expectHeight)) {
result = size;
}
}
}
return result;
}
public void close() {
if(camera != null){
camera.stopPreview();
camera.release();
Log.e("TAG", "停止预览相机");
camera = null;
}
}
public void setViewWidth(int width) {
this.mWidth = width;
}
public void setViewHeight(int height) {
this.mHeight = height;
}
// public void setHolder(SurfaceHolder mHolder) {
// this.surfaceHolder = mHolder;
// }
//处理方法camera源码中有
public int calculateCameraPreviewOrientation(Activity activity) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int rotation = activity.getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360;
} else {
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
public Boolean newCameraFocus(Point point, Camera.AutoFocusCallback autoFocusCallback) {
try {
if(camera ==null){
throw new RuntimeException("camera is null");
}
Point cameraFocusPoint = convertToCameraPoint(point);
Rect cameraRect = convertToCameraRect(cameraFocusPoint,100);
Camera.Parameters parameters =camera.getParameters();
//不支持设置自定义聚焦,则使用自动聚焦,返回
if(parameters.getMaxNumFocusAreas()<=0){
return focus(autoFocusCallback);
}
clearCameraFocus();
List<Camera.Area> focusAreas = new ArrayList<Camera.Area>();
focusAreas.add(new Camera.Area(cameraRect,100));
parameters.setFocusAreas(focusAreas);
//设置测光区域列
parameters.setMeteringAreas(focusAreas);
camera.setParameters(parameters);
return focus(autoFocusCallback);
} catch (RuntimeException e) {
e.printStackTrace();
}finally {
return focus(autoFocusCallback);
}
}
private void clearCameraFocus() {
try {
if (camera == null) {
throw new RuntimeException("mCamera is null");
}
camera.cancelAutoFocus();
Camera.Parameters parameters = camera.getParameters();
parameters.setFocusAreas(null);
//设置测光区域列
parameters.setMeteringAreas(null);
camera.setParameters(parameters);
} catch (Exception e) {
e.printStackTrace();
}
}
private Boolean focus(Camera.AutoFocusCallback autoFocusCallback) {
camera.cancelAutoFocus();
camera.autoFocus(autoFocusCallback);
return true;
}
private Rect convertToCameraRect(Point cameraFocusPoint, int weight) {
int left = limit(cameraFocusPoint.x-weight,1000,-1000);
int right = limit(cameraFocusPoint.x+weight,1000,-1000);
int bottom = limit(cameraFocusPoint.y+weight,1000,-1000);
int top = limit(cameraFocusPoint.y-weight,1000,-1000);
return new Rect(left,top,right,bottom);
}
private int limit(int x, int max, int min) {
if(x>max)
return max;
if (x<min)
return min;
return x;
}
private Point convertToCameraPoint(Point point) {
int newY = point.x*2000 / mHeight - 1000;
//减去1000的半个范围后,需要取负
//camera坐标是正常竖屏往左旋转90度的
//坐标系说明,https://blog.csdn.net/afei__/article/details/52033466
int newx = -(point.x*2000 / mWidth - 1000);
return new Point(newx,newY);
}
}
2.MyCameraView类
public class MyCameraView extends GLSurfaceView implements MyRender.RenderListener{
private static final String TAG = "MyCameraView";
MyRender myRender;
MyCameraHelper myCameraHelper;
private int cameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
private boolean mIsFocusing;
// SurfaceHolder mHolder;
public MyCameraView(Context context) {
this(context,null);
}
public MyCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
// mHolder = getHolder();
myCameraHelper = new MyCameraHelper(context);
// mHolder.addCallback(this);
// mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
myRender = new MyRender(context);
setRenderer(myRender);
myRender.setOnRenderListener(this);
}
@Override
public void onSurfaceCreate(SurfaceTexture surfaceTexture) {
myCameraHelper.setSurfaceTexture(surfaceTexture);
rotateCameraAngle();
myCameraHelper.openCamera(cameraId);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = getMeasuredWidth();
int height = getMeasuredHeight();
myRender.setViewHeight(height);
myRender.setViewWidth(width);
// mHolder.setFixedSize(width,height);
myCameraHelper.setViewWidth(width);
myCameraHelper.setViewHeight(height);
}
/**
* 旋转相机的角度
*/
private void rotateCameraAngle() {
myRender.resetMatrix();
// 前置摄像头
if (cameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
myRender.rotateMatrix(0, 90, 0, 0, 1);
myRender.rotateMatrix(0, 180, 1, 0, 0);
}
// 后置摄像头
else if (cameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
myRender.rotateMatrix(0, 90, 0, 0, 1);
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
Log.e(TAG, "onTouchEvent focus: x="+(int)event.getX()+",y="+(int)event.getY());
if(event.getAction() ==ACTION_UP){
focus((int)event.getX(),(int)event.getY());
}
return true;
// else
// return super.onTouchEvent(event);
}
private void focus(int x, int y) {
if(mIsFocusing)
return;
mIsFocusing = true;
Point point = new Point(x,y);
if(mFocusListener != null)
mFocusListener.beginFocus(x,y);
Log.e(TAG, "focus: x="+x+",y="+y);
myCameraHelper.newCameraFocus(point, new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
mIsFocusing = false;
if(mFocusListener != null)
mFocusListener.endFocus();
}
});
}
private FocusListener mFocusListener;
public void setOnFocusListener(FocusListener focusListener) {
this.mFocusListener = focusListener;
}
public interface FocusListener {
void beginFocus(int x,int y);
void endFocus();
}
public void onDestroy() {
if(myCameraHelper != null){
myCameraHelper.close();
myCameraHelper = null;
}
if(myRender != null)
myRender = null;
}
// @Override
// public void surfaceCreated(SurfaceHolder surfaceHolder) {
// myCameraHelper.setHolder(mHolder);
// myCameraHelper.openCamera(cameraId);
// }
}
3.MyCameraActivity.java类
public class MyCameraActivity extends AppCompatActivity {
private MyCameraView mCameraView;
CameraFocusView cameraFocusView;
@Override
protected void onResume() {
super.onResume();
// mCameraView.onResume();
}
@Override
protected void onPause() {
super.onPause();
// mCameraView.onPause();
}
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mycamera_render);
mCameraView = (MyCameraView) findViewById(R.id._mysurface_view);
cameraFocusView = findViewById(R.id.camera_focus_view);
//手动点击屏幕,聚焦
mCameraView.setOnFocusListener(new MyCameraView.FocusListener() {
@Override
public void beginFocus(int x, int y) {
cameraFocusView.beginFocus(x, y);
}
@Override
public void endFocus() {
cameraFocusView.endFocus(true);
}
});
}
@Override
protected void onDestroy() {
mCameraView.onDestroy();
super.onDestroy();
}
}
4.聚焦类 CameraFocusView.java
开始聚焦显示方框,聚焦结束,清除显示的方框。
public class CameraFocusView extends View {
private int mStrokeWidth;
private int prepareColor;
private int finishColor;
private int mPaintColor;
private boolean isFocusing;
private Paint mPaint;
private int mDuration;
private Handler mDurationHandler;
public CameraFocusView(Context context) {
super(context);
init(context, null);
}
public CameraFocusView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
public CameraFocusView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context, attrs);
}
private void init(Context context, @Nullable AttributeSet attrs) {
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.camera_focus_view);
this.mStrokeWidth = (int) typedArray.getDimension(R.styleable.camera_focus_view_stroke_width, 5);
this.prepareColor = typedArray.getColor(R.styleable.camera_focus_view_prepare_color, Color.RED);
this.finishColor = typedArray.getColor(R.styleable.camera_focus_view_finish_color, Color.YELLOW);
this.mPaint = new Paint();
this.mPaintColor = prepareColor;
this.mDuration = 1000;
this.mDurationHandler = new Handler(Looper.getMainLooper());
this.setVisibility(GONE);
}
public void beginFocus(int centerX, int centerY) {
mPaintColor = prepareColor;
isFocusing = true;
int x = centerX - getMeasuredWidth()/2;
int y = centerY - getMeasuredHeight()/2;
setX(x);
setY(y);
setVisibility(VISIBLE);
invalidate();
}
public void endFocus(boolean isSuccess) {
isFocusing = false;
if (isSuccess) {
mPaintColor = finishColor;
mDurationHandler.postDelayed(new Runnable() {
@Override
public void run() {
if (!isFocusing) {
setVisibility(GONE);
}
}
}, mDuration);
invalidate();
} else {
setVisibility(GONE);
}
}
public void cancelFocus() {
isFocusing = false;
setVisibility(GONE);
}
public void setDuration(int duration) {
mDuration = duration;
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
int width = getMeasuredWidth();
int height = getMeasuredHeight();
mPaint.setStrokeWidth(mStrokeWidth);
mPaint.setAntiAlias(true);
mPaint.setColor(mPaintColor);
mPaint.setStyle(Paint.Style.FILL);
canvas.drawLine(0, 0, width/3, 0, mPaint);
canvas.drawLine(width*2/3, 0, width, 0, mPaint);
canvas.drawLine(0, height, width/3, height, mPaint);
canvas.drawLine(width*2/3, height, width, height, mPaint);
canvas.drawLine(0, 0, 0, height/3, mPaint);
canvas.drawLine(0, height*2/3, 0, height, mPaint);
canvas.drawLine(width, 0, width, height/3, mPaint);
canvas.drawLine(width, height*2/3, width, height, mPaint);
}
}
四、总结
本文梳理了使用 opengl 将相机预览数据渲染到 GLSurfaceView 的基本流程,后续以此为基础,结合 opengl 的离屏渲染机制实现实时滤镜功能,现在渲染器代码太长,后面会对代码进行抽取。
作者:张小潇
链接:https://www.jianshu.com/p/c7145256dd13
来源:简书
著作权归作者所有。商业转载请联系作者获得授权,非商业转载请注明出处。