摄像头预览
摄像头预览有3种方式:
setPreviewDisplay(holder)
setPreviewTexture(surfaceTexture)
自定义
第一种会给摄像头绑定一个视窗(最终的型态是一个Surface,Egl会对转换成Native的Window与之对应),摄像头采集完数据,再绘制到视窗上,这都是操作系统自发处理的。
第二种是输出到纹理,这个纹理可以做一些处理,加上其他一些OpenGL的操作,最终输出到视窗上。
第三种,也可以使用自定义的方式通过PreviewCallback拿到Frame数据,装换到2D纹理,再输出到视窗。
后两者是可以进行自定义的OpenGL渲染的,比如特效,滤镜等。下一篇(二)中会比较两者的优劣,本文主要阐述一下输出到纹理的流程。
本文使用GLSurfaceView,它自动维护了Android OpenGL的Context,我们只需要在其Renderer的onDrawFrame里设置好我们的渲染逻辑,其他的都交给GLSurfaceView的Render线程处理就行。唯一需要注意的是摄像头采集的数据和屏幕一般会差90度,在绘制的时候设置的纹理坐标需要处理一下。
Camera采集
public class GLCameraActivity extends AppCompatActivity {
private Camera mCamera;
MyGLSurfaceView surfaceView;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
surfaceView = new MyGLSurfaceView(this);
CameraRenderer renderer = new CameraRenderer();
surfaceView.setRenderer(renderer);
//脏模式,数据需要刷新的时候进行绘制
surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
renderer.setOnSurfaceListener(new CameraRenderer.OnSurfaceListener() {
@Override
public void onSurfaceCreate(SurfaceTexture surfaceTexture) {
openCamera(surfaceTexture);
}
@Override
public void onFrameAvailable() {
surfaceView.requestRender();
}
});
setContentView(surfaceView);
}
private void openCamera(SurfaceTexture surfaceTexture) {
close();
mCamera = Camera.open(0);
try {
mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
}
});
mCamera.setPreviewTexture(surfaceTexture);
Camera.Parameters parameters = mCamera.getParameters();
if (parameters != null) {
//对焦,"auto"只对焦一次,某些机型可能不支持这种持续对焦的方式
//http://blog.csdn.net/huweigoodboy/article/details/51378751
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
parameters.setPreviewSize(1280, 720);
mCamera.setParameters(parameters);
}
//这个模式下,不生效,可以通过设置纹理坐标进行变换
// mCamera.setDisplayOrientation(90);
mCamera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
private void close() {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
@Override
protected void onResume() {
super.onResume();
surfaceView.onResume();
}
@Override
protected void onPause() {
super.onPause();
surfaceView.onPause();
close();
}
class MyGLSurfaceView extends GLSurfaceView {
public MyGLSurfaceView(Context context) {
super(context);
setEGLContextClientVersion(2);
}
}
}
自定义Renderer
public class CameraRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
int[] tex = new int[1];
SurfaceTexture surfaceTexture;
CameraTexture triangle;
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
if (mOnSurfaceListener != null) {
mOnSurfaceListener.onFrameAvailable();
}
}
public interface OnSurfaceListener {
void onSurfaceCreate(SurfaceTexture surfaceTexture);
void onFrameAvailable();
}
private OnSurfaceListener mOnSurfaceListener;
public void setOnSurfaceListener(OnSurfaceListener onSurfaceListener) {
this.mOnSurfaceListener = onSurfaceListener;
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glGenTextures(1, tex, 0);
surfaceTexture = new SurfaceTexture(tex[0]);
if (mOnSurfaceListener != null) {
mOnSurfaceListener.onSurfaceCreate(surfaceTexture);
}
surfaceTexture.setOnFrameAvailableListener(this);
triangle = new CameraTexture();
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
@Override
public void onDrawFrame(GL10 gl) {
surfaceTexture.updateTexImage();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
triangle.draw(tex[0]);
}
}
定义OpenGL Shader
public class CameraTexture {
private static final String TAG = CameraTexture.class.getSimpleName();
private final String vsCode =
"attribute vec4 vPosition;" +
"attribute vec2 vCoord;" +
"varying vec2 vTexCoord;" +
"void main() {" +
" gl_Position = vPosition;" +
" vTexCoord = vCoord;" +
"}";
private final String fsCode =
"#extension GL_OES_EGL_image_external : require\n" +
"uniform samplerExternalOES sTexture;" +
"varying vec2 vTexCoord;" +
"void main() {" +
" gl_FragColor = texture2D(sTexture, vTexCoord);" +
"}";
static float vs[] = {
1.0f, -1.0f, 1.0f, 0.0f,
1.0f, 1.0f, 0.0f, 0.0f,
-1.0f, 1.0f, 0.0f, 1.0f,
-1.0f, 1.0f, 0.0f, 1.0f,
-1.0f, -1.0f, 1.0f, 1.0f,
1.0f, -1.0f, 1.0f, 0.0f,
};
int program;
int vsShader;
int fgShader;
FloatBuffer vertexBuffer;
public CameraTexture() {
program = GLES20.glCreateProgram();
vsShader = loadShader(GLES20.GL_VERTEX_SHADER, vsCode);
fgShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fsCode);
GLES20.glAttachShader(program, vsShader);
GLES20.glAttachShader(program, fgShader);
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
}
ByteBuffer bb = ByteBuffer.allocateDirect(vs.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(vs);
vertexBuffer.position(0);
GLES20.glBindAttribLocation(program, 0, "vPosition");
}
public int loadShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
int[] compileStatus = new int[1];
GLES20.glGetProgramiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
}
return shader;
}
public void draw(int texId) {
GLES20.glUseProgram(program);
vertexBuffer.position(0);
GLES20.glEnableVertexAttribArray(0);
GLES20.glVertexAttribPointer(0, 2, GLES20.GL_FLOAT, false, 4 * 4, vertexBuffer);
vertexBuffer.position(2);
GLES20.glEnableVertexAttribArray(1);
GLES20.glVertexAttribPointer(1, 2, GLES20.GL_FLOAT, false, 4 * 4, vertexBuffer);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texId);
GLES20.glTexParameterf(
GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameterf(
GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(
GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(
GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
int sTextureLocation = GLES20.glGetUniformLocation(program, "sTexture");
GLES20.glUniform1i(sTextureLocation, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 6);
GLES20.glDisableVertexAttribArray(0);
GLES20.glDisableVertexAttribArray(1);
}
}