I\'m getting these errors from LogCat:
10-30 00:31:51.494: D/CameraHal(1205): CameraHal setOverlay/1/00000000/00000000
10-30 00:31:51.494: E/CameraHal(1205): Trying to set overlay, but overlay is null!, line:3472
10-30 00:31:51.494: W/CameraService(1205): Overlay create failed - retrying
...
10-30 00:31:52.526: E/CameraService(1205): Overlay Creation Failed!
...
10-30 00:31:52.588: E/AndroidRuntime(5040): FATAL EXCEPTION: main
10-30 00:31:52.588: E/AndroidRuntime(5040): java.lang.RuntimeException: startPreview failed
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.hardware.Camera.startPreview(Native Method)
10-30 00:31:52.588: E/AndroidRuntime(5040): at com.matthewmitchell.nightcam.CameraSurfaceView.surfaceCreated(CameraSurfaceView.java:47)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.SurfaceView.updateWindow(SurfaceView.java:544)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.SurfaceView.dispatchDraw(SurfaceView.java:341)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.ViewGroup.drawChild(ViewGroup.java:1638)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.View.draw(View.java:6743)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.widget.FrameLayout.draw(FrameLayout.java:352)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.ViewGroup.drawChild(ViewGroup.java:1640)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.ViewGroup.drawChild(ViewGroup.java:1638)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.View.draw(View.java:6743)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.widget.FrameLayout.draw(FrameLayout.java:352)
10-30 00:31:52.588: E/AndroidRuntime(5040): at com.android.internal.policy.impl.PhoneWindow$DecorView.draw(PhoneWindow.java:1876)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.ViewRoot.draw(ViewRoot.java:1407)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.ViewRoot.performTraversals(ViewRoot.java:1163)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.view.ViewRoot.handleMessage(ViewRoot.java:1727)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.os.Handler.dispatchMessage(Handler.java:99)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.os.Looper.loop(Looper.java:123)
10-30 00:31:52.588: E/AndroidRuntime(5040): at android.app.ActivityThread.main(ActivityThread.java:4627)
10-30 00:31:52.588: E/AndroidRuntime(5040): at java.lang.reflect.Method.invokeNative(Native Method)
10-30 00:31:52.588: E/AndroidRuntime(5040): at java.lang.reflect.Method.invoke(Method.java:521)
10-30 00:31:52.588: E/AndroidRuntime(5040): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:868)
10-30 00:31:52.588: E/AndroidRuntime(5040): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:626)
10-30 00:31:52.588: E/AndroidRuntime(5040): at dalvik.system.NativeStart.main(Native Method)
Here is the Activity class:
public class NightCamActivity extends Activity {
private GLSurfaceView mGLView;
CameraSurfaceView surface_view;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Create a GLSurfaceView instance and set it
// as the ContentView for this Activity
Debug.out(\"Welcome\");
surface_view = new CameraSurfaceView(this);
mGLView = new MySurfaceView(this);
setContentView(mGLView);
addContentView(surface_view, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
}
@Override
protected void onPause() {
super.onPause();
// The following call pauses the rendering thread.
// If your OpenGL application is memory intensive,
// you should consider de-allocating objects that
// consume significant memory here.
mGLView.onPause();
}
@Override
protected void onResume() {
super.onResume();
// The following call resumes a paused rendering thread.
// If you de-allocated graphic objects for onPause()
// this is a good place to re-allocate them.
mGLView.onResume();
}
}
MySurfaceView class:
class MySurfaceView extends GLSurfaceView{
public MySurfaceView(NightCamActivity context){
super(context);
// Create an OpenGL ES 2.0 context.
Debug.out(\"Mysurfaceview welcome\");
setEGLContextClientVersion(2);
// Set the Renderer for drawing on the GLSurfaceView
MyRenderer renderer = new MyRenderer();
renderer.takeContext(context);
context.surface_view.renderer = renderer;
setRenderer(renderer);
}
}
CameraSurfaceView class:
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private Camera camera;
Camera.Size use_size;
MyRenderer renderer;
public CameraSurfaceView(Context context) {
super(context);
SurfaceHolder holder = getHolder();
holder.addCallback(this);
Debug.out(\"Init CSV\");
camera = Camera.open();
}
public void surfaceCreated(SurfaceHolder holder) {
Debug.out(\"SC\");
try {
camera.setPreviewDisplay(holder);
} catch (IOException e) {
Debug.out(\"Could not set preview display for camera.\");
}
camera.setPreviewCallback(this);
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it\'s very
// important to release it when the activity is paused.
try {
if (camera != null) {
camera.stopPreview();
camera.release();
}
} catch (Exception e) {
Debug.out(\"Camera release failure.\");
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
Camera.Parameters parameters = camera.getParameters();
List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
Camera.Size optimalPreviewSize = getOptimalPreviewSize(supportedPreviewSizes, w, h);
if (optimalPreviewSize != null) {
parameters.setPreviewSize(optimalPreviewSize.width, optimalPreviewSize.height);
camera.setParameters(parameters);
camera.startPreview();
}
}
static Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
final double MAX_DOWNSIZE = 1.5;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
for (Camera.Size size : sizes) {
double ratio = (double) size.width / size.height;
double downsize = (double) size.width / w;
if (downsize > MAX_DOWNSIZE) {
//if the preview is a lot larger than our display surface ignore it
//reason - on some phones there is not enough heap available to show the larger preview sizes
continue;
}
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
//keep the max_downsize requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
double downsize = (double) size.width / w;
if (downsize > MAX_DOWNSIZE) {
continue;
}
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
//everything else failed, just take the closest match
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void onPreviewFrame(byte[] data, Camera arg1) {
Debug.out(\"PREVIEW FRAME:\");
byte[] pixels = new byte[use_size.width * use_size.height * 3]; ;
decodeYUV420SP(pixels, data, use_size.width, use_size.height);
renderer.bindCameraTexture(pixels, use_size.width, use_size.height);
}
void decodeYUV420SP(byte[] rgb, byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0){
y = 0;
}
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0){
r = 0;
}else if (r > 262143){
r = 262143;
}
if (g < 0){
g = 0;
}else if (g > 262143){
g = 262143;
}
if (b < 0){
b = 0;
}else if (b > 262143){
b = 262143;
}
rgb[yp*3] = (byte) (b << 6);
rgb[yp*3 + 1] = (byte) (b >> 2);
rgb[yp*3 + 2] = (byte) (b >> 10);
}
}
}
}
Finally the MyRender class:
public class MyRenderer implements GLSurfaceView.Renderer{
private FloatBuffer vertices;
private FloatBuffer texcoords;
private int mProgram;
private int maPositionHandle;
private int gvTexCoordHandle;
private int gvSamplerHandle;
private static Context context;
int[] camera_texture;
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
initShapes();
GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
Debug.out(\"Hello init.\");
//Shaders
int vertexShader = 0;
int fragmentShader = 0;
try {
vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, readFile(\"vertex.vsh\"));
fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, readFile(\"fragment.fsh\"));
} catch (IOException e) {
Debug.out(\"The shaders could not be found.\");
e.printStackTrace();
}
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL program executables
// get handles
maPositionHandle = GLES20.glGetAttribLocation(mProgram, \"vPosition\");
gvTexCoordHandle = GLES20.glGetAttribLocation(mProgram, \"a_texCoord\");
gvSamplerHandle = GLES20.glGetAttribLocation(mProgram, \"s_texture\");
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
camera_texture = null;
}
private void initShapes(){
float triangleCoords[] = {
// X, Y, Z
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
-1.0f, 1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
};
float texcoordf[] = {
// X, Y, Z
-1.0f,-1.0f,
1.0f,-1.0f,
-1.0f,1.0f,
1.0f,1.0f,
};
// initialize vertex Buffer for vertices
ByteBuffer vbb = ByteBuffer.allocateDirect(triangleCoords.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware\'s native byte order
vertices = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
vertices.put(triangleCoords); // add the coordinates to the FloatBuffer
vertices.position(0); // set the buffer to read the first coordinate
// initialize vertex Buffer for texcoords
vbb = ByteBuffer.allocateDirect(texcoordf.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware\'s native byte order
texcoords = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
texcoords.put(texcoordf); // add the coordinates to the FloatBuffer
texcoords.position(0); // set the buffer to read the first coordinate
}
private static String readFile(String path) throws IOException {
AssetManager assetManager = context.getAssets();
InputStream stream = assetManager.open(path);
try {
return new Scanner(stream).useDelimiter(\"\\\\A\").next();
}
finally {
stream.close();
}
}
private int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public void onDrawFrame(GL10 unused) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
if(camera_texture == null){
return;
}
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// Prepare the triangle data
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertices);
GLES20.glVertexAttribPointer(gvTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, texcoords);
GLES20.glEnableVertexAttribArray(maPositionHandle);
GLES20.glEnableVertexAttribArray(gvTexCoordHandle);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, camera_texture[0]);
GLES20.glUniform1i(gvSamplerHandle, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDisableVertexAttribArray(maPositionHandle);
GLES20.glDisableVertexAttribArray(gvTexCoordHandle);
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
public void takeContext(Context ocontext) {
Debug.out(\"Take context\");
context = ocontext;
}
void bindCameraTexture(byte[] data,int w,int h) {
byte[] pixels = new byte[256*256*3];
for(int x = 0;x < 256;x++){
for(int y = 0;x < 256;x++){
pixels[x*256+y] = data[x*w+y];
}
}
if (camera_texture==null){
camera_texture=new int[1];
}else{
GLES20.glDeleteTextures(1, camera_texture, 0);
}
GLES20.glGenTextures(1, camera_texture, 0);
int tex = camera_texture[0];
GLES20.glBindTexture(GL10.GL_TEXTURE_2D, tex);
GLES20.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_RGB, 256, 256, 0, GL10.GL_RGB, GL10.GL_UNSIGNED_BYTE, ByteBuffer.wrap(pixels));
GLES20.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
}
}