Hi,
I have a dream : A face detect library for B4A :
With openCv for Android here : javacv - Java interface to OpenCV and more - Google Project Hosting and Downloads - javacv - Java interface to OpenCV and more - Google Project Hosting
Example of Android code :
I think it is possible to "mix" the code with the Erel's camera library . What do you think?
JP
Erel's camera lib : http://www.b4x.com/forum/additional...2-new-camera-library-v1-00-a-2.html#post41604
I have a dream : A face detect library for B4A :
With openCv for Android here : javacv - Java interface to OpenCV and more - Google Project Hosting and Downloads - javacv - Java interface to OpenCV and more - Google Project Hosting
Example of Android code :
B4X:
/*
* Copyright (C) 2010,2011 Samuel Audet
*
* FacePreview - A fusion of OpenCV's facedetect and Android's CameraPreview samples,
* with JavaCV + JavaCPP as the glue in between.
*
* This file was based on CameraPreview.java that came with the Samples for
* Android SDK API 8, revision 1 and contained the following copyright notice:
*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* IMPORTANT - Make sure your AndroidManifiest.xml file includes the following:
* <uses-sdk android:minSdkVersion="4" android:targetSdkVersion="4" />
* <uses-permission android:name="android.permission.CAMERA" />
* <uses-feature android:name="android.hardware.camera" />
* <application android:label="@string/app_name">
* <activity android:name="FacePreview"
* android:label="@string/app_name"
* android:screenOrientation="landscape">
*/
package com.googlecode.javacv.facepreview;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Paint;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.FrameLayout;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import com.googlecode.javacpp.Loader;
import com.googlecode.javacv.cpp.opencv_objdetect;
import static com.googlecode.javacv.cpp.opencv_core.*;
import static com.googlecode.javacv.cpp.opencv_imgproc.*;
import static com.googlecode.javacv.cpp.opencv_objdetect.*;
import static com.googlecode.javacv.cpp.opencv_highgui.*;
// ----------------------------------------------------------------------
public class FacePreview extends Activity {
private FrameLayout layout;
private FaceView faceView;
private Preview mPreview;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
// Hide the window title.
requestWindowFeature(Window.FEATURE_NO_TITLE);
// Create our Preview view and set it as the content of our activity.
try {
layout = new FrameLayout(this);
faceView = new FaceView(this);
mPreview = new Preview(this, faceView);
layout.addView(mPreview);
layout.addView(faceView);
setContentView(layout);
} catch (IOException e) {
e.printStackTrace();
new AlertDialog.Builder(this).setMessage(e.getMessage()).create().show();
}
}
}
// ----------------------------------------------------------------------
class FaceView extends View implements Camera.PreviewCallback {
public static final int SUBSAMPLING_FACTOR = 4;
private IplImage grayImage;
private CvHaarClassifierCascade classifier;
private CvMemStorage storage;
private CvSeq faces;
public FaceView(FacePreview context) throws IOException {
super(context);
// Load the classifier file from Java resources.
File classifierFile = Loader.extractResource(getClass(),
"/com/googlecode/javacv/facepreview/haarcascade_frontalface_alt.xml",
context.getCacheDir(), "classifier", ".xml");
if (classifierFile == null || classifierFile.length() <= 0) {
throw new IOException("Could not extract the classifier file from Java resource.");
}
// Preload the opencv_objdetect module to work around a known bug.
Loader.load(opencv_objdetect.class);
classifier = new CvHaarClassifierCascade(cvLoad(classifierFile.getAbsolutePath()));
classifierFile.delete();
if (classifier.isNull()) {
throw new IOException("Could not load the classifier file.");
}
storage = CvMemStorage.create();
}
public void onPreviewFrame(final byte[] data, final Camera camera) {
try {
Camera.Size size = camera.getParameters().getPreviewSize();
processImage(data, size.width, size.height);
camera.addCallbackBuffer(data);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
protected void processImage(byte[] data, int width, int height) {
// First, downsample our image and convert it into a grayscale IplImage
int f = SUBSAMPLING_FACTOR;
if (grayImage == null || grayImage.width() != width/f || grayImage.height() != height/f) {
grayImage = IplImage.create(width/f, height/f, IPL_DEPTH_8U, 1);
}
int imageWidth = grayImage.width();
int imageHeight = grayImage.height();
int dataStride = f*width;
int imageStride = grayImage.widthStep();
ByteBuffer imageBuffer = grayImage.getByteBuffer();
for (int y = 0; y < imageHeight; y++) {
int dataLine = y*dataStride;
int imageLine = y*imageStride;
for (int x = 0; x < imageWidth; x++) {
imageBuffer.put(imageLine + x, data[dataLine + f*x]);
}
}
faces = cvHaarDetectObjects(grayImage, classifier, storage, 1.1, 3, CV_HAAR_DO_CANNY_PRUNING);
postInvalidate();
cvClearMemStorage(storage);
}
@Override
protected void onDraw(Canvas canvas) {
Paint paint = new Paint();
paint.setColor(Color.RED);
paint.setTextSize(20);
String s = "FacePreview - This side up.";
float textWidth = paint.measureText(s);
canvas.drawText(s, (getWidth()-textWidth)/2, 20, paint);
if (faces != null) {
paint.setStrokeWidth(2);
paint.setStyle(Paint.Style.STROKE);
float scaleX = (float)getWidth()/grayImage.width();
float scaleY = (float)getHeight()/grayImage.height();
int total = faces.total();
for (int i = 0; i < total; i++) {
CvRect r = new CvRect(cvGetSeqElem(faces, i));
int x = r.x(), y = r.y(), w = r.width(), h = r.height();
canvas.drawRect(x*scaleX, y*scaleY, (x+w)*scaleX, (y+h)*scaleY, paint);
}
}
}
}
// ----------------------------------------------------------------------
class Preview extends SurfaceView implements SurfaceHolder.Callback {
SurfaceHolder mHolder;
Camera mCamera;
Camera.PreviewCallback previewCallback;
Preview(Context context, Camera.PreviewCallback previewCallback) {
super(context);
this.previewCallback = previewCallback;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
mCamera = Camera.open();
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
// TODO: add more exception handling logic here
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.05;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
for (Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters
parameters = mCamera.getParameters();
List<Size> sizes = parameters.getSupportedPreviewSizes();
Size optimalSize = getOptimalPreviewSize(sizes, w, h);
parameters.setPreviewSize(optimalSize.width, optimalSize.height);
mCamera.setParameters(parameters);
if (previewCallback != null) {
mCamera.setPreviewCallbackWithBuffer(previewCallback);
Camera.Size size = parameters.getPreviewSize();
byte[] data = new byte[size.width*size.height*
ImageFormat.getBitsPerPixel(parameters.getPreviewFormat())/8];
mCamera.addCallbackBuffer(data);
}
mCamera.startPreview();
}
}
I think it is possible to "mix" the code with the Erel's camera library . What do you think?
JP
Erel's camera lib : http://www.b4x.com/forum/additional...2-new-camera-library-v1-00-a-2.html#post41604
B4X:
package anywheresoftware.b4a.objects;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import android.hardware.Camera; //
import android.hardware.Camera.Parameters;
import android.view.SurfaceHolder;
import android.view.SurfaceView; //
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import anywheresoftware.b4a.BA;
import anywheresoftware.b4a.BALayout;
import anywheresoftware.b4a.BA.ActivityObject;
import anywheresoftware.b4a.BA.Events;
import anywheresoftware.b4a.BA.Permissions;
import anywheresoftware.b4a.BA.ShortName;
import anywheresoftware.b4a.BA.Version;
import anywheresoftware.b4a.keywords.Common;
/**
* The camera object allows you to use the device back facing camera to take pictures and show preview images.
*Currently the camera orientation is always landscape. Usually you will want to force the application to also be in landscape mode (Project - Supported Orientations).
*Only one process can access the camera at any time. Therefore it is highly recommended to initialize the camera object in Activity_Resume and release it in Activity_Pause.
*A working example with explanations is available <link>here|http://www.b4x.com/forum/basic4android-getting-started-tutorials/6891-take-pictures-internal-camera.html</link>.
*/
@ActivityObject
@Permissions(values={"android.permission.CAMERA"})
@ShortName("Camera")
@Version(1.0f)
@Events(values={"Ready (Success As Boolean)", "PictureTaken (Data() As Byte)"})
public class CameraW {
private static Camera c;
private SurfaceView sv;
private String eventName;
private BA ba;
private AtomicInteger readyCount = new AtomicInteger();
/**
* Initializes the camera.
*Panel - The preview images will be displayed on the panel.
*EventName - Events subs prefix.
*The Ready event will be raised when the camera has finished opening.
*/
public void Initialize(final BA ba, ViewGroup Panel, String EventName) {
this.ba = ba;
readyCount.set(0);
this.eventName = EventName.toLowerCase(BA.cul);
sv = new SurfaceView(ba.context);
anywheresoftware.b4a.BALayout.LayoutParams lp = new anywheresoftware.b4a.BALayout.LayoutParams(0, 0,
Panel.getLayoutParams().width, Panel.getLayoutParams().height);
Panel.addView(sv, lp);
if (c != null) {
readyCount.set(1);
}
sv.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
sv.getHolder().setFixedSize(Panel.getLayoutParams().width, Panel.getLayoutParams().height);
sv.getHolder().addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceChanged(SurfaceHolder holder, int format,
int width, int height) {
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if (readyCount.addAndGet(1) == 2) {
ba.raiseEvent(null, eventName + "_ready", true);
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
});
if (c == null) {
ba.submitRunnable(new Runnable() {
@Override
public void run() {
try {
c = Camera.open();
if (readyCount.addAndGet(1) == 2) {
ba.raiseEventFromDifferentThread(null, CameraW.this, -1,eventName + "_ready", false, new Object[] {true});
}
}
catch (Exception e) {
Release();
ba.raiseEventFromDifferentThread(null, CameraW.this, -1,eventName + "_ready", false, new Object[] {false});
}
}
}, this, -1);
}
}
/**
* Starts displaying the preview images.
*/
public void StartPreview() throws IOException {
c.setPreviewDisplay(sv.getHolder());
c.startPreview();
}
/**
* Stops displaying the preview images.
*/
public void StopPreview() {
if (c != null)
c.stopPreview();
}
/**
* Releases the camera object and allows other processes to access the camera.
*/
public void Release() {
if (sv != null) {
ViewGroup vg = (ViewGroup) sv.getParent();
vg.removeView(sv);
sv = null;
}
if (c != null) {
c.release();
c = null;
}
}
/**
* Takes a picture. When the picture is ready, the PictureTaken event will be raised.
*You should not call TakePicture while another picture is currently taken.
*The preview images are stopped after calling this method. You can call StartPreview to restart the preview images.
*/
public void TakePicture() {
c.takePicture(null , null, new Camera.PictureCallback() {
@Override
public void onPictureTaken(byte[] data, Camera camera) {
ba.raiseEvent(null, eventName + "_picturetaken", data);
}
});
}
}