I have had some luck using JavaCV inline, here is how to do it:
Download this:
http://search.maven.org/remotecontent?filepath=org/bytedeco/javacv/0.11/javacv-0.11-bin.zip
Extract the .zip
And copy the following .jar files toy our B4A Libraries directory or to your apps additional libraries directory:
"javacpp" "javacv" "flandmark" "artoolkitplus" "videoinput" "libfreenect" "flycapture" "ffmpeg" "opencv" "flandmark-android-arm" "artoolkitplus-android-arm" "ffmpeg-android-arm" "opencv-android-arm"
Enable JavaObject Lib
Add this to your code:
I got the java code from the "Sample Usage" here:
https://github.com/bytedeco/javacv
Notice I cut out the "Smoother" class from their example and just got to the meat.
Dump this in Process Globals:
Activity create needs this:
Then put a .bmp file named reuse.bmp into your AssetsDir, to do this you just click the Files tab (bottom right corner) in B4A press "Add Files" button and select something named reuse.bmp.
Then you can call this code (put in activity create if you want):
It will load reuse.bmp from File.DirAssets, copy it to File.DirRootExternal smooth/overwrite File.DirRootExterna/reuse.bmp four times (makes difference easier to see).
*Update
BlobDetect() detects the biggest "blob" change between two video preview frame dumps.
SubtractBytes and PreviewBytes are NV21 byte arrays captured from frame dumps of the camera's video feed.
Sub Camera1_Preview (PreviewPic() As Byte)
subtractBytes = PreviewPic
..
previewBytes = PreviewPic
Download this:
http://search.maven.org/remotecontent?filepath=org/bytedeco/javacv/0.11/javacv-0.11-bin.zip
Extract the .zip
And copy the following .jar files toy our B4A Libraries directory or to your apps additional libraries directory:
"javacpp" "javacv" "flandmark" "artoolkitplus" "videoinput" "libfreenect" "flycapture" "ffmpeg" "opencv" "flandmark-android-arm" "artoolkitplus-android-arm" "ffmpeg-android-arm" "opencv-android-arm"
Enable JavaObject Lib
Add this to your code:
B4X:
#AdditionalJar: opencv-android-arm
#AdditionalJar: javacv
#AdditionalJar: javacpp
#AdditionalJar: flandmark-android-arm
#AdditionalJar: ffmpeg-android-arm
#AdditionalJar: artoolkitplus-android-arm
#AdditionalJar: videoinput
#AdditionalJar: libfreenect
#AdditionalJar: flycapture
#AdditionalJar: flandmark
#AdditionalJar: ffmpeg
#AdditionalJar: artoolkitplus
#AdditionalJar: opencv
#If JAVA
import static org.bytedeco.javacpp.opencv_core.*;
import static org.bytedeco.javacpp.opencv_imgproc.*;
import static org.bytedeco.javacpp.opencv_highgui.*;
import java.util.Random; //for testblob
import android.graphics.Bitmap; //for return of bitmap type
import anywheresoftware.b4a.keywords.Common; //for raiseevent to send values back to b4a
public static void smooth(String filename) {
IplImage image = cvLoadImage(filename);
if (image != null) {
cvSmooth(image, image);
cvSaveImage(filename, image);
cvReleaseImage(image);
}
}
public static void open(String filename) {
IplImage image = cvLoadImage(filename);
if (image != null) {
IplConvKernel mat=cvCreateStructuringElementEx(5, 5, 2, 2, CV_SHAPE_RECT);
cvMorphologyEx(image, image, null, mat, MORPH_OPEN, 1);
cvSaveImage(filename, image);
cvReleaseImage(image);
}
}
public static void JCVerode(String filename) {
IplImage image = cvLoadImage(filename);
if (image != null) {
IplConvKernel mat=cvCreateStructuringElementEx(5, 5, 2, 2, CV_SHAPE_RECT);
cvErode(image, image, mat, 1);
cvSaveImage(filename, image);
cvReleaseImage(image);
}
}
public static void JCVerodeBig(String filename) {
IplImage image = cvLoadImage(filename);
if (image != null) {
IplConvKernel mat=cvCreateStructuringElementEx(9, 9, 4, 4, CV_SHAPE_ELLIPSE);
cvErode(image, image, mat, 1);
cvSaveImage(filename, image);
cvReleaseImage(image);
}
}
public Bitmap BlobDetect(byte[] data, byte[] sub_data, int width, int height, Boolean diffAbs, Boolean thresholdOTSU, int thresholdL, int thresholdH, int closeiter, int closesize, int closeanchor) {
long startTime = System.currentTimeMillis();
IplImage yuvimage = IplImage.create(width, height * 3 / 2, IPL_DEPTH_8U, 1); //1 isntead of 2 for last param
yuvimage.getByteBuffer().put(data);
IplImage grayimage = IplImage.create(width, height, IPL_DEPTH_8U, 1); // 3-> 1 last parameter for gray
cvCvtColor(yuvimage, grayimage, CV_YUV2GRAY_NV21);// YUV2BGR ->YUV2GRAY
IplImage sub_yuvimage = IplImage.create(width, height * 3 / 2, IPL_DEPTH_8U, 1);
sub_yuvimage.getByteBuffer().put(sub_data);
IplImage sub_grayimage = IplImage.create(width, height, IPL_DEPTH_8U, 1);
cvCvtColor(sub_yuvimage, sub_grayimage, CV_YUV2GRAY_NV21);
IplImage subtractedimage = IplImage.create(width, height, IPL_DEPTH_8U, 1);
if(!diffAbs)cvSub(sub_grayimage,grayimage , subtractedimage);
if(diffAbs)cvAbsDiff(sub_grayimage, grayimage, subtractedimage);
cvFlip(subtractedimage, null, 1);
cvNormalize(subtractedimage, subtractedimage, 0, 255, CV_MINMAX, null);
IplImage subtractedimageO = IplImage.create(width, height, IPL_DEPTH_8U, 1);
subtractedimageO = cvCloneImage(subtractedimage);
double maxcompactarea = 0;
double maxcompact = 0;
int maxcompactthresh = 0;
IplImage image = IplImage.create(width, height, IPL_DEPTH_8U, 3);
CvSeq bigContour = new CvSeq();
for(int i=10; i<125; i = i + 5){
float maxarea =0;
double perimeter =0;
subtractedimage = cvCloneImage(subtractedimageO);
CvScalar color = CV_RGB( 255 - (i*2) , 255 - (i*2), 255 - (i*2));
//if(!diffAbs)cvSub(sub_grayimage,grayimage , subtractedimage);
//if(diffAbs)cvAbsDiff(sub_grayimage, grayimage, subtractedimage);
//cvFlip(subtractedimage, null, 1);
//cvNormalize(subtractedimage, subtractedimage, 0, 255, CV_MINMAX, null);
//if(thresholdOTSU)cvThreshold(subtractedimage, subtractedimage, 100, 255, CV_THRESH_OTSU);
//if(!thresholdOTSU)cvThreshold(subtractedimage, subtractedimage, thresholdL, thresholdH, THRESH_TOZERO);
cvThreshold(subtractedimage, subtractedimage, i, thresholdH, THRESH_TOZERO);
IplConvKernel mat=cvCreateStructuringElementEx(closesize, closesize, closeanchor, closeanchor, CV_SHAPE_ELLIPSE);
cvMorphologyEx(subtractedimage, subtractedimage, null, mat, MORPH_CLOSE, closeiter);
CvMemStorage mem;
CvSeq contours = new CvSeq();
CvSeq ptr = new CvSeq();
mem = cvCreateMemStorage(0);
cvFindContours(subtractedimage, mem, contours, Loader.sizeof(CvContour.class) , CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0));
Random rand = new Random();
//for (ptr = contours; ptr != null; ptr = ptr.h_next()) {
while ( contours != null && !contours.isNull() ) {
if ( contours.elem_size() > 0 ) {
//Color randomColor = new Color(rand.nextFloat(), rand.nextFloat(), rand.nextFloat());
//CvScalar color = CV_RGB( rand.nextFloat() * 255, rand.nextFloat() * 255, rand.nextFloat() * 255);
////cvDrawContours(image, ptr, color, CV_RGB(0,0,0), -1, CV_FILLED, 8, cvPoint(0,0));
//cvDrawContours(image, contours , color, CV_RGB(0,0,0), -1, CV_FILLED, 8, cvPoint(0,0));
//BA.Log("Area: " + String.valueOf(cvContourArea(contours)));
if (cvContourArea(contours) > maxarea) {
maxarea = (float) cvContourArea(contours);
perimeter = cvContourPerimeter(contours);
bigContour = contours;
}
//Common.CallSubNew2(mostCurrent.activityBA,main.getObject(),"myevent_fire",cvContourArea(contours));
}
contours = contours.h_next();
}
double compact = 4 * 3.14159 * maxarea / Math.pow(perimeter,2);
BA.Log("i: " + i + " Area: " + maxarea + " perimeter: " + perimeter + " compactness: " + compact);
if (compact > maxcompact && maxarea > 5000 && maxarea < 40000) {
maxcompact = compact;
maxcompactthresh = i;
maxcompactarea = maxarea;
image = IplImage.create(width, height, IPL_DEPTH_8U, 3);
cvDrawContours(image, bigContour , CV_RGB(255,255,255), CV_RGB(0,0,0), -1, CV_FILLED, 8, cvPoint(0,0));
}
}
BA.Log("maxcompact: " + maxcompact + " maxcompactthresh: " + maxcompactthresh + " maxcompactarea: " + maxcompactarea);
//cvSaveImage(filename, image);
Common.CallSubNew2(mostCurrent.activityBA,main.getObject(),"myevent_fire",maxcompactarea);
IplImage imageBitmap = IplImage.create(width, height, IPL_DEPTH_8U, 4);
cvCvtColor(image, imageBitmap, CV_BGR2BGRA);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(imageBitmap.getByteBuffer());
BA.Log("TestBlobExternal Time: " + String.valueOf(System.currentTimeMillis() - startTime));
//return (System.currentTimeMillis() - startTime);
return bitmap;
}
#End If
I got the java code from the "Sample Usage" here:
https://github.com/bytedeco/javacv
Notice I cut out the "Smoother" class from their example and just got to the meat.
Dump this in Process Globals:
B4X:
Sub Process_Globals
Private NativeMe As JavaObject
..
Activity create needs this:
B4X:
Sub Activity_Create(FirstTime As Boolean)
If FirstTime Then
NativeMe.InitializeContext
End If
Then put a .bmp file named reuse.bmp into your AssetsDir, to do this you just click the Files tab (bottom right corner) in B4A press "Add Files" button and select something named reuse.bmp.
Then you can call this code (put in activity create if you want):
B4X:
File.Copy(File.DirAssets,"reuse.bmp",File.DirRootExternal,"reuse.bmp")
Dim s As String = File.DirRootExternal & "/reuse.bmp"
NativeMe.RunMethod("smooth", Array (s))
NativeMe.RunMethod("smooth", Array (s))
NativeMe.RunMethod("smooth", Array (s))
NativeMe.RunMethod("smooth", Array (s))
Dim subtractBytes() As Byte
Dim previewBytes() As Byte 'dropimagebytes
If blobDet Then blobsBitmap = NativeMe.RunMethod("BlobDetect", Array (previewBytes, subtractBytes, 640, 480, diffAbs, thresholdOTSU, thresholdL, thresholdH, closeiter, closesize, closeanchor))
*Update
BlobDetect() detects the biggest "blob" change between two video preview frame dumps.
SubtractBytes and PreviewBytes are NV21 byte arrays captured from frame dumps of the camera's video feed.
Sub Camera1_Preview (PreviewPic() As Byte)
subtractBytes = PreviewPic
..
previewBytes = PreviewPic
Last edited: