Difference between revisions of "Example Android Gradient"

From BoofCV
Jump to navigationJump to search
m
m
Line 7: Line 7:
Demonstration of how to capture and process a video stream in real-time using BoofCV on an Android device.  On Android devices, video streams are accessed inside a camera preview, which require several hoops to be jumped through.  What this example does is capture the image in NV21 format, convert it into an ImageUInt8, compute the image gradient, visualize the gradient in a Bitmap image, and display the results.  Note that the example below is not entirely self contained, see the complete project for additional files.
Demonstration of how to capture and process a video stream in real-time using BoofCV on an Android device.  On Android devices, video streams are accessed inside a camera preview, which require several hoops to be jumped through.  What this example does is capture the image in NV21 format, convert it into an ImageUInt8, compute the image gradient, visualize the gradient in a Bitmap image, and display the results.  Note that the example below is not entirely self contained, see the complete project for additional files.


Example File: [https://github.com/lessthanoptimal/BoofCV/blob/v0.16/integration/android/examples/video/src/org/boofcv/example/android/VideoActivity.java VideoActivity.java]
Example File: [https://github.com/lessthanoptimal/BoofCV/blob/v0.23/integration/android/examples/video/src/org/boofcv/example/android/VideoActivity.java VideoActivity.java]


Complete Project: [https://github.com/lessthanoptimal/BoofCV/blob/v0.16/integration/android/examples/video Android Project]
Complete Project: [https://github.com/lessthanoptimal/BoofCV/blob/v0.23/integration/android/examples/video Android Project]


Concepts:
Concepts:
Line 25: Line 25:
<syntaxhighlight lang="java">
<syntaxhighlight lang="java">
/**
/**
  * Demonstration of how to process a video stream on an Android device using BoofCV.  Most of the code below
  * Demonstration of how to process a video stream on an Android device using BoofCV.  Most of the drudgery of
  * is deals with handling Android and all of its quirks.  Video streams can be accessed in Android by processing
  * video processing is handled by {@link VideoDisplayActivity}This class still needs to tell it which
* a camera previewData from a camera preview comes in an NV21 image format, which needs to be converted.
  * camera to use and needs to select the optimal resolutionThe actual processing is done by {@link ShowGradient}
  * After it has been converted it needs to be processed and then displayedNote that several locks are required
  * which is passed into the super class when {@link #onResume()} is called.
  * to avoid the three threads (GUI, camera preview, and processing) from interfering with each other.
  *
  *
  * @author Peter Abeles
  * @author Peter Abeles
  */
  */
public class VideoActivity extends Activity implements Camera.PreviewCallback {
public class VideoActivity extends VideoDisplayActivity
 
{
// camera and display objects
private Camera mCamera;
private Visualization mDraw;
private CameraPreview mPreview;
 
// computes the image gradient
private ImageGradient<ImageUInt8,ImageSInt16> gradient = FactoryDerivative.three(ImageUInt8.class, ImageSInt16.class);
 
// Two images are needed to store the converted preview image to prevent a thread conflict from occurring
private ImageUInt8 gray1,gray2;
private ImageSInt16 derivX,derivY;
 
// Android image data used for displaying the results
private Bitmap output;
// temporary storage that's needed when converting from BoofCV to Android image data types
private byte[] storage;
 
// Thread where image data is processed
private ThreadProcess thread;
 
// Object used for synchronizing gray images
private final Object lockGray = new Object();
// Object used for synchronizing output image
private final Object lockOutput = new Object();
 
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
 
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.video);
 
// Used to visualize the results
mDraw = new Visualization(this);
 
// Create our Preview view and set it as the content of our activity.
mPreview = new CameraPreview(this,this,true);
 
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
 
preview.addView(mPreview);
preview.addView(mDraw);
}
 
@Override
@Override
protected void onResume() {
protected void onResume() {
super.onResume();
super.onResume();
setUpAndConfigureCamera();
setProcessing( new ShowGradient());
 
// for fun you can display the FPS by uncommenting the line below.
// The FPS will vary depending on processing time and shutter speed,
// which is dependent on lighting conditions
// setShowFPS(true);
}
}


@Override
@Override
protected void onPause() {
protected Camera openConfigureCamera( Camera.CameraInfo cameraInfo )
super.onPause();
{
Camera mCamera = selectAndOpenCamera(cameraInfo);
Camera.Parameters param = mCamera.getParameters();


// stop the camera preview and all processing
// Select the preview size closest to 320x240
if (mCamera != null){
// Smaller images are recommended because some computer vision operations are very expensive
mPreview.setCamera(null);
List<Camera.Size> sizes = param.getSupportedPreviewSizes();
mCamera.setPreviewCallback(null);
Camera.Size s = sizes.get(closest(sizes,320,240));
mCamera.stopPreview();
param.setPreviewSize(s.width,s.height);
mCamera.release();
mCamera.setParameters(param);
mCamera = null;


thread.stopThread();
return mCamera;
thread = null;
}
}
}


/**
/**
* Sets up the camera if it is not already setup.
* Step through the camera list and select a camera.  It is also possible that there is no camera.
* The camera hardware requirement in AndroidManifest.xml was turned off so that devices with just
* a front facing camera can be found.  Newer SDK's handle this in a more sane way, but with older devices
* you need this work around.
*/
*/
private void setUpAndConfigureCamera() {
private Camera selectAndOpenCamera(Camera.CameraInfo info) {
// Open and configure the camera
int numberOfCameras = Camera.getNumberOfCameras();
mCamera = Camera.open();


Camera.Parameters param = mCamera.getParameters();
int selected = -1;


// Select the preview size closest to 320x240
for (int i = 0; i < numberOfCameras; i++) {
// Smaller images are recommended because some computer vision operations are very expensive
Camera.getCameraInfo(i, info);
List<Camera.Size> sizes = param.getSupportedPreviewSizes();
Camera.Size s = sizes.get(closest(sizes,320,240));
param.setPreviewSize(s.width,s.height);
mCamera.setParameters(param);


// declare image data
if( info.facing == Camera.CameraInfo.CAMERA_FACING_BACK ) {
gray1 = new ImageUInt8(s.width,s.height);
selected = i;
gray2 = new ImageUInt8(s.width,s.height);
break;
derivX = new ImageSInt16(s.width,s.height);
} else {
derivY = new ImageSInt16(s.width,s.height);
// default to a front facing camera if a back facing one can't be found
output = Bitmap.createBitmap(s.width,s.height,Bitmap.Config.ARGB_8888 );
selected = i;
storage = ConvertBitmap.declareStorage(output, storage);
}
}


// start image processing thread
if( selected == -1 ) {
thread = new ThreadProcess();
dialogNoCamera();
thread.start();
return null; // won't ever be called
} else {
return Camera.open(selected);
}
}


// Start the video feed by passing it to mPreview
/**
mPreview.setCamera(mCamera);
* Gracefully handle the situation where a camera could not be found
*/
private void dialogNoCamera() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage("Your device has no cameras!")
.setCancelable(false)
.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
System.exit(0);
}
});
AlertDialog alert = builder.create();
alert.show();
}
}


Line 155: Line 129:


return best;
return best;
}
/**
* Called each time a new image arrives in the data stream.
*/
@Override
public void onPreviewFrame(byte[] bytes, Camera camera) {
// convert from NV21 format into gray scale
synchronized (lockGray) {
ConvertNV21.nv21ToGray(bytes,gray1.width,gray1.height,gray1);
}
// Can only do trivial amounts of image processing inside this function or else bad stuff happens.
// To work around this issue most of the processing has been pushed onto a thread and the call below
// tells the thread to wake up and process another image
thread.interrupt();
}
/**
* Draws on top of the video stream for visualizing computer vision results
*/
private class Visualization extends SurfaceView {
Activity activity;
public Visualization(Activity context ) {
super(context);
this.activity = context;
// This call is necessary, or else the
// draw method will not be called.
setWillNotDraw(false);
}
@Override
protected void onDraw(Canvas canvas){
synchronized ( lockOutput ) {
int w = canvas.getWidth();
int h = canvas.getHeight();
// fill the window and center it
double scaleX = w/(double)output.getWidth();
double scaleY = h/(double)output.getHeight();
double scale = Math.min(scaleX,scaleY);
double tranX = (w-scale*output.getWidth())/2;
double tranY = (h-scale*output.getHeight())/2;
canvas.translate((float)tranX,(float)tranY);
canvas.scale((float)scale,(float)scale);
// draw the image
canvas.drawBitmap(output,0,0,null);
}
}
}
/**
* External thread used to do more time consuming image processing
*/
private class ThreadProcess extends Thread {
// true if a request has been made to stop the thread
volatile boolean stopRequested = false;
// true if the thread is running and can process more data
volatile boolean running = true;
/**
* Blocks until the thread has stopped
*/
public void stopThread() {
stopRequested = true;
while( running ) {
thread.interrupt();
Thread.yield();
}
}
@Override
public void run() {
while( !stopRequested ) {
// Sleep until it has been told to wake up
synchronized ( Thread.currentThread() ) {
try {
wait();
} catch (InterruptedException ignored) {}
}
// process the most recently converted image by swapping image buffered
synchronized (lockGray) {
ImageUInt8 tmp = gray1;
gray1 = gray2;
gray2 = tmp;
}
// process the image and compute its gradient
gradient.process(gray2,derivX,derivY);
// render the output in a synthetic color image
synchronized ( lockOutput ) {
VisualizeImageData.colorizeGradient(derivX,derivY,-1,output,storage);
}
mDraw.postInvalidate();
}
running = false;
}
}
}
}
}
</syntaxhighlight>
</syntaxhighlight>

Revision as of 05:51, 28 March 2016

Demonstration of how to capture and process a video stream in real-time using BoofCV on an Android device. On Android devices, video streams are accessed inside a camera preview, which require several hoops to be jumped through. What this example does is capture the image in NV21 format, convert it into an ImageUInt8, compute the image gradient, visualize the gradient in a Bitmap image, and display the results. Note that the example below is not entirely self contained, see the complete project for additional files.

Example File: VideoActivity.java

Complete Project: Android Project

Concepts:

  • Android
  • Camera Preview
  • Image Gradient

Related Tutorial:

Related Examples:

Example Code

/**
 * Demonstration of how to process a video stream on an Android device using BoofCV.  Most of the drudgery of
 * video processing is handled by {@link VideoDisplayActivity}.  This class still needs to tell it which
 * camera to use and needs to select the optimal resolution.  The actual processing is done by {@link ShowGradient}
 * which is passed into the super class when {@link #onResume()} is called.
 *
 * @author Peter Abeles
 */
public class VideoActivity extends VideoDisplayActivity
{
	@Override
	protected void onResume() {
		super.onResume();
		setProcessing( new ShowGradient());

		// for fun you can display the FPS by uncommenting the line below.
		// The FPS will vary depending on processing time and shutter speed,
		// which is dependent on lighting conditions
//		setShowFPS(true);
	}

	@Override
	protected Camera openConfigureCamera( Camera.CameraInfo cameraInfo )
	{
		Camera mCamera = selectAndOpenCamera(cameraInfo);
		Camera.Parameters param = mCamera.getParameters();

		// Select the preview size closest to 320x240
		// Smaller images are recommended because some computer vision operations are very expensive
		List<Camera.Size> sizes = param.getSupportedPreviewSizes();
		Camera.Size s = sizes.get(closest(sizes,320,240));
		param.setPreviewSize(s.width,s.height);
		mCamera.setParameters(param);

		return mCamera;
	}

	/**
	 * Step through the camera list and select a camera.  It is also possible that there is no camera.
	 * The camera hardware requirement in AndroidManifest.xml was turned off so that devices with just
	 * a front facing camera can be found.  Newer SDK's handle this in a more sane way, but with older devices
	 * you need this work around.
	 */
	private Camera selectAndOpenCamera(Camera.CameraInfo info) {
		int numberOfCameras = Camera.getNumberOfCameras();

		int selected = -1;

		for (int i = 0; i < numberOfCameras; i++) {
			Camera.getCameraInfo(i, info);

			if( info.facing == Camera.CameraInfo.CAMERA_FACING_BACK ) {
				selected = i;
				break;
			} else {
				// default to a front facing camera if a back facing one can't be found
				selected = i;
			}
		}

		if( selected == -1 ) {
			dialogNoCamera();
			return null; // won't ever be called
		} else {
			return Camera.open(selected);
		}
	}

	/**
	 * Gracefully handle the situation where a camera could not be found
	 */
	private void dialogNoCamera() {
		AlertDialog.Builder builder = new AlertDialog.Builder(this);
		builder.setMessage("Your device has no cameras!")
				.setCancelable(false)
				.setPositiveButton("OK", new DialogInterface.OnClickListener() {
					public void onClick(DialogInterface dialog, int id) {
						System.exit(0);
					}
				});
		AlertDialog alert = builder.create();
		alert.show();
	}

	/**
	 * Goes through the size list and selects the one which is the closest specified size
	 */
	public static int closest( List<Camera.Size> sizes , int width , int height ) {
		int best = -1;
		int bestScore = Integer.MAX_VALUE;

		for( int i = 0; i < sizes.size(); i++ ) {
			Camera.Size s = sizes.get(i);

			int dx = s.width-width;
			int dy = s.height-height;

			int score = dx*dx + dy*dy;
			if( score < bestScore ) {
				best = i;
				bestScore = score;
			}
		}

		return best;
	}
}