android – 移动视觉api – 面部跟踪时如何捕获图片
作者:互联网
我正在使用Mobile vision api的面部跟踪示例,我正试图通过点击屏幕拍照.首先,我想用按钮拍摄屏幕上的任何图片,我尝试了这个代码,但它失败了.我看一下条形码阅读器的例子,并且有tap方法,但我不能成功.我应该用这种情况采用什么方法?
private void takeImage() {
camera.takePicture(null, null, new PictureCallback() {
private File imageFile;
@Override
public void onPictureTaken(byte[] data, Camera camera) {
try {
// convert byte array into bitmap
Bitmap loadedImage = null;
Bitmap rotatedBitmap = null;
loadedImage = BitmapFactory.decodeByteArray(data, 0,
data.length);
// rotate Image
Matrix rotateMatrix = new Matrix();
rotateMatrix.postRotate(rotation);
rotatedBitmap = Bitmap.createBitmap(loadedImage, 0, 0,
loadedImage.getWidth(), loadedImage.getHeight(),
rotateMatrix, false);
String state = Environment.getExternalStorageState();
File folder = null;
if (state.contains(Environment.MEDIA_MOUNTED)) {
folder = new File(Environment
.getExternalStorageDirectory() + "/Demo");
} else {
folder = new File(Environment
.getExternalStorageDirectory() + "/Demo");
}
boolean success = true;
if (!folder.exists()) {
success = folder.mkdirs();
}
if (success) {
java.util.Date date = new java.util.Date();
imageFile = new File(folder.getAbsolutePath()
+ File.separator
+ new Timestamp(date.getTime()).toString()
+ "Image.jpg");
imageFile.createNewFile();
} else {
Toast.makeText(getBaseContext(), "Image Not saved",
Toast.LENGTH_SHORT).show();
return;
}
ByteArrayOutputStream ostream = new ByteArrayOutputStream();
// save image into gallery
rotatedBitmap.compress(CompressFormat.JPEG, 100, ostream);
FileOutputStream fout = new FileOutputStream(imageFile);
fout.write(ostream.toByteArray());
fout.close();
ContentValues values = new ContentValues();
values.put(Images.Media.DATE_TAKEN,
System.currentTimeMillis());
values.put(Images.Media.MIME_TYPE, "image/jpeg");
values.put(MediaStore.MediaColumns.DATA,
imageFile.getAbsolutePath());
CameraDemoActivity.this.getContentResolver().insert(
Images.Media.EXTERNAL_CONTENT_URI, values);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
@Override
public void onClick(View v) {
case R.id.captureImage:
takeImage();
break;
default:
break;
}
}
解决方法:
我解决了这个问题.
findViewById(R.id.capture).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mCameraSource.takePicture(null, new CameraSource.PictureCallback() {
private File imageFile;
@Override
public void onPictureTaken(byte[] bytes) {
try {
// convert byte array into bitmap
Bitmap loadedImage = null;
Bitmap rotatedBitmap = null;
loadedImage = BitmapFactory.decodeByteArray(bytes, 0,
bytes.length);
Matrix rotateMatrix = new Matrix();
rotateMatrix.postRotate(rotation);
rotatedBitmap = Bitmap.createBitmap(loadedImage, 0, 0,
loadedImage.getWidth(), loadedImage.getHeight(),
rotateMatrix, false);
dir = new File(
Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "MyPhotos");
boolean success = true;
if (!dir.exists())
{
success = dir.mkdirs();
}
if (success) {
java.util.Date date = new java.util.Date();
imageFile = new File(dir.getAbsolutePath()
+ File.separator
+ new Timestamp(date.getTime()).toString()
+ "Image.jpg");
imageFile.createNewFile();
} else {
Toast.makeText(getBaseContext(), "Image Not saved",
Toast.LENGTH_SHORT).show();
return;
}
ByteArrayOutputStream ostream = new ByteArrayOutputStream();
// save image into gallery
rotatedBitmap.compress(CompressFormat.JPEG, 100, ostream);
FileOutputStream fout = new FileOutputStream(imageFile);
fout.write(ostream.toByteArray());
fout.close();
ContentValues values = new ContentValues();
values.put(Images.Media.DATE_TAKEN,
System.currentTimeMillis());
values.put(Images.Media.MIME_TYPE, "image/jpeg");
values.put(MediaStore.MediaColumns.DATA,
imageFile.getAbsolutePath());
FaceTrackerActivity.this.getContentResolver().insert(
Images.Media.EXTERNAL_CONTENT_URI, values);
//saveToInternalStorage(loadedImage);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
});
标签:android,google-vision 来源: https://codeday.me/bug/20190829/1759270.html