android - 这是使用 AsyncTask 的问题吗?需要整理我的代码
问题描述
我正在开发一个打开相机的活动,用户拍照并保存并在下一个活动中打开(发生其他事情的地方)。在第一个活动中,我对相机进行了一些覆盖,它们应该可以正常工作,但我还想实现到用户脸部的距离计算。我知道现在在我的代码中,我在相机拍照后调用计算,因此它确实有效,但仅在此时,我需要它与相机一起运行。这是代码:
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camera);
inflater = LayoutInflater.from(getBaseContext());
View view = inflater.inflate(R.layout.camera, null);
ImageView imgFavorite = findViewById(R.id.btncapturatestedp);
textView = findViewById(R.id.text_distancia);
imgFavorite.setClickable(true);
imgFavorite.bringToFront();
imgFavorite.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
camera.takePicture(null, null, mPictureCallback);
}
});
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
int action = event.getAction();
Log.i("Volume", "Volume " + keyCode + " " + action);
AudioManager audio = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
switch (keyCode) {
case KeyEvent.KEYCODE_VOLUME_UP:
if (action == KeyEvent.ACTION_DOWN) {
Toast.makeText(getApplicationContext(),
"Use o botão de volume para baixo para " +
"tirar uma foto", Toast.LENGTH_LONG).show();
}
return true;
case KeyEvent.KEYCODE_VOLUME_DOWN:
if (action == KeyEvent.ACTION_DOWN) {
camera.takePicture(null, null, mPictureCallback);
}
return true;
default:
return super.onKeyDown(keyCode, event);
}
}
@Override
protected void onResume() {
getWindow().setFormat(PixelFormat.UNKNOWN);
surfaceView = findViewById(R.id.cameraPreviewlenteG15);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
super.onResume();
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
if(cameraview){
camera.stopPreview();
cameraview = false;
}
if (camera != null){
try {
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
cameraview = true;
} catch (IOException e) {
e.printStackTrace();
}
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
camera = Camera.open(1);
Camera.Parameters params = camera.getParameters();
F = params.getFocalLength();
angleX = params.getHorizontalViewAngle();
angleY = params.getVerticalViewAngle();
sensorX = (float) (Math.tan(Math.toRadians(angleX / 2)) * 2 * F);
sensorY = (float) (Math.tan(Math.toRadians(angleY / 2)) * 2 * F);
if (this.getResources().getConfiguration().orientation != Configuration.ORIENTATION_LANDSCAPE)
{
params.set("orientation", "portrait");
camera.setDisplayOrientation(90);
}
try
{
camera.setPreviewDisplay(holder);
}
catch (IOException exception)
{
camera.release();
camera = null;
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
camera.stopPreview();
camera.release();
camera = null;
cameraview = false;
}
private final Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
@Override
public void onPictureTaken(final byte[] data, Camera camera) {
new Thread(new Runnable() {
@Override
public void run() {
String path = STORAGE_PATH + "/DCIM" + "/AppDnp";
writeFile(path, data);
}
}, "captureThread").start();
startPreview();
}
};
public void writeFile(String path, byte[] data) {
Bitmap bitmap = null;
if (data != null){
bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
}
if (bitmap != null){
Matrix matrix = new Matrix();
if (mCameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
matrix.postRotate(90);
}else if (mCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT){
matrix.postRotate(90);
matrix.postScale(1, -1);
}
Bitmap rotateBmp = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(),
bitmap.getHeight(), matrix,false);
saveBmp2SD(path, rotateBmp);
rotateBmp.recycle();
}
}
private void saveBmp2SD(String path, Bitmap bitmap){
File file = new File(path);
if (!file.exists()){
file.mkdir();
}
String fileName = path + "/" + "IMG_DNP" + ".jpg";
try {
FileOutputStream fos = new FileOutputStream(fileName);
BufferedOutputStream bos = new BufferedOutputStream(fos);
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, bos);
bos.flush();
bos.close();
Log.i("TAG", "Take picture success!");
Intent i = new Intent(this, DPCalc.class);
i.putExtra("filename", fileName);
i.putExtra("path", path);
startActivity(i);
} catch (FileNotFoundException e) {
e.printStackTrace();
Log.e("TAG", "The save file for take picture does not exists!");
} catch (IOException e) {
e.printStackTrace();
Log.e("TAG", "Take picture fail!");
}
}
private void startPreview(){
if (camera != null) {
FaceDetector detector = new FaceDetector.Builder(this)
.setTrackingEnabled(true)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.setLandmarkType(FaceDetector.ALL_LANDMARKS)
.setMode(FaceDetector.FAST_MODE)
.build();
detector.setProcessor(new LargestFaceFocusingProcessor(detector, new FaceTracker()));
CameraSource cameraSource = new CameraSource.Builder(this, detector)
.setFacing(CameraSource.CAMERA_FACING_FRONT)
.setRequestedFps(30.0f)
.build();
System.out.println(cameraSource.getPreviewSize());
try {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
cameraSource.start();
} catch (IOException e) {
e.printStackTrace();
}
Log.i("TAG", "Camera Preview has started!");
}
}
public void showStatus(final String message) {
runOnUiThread(new Runnable() {
@Override
public void run() {
textView.setText(message);
}
});
}
private class FaceTracker extends Tracker<Face> {
private FaceTracker() {
}
@Override
public void onUpdate(Detector.Detections<Face> detections, Face face) {
PointF leftEyePos = face.getLandmarks().get(LEFT_EYE).getPosition();
PointF rightEyePos = face.getLandmarks().get(RIGHT_EYE).getPosition();
float deltaX = Math.abs(leftEyePos.x - rightEyePos.x);
float deltaY = Math.abs(leftEyePos.y - rightEyePos.y);
float distance;
if (deltaX >= deltaY) {
distance = F * (AVERAGE_EYE_DISTANCE / sensorX) * (IMAGE_WIDTH / deltaX);
} else {
distance = F * (AVERAGE_EYE_DISTANCE / sensorY) * (IMAGE_HEIGHT / deltaY);
}
showStatus("distance: " + String.format("%.0f", distance) + "mm");
}
@Override
public void onMissing(Detector.Detections<Face> detections) {
super.onMissing(detections);
showStatus("face not detected");
}
@Override
public void onDone() {
super.onDone();
}
}
我尝试在 onStart 上运行而不是 startPreview() 方法,但是它不起作用。我怎样才能更好地解决这个问题?使用异步?我知道我的编码很糟糕,我需要学习一些东西,但请给我指明一个方向。
如果有人感兴趣,距离代码基于此https://github.com/IvanLudvig/Screen-to-face-distance 。
解决方案
推荐阅读
- c# - 读取具有未知字节大小的 TCP/IP 套接字流 C#
- javascript - Javascript函数innerText
- r - 控制 rgl 中的雾密度
- sql - 错误:ORA-00907:oracle 缺少右括号
- python - 如何防止使用 python 更改(重命名)文件名?
- python - 使用正则表达式获取字符串作为浮点变量
- cypress - 如何在 cypress 等待页面加载时捕获通知?
- java - Java、jTable 搜索
- python - 对另一列中的一列中的字符串进行分类 - python
- android - 我可以在编译期间使用 Byte Buddy 转换已编译的类吗?