首页 > 解决方案 > 为什么使用 Camera2 API 的图片有不同的亮度?

问题描述

我已经使用 Camera2 API 实现了相机。在相同的设置下拍摄的照片,即相同的相机位置,相同的位置具有不同的亮度级别。下面的照片是一张一张的拍的,亮度水平还是有很大差别的。

在此处输入图像描述

在此处输入图像描述

如何设置使用 Camera2 API 拍摄的图片的亮度?

我尝试手动设置亮度,如下面的代码所示,但我仍然无法获得恒定的亮度。

我的最终目标是能够比较两个图像。

import kotlinx.android.synthetic.main.activity_camera.*
import java.io.FileOutputStream
import android.util.SparseIntArray
import android.view.Surface
import android.media.ImageReader
import android.util.Size
import java.io.File
import android.graphics.SurfaceTexture
import android.view.TextureView
import android.content.Context
import android.support.v4.app.ActivityCompat
import android.content.pm.PackageManager
import android.Manifest
import android.app.Activity
import android.content.Intent
import android.graphics.ImageFormat
import android.hardware.camera2.*
import android.widget.Toast
import android.hardware.camera2.TotalCaptureResult
import android.hardware.camera2.CaptureRequest
import android.hardware.camera2.CameraCaptureSession
import android.media.Image
import android.media.MediaScannerConnection
import android.os.*
import java.io.OutputStream
import java.lang.Exception


class CameraActivity : Activity() {
var createDirectories = CreateDirectories()
private val ORIENTATIONS = SparseIntArray()

private var cameraId: String? = null
private var cameraDevice: CameraDevice? = null
private var cameraCaptureSessions: CameraCaptureSession? = null
private var captureRequestBuilder: CaptureRequest.Builder? = null
private var imageDimension: Size? = null
private var imageReader: ImageReader? = null
private var file: File? = null
private val REQUEST_CAMERA_PERMISSION = 200
private var mBackgroundHandler: Handler? = null
private var mBackgroundThread: HandlerThread? = null
// var manager: CameraManager? =null
var flag = true
var width = 900
var height = 900

private val customHandler = Handler()

override fun onCreate(savedInstanceState: Bundle?) {
    super.onCreate(savedInstanceState)
    setContentView(R.layout.activity_camera)

    ORIENTATIONS.append(Surface.ROTATION_0, 270)
    ORIENTATIONS.append(Surface.ROTATION_90, 90)
    ORIENTATIONS.append(Surface.ROTATION_180, 0)
    ORIENTATIONS.append(Surface.ROTATION_270, 180)

    texture.surfaceTextureListener = textureListener
}

companion object {
    var file1: Boolean = true
}

fun takePicture() {

    try {
        if (null == cameraDevice) {

            return
        }

        val manager = getSystemService(Context.CAMERA_SERVICE) as CameraManager

        val characteristics = manager.getCameraCharacteristics(cameraDevice?.getId());

        val reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1)
        var outputSurfaces = mutableListOf<Surface>()
        outputSurfaces.add(reader.surface)
        outputSurfaces.add(Surface(texture.surfaceTexture))
        val captureBuilder =
            cameraDevice?.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE)
        captureBuilder?.addTarget(reader.surface)
        captureBuilder?.set(CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AE_MODE_ON)
        val brightness = setBrightness(characteristics)
        captureBuilder?.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, brightness)

        val rotation = windowManager.defaultDisplay.rotation
        val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)
        captureBuilder?.set(CaptureRequest.JPEG_ORIENTATION, sensorOrientation)
        createDirectories.createFolder("ClipPicNew")


        file = File("${Environment.getExternalStorageDirectory()}/ClipPicNew/pic${System.currentTimeMillis()}.jpg")
        reader.setOnImageAvailableListener(readerListener, mBackgroundHandler)

        cameraDevice?.createCaptureSession(
            listOf(reader.surface),
            object : CameraCaptureSession.StateCallback() {

                override fun onConfigured(session: CameraCaptureSession) {
                    try {
                        session.capture(
                            captureBuilder?.build(),
                            captureListener,
                            mBackgroundHandler
                        )

                    } catch (e: CameraAccessException) {

                        e.printStackTrace()
                    }
                }

                override fun onConfigureFailed(session: CameraCaptureSession) {
                }
            },
            mBackgroundHandler
        )

    } catch (e: CameraAccessException) {

        e.printStackTrace()
    }

}

val captureListener = object : CameraCaptureSession.CaptureCallback() {
    override fun onCaptureCompleted(
        session: CameraCaptureSession,
        request: CaptureRequest,
        result: TotalCaptureResult
    ) {
        super.onCaptureCompleted(session, request, result)
        Toast.makeText(this@CameraActivity, "Saved:$file", Toast.LENGTH_SHORT).show()
        createCameraPreview()
    }
}

val readerListener: ImageReader.OnImageAvailableListener =
    object : ImageReader.OnImageAvailableListener {
        override fun onImageAvailable(reader: ImageReader?) {
            var image: Image? = null
            try {

                image = reader?.acquireLatestImage()
                val buffer = image!!.planes[0].buffer
                val bytes = ByteArray(buffer.capacity())
                buffer.get(bytes)

                save(bytes)
            } catch (e: Exception) {

                e.printStackTrace()
            } finally {
                image?.close()
            }
        }

        fun save(bytes: ByteArray) {

            var output: OutputStream? = null
            try {
                output = FileOutputStream(file)
                output.write(bytes)

            } catch (e: Exception) {

                e.printStackTrace()
            } finally {
                output?.close()
                var filePath = ""
                if (file != null) {
                    filePath = file?.absolutePath.toString()
                }

                MediaScannerConnection.scanFile(
                    this@CameraActivity,
                    arrayOf(filePath),
                    null,
                    null
                )
            }
        }
    }

//texture listener
private var textureListener: TextureView.SurfaceTextureListener =
    object : TextureView.SurfaceTextureListener {
        override fun onSurfaceTextureAvailable(
            surface: SurfaceTexture,
            width1: Int,
            height1: Int
        ) {
            width = width1
            height = height1

            openCamera()
        }

        override fun onSurfaceTextureSizeChanged(
            surface: SurfaceTexture,
            width1: Int,
            height1: Int
        ) {
            width = width1
            height = height1
            // Transform you image captured size according to the surface width and height
        }

        override fun onSurfaceTextureDestroyed(surface: SurfaceTexture): Boolean {
            return false
        }

        override fun onSurfaceTextureUpdated(surface: SurfaceTexture) {}
    }

//open Camera

private fun openCamera() {
    val manager = getSystemService(Context.CAMERA_SERVICE) as CameraManager

    try {
        cameraId = manager.cameraIdList[0]
        val characteristics = manager.getCameraCharacteristics(cameraId)

        val map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
        imageDimension = map.getOutputSizes(SurfaceTexture::class.java)[0]
        if (ActivityCompat.checkSelfPermission(
                this,
                Manifest.permission.CAMERA
            ) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(
                this,
                Manifest.permission.WRITE_EXTERNAL_STORAGE
            ) != PackageManager.PERMISSION_GRANTED
        ) {
            ActivityCompat.requestPermissions(
                this,
                arrayOf(Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE),
                REQUEST_CAMERA_PERMISSION
            );
            return
        }
        manager.openCamera(cameraId, stateCallback, null)
    } catch (e: CameraAccessException) {
        e.printStackTrace()
    }
}


private val stateCallback = object : CameraDevice.StateCallback() {

    override fun onOpened(camera: CameraDevice) {
        //This is called when the camera is open

        cameraDevice = camera
        createCameraPreview()
    }

    override fun onDisconnected(camera: CameraDevice) {

    }

    override fun onError(camera: CameraDevice, error: Int) {

    }
}


fun createCameraPreview() {
    try {
        val surfaceTexture = texture.getSurfaceTexture()
        surfaceTexture.setDefaultBufferSize(
            width, height
        );
        val surface = Surface(surfaceTexture)
        captureRequestBuilder =
                cameraDevice?.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)


        captureRequestBuilder?.addTarget(surface)


        cameraDevice?.createCaptureSession(listOf(surface), mCameraCaptureSessionCallback, null)
    } catch (e: CameraAccessException) {
        e.printStackTrace()
    }

}

private val mCameraCaptureSessionCallback = object : CameraCaptureSession.StateCallback() {
    override fun onConfigured(cameraCaptureSession: CameraCaptureSession) {
        if (null == cameraDevice) {
            return

        }

        cameraCaptureSessions = cameraCaptureSession
        updatePreview()
    }

    override fun onConfigureFailed(session: CameraCaptureSession) {

    }
}


fun updatePreview() {
    if (null == cameraDevice) {
    }


    val cameraManager = getSystemService(Context.CAMERA_SERVICE) as CameraManager
    cameraId = cameraManager.cameraIdList[0]
    val characteristics = cameraManager.getCameraCharacteristics(cameraId)

    captureRequestBuilder?.set(
        CaptureRequest.CONTROL_AWB_MODE,
        CameraMetadata.CONTROL_AE_MODE_ON
    )
    val brightness = setBrightness(characteristics)
    captureRequestBuilder?.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, brightness)


    try {
        cameraCaptureSessions?.setRepeatingRequest(
            captureRequestBuilder?.build(),
            null,
            mBackgroundHandler
        )
    } catch (e: Exception) {

        e.printStackTrace()
    }

}

private fun setBrightness(characteristics: CameraCharacteristics): Int {

    val controlAECompensationStep =
        characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP)
    if (controlAECompensationStep != null) {
        var compensationStep = controlAECompensationStep
    }

    var minCompensationRange = 0
    var maxCompensationRange = 0
    val controlAECompensationRange =
        characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE)
    if (controlAECompensationRange != null) {
        minCompensationRange = controlAECompensationRange.getLower();
        maxCompensationRange = controlAECompensationRange.getUpper();
    }

    return (minCompensationRange + (maxCompensationRange - minCompensationRange) * (90 / 100))

}

private fun startBackgroundThread() {
    mBackgroundThread = HandlerThread("Camera Background")
    mBackgroundThread?.start()
    mBackgroundHandler = Handler(mBackgroundThread?.looper)
}

private fun stopBackgroundThread() {
    mBackgroundThread?.quitSafely()
    try {
        mBackgroundThread?.join()
        mBackgroundThread = null
        mBackgroundHandler = null
    } catch (e: InterruptedException) {
        e.printStackTrace()
    }

}

private fun closeCamera() {
    if (null != cameraDevice) {
        cameraDevice?.close()
        cameraDevice = null
    }
    if (null != imageReader) {
        imageReader?.close()
        imageReader = null
    }
}

override fun onRequestPermissionsResult(
    requestCode: Int,
    permissions: Array<String>,
    grantResults: IntArray
) {
    if (requestCode == REQUEST_CAMERA_PERMISSION) {
        if (grantResults[0] == PackageManager.PERMISSION_DENIED) {
            // close the app
            Toast.makeText(
                this,
                "Sorry!!!, you can't use this app without granting permission",
                Toast.LENGTH_LONG
            ).show()
            finish()
        }
    }
}

override fun onResume() {
    super.onResume()

    startBackgroundThread()
    if (texture.isAvailable) {
        openCamera()
    } else {
        texture.surfaceTextureListener = textureListener
    }
    if (flag) {
        flag = false
        customHandler.postDelayed(updateTimerThread, 500)

    }

}

override fun onDestroy() {
    super.onDestroy()
    customHandler.removeCallbacks(updateTimerThread)
}

override fun onPause() {
    closeCamera()
    stopBackgroundThread()
    super.onPause()
}

private var updateTimerThread: Runnable = object : Runnable {

    override fun run() {
        takePictureAndSendResult()
    }
}

fun takePictureAndSendResult() {

    takePicture()
    val resultIntent = Intent()
    if (file != null) {

        resultIntent.putExtra("File Path", file!!.absolutePath)
    }

    setResult(Activity.RESULT_OK, resultIntent)
    finish()
}

}

标签: android

解决方案


在拍照之前允许预览运行多长时间?自动曝光程序可能需要几帧才能稳定,因此如果您在启动相机后立即拍照,您可能会遇到这样的问题。

否则,我会注意到曝光补偿不是关于设置绝对曝光值,而是关于设置默认解决方案自动曝光选择的偏移量(所以它是“我希望它比 AE 更亮”的控制) . 如果你想要真正的手动曝光,“我要曝光 0.1 秒”,你需要使用不同的控件(SENSOR_EXPOSURE_TIME、AE_MODE_OFF 等)。

如果事情一直很暗,我建议检查目标 FPS 范围(CONTROL_AE_TARGET_FPS_RANGE),因为如果它设置为 [30,30] 之类的东西,AE 例程不能将曝光时间延长到 1/30 秒以上,这也可以在较暗的地方很少。您可能想切换到 [15,30] 之类的东西,这将允许曝光高达 1/15 秒。

捕获的图像是否与您看到的预览相符?


推荐阅读