Newer
Older
MiniTias / android / app / src / main / kotlin / com / example / mini_tias / RawCapturePlugin.kt
package com.example.mini_tias

import android.content.Context
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.graphics.Color
import android.graphics.ImageFormat
import android.graphics.Matrix
import android.graphics.Rect
import android.graphics.YuvImage
import android.hardware.camera2.*
import android.media.ImageReader
import android.media.MediaScannerConnection
import android.os.Handler
import android.os.HandlerThread
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import java.io.ByteArrayOutputStream
import java.nio.ByteBuffer
import java.util.zip.CRC32
import java.util.zip.Deflater
import java.util.zip.DeflaterOutputStream
import kotlin.math.roundToInt

/**
 * Camera2 API を使用してフロントカメラから YUV_420_888 フォーマットで
 * フル解像度の画像を 1 フレームキャプチャする.
 */
class RawCapturePlugin(private val context: Context) : MethodChannel.MethodCallHandler {

    private var backgroundThread: HandlerThread? = null
    private var backgroundHandler: Handler? = null
    private var cameraDevice: CameraDevice? = null

    /** フレーム取得後に処理するコールバックの型エイリアス.*/
    private typealias FrameProcessor = (
        yBytes: ByteArray, uBytes: ByteArray, vBytes: ByteArray,
        width: Int, height: Int,
        yRowStride: Int, uvRowStride: Int, uvPixelStride: Int,
        reader: ImageReader, camera: CameraDevice,
        result: MethodChannel.Result
    ) -> Unit

    override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) {
        when (call.method) {
            "captureFullResolutionPng" -> captureFullResolutionPng(result)
            "convertYuvToJpeg" -> convertYuvToJpeg(call, result)
            "scanFile" -> {
                val path = call.argument<String>("path")
                if (path != null) {
                    MediaScannerConnection.scanFile(context, arrayOf(path), arrayOf("image/png")) { _, _ ->
                        result.success(null)
                    }
                } else {
                    result.error("INVALID_PATH", "パスが指定されていません", null)
                }
            }
            else -> result.notImplemented()
        }
    }

    /// YUV_420_888 を NV21 に変換し,YuvImage で JPEG 化して返す.
    private fun convertYuvToJpeg(call: MethodCall, result: MethodChannel.Result) {
        try {
            val width = call.argument<Int>("width")!!
            val height = call.argument<Int>("height")!!
            val yPlane = call.argument<ByteArray>("yPlane")!!
            val uPlane = call.argument<ByteArray>("uPlane")!!
            val vPlane = call.argument<ByteArray>("vPlane")!!
            val yRowStride = call.argument<Int>("yRowStride")!!
            val uvRowStride = call.argument<Int>("uvRowStride")!!
            val uvPixelStride = call.argument<Int>("uvPixelStride")!!
            val rotation = call.argument<Int>("rotation") ?: 0
            val quality = call.argument<Int>("quality") ?: 85

            // YUV_420_888 → NV21 変換
            val nv21 = ByteArray(width * height * 3 / 2)

            // Y プレーンをコピー
            for (row in 0 until height) {
                System.arraycopy(yPlane, row * yRowStride, nv21, row * width, width)
            }

            // UV プレーンを NV21 形式(VUVU...)にインターリーブ
            val uvOffset = width * height
            for (row in 0 until height / 2) {
                for (col in 0 until width / 2) {
                    val uvIndex = row * uvRowStride + col * uvPixelStride
                    nv21[uvOffset + row * width + col * 2] = vPlane[uvIndex]
                    nv21[uvOffset + row * width + col * 2 + 1] = uPlane[uvIndex]
                }
            }

            // NV21 → JPEG
            val yuvImage = YuvImage(nv21, ImageFormat.NV21, width, height, null)
            val jpegStream = ByteArrayOutputStream()
            yuvImage.compressToJpeg(Rect(0, 0, width, height), quality, jpegStream)

            // 回転・反転が必要な場合
            val mirror = call.argument<Boolean>("mirror") ?: false
            val jpegBytes = if (rotation != 0 || mirror) {
                val bitmap = BitmapFactory.decodeByteArray(jpegStream.toByteArray(), 0, jpegStream.size())
                val matrix = Matrix()
                if (rotation != 0) matrix.postRotate(rotation.toFloat())
                if (mirror) matrix.postScale(-1f, 1f)
                val transformed = Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
                bitmap.recycle()

                val outStream = ByteArrayOutputStream()
                transformed.compress(Bitmap.CompressFormat.JPEG, quality, outStream)
                transformed.recycle()
                outStream.toByteArray()
            } else {
                jpegStream.toByteArray()
            }

            result.success(jpegBytes)
        } catch (e: Exception) {
            result.error("CONVERT_ERROR", "YUV to JPEG 変換に失敗: ${e.message}", null)
        }
    }

    private fun captureFullResolutionPng(result: MethodChannel.Result) {
        openCameraAndCapture(result) { yBytes, uBytes, vBytes, width, height, yRowStride, uvRowStride, uvPixelStride, reader, camera, res ->
            // YUV → RGB 変換 + 90° 時計回り回転を統合
            // 元の(x,y) → 回転後の(height-1-y, x)
            // 出力サイズ: width_out=height, height_out=width
            val rotatedPixels = IntArray(width * height)
            for (y in 0 until height) {
                for (x in 0 until width) {
                    val yIndex = y * yRowStride + x
                    val uvIndex = (y / 2) * uvRowStride + (x / 2) * uvPixelStride

                    val yValue = (yBytes[yIndex].toInt() and 0xFF).toDouble()
                    val uValue = (uBytes[uvIndex].toInt() and 0xFF).toDouble()
                    val vValue = (vBytes[uvIndex].toInt() and 0xFF).toDouble()

                    val r = (yValue + 1.402 * (vValue - 128)).roundToInt().coerceIn(0, 255)
                    val g = (yValue - 0.344 * (uValue - 128) - 0.714 * (vValue - 128)).roundToInt().coerceIn(0, 255)
                    val b = (yValue + 1.772 * (uValue - 128)).roundToInt().coerceIn(0, 255)

                    rotatedPixels[x * height + (height - 1 - y)] = Color.rgb(r, g, b)
                }
            }

            val rotatedWidth = height
            val rotatedHeight = width
            val bitmap = Bitmap.createBitmap(rotatedWidth, rotatedHeight, Bitmap.Config.ARGB_8888)
            bitmap.setPixels(rotatedPixels, 0, rotatedWidth, 0, 0, rotatedWidth, rotatedHeight)

            // 非圧縮 PNG エンコード(Deflater.NO_COMPRESSION で高速化)
            val pngBytes = encodePngUncompressed(bitmap)
            bitmap.recycle()

            reader.close()
            camera.close()
            cameraDevice = null

            Handler(context.mainLooper).post {
                res.success(pngBytes)
                stopBackgroundThread()
            }
        }
    }

    /**
     * フロントカメラを開いてフル解像度の YUV フレームを 1 枚取得し,[onFrame] を呼び出す.
     *
     * カメラのオープン・セッション作成・AE/AF 安定待機(1 秒)・本番キャプチャの
     * 共通フローを担う.フレーム取得後の処理は [onFrame] で差異化する.
     * [onFrame] 内で reader と camera のクローズおよび result への応答を行うこと.
     */
    private fun openCameraAndCapture(
        result: MethodChannel.Result,
        onFrame: FrameProcessor,
    ) {
        startBackgroundThread()

        val cameraManager = context.getSystemService(Context.CAMERA_SERVICE) as CameraManager

        val cameraId = cameraManager.cameraIdList.firstOrNull { id ->
            val characteristics = cameraManager.getCameraCharacteristics(id)
            characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT
        }

        if (cameraId == null) {
            returnError(result, "NO_CAMERA", "フロントカメラが見つかりません")
            return
        }

        val characteristics = cameraManager.getCameraCharacteristics(cameraId)
        val streamConfigMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)

        if (streamConfigMap == null) {
            returnError(result, "NO_CONFIG", "カメラの設定を取得できません")
            return
        }

        val yuvSizes = streamConfigMap.getOutputSizes(ImageFormat.YUV_420_888)
        val maxSize = yuvSizes.maxByOrNull { it.width * it.height }

        if (maxSize == null) {
            returnError(result, "NO_SIZE", "YUV の解像度を取得できません")
            return
        }

        val imageReader = ImageReader.newInstance(
            maxSize.width, maxSize.height, ImageFormat.YUV_420_888, 2
        )

        var resultSent = false

        try {
            cameraManager.openCamera(cameraId, object : CameraDevice.StateCallback() {
                override fun onOpened(camera: CameraDevice) {
                    cameraDevice = camera
                    try {
                        camera.createCaptureSession(
                            listOf(imageReader.surface),
                            object : CameraCaptureSession.StateCallback() {
                                override fun onConfigured(session: CameraCaptureSession) {
                                    try {
                                        // Phase 1: プレビューを流して AE/AF を安定させる
                                        val previewRequest = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply {
                                            addTarget(imageReader.surface)
                                            set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)
                                            set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF)
                                        }
                                        session.setRepeatingRequest(previewRequest.build(), null, backgroundHandler)

                                        // Phase 2: 1 秒後にリスナーを設定してから本番キャプチャ
                                        backgroundHandler?.postDelayed({
                                            try {
                                                session.stopRepeating()

                                                // プレビュー中のバッファを捨てる
                                                while (true) {
                                                    val stale = imageReader.acquireLatestImage()
                                                    if (stale != null) {
                                                        stale.close()
                                                    } else {
                                                        break
                                                    }
                                                }

                                                // ここでリスナーを設定(本番フレームのみ受信)
                                                imageReader.setOnImageAvailableListener({ reader ->
                                                    if (resultSent) return@setOnImageAvailableListener
                                                    val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
                                                    resultSent = true

                                                    try {
                                                        val yPlane = image.planes[0]
                                                        val uPlane = image.planes[1]
                                                        val vPlane = image.planes[2]

                                                        val width = image.width
                                                        val height = image.height

                                                        val yBytes = ByteArray(yPlane.buffer.remaining())
                                                        val uBytes = ByteArray(uPlane.buffer.remaining())
                                                        val vBytes = ByteArray(vPlane.buffer.remaining())

                                                        yPlane.buffer.get(yBytes)
                                                        uPlane.buffer.get(uBytes)
                                                        vPlane.buffer.get(vBytes)

                                                        val yRowStride = yPlane.rowStride
                                                        val uvRowStride = uPlane.rowStride
                                                        val uvPixelStride = uPlane.pixelStride

                                                        image.close()

                                                        onFrame(
                                                            yBytes, uBytes, vBytes,
                                                            width, height,
                                                            yRowStride, uvRowStride, uvPixelStride,
                                                            reader, camera, result
                                                        )
                                                    } catch (e: Exception) {
                                                        image.close()
                                                        reader.close()
                                                        cleanup()
                                                        Handler(context.mainLooper).post {
                                                            result.error("PROCESS_ERROR", "画像データの処理に失敗: ${e.message}", null)
                                                        }
                                                    }
                                                }, backgroundHandler)

                                                // 本番キャプチャ実行
                                                val captureRequest = camera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE).apply {
                                                    addTarget(imageReader.surface)
                                                    set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)
                                                    set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF)
                                                }
                                                session.capture(captureRequest.build(), null, backgroundHandler)
                                            } catch (e: Exception) {
                                                cleanup()
                                                imageReader.close()
                                                Handler(context.mainLooper).post {
                                                    result.error("CAPTURE_ERROR", "キャプチャに失敗: ${e.message}", null)
                                                }
                                            }
                                        }, 1000)
                                    } catch (e: Exception) {
                                        cleanup()
                                        imageReader.close()
                                        Handler(context.mainLooper).post {
                                            result.error("CAPTURE_ERROR", "キャプチャに失敗: ${e.message}", null)
                                        }
                                    }
                                }

                                override fun onConfigureFailed(session: CameraCaptureSession) {
                                    cleanup()
                                    imageReader.close()
                                    Handler(context.mainLooper).post {
                                        result.error("SESSION_ERROR", "カメラセッションの設定に失敗", null)
                                    }
                                }
                            },
                            backgroundHandler
                        )
                    } catch (e: Exception) {
                        cleanup()
                        imageReader.close()
                        Handler(context.mainLooper).post {
                            result.error("CAPTURE_ERROR", "キャプチャに失敗: ${e.message}", null)
                        }
                    }
                }

                override fun onDisconnected(camera: CameraDevice) {
                    camera.close()
                    cameraDevice = null
                    imageReader.close()
                    stopBackgroundThread()
                }

                override fun onError(camera: CameraDevice, error: Int) {
                    camera.close()
                    cameraDevice = null
                    imageReader.close()
                    Handler(context.mainLooper).post {
                        result.error("CAMERA_ERROR", "カメラエラー: $error", null)
                        stopBackgroundThread()
                    }
                }
            }, backgroundHandler)
        } catch (e: SecurityException) {
            imageReader.close()
            returnError(result, "PERMISSION_ERROR", "カメラの権限がありません")
        }
    }

    /// Bitmap を非圧縮 PNG(Deflater.NO_COMPRESSION)としてエンコードする.
    private fun encodePngUncompressed(bitmap: Bitmap): ByteArray {
        val w = bitmap.width
        val h = bitmap.height
        val out = ByteArrayOutputStream(w * h * 3 + 1024)

        // PNG シグネチャ
        out.write(byteArrayOf(0x89.toByte(), 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A))

        // IHDR チャンク
        val ihdrData = ByteBuffer.allocate(13)
            .putInt(w)
            .putInt(h)
            .put(8)  // bit depth
            .put(2)  // color type: RGB
            .put(0)  // compression method
            .put(0)  // filter method
            .put(0)  // interlace method
            .array()
        writeChunk(out, "IHDR", ihdrData)

        // IDAT チャンク: 非圧縮 zlib でラップした RGB データ
        val idatStream = ByteArrayOutputStream(w * h * 3 + h + 64)
        val deflater = Deflater(Deflater.NO_COMPRESSION)
        val deflaterOut = DeflaterOutputStream(idatStream, deflater, 65536)

        val pixels = IntArray(w)
        val rowBytes = ByteArray(1 + w * 3)  // フィルタバイト(0) + RGB
        rowBytes[0] = 0  // フィルタ: None

        for (y in 0 until h) {
            bitmap.getPixels(pixels, 0, w, 0, y, w, 1)
            for (x in 0 until w) {
                val pixel = pixels[x]
                rowBytes[1 + x * 3] = ((pixel shr 16) and 0xFF).toByte()  // R
                rowBytes[1 + x * 3 + 1] = ((pixel shr 8) and 0xFF).toByte()  // G
                rowBytes[1 + x * 3 + 2] = (pixel and 0xFF).toByte()  // B
            }
            deflaterOut.write(rowBytes)
        }
        deflaterOut.finish()
        deflaterOut.close()
        deflater.end()

        writeChunk(out, "IDAT", idatStream.toByteArray())

        // IEND チャンク
        writeChunk(out, "IEND", byteArrayOf())

        return out.toByteArray()
    }

    /// PNG チャンクを書き込む(length + type + data + CRC).
    private fun writeChunk(out: ByteArrayOutputStream, type: String, data: ByteArray) {
        val typeBytes = type.toByteArray(Charsets.US_ASCII)
        // length (4 bytes, big-endian)
        out.write(ByteBuffer.allocate(4).putInt(data.size).array())
        // type (4 bytes)
        out.write(typeBytes)
        // data
        out.write(data)
        // CRC (type + data)
        val crc = CRC32()
        crc.update(typeBytes)
        crc.update(data)
        out.write(ByteBuffer.allocate(4).putInt(crc.value.toInt()).array())
    }

    private fun cleanup() {
        cameraDevice?.close()
        cameraDevice = null
        stopBackgroundThread()
    }

    private fun returnError(result: MethodChannel.Result, code: String, message: String) {
        result.error(code, message, null)
        stopBackgroundThread()
    }

    private fun startBackgroundThread() {
        backgroundThread = HandlerThread("RawCaptureThread").also { it.start() }
        backgroundHandler = Handler(backgroundThread!!.looper)
    }

    private fun stopBackgroundThread() {
        backgroundThread?.quitSafely()
        try {
            backgroundThread?.join(3000)
        } catch (_: InterruptedException) {
        }
        backgroundThread = null
        backgroundHandler = null
    }
}