2

I've got some code below .. it converts YUV_420_888 (android camera2 api) image format to ARGB8888 and saves it to a jpeg .. however the result is almost completely green .. any idea how to get this to work as expected??

private ByteBuffer image2ByteBuffer(Image image) {
    Image.Plane Y = image.getPlanes()[0];
    Image.Plane U = image.getPlanes()[1];
    Image.Plane V = image.getPlanes()[2];

    int yRowStride = Y.getRowStride();
    int yPixelStride = Y.getPixelStride();
    int uRowStride = U.getRowStride();
    int uPixelStride = U.getPixelStride();
    int vRowStride = V.getRowStride();
    int vPixelStride = V.getPixelStride();


    ByteBuffer YBuffer = Y.getBuffer();
    ByteBuffer UBuffer = U.getBuffer();
    ByteBuffer VBuffer = V.getBuffer();

    YBuffer.rewind();
    UBuffer.rewind();
    VBuffer.rewind();

    int yb, ub, vb;
    yb = YBuffer.capacity();
    ub = UBuffer.capacity();
    vb = VBuffer.capacity();

    byte[] ybb, ubb,vbb;
    ybb = new byte[yb];
    ubb = new byte[ub];
    vbb = new byte[vb];

    YBuffer.get(ybb);
    UBuffer.get(ubb);
    VBuffer.get(vbb);

    byte[] yData = ybb;
    byte[] uData = new byte[yb/4];
    byte[] vData = new byte[yb/4];

    int index = 0;
    for(int i = 0; (i < (640 * 480 * 0.5)) && (index < (640 * 480 * 0.25)); i += uPixelStride){
        uData[index] = ubb[i];
        vData[index] = vbb[i];
        index++;
    }

    byte[] yuv420sp = new byte[(int)(640 * 480 * 1.5)];

    // Y DATA FIRST

    for (int i = 0 ; i < 640 * 480; i++)
        yuv420sp[i] = yData[i];

    //UV INTERLEAVED DATA NEXT

    index = 640 * 480;
    for (int i = 0; i < (640 * 480 * 0.25); i++) {
        yuv420sp[index] = uData[i];
        index++;
        yuv420sp[index] = vData[i];
        index++;
    }



    int[] decoded = new int[640 * 480];
    YUV420Converter.decodeYUV420888(decoded, yuv420sp, 640, 480);
    Bitmap bitmap = Bitmap.createBitmap(decoded, 640, 480, Bitmap.Config.ARGB_8888);
    ByteBuffer byteBuffer = ByteBuffer.allocate(640 * 480 * 4);
    bitmap.copyPixelsToBuffer(byteBuffer);
    bitmap.recycle();
    return byteBuffer;
}

The next bit of code is from the imagereader

    public void onImageAvailable(ImageReader reader) {

        Image image = reader.acquireLatestImage();

        ByteBuffer byteBuffer = image2ByteBuffer(image);

        saveImage(byteBuffer);
        byteBuffer.clear();
        image.close();
    }

    public void saveImage(ByteBuffer imageByteBuffer) {
        try {
            imageByteBuffer.rewind();
            Bitmap imageBitmap = Bitmap.createBitmap(WIDTH, HEIGHT, Bitmap.Config.ARGB_8888);
            imageBitmap.copyPixelsFromBuffer(imageByteBuffer);
            String filePath = Environment.getExternalStorageDirectory().getPath()
                    + "/pic" + mImagesCount + ".png";
            FileOutputStream fileOutputStream = new FileOutputStream(filePath);

            imageBitmap.compress(Bitmap.CompressFormat.PNG, 100, fileOutputStream);

            //fileOutputStream.write(imageByteArray);
            fileOutputStream.flush();
            fileOutputStream.close();
            imageBitmap.recycle();
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

Used to decode to ARGB once we have a proper formatted YUV

public static void decodeYUV420888(int[] rgb, byte[] yuv420sp, int width, int height) {

    final int frameSize = width * height;

    for (int j = 0, yp = 0; j < height; j++) {       int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
        for (int i = 0; i < width; i++, yp++) {
            int y = (0xff & ((int) yuv420sp[yp])) - 16;
            if (y < 0)
                y = 0;
            if ((i & 1) == 0) {
                v = (0xff & yuv420sp[uvp++]) - 128;
                u = (0xff & yuv420sp[uvp++]) - 128;
            }

            int y1192 = 1192 * y;
            int b = (y1192 + 1634 * v);
            int g = (y1192 - 833 * v - 400 * u);
            int r = (y1192 + 2066 * u);

            if (r < 0)                  r = 0;               else if (r > 262143)
                r = 262143;
            if (g < 0)                  g = 0;               else if (g > 262143)
                g = 262143;
            if (b < 0)                  b = 0;               else if (b > 262143)
                b = 262143;

            rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
        }
    }
}
Arjun
  • 306
  • 3
  • 18
  • To follow up on this .. I implemented the answer discussed [here](http://stackoverflow.com/questions/36212904/yuv-420-888-interpretation-on-samsung-galaxy-s7-camera2) – Arjun Oct 17 '16 at 17:04

2 Answers2

1

Apparently there is an issue with API 21, but this is resolved in API 22.
I found this in the following post: yuv_420_888-to-rgb
Another excellent post about this topic can be found here: Solving some android problems

Hope this helps !

Hibbem
  • 1,317
  • 13
  • 20
0

Just to close this up.. I am working with the Samsung Note 5 and have implemented the solution in the link below

YUV_420_888 interpretation on Samsung Galaxy S7 (Camera2)

UPDATE:

Just incase someone stumbles upon this ... here is a renderscript example of converting yuv420sp (NV21) images to RGBA.

https://www.rounds.com/blog/unleash-power-gpu-renderscript/

Combined, these both solve Camera API levels 1/2 YUV_XX -> RGBA problems. Happy Coding !

Community
  • 1
  • 1
Arjun
  • 306
  • 3
  • 18