27

I'm developing a camera app based on Camera API 2 and I have found several problems using the libyuv. I want to convert YUV_420_888 images retrieved from a ImageReader, but I'm having some problems with scaling in a reprocessable surface.

In essence: Images come out with tones of green instead of having the corresponding tones (I'm exporting the .yuv files and checking them using http://rawpixels.net/).

You can see an input example here: enter image description here

And what I get after I perform scaling: enter image description here

I think I am doing something wrong with strides, or providing an invalid YUV format (maybe I have to transform the image to another format?). However, I can't figure out where is the error since I don't know how to correlate the green color to the scaling algorithm.

This is the conversion code I am using, you can ignore the return NULL as there is further processing that is not related to the problem.

#include <jni.h>
#include <stdint.h>
#include <android/log.h>
#include <inc/libyuv/scale.h>
#include <inc/libyuv.h>
#include <stdio.h>


#define  LOG_TAG    "libyuv-jni"

#define unused(x) UNUSED_ ## x __attribute__((__unused__))
#define  LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS_)

struct YuvFrame {
    int width;
    int height;
    uint8_t *data;
    uint8_t *y;
    uint8_t *u;
    uint8_t *v;
};

static struct YuvFrame i420_input_frame;
static struct YuvFrame i420_output_frame;

extern "C" {

JNIEXPORT jbyteArray JNICALL
Java_com_android_camera3_camera_hardware_session_output_photo_yuv_YuvJniInterface_scale420YuvByteArray(
        JNIEnv *env, jclass /*clazz*/, jbyteArray yuvByteArray_, jint src_width, jint src_height,
        jint out_width, jint out_height) {

    jbyte *yuvByteArray = env->GetByteArrayElements(yuvByteArray_, NULL);

    //Get input and output length
    int input_size = env->GetArrayLength(yuvByteArray_);
    int out_size = out_height * out_width;

    //Generate input frame
    i420_input_frame.width = src_width;
    i420_input_frame.height = src_height;
    i420_input_frame.data = (uint8_t *) yuvByteArray;
    i420_input_frame.y = i420_input_frame.data;
    i420_input_frame.u = i420_input_frame.y + input_size;
    i420_input_frame.v = i420_input_frame.u + input_size / 4;

    //Generate output frame
    free(i420_output_frame.data);
    i420_output_frame.width = out_width;
    i420_output_frame.height = out_height;
    i420_output_frame.data = new unsigned char[out_size * 3 / 2];
    i420_output_frame.y = i420_output_frame.data;
    i420_output_frame.u = i420_output_frame.y + out_size;
    i420_output_frame.v = i420_output_frame.u + out_size / 4;
    libyuv::FilterMode mode = libyuv::FilterModeEnum::kFilterBilinear;

    int result = I420Scale(i420_input_frame.y, i420_input_frame.width,
                           i420_input_frame.u, i420_input_frame.width / 2,
                           i420_input_frame.v, i420_input_frame.width / 2,
                           i420_input_frame.width, i420_input_frame.height,
                           i420_output_frame.y, i420_output_frame.width,
                           i420_output_frame.u, i420_output_frame.width / 2,
                           i420_output_frame.v, i420_output_frame.width / 2,
                           i420_output_frame.width, i420_output_frame.height,
                           mode);
    LOGD("Image result %d", result);
    env->ReleaseByteArrayElements(yuvByteArray_, yuvByteArray, 0);
    return NULL;
}
CinCout
  • 8,291
  • 9
  • 47
  • 55
Francisco Durdin Garcia
  • 9,572
  • 4
  • 39
  • 79

5 Answers5

1

You can try that code that it uses the y_size instead of full size of your array.

    ...
    //Get input and output length
    int input_size = env->GetArrayLength(yuvByteArray_);
    int y_size = src_width * src_height;
    int out_size = out_height * out_width;

    //Generate input frame
    i420_input_frame.width = src_width;
    i420_input_frame.height = src_height;
    i420_input_frame.data = (uint8_t *) yuvByteArray;
    i420_input_frame.y = i420_input_frame.data;
    i420_input_frame.u = i420_input_frame.y + y_size;
    i420_input_frame.v = i420_input_frame.u + y_size / 4;

    //Generate output frame
    free(i420_output_frame.data);
    i420_output_frame.width = out_width;
    i420_output_frame.height = out_height;
    i420_output_frame.data = new unsigned char[out_size * 3 / 2];
    i420_output_frame.y = i420_output_frame.data;
    i420_output_frame.u = i420_output_frame.y + out_size;
    i420_output_frame.v = i420_output_frame.u + out_size / 4;
    ...

probably your code is based on that https://github.com/begeekmyfriend/yasea/blob/master/library/src/main/libenc/jni/libenc.cc and according to that code you have to use the y_size

gmetax
  • 3,449
  • 2
  • 26
  • 41
1

You have an issue with the input size of the frame:

It should be:

int input_array_size = env->GetArrayLength(yuvByteArray_);
int input_size = input_array_size * 2 / 3; //This is the frame size

For example, If you have a Frame that is 6x4

Chanel y size: 6*4 = 24

 1 2 3 4 5 6
 _ _ _ _ _ _
|_|_|_|_|_|_| 1
|_|_|_|_|_|_| 2
|_|_|_|_|_|_| 3
|_|_|_|_|_|_| 4

Chanel u size: 3*2 = 6

  1   2   3 
 _ _ _ _ _ _
|   |   |   | 
|_ _|_ _|_ _| 1
|   |   |   | 
|_ _|_ _|_ _| 2

Chanel v size: 3*2 = 6

  1   2   3 
 _ _ _ _ _ _
|   |   |   | 
|_ _|_ _|_ _| 1
|   |   |   | 
|_ _|_ _|_ _| 2

Array Size = 6*4+3*2+3*2 = 36
But actual Frame Size = channel y Size = 36 * 2 / 3 = 24

Rama
  • 3,107
  • 2
  • 9
  • 25
  • so you are suggesting to him to use the same size (y size) as we have already suggested (me and Dave) – gmetax Apr 04 '17 at 13:36
  • @gmetax I found it important to note that the actual frame size should be used, and to provide a method to calculate it from the size of the array. The method you propose to calculate it is different, but what is the correct solution depends on the needs of the OP. Be patient. – Rama Apr 04 '17 at 14:05
0

gmetax is almost correct.

You are using the size of the entire array where you should be using the size of the Y component, which is src_width * src_height.

gmetax's answer is wrong in that he has put y_size in place of out_size when defining the output frame. The correct code snippet, I believe, would look like:

//Get input and output length
int input_size = env->GetArrayLength(yuvByteArray_);
int y_size = src_width * src_height;
int out_size = out_height * out_width;

//Generate input frame
i420_input_frame.width = src_width;
i420_input_frame.height = src_height;
i420_input_frame.data = (uint8_t *) yuvByteArray;
i420_input_frame.y = i420_input_frame.data;
i420_input_frame.u = i420_input_frame.y + y_size;
i420_input_frame.v = i420_input_frame.u + y_size / 4;

//Generate output frame
free(i420_output_frame.data);
i420_output_frame.width = out_width;
i420_output_frame.height = out_height;
i420_output_frame.data = new unsigned char[out_size * 3 / 2];
i420_output_frame.y = i420_output_frame.data;
i420_output_frame.u = i420_output_frame.y + out_size;
i420_output_frame.v = i420_output_frame.u + out_size / 4;
Dave
  • 4,208
  • 2
  • 16
  • 24
0

You are trying to scale your YUV422 image as if it was YUV420, no wonder the colors are all messed up. First of all you need to figure out what exactly format of your YUV input buffer. From documentation of YUV_422_888 it looks like it may represent planar as well as interleaved formats (if pixel stride is not 1). From your results it looks like your source is planar and processing of Y plane is ok, but your error is in handling U and V planes. To get scaling right:

  • You have to figure out if your U and V planes are interleaved or planar. Most likely they are planar as well.
  • Use ScalePlane from libyuv to scale U and V separately. Perhaps if you step into I420Scale it calls ScalePlane for individual planes. Do the same, but use correct linesizes for your U and V planes (each is twice larger than what I420Scale expects).

Some tips how to figure out if you have planar or interleaved U and V: try to skip scaling of your image and saving it, to ensure that you get correct result (identical to the source). Then try to zero out U frame or V frame and see what you get. If U and V are planar and you memset U plane to zero you should see entire picture changing color. If they are interleaved you'll get half of picture changing and the other one staying the same. Same way you can check your assumptions about sizes, linesizes, and offsets of your planes. Once you are sure about your YUV format and layout you can scale individual planes if your input is planar, or if you have interleaved input first you need to deinterleave planes and then scale them.

Alternatively, you can use libswscale from ffmpeg/libav and try different formats to find correct one and then use libyuv.

Pavel P
  • 13,962
  • 11
  • 68
  • 109
0

The green images was caused by one of the planes being full of 0's. This means that one of the planes was empty. This was caused because I was converting from YUV NV21 instead of YUV I420. The images from the framework of camera in android comes as I420 YUVs.

We need to convert them to YUV I420 to work properly with Libyuv. After that we can start using the multiple operations that the library offer you. Like rotate, scale etc.

Here is the snipped about how the scaling method looks:

JNIEXPORT jint JNICALL
Java_com_aa_project_images_yuv_myJNIcl_scaleI420(JNIEnv *env, jclass type,
                                                 jobject srcBufferY,
                                                 jobject srcBufferU,
                                                 jobject srcBufferV,
                                                 jint srcWidth, jint srcHeight,
                                                 jobject dstBufferY,
                                                 jobject dstBufferU,
                                                 jobject dstBufferV,
                                                 jint dstWidth, jint dstHeight,
                                                 jint filterMode) {

    const uint8_t *srcY = static_cast<uint8_t *>(env->GetDirectBufferAddress(srcBufferY));
    const uint8_t *srcU = static_cast<uint8_t *>(env->GetDirectBufferAddress(srcBufferU));
    const uint8_t *srcV = static_cast<uint8_t *>(env->GetDirectBufferAddress(srcBufferV));
    uint8_t *dstY = static_cast<uint8_t *>(env->GetDirectBufferAddress(dstBufferY));
    uint8_t *dstU = static_cast<uint8_t *>(env->GetDirectBufferAddress(dstBufferU));
    uint8_t *dstV = static_cast<uint8_t *>(env->GetDirectBufferAddress(dstBufferV));

    return libyuv::I420Scale(srcY, srcWidth,
                             srcU, srcWidth / 2,
                             srcV, srcWidth / 2,
                             srcWidth, srcHeight,
                             dstY, dstWidth,
                             dstU, dstWidth / 2,
                             dstV, dstWidth / 2,
                             dstWidth, dstHeight,
                             static_cast<libyuv::FilterMode>(filterMode));
}
Francisco Durdin Garcia
  • 9,572
  • 4
  • 39
  • 79