使用libyuv库缩放YUV图像时出现的问题 [英] Problems when scaling a YUV image using libyuv library
问题描述
我正在开发基于 Camera API 2
的相机应用程序,我发现使用
我执行缩放后得到的结果:
我认为我在步幅方面做错了,或者提供了无效的YUV格式(也许我必须将图像转换为另一种格式?)。但是,我无法弄清楚错误在哪里,因为我不知道如何将绿色与缩放算法相关联。
这是转换代码I在使用时,你可以忽略返回NULL,因为还有与问题无关的进一步处理。
#include< jni.h>
#include< stdint.h>
#include< android / log.h>
#include< inc / libyuv / scale.h>
#include< inc / libyuv.h>
#include< stdio.h>
#define LOG_TAGlibyuv-jni
#define unused(x)UNUSED_ ## x __attribute __((__ unused__))
#define LOGD(...)__ android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__ VA_ARGS__)
#define LOGE(...)__ android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__ VA_ARGS_)
struct YuvFrame {
int宽度;
int height;
uint8_t * data;
uint8_t * y;
uint8_t * u;
uint8_t * v;
};
static struct YuvFrame i420_input_frame;
static struct YuvFrame i420_output_frame;
externC{
JNIEXPORT jbyteArray JNICALL
Java_com_android_camera3_camera_hardware_session_output_photo_yuv_YuvJniInterface_scale420YuvByteArray(
JNIEnv * env,jclass / * clazz * /,jbyteArray yuvByteArray_,jint src_width ,jint src_height,
jint out_width,jint out_height){
jbyte * yuvByteArray = env-> GetByteArrayElements(yuvByteArray_,NULL);
//获取输入和输出长度
int input_size = env-> GetArrayLength(yuvByteArray_);
int out_size = out_height * out_width;
//生成输入框
i420_input_frame.width = src_width;
i420_input_frame.height = src_height;
i420_input_frame.data =(uint8_t *)yuvByteArray;
i420_input_frame.y = i420_input_frame.data;
i420_input_frame.u = i420_input_frame.y + input_size;
i420_input_frame.v = i420_input_frame.u + input_size / 4;
//生成输出框$ b $ b free(i420_output_frame.data);
i420_output_frame.width = out_width;
i420_output_frame.height = out_height;
i420_output_frame.data = new unsigned char [out_size * 3/2];
i420_output_frame.y = i420_output_frame.data;
i420_output_frame.u = i420_output_frame.y + out_size;
i420_output_frame.v = i420_output_frame.u + out_size / 4;
libyuv :: FilterMode mode = libyuv :: FilterModeEnum :: kFilterBilinear;
int result = I420Scale(i420_input_frame.y,i420_input_frame.width,
i420_input_frame.u,i420_input_frame.width / 2,
i420_input_frame.v,i420_input_frame.width / 2,
i420_input_frame.width,i420_input_frame.height,
i420_output_frame.y,i420_output_frame.width,
i420_output_frame.u,i420_output_frame.width / 2,
i420_output_frame.v,i420_output_frame.width / 2 ,
i420_output_frame.width,i420_output_frame.height,
mode);
LOGD(图像结果%d,结果);
env-> ReleaseByteArrayElements(yuvByteArray_,yuvByteArray,0);
返回NULL;
}
你有一个问题框架的输入大小:
它应该是:
int input_array_size = env-> GetArrayLength(yuvByteArray_);
int input_size = input_array_size * 2/3; //这是帧大小
例如,如果你有一个6x4的帧
香奈儿 y 尺寸:6 * 4 = 24
1 2 3 4 5 6
_ _ _ _ _ _
| _ | _ | _ | _ | _ | _ | 1
| _ | _ | _ | _ | _ | _ | 2
| _ | _ | _ | _ | _ | _ | 3
| _ | _ | _ | _ | _ | _ | 4
香奈儿你尺寸:3 * 2 = 6
1 2 3
_ _ _ _ _ _
| | | |
| _ _ | _ _ | _ _ | 1
| | | |
| _ _ | _ _ | _ _ | 2
香奈儿 v 尺寸:3 * 2 = 6
1 2 3
_ _ _ _ _ _
| | | |
| _ _ | _ _ | _ _ | 1
| | | |
| _ _ | _ _ | _ _ | 2
数组大小= 6 * 4 + 3 * 2 + 3 * 2 = 36
但实际帧大小=频道 y 大小= 36 * 2/3 = 24
I'm developing a camera app based on Camera API 2
and I have found several problems using the libyuv.
I want to convert YUV_420_888
images retrieved from a ImageReader, but I'm having some problems with scaling in a reprocessable surface.
In essence: Images come out with tones of green instead of having the corresponding tones (I'm exporting the .yuv files and checking them using http://rawpixels.net/).
You can see an input example here:
And what I get after I perform scaling:
I think I am doing something wrong with strides, or providing an invalid YUV format (maybe I have to transform the image to another format?). However, I can't figure out where is the error since I don't know how to correlate the green color to the scaling algorithm.
This is the conversion code I am using, you can ignore the return NULL as there is further processing that is not related to the problem.
#include <jni.h>
#include <stdint.h>
#include <android/log.h>
#include <inc/libyuv/scale.h>
#include <inc/libyuv.h>
#include <stdio.h>
#define LOG_TAG "libyuv-jni"
#define unused(x) UNUSED_ ## x __attribute__((__unused__))
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS_)
struct YuvFrame {
int width;
int height;
uint8_t *data;
uint8_t *y;
uint8_t *u;
uint8_t *v;
};
static struct YuvFrame i420_input_frame;
static struct YuvFrame i420_output_frame;
extern "C" {
JNIEXPORT jbyteArray JNICALL
Java_com_android_camera3_camera_hardware_session_output_photo_yuv_YuvJniInterface_scale420YuvByteArray(
JNIEnv *env, jclass /*clazz*/, jbyteArray yuvByteArray_, jint src_width, jint src_height,
jint out_width, jint out_height) {
jbyte *yuvByteArray = env->GetByteArrayElements(yuvByteArray_, NULL);
//Get input and output length
int input_size = env->GetArrayLength(yuvByteArray_);
int out_size = out_height * out_width;
//Generate input frame
i420_input_frame.width = src_width;
i420_input_frame.height = src_height;
i420_input_frame.data = (uint8_t *) yuvByteArray;
i420_input_frame.y = i420_input_frame.data;
i420_input_frame.u = i420_input_frame.y + input_size;
i420_input_frame.v = i420_input_frame.u + input_size / 4;
//Generate output frame
free(i420_output_frame.data);
i420_output_frame.width = out_width;
i420_output_frame.height = out_height;
i420_output_frame.data = new unsigned char[out_size * 3 / 2];
i420_output_frame.y = i420_output_frame.data;
i420_output_frame.u = i420_output_frame.y + out_size;
i420_output_frame.v = i420_output_frame.u + out_size / 4;
libyuv::FilterMode mode = libyuv::FilterModeEnum::kFilterBilinear;
int result = I420Scale(i420_input_frame.y, i420_input_frame.width,
i420_input_frame.u, i420_input_frame.width / 2,
i420_input_frame.v, i420_input_frame.width / 2,
i420_input_frame.width, i420_input_frame.height,
i420_output_frame.y, i420_output_frame.width,
i420_output_frame.u, i420_output_frame.width / 2,
i420_output_frame.v, i420_output_frame.width / 2,
i420_output_frame.width, i420_output_frame.height,
mode);
LOGD("Image result %d", result);
env->ReleaseByteArrayElements(yuvByteArray_, yuvByteArray, 0);
return NULL;
}
You have an issue with the input size of the frame:
It should be:
int input_array_size = env->GetArrayLength(yuvByteArray_);
int input_size = input_array_size * 2 / 3; //This is the frame size
For example, If you have a Frame that is 6x4
Chanel y size: 6*4 = 24
1 2 3 4 5 6
_ _ _ _ _ _
|_|_|_|_|_|_| 1
|_|_|_|_|_|_| 2
|_|_|_|_|_|_| 3
|_|_|_|_|_|_| 4
Chanel u size: 3*2 = 6
1 2 3
_ _ _ _ _ _
| | | |
|_ _|_ _|_ _| 1
| | | |
|_ _|_ _|_ _| 2
Chanel v size: 3*2 = 6
1 2 3
_ _ _ _ _ _
| | | |
|_ _|_ _|_ _| 1
| | | |
|_ _|_ _|_ _| 2
Array Size = 6*4+3*2+3*2 = 36
But actual Frame Size = channel y Size = 36 * 2 / 3 = 24
这篇关于使用libyuv库缩放YUV图像时出现的问题的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!