【JNI】 Android调用JNI的进阶实例(摄像头预览数据转码RGB播放)
2016-08-26 14:58
1271 查看
前一篇的博文介绍了:Android调用JNI的简单实例(附详细步骤),现在带来一个进阶版的,虽然时间隔得有点久远。
这里要说下,尽量不要用Java写编解码的东西,就算你是大神,你写的出来,但那也是不实用的,就像切西瓜一样,拿一把削水果刀去切西瓜,肯定比不上用西瓜刀方便吧,还是老老实实写个JNI调用得了,也不复杂C/C++方便的很,当然,这里不是说Java不行,语言只是工具,做什么事情用什么语言,没必要硬着头皮往上顶对吧。纯属个人观点,大神可以无视。
好了进入正题,该实例主要内容:开启摄像头预览,将获取到的视频帧YUV数据,通过JNI调用C的转码函数转为RGB类型数据然后返回,在自定义控件上绘制播放。
1、工程结构:
该工程是在之前的SimpleJni实例上进行修改的,简单描述一下功能:
CameraEngineActivity主界面,用于加载控件及渲染等;
CameraView摄像头预览控件,用于预览及捕获视频帧YUV数据;
ImageUtilEngine声明调用C函数的接口类,声明native的C函数;
SporeRender渲染画面类,用于绘制图像;
Texture2D图像优化类,用于优化图像纹理;
Demo地址:
2、新建java调用C函数的接口类
[java] view
plain copy
print?
package com.eric.complexjni;
/*
*@author Eric
*@2015-12-7下午4:35:18
*/
public class ImageUtilEngine {
static {
System.loadLibrary("");
}
public native int[] decodeYUV420SP(byte[] buf, int width, int heigth);
}
3、编译该接口类的头文件.h
命令窗口:Win+R运行cmd,cd进入到eclipse工作空间中ComplexJni工程目录,
输入javah -classpath bin/classes -d jni com.eric.complexjni.ImageUtilEngine编译接口类
编译完成后,刷新工程,就可以看到工程中自动创建了jni文件夹,其中包含编译好的.h头文件
4、在jni目录下新建com_eric_complexjni_ImageUtilEngine.h
[cpp] view
plain copy
print?
#include <jni.h>
#include <stdlib.h>
#include <com_eric_complexjni_ImageUtilEngine.h>
#include <android/log.h>
#include <android/bitmap.h>
#include <math.h>
#define LOG_TAG "Spore.meitu"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
int min(int x, int y) {
return (x <= y) ? x : y;
}
int max(int x,int y){
return (x >= y) ? x : y;
}
int alpha(int color) {
return (color >> 24) & 0xFF;
}
int red(int color) {
return (color >> 16) & 0xFF;
}
int green(int color) {
return (color >> 8) & 0xFF;
}
int blue(int color) {
return color & 0xFF;
}
int ARGB(int alpha, int red, int green, int blue) {
return (alpha << 24) | (red << 16) | (green << 8) | blue;
}
#include <unistd.h>
#include <stdio.h>
#include <fcntl.h>
#include <linux/fb.h>
#include <sys/mman.h>
inline static unsigned short int make16color(unsigned char r, unsigned char g, unsigned char b)
{
return (
(((r >> 3) & 31) << 11) |
(((g >> 2) & 63) << 5) |
((b >> 3) & 31) );
}
int framebuffer_main()
{
LOGI("framebuffer code");
int fbfd = 0;
struct fb_var_screeninfo vinfo;
struct fb_fix_screeninfo finfo;
long int screensize = 0;
char *fbp = 0;
int x = 0, y = 0;
int guage_height = 20, step = 10;
long int location = 0;
// Open the file for reading and writing
LOGI("framebuffer code 1");
fbfd = open("/dev/graphics/fb0", O_RDWR);
LOGI("framebuffer code 2");
if (!fbfd) {
LOGI("Error: cannot open framebuffer device.\n");
exit(1);
}
LOGI("The framebuffer device was opened successfully.\n");
// Get fixed screen information
if (ioctl(fbfd, FBIOGET_FSCREENINFO, &finfo)) {
LOGI("Error reading fixed information.\n");
exit(2);
}
// Get variable screen information
if (ioctl(fbfd, FBIOGET_VSCREENINFO, &vinfo)) {
LOGI("Error reading variable information.\n");
exit(3);
}
LOGI("sizeof(unsigned short) = %d\n", sizeof(unsigned short));
LOGI("%dx%d, %dbpp\n", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel);
LOGI("xoffset:%d, yoffset:%d, line_length: %d\n", vinfo.xoffset,
vinfo.yoffset, finfo.line_length);
// Figure out the size of the screen in bytes
screensize = vinfo.xres * vinfo.yres * vinfo.bits_per_pixel / 8;
// Map the device to memory
fbp = (char *) mmap(0, screensize, PROT_READ | PROT_WRITE, MAP_SHARED,
fbfd, 0);
if ((int) fbp == -1) {
LOGI("Error: failed to map framebuffer device to memory.\n");
exit(4);
}
LOGI("The framebuffer device was mapped to memory successfully.\n");
//set to black color first
memset(fbp, 0, screensize);
//draw rectangle
y = (vinfo.yres - guage_height) / 2 - 2; // Where we are going to put the pixel
for (x = step - 2; x < vinfo.xres - step + 2; x++) {
location = (x + vinfo.xoffset) * (vinfo.bits_per_pixel / 8) + (y
+ vinfo.yoffset) * finfo.line_length;
*((unsigned short int*) (fbp + location)) = 255;
}
y = (vinfo.yres + guage_height) / 2 + 2; // Where we are going to put the pixel
for (x = step - 2; x < vinfo.xres - step + 2; x++) {
location = (x + vinfo.xoffset) * (vinfo.bits_per_pixel / 8) + (y
+ vinfo.yoffset) * finfo.line_length;
*((unsigned short int*) (fbp + location)) = 255;
}
x = step - 2;
for (y = (vinfo.yres - guage_height) / 2 - 2; y < (vinfo.yres
+ guage_height) / 2 + 2; y++) {
location = (x + vinfo.xoffset) * (vinfo.bits_per_pixel / 8) + (y
+ vinfo.yoffset) * finfo.line_length;
*((unsigned short int*) (fbp + location)) = 255;
}
x = vinfo.xres - step + 2;
for (y = (vinfo.yres - guage_height) / 2 - 2; y < (vinfo.yres
+ guage_height) / 2 + 2; y++) {
location = (x + vinfo.xoffset) * (vinfo.bits_per_pixel / 8) + (y
+ vinfo.yoffset) * finfo.line_length;
*((unsigned short int*) (fbp + location)) = 255;
}
// Figure out where in memory to put the pixel
for (x = step; x < vinfo.xres - step; x++) {
for (y = (vinfo.yres - guage_height) / 2; y < (vinfo.yres
+ guage_height) / 2; y++) {
location = (x + vinfo.xoffset) * (vinfo.bits_per_pixel / 8) + (y
+ vinfo.yoffset) * finfo.line_length;
if (vinfo.bits_per_pixel == 32) {
*(fbp + location) = 100; // Some blue
*(fbp + location + 1) = 15 + (x - 100) / 2; // A little green
*(fbp + location + 2) = 200 - (y - 100) / 5; // A lot of red
*(fbp + location + 3) = 0; // No transparency
} else { //assume 16bpp
unsigned char b = 255 * x / (vinfo.xres - step);
unsigned char g = 255; // (x - 100)/6 A little green
unsigned char r = 255; // A lot of red
unsigned short int t = make16color(r, g, b);
*((unsigned short int*) (fbp + location)) = t;
}
}
//printf("x = %d, temp = %d\n", x, temp);
//sleep to see it
usleep(200);
}
//clean framebuffer
munmap(fbp, screensize);
close(fbfd);
return 0;
}
int r_v_table[256],g_v_table[256],g_u_table[256],b_u_table[256],y_table[256];
int r_yv_table[256][256],b_yu_table[256][256];
int inited = 0;
void initTable()
{
if (inited == 0)
{
//framebuffer_main();
inited = 1;
int m = 0,n=0;
for (; m < 256; m++)
{
r_v_table[m] = 1634 * (m - 128);
g_v_table[m] = 833 * (m - 128);
g_u_table[m] = 400 * (m - 128);
b_u_table[m] = 2066 * (m - 128);
y_table[m] = 1192 * (m - 16);
}
int temp = 0;
for (m = 0; m < 256; m++)
for (n = 0; n < 256; n++)
{
temp = 1192 * (m - 16) + 1634 * (n - 128);
if (temp < 0) temp = 0; else if (temp > 262143) temp = 262143;
r_yv_table[m]
= temp;
temp = 1192 * (m - 16) + 2066 * (n - 128);
if (temp < 0) temp = 0; else if (temp > 262143) temp = 262143;
b_yu_table[m]
= temp;
}
}
}
jintArray Java_com_spore_ImageUtilEngine_decodeYUV420SP(JNIEnv * env,
jobject thiz, jbyteArray buf, jint width, jint height) {
jbyte * yuv420sp = (*env)->GetByteArrayElements(env, buf, 0);
int frameSize = width * height;
jint rgb[frameSize]; // 鏂板浘鍍忓儚绱犲��
initTable();
int i = 0, j = 0,yp = 0;
int uvp = 0, u = 0, v = 0;
for (j = 0, yp = 0; j < height; j++)
{
uvp = frameSize + (j >> 1) * width;
u = 0;
v = 0;
for (i = 0; i < width; i++, yp++)
{
int y = (0xff & ((int) yuv420sp[yp]));
if (y < 0)
y = 0;
if ((i & 1) == 0)
{
v = (0xff & yuv420sp[uvp++]);
u = (0xff & yuv420sp[uvp++]);
}
// int y1192 = 1192 * y;
// int r = (y1192 + 1634 * v);
// int g = (y1192 - 833 * v - 400 * u);
// int b = (y1192 + 2066 * u);
int y1192 = y_table[y];
int r = r_yv_table[y][v];//(y1192 + r_v_table[v]);
int g = (y1192 - g_v_table[v] - g_u_table[u]);
int b = b_yu_table[y][u];//(y1192 + b_u_table[u]);
//if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
//if (b < 0) b = 0; else if (b > 262143) b = 262143;
// r = (r >> 31) ? 0 : (r & 0x3ffff);
// g = (g >> 31) ? 0 : (g & 0x3ffff);
// b = (b >> 31) ? 0 : (b & 0x3ffff);
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
jintArray result = (*env)->NewIntArray(env, frameSize);
(*env)->SetIntArrayRegion(env, result, 0, frameSize, rgb);
(*env)->ReleaseByteArrayElements(env, buf, yuv420sp, 0);
return result;
}
5、在JNI目录下新建Android.mk文件用于生成so文件
[java] view
plain copy
print?
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= com_eric_complexjni_ImageUtilEngine.c
LOCAL_C_INCLUDES := $(JNI_H_INCLUDE)
LOCAL_SHARED_LIBRARIES := libutils
LOCAL_PRELINK_MODULE := false
LOCAL_MODULE := SimpleJni
LOCAL_LDLIBS := -llog -ljnigraphics
include $(BUILD_SHARED_LIBRARY)
这里要说下,尽量不要用Java写编解码的东西,就算你是大神,你写的出来,但那也是不实用的,就像切西瓜一样,拿一把削水果刀去切西瓜,肯定比不上用西瓜刀方便吧,还是老老实实写个JNI调用得了,也不复杂C/C++方便的很,当然,这里不是说Java不行,语言只是工具,做什么事情用什么语言,没必要硬着头皮往上顶对吧。纯属个人观点,大神可以无视。
好了进入正题,该实例主要内容:开启摄像头预览,将获取到的视频帧YUV数据,通过JNI调用C的转码函数转为RGB类型数据然后返回,在自定义控件上绘制播放。
1、工程结构:
该工程是在之前的SimpleJni实例上进行修改的,简单描述一下功能:
CameraEngineActivity主界面,用于加载控件及渲染等;
CameraView摄像头预览控件,用于预览及捕获视频帧YUV数据;
ImageUtilEngine声明调用C函数的接口类,声明native的C函数;
SporeRender渲染画面类,用于绘制图像;
Texture2D图像优化类,用于优化图像纹理;
Demo地址:
2、新建java调用C函数的接口类
[java] view
plain copy
print?
package com.eric.complexjni;
/*
*@author Eric
*@2015-12-7下午4:35:18
*/
public class ImageUtilEngine {
static {
System.loadLibrary("");
}
public native int[] decodeYUV420SP(byte[] buf, int width, int heigth);
}
3、编译该接口类的头文件.h
命令窗口:Win+R运行cmd,cd进入到eclipse工作空间中ComplexJni工程目录,
输入javah -classpath bin/classes -d jni com.eric.complexjni.ImageUtilEngine编译接口类
编译完成后,刷新工程,就可以看到工程中自动创建了jni文件夹,其中包含编译好的.h头文件
4、在jni目录下新建com_eric_complexjni_ImageUtilEngine.h
[cpp] view
plain copy
print?
#include <jni.h>
#include <stdlib.h>
#include <com_eric_complexjni_ImageUtilEngine.h>
#include <android/log.h>
#include <android/bitmap.h>
#include <math.h>
#define LOG_TAG "Spore.meitu"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
int min(int x, int y) {
return (x <= y) ? x : y;
}
int max(int x,int y){
return (x >= y) ? x : y;
}
int alpha(int color) {
return (color >> 24) & 0xFF;
}
int red(int color) {
return (color >> 16) & 0xFF;
}
int green(int color) {
return (color >> 8) & 0xFF;
}
int blue(int color) {
return color & 0xFF;
}
int ARGB(int alpha, int red, int green, int blue) {
return (alpha << 24) | (red << 16) | (green << 8) | blue;
}
#include <unistd.h>
#include <stdio.h>
#include <fcntl.h>
#include <linux/fb.h>
#include <sys/mman.h>
inline static unsigned short int make16color(unsigned char r, unsigned char g, unsigned char b)
{
return (
(((r >> 3) & 31) << 11) |
(((g >> 2) & 63) << 5) |
((b >> 3) & 31) );
}
int framebuffer_main()
{
LOGI("framebuffer code");
int fbfd = 0;
struct fb_var_screeninfo vinfo;
struct fb_fix_screeninfo finfo;
long int screensize = 0;
char *fbp = 0;
int x = 0, y = 0;
int guage_height = 20, step = 10;
long int location = 0;
// Open the file for reading and writing
LOGI("framebuffer code 1");
fbfd = open("/dev/graphics/fb0", O_RDWR);
LOGI("framebuffer code 2");
if (!fbfd) {
LOGI("Error: cannot open framebuffer device.\n");
exit(1);
}
LOGI("The framebuffer device was opened successfully.\n");
// Get fixed screen information
if (ioctl(fbfd, FBIOGET_FSCREENINFO, &finfo)) {
LOGI("Error reading fixed information.\n");
exit(2);
}
// Get variable screen information
if (ioctl(fbfd, FBIOGET_VSCREENINFO, &vinfo)) {
LOGI("Error reading variable information.\n");
exit(3);
}
LOGI("sizeof(unsigned short) = %d\n", sizeof(unsigned short));
LOGI("%dx%d, %dbpp\n", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel);
LOGI("xoffset:%d, yoffset:%d, line_length: %d\n", vinfo.xoffset,
vinfo.yoffset, finfo.line_length);
// Figure out the size of the screen in bytes
screensize = vinfo.xres * vinfo.yres * vinfo.bits_per_pixel / 8;
// Map the device to memory
fbp = (char *) mmap(0, screensize, PROT_READ | PROT_WRITE, MAP_SHARED,
fbfd, 0);
if ((int) fbp == -1) {
LOGI("Error: failed to map framebuffer device to memory.\n");
exit(4);
}
LOGI("The framebuffer device was mapped to memory successfully.\n");
//set to black color first
memset(fbp, 0, screensize);
//draw rectangle
y = (vinfo.yres - guage_height) / 2 - 2; // Where we are going to put the pixel
for (x = step - 2; x < vinfo.xres - step + 2; x++) {
location = (x + vinfo.xoffset) * (vinfo.bits_per_pixel / 8) + (y
+ vinfo.yoffset) * finfo.line_length;
*((unsigned short int*) (fbp + location)) = 255;
}
y = (vinfo.yres + guage_height) / 2 + 2; // Where we are going to put the pixel
for (x = step - 2; x < vinfo.xres - step + 2; x++) {
location = (x + vinfo.xoffset) * (vinfo.bits_per_pixel / 8) + (y
+ vinfo.yoffset) * finfo.line_length;
*((unsigned short int*) (fbp + location)) = 255;
}
x = step - 2;
for (y = (vinfo.yres - guage_height) / 2 - 2; y < (vinfo.yres
+ guage_height) / 2 + 2; y++) {
location = (x + vinfo.xoffset) * (vinfo.bits_per_pixel / 8) + (y
+ vinfo.yoffset) * finfo.line_length;
*((unsigned short int*) (fbp + location)) = 255;
}
x = vinfo.xres - step + 2;
for (y = (vinfo.yres - guage_height) / 2 - 2; y < (vinfo.yres
+ guage_height) / 2 + 2; y++) {
location = (x + vinfo.xoffset) * (vinfo.bits_per_pixel / 8) + (y
+ vinfo.yoffset) * finfo.line_length;
*((unsigned short int*) (fbp + location)) = 255;
}
// Figure out where in memory to put the pixel
for (x = step; x < vinfo.xres - step; x++) {
for (y = (vinfo.yres - guage_height) / 2; y < (vinfo.yres
+ guage_height) / 2; y++) {
location = (x + vinfo.xoffset) * (vinfo.bits_per_pixel / 8) + (y
+ vinfo.yoffset) * finfo.line_length;
if (vinfo.bits_per_pixel == 32) {
*(fbp + location) = 100; // Some blue
*(fbp + location + 1) = 15 + (x - 100) / 2; // A little green
*(fbp + location + 2) = 200 - (y - 100) / 5; // A lot of red
*(fbp + location + 3) = 0; // No transparency
} else { //assume 16bpp
unsigned char b = 255 * x / (vinfo.xres - step);
unsigned char g = 255; // (x - 100)/6 A little green
unsigned char r = 255; // A lot of red
unsigned short int t = make16color(r, g, b);
*((unsigned short int*) (fbp + location)) = t;
}
}
//printf("x = %d, temp = %d\n", x, temp);
//sleep to see it
usleep(200);
}
//clean framebuffer
munmap(fbp, screensize);
close(fbfd);
return 0;
}
int r_v_table[256],g_v_table[256],g_u_table[256],b_u_table[256],y_table[256];
int r_yv_table[256][256],b_yu_table[256][256];
int inited = 0;
void initTable()
{
if (inited == 0)
{
//framebuffer_main();
inited = 1;
int m = 0,n=0;
for (; m < 256; m++)
{
r_v_table[m] = 1634 * (m - 128);
g_v_table[m] = 833 * (m - 128);
g_u_table[m] = 400 * (m - 128);
b_u_table[m] = 2066 * (m - 128);
y_table[m] = 1192 * (m - 16);
}
int temp = 0;
for (m = 0; m < 256; m++)
for (n = 0; n < 256; n++)
{
temp = 1192 * (m - 16) + 1634 * (n - 128);
if (temp < 0) temp = 0; else if (temp > 262143) temp = 262143;
r_yv_table[m]
= temp;
temp = 1192 * (m - 16) + 2066 * (n - 128);
if (temp < 0) temp = 0; else if (temp > 262143) temp = 262143;
b_yu_table[m]
= temp;
}
}
}
jintArray Java_com_spore_ImageUtilEngine_decodeYUV420SP(JNIEnv * env,
jobject thiz, jbyteArray buf, jint width, jint height) {
jbyte * yuv420sp = (*env)->GetByteArrayElements(env, buf, 0);
int frameSize = width * height;
jint rgb[frameSize]; // 鏂板浘鍍忓儚绱犲��
initTable();
int i = 0, j = 0,yp = 0;
int uvp = 0, u = 0, v = 0;
for (j = 0, yp = 0; j < height; j++)
{
uvp = frameSize + (j >> 1) * width;
u = 0;
v = 0;
for (i = 0; i < width; i++, yp++)
{
int y = (0xff & ((int) yuv420sp[yp]));
if (y < 0)
y = 0;
if ((i & 1) == 0)
{
v = (0xff & yuv420sp[uvp++]);
u = (0xff & yuv420sp[uvp++]);
}
// int y1192 = 1192 * y;
// int r = (y1192 + 1634 * v);
// int g = (y1192 - 833 * v - 400 * u);
// int b = (y1192 + 2066 * u);
int y1192 = y_table[y];
int r = r_yv_table[y][v];//(y1192 + r_v_table[v]);
int g = (y1192 - g_v_table[v] - g_u_table[u]);
int b = b_yu_table[y][u];//(y1192 + b_u_table[u]);
//if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
//if (b < 0) b = 0; else if (b > 262143) b = 262143;
// r = (r >> 31) ? 0 : (r & 0x3ffff);
// g = (g >> 31) ? 0 : (g & 0x3ffff);
// b = (b >> 31) ? 0 : (b & 0x3ffff);
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
jintArray result = (*env)->NewIntArray(env, frameSize);
(*env)->SetIntArrayRegion(env, result, 0, frameSize, rgb);
(*env)->ReleaseByteArrayElements(env, buf, yuv420sp, 0);
return result;
}
5、在JNI目录下新建Android.mk文件用于生成so文件
[java] view
plain copy
print?
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= com_eric_complexjni_ImageUtilEngine.c
LOCAL_C_INCLUDES := $(JNI_H_INCLUDE)
LOCAL_SHARED_LIBRARIES := libutils
LOCAL_PRELINK_MODULE := false
LOCAL_MODULE := SimpleJni
LOCAL_LDLIBS := -llog -ljnigraphics
include $(BUILD_SHARED_LIBRARY)
相关文章推荐
- Android调用JNI的进阶实例(摄像头预览数据转码RGB播放)
- Android下调用jni时进行的转码操作实例
- vlc-android 中调用用libvlcjni.so实现流媒体播放
- 我的Android进阶之旅------>Android MediaPlayer播放mp3的实例--简易mp3播放器
- Android JNI简单实例(android 调用C/C++代码)
- Android-NDK开发之基础--Android JNI实例代码(一)-- 在JNI中执行Java方法--C/C++调用Java
- Android-NDK开发之基础--Android JNI实例代码(一)-- 在JNI中执行Java方法--C/C++调用Java
- Android调用系统摄像头--使用MediaRecorder--1--预览(preview)功能
- Android JNI简单实例(android 调用C/C++代码)
- Android Service AIDL 远程调用服务之简单音乐播放实例
- Android 基于NDK的JNI开发 C调用java和java调用C的进阶教程
- [转]Android通过JNI调用驱动程序(完全解析实例)
- Android通过JNI调用驱动程序(完全解析实例)
- 我的Android进阶之旅------>Android MediaPlayer播放网络音频的实例--网络mp3播放器
- android开发(19) 调用手机的摄像头录像,并播放。
- Android通过JNI调用驱动程序(完全解析实例)
- Android上 用Html5做界面,javascript调用摄像头实例
- android开发(19) 调用手机的摄像头录像,并播放。
- Android-NDK开发之基础--Android JNI实例代码(一)-- 在JNI中执行Java方法--C/C++调用Java
- Android Service AIDL 远程调用服务之简单音乐播放实例【转载】