前面我写了两篇文章说明了zc301的实现 具体请看 http://blog.csdn.net/hclydao/article/details/21235919
下面顺便把通用的USB也写上 前面的ZC301获取到的格式是JPEG的所以不用进行什么处理就可以用来显示,但是标准的UVC获取到的是YUV422格式的需要转换成RGB后才能进行显示 这里我们在前面两篇的代码稍作修改就可以了
HAL层 代码如下
/*
* Android USB Camera zc3xx Library
*
* Copyright (c) 2014Store information technology guangzhou ltd
* Copyright (c) 2014hclydao
*
* This program is free software;
you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation;
either version 2 of the License.
*/
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include #defineLOG_TAG"FimcGzsd"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, __VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)struct fimc_buffer {
unsigned char *start;
size_tlength;
};
static int fd = -1;
struct fimc_buffer *buffers=NULL;
struct v4l2_buffer v4l2_buf;
static int bufnum = 1;
static int mwidth,mheight;
static int c_type = 0;
int *rgb = NULL;
int *ybuf = NULL;
int yuv_tbl_ready=0;
int y1192_tbl[256];
int v1634_tbl[256];
int v833_tbl[256];
int u400_tbl[256];
int u2066_tbl[256];
/*
*open usb camera device
*/
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_open(JNIEnv * env, jclass obj, const jbyteArray devname)
{
jbyte *dev = (jbyte*)(*env)->GetByteArrayElements(env, devname, 0);
fd = open(dev, O_RDWR, 0);
if (fd<0)
{
LOGE("%s ++++ open error\n",dev);
return-1;
}
(*env)->ReleaseByteArrayElements(env, devname, dev, 0);
return fd;
}
/*
* init device
*/
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_init(JNIEnv * env, jclass obj, jint width, jint height,jint numbuf,jint ctype)
{
int ret;
int i;
bufnum = numbuf;
mwidth = width;
mheight = height;
c_type = ctype;
struct v4l2_format fmt;
struct v4l2_capability cap;
ret = ioctl(fd, VIDIOC_QUERYCAP, &cap);
if (ret < 0) {
LOGE("%d :VIDIOC_QUERYCAP failed\n",__LINE__);
return -1;
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
LOGE("%d : no capture devices\n",__LINE__);
return -1;
} memset( &fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(c_type == 1)
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
else
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB565;
fmt.fmt.pix.width = width;
fmt.fmt.pix.height = height;
if (ioctl(fd, VIDIOC_S_FMT, &fmt) < 0)
{
LOGE("++++%d : set format failed\n",__LINE__);
return -1;
}struct v4l2_requestbuffers req;
req.count = numbuf;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
ret = ioctl(fd, VIDIOC_REQBUFS, &req);
if (ret < 0) {
LOGE("++++%d : VIDIOC_REQBUFS failed\n",__LINE__);
return -1;
}buffers = calloc(req.count, sizeof(*buffers));
if (!buffers) {
LOGE ("++++%d Out of memory\n",__LINE__);
return -1;
} for(i = 0;
i< bufnum;
++i) {
memset(&v4l2_buf, 0, sizeof(v4l2_buf));
v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4l2_buf.memory = V4L2_MEMORY_MMAP;
v4l2_buf.index = i;
ret = ioctl(fd , VIDIOC_QUERYBUF, &v4l2_buf);
if(ret < 0) {
LOGE("+++%d : VIDIOC_QUERYBUF failed\n",__LINE__);
return -1;
}
buffers[i].length = v4l2_buf.length;
if ((buffers[i].start = (char *)mmap(0, v4l2_buf.length,
PROT_READ | PROT_WRITE, MAP_SHARED,
fd, v4l2_buf.m.offset)) < 0) {
LOGE("%d : mmap() failed",__LINE__);
return -1;
}
}
rgb = (int *)malloc(sizeof(int) * (mwidth*mheight));
ybuf = (int *)malloc(sizeof(int) * (mwidth*mheight));
return 0;
}
/*
*open usb camera device
*/
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_streamon(JNIEnv * env, jclass obj)
{
int i;
int ret;
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
for(i = 0;
i< bufnum;
++i) {
memset(&v4l2_buf, 0, sizeof(v4l2_buf));
v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4l2_buf.memory = V4L2_MEMORY_MMAP;
v4l2_buf.index = i;
ret = ioctl(fd, VIDIOC_QBUF, &v4l2_buf);
if (ret < 0) {
LOGE("%d : VIDIOC_QBUF failed\n",__LINE__);
return ret;
}
}
ret = ioctl(fd, VIDIOC_STREAMON, &type);
if (ret < 0) {
LOGE("%d : VIDIOC_STREAMON failed\n",__LINE__);
return ret;
}
return 0;
}void yuyv422toABGRY(unsigned char *src)
{ int width=0;
int height=0;
width = mwidth;
height = mheight;
int frameSize =width*height*2;
int i;
if((!rgb || !ybuf)){
return;
}
int *lrgb = NULL;
int *lybuf = NULL;
lrgb = &rgb[0];
lybuf = &ybuf[0];
if(yuv_tbl_ready==0){
for(i=0 ;
i<256 ;
i++){
y1192_tbl[i] = 1192*(i-16);
if(y1192_tbl[i]<0){
y1192_tbl[i]=0;
}v1634_tbl[i] = 1634*(i-128);
v833_tbl[i] = 833*(i-128);
u400_tbl[i] = 400*(i-128);
u2066_tbl[i] = 2066*(i-128);
}
yuv_tbl_ready=1;
} for(i=0 ;
i>10;
int g1 = (y1192_1 - v833_tbl[v] - u400_tbl[u])>>10;
int b1 = (y1192_1 + u2066_tbl[u])>>10;
int y1192_2=y1192_tbl[y2];
int r2 = (y1192_2 + v1634_tbl[v])>>10;
int g2 = (y1192_2 - v833_tbl[v] - u400_tbl[u])>>10;
int b2 = (y1192_2 + u2066_tbl[u])>>10;
r1 = r1>255 ? 255 : r1<0 ? 0 : r1;
g1 = g1>255 ? 255 : g1<0 ? 0 : g1;
b1 = b1>255 ? 255 : b1<0 ? 0 : b1;
r2 = r2>255 ? 255 : r2<0 ? 0 : r2;
g2 = g2>255 ? 255 : g2<0 ? 0 : g2;
b2 = b2>255 ? 255 : b2<0 ? 0 : b2;
*lrgb++ = 0xff000000 | b1<<16 | g1<<8 | r1;
*lrgb++ = 0xff000000 | b2<<16 | g2<<8 | r2;
if(lybuf!=NULL){
*lybuf++ = y1;
*lybuf++ = y2;
}
}}void
Java_com_dao_usbcam_Fimcgzsd_pixeltobmp( JNIEnv* env,jobject thiz,jobject bitmap){ jboolean bo;
AndroidBitmapInfoinfo;
void*pixels;
intret;
int i;
int *colors;
int width=0;
int height=0;
if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return;
} width = info.width;
height = info.height;
if(!rgb || !ybuf) return;
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
LOGE("Bitmap format is not RGBA_8888 !");
return;
} if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
} colors = (int*)pixels;
int *lrgb =NULL;
lrgb = &rgb[0];
for(i=0 ;
i>10;
int g1 = (y1192_1 - v833_tbl[v] - u400_tbl[u])>>10;
int b1 = (y1192_1 + u2066_tbl[u])>>10;
int y1192_2=y1192_tbl[y2];
int r2 = (y1192_2 + v1634_tbl[v])>>10;
int g2 = (y1192_2 - v833_tbl[v] - u400_tbl[u])>>10;
int b2 = (y1192_2 + u2066_tbl[u])>>10;
r1 = r1>255 ? 255 : r1<0 ? 0 : r1;
g1 = g1>255 ? 255 : g1<0 ? 0 : g1;
b1 = b1>255 ? 255 : b1<0 ? 0 : b1;
r2 = r2>255 ? 255 : r2<0 ? 0 : r2;
g2 = g2>255 ? 255 : g2<0 ? 0 : g2;
b2 = b2>255 ? 255 : b2<0 ? 0 : b2;
*lrgb++ = 0xff000000 | b1<<16 | g1<<8 | r1;
*lrgb++ = 0xff000000 | b2<<16 | g2<<8 | r2;
if(lybuf!=NULL){
*lybuf++ = y1;
*lybuf++ = y2;
}
}}JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_yuvtorgb(JNIEnv * env, jclass obj,const jbyteArray yuvdata, jintArray rgbdata)
{
jbyte *ydata = https://www.it610.com/article/(jbyte*)(*env)->GetByteArrayElements(env, yuvdata, 0);
jint *rdata = https://www.it610.com/article/(jint*)(*env)->GetIntArrayElements(env, rgbdata, 0);
yuyv422torgb(ydata,rdata);
(*env)->ReleaseByteArrayElements(env, yuvdata, ydata, 0);
(*env)->ReleaseIntArrayElements(env, rgbdata, rdata, 0);
return 0;
}
/*
*get one frame data
*/
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_dqbuf(JNIEnv * env, jclass obj,const jbyteArray videodata)
{
int ret;
jbyte *data = https://www.it610.com/article/(jbyte*)(*env)->GetByteArrayElements(env, videodata, 0);
v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4l2_buf.memory = V4L2_MEMORY_MMAP;
ret = ioctl(fd, VIDIOC_DQBUF, &v4l2_buf);
if (ret < 0) {
LOGE("%s : VIDIOC_DQBUF failed, dropped frame\n",__func__);
return ret;
}
if(c_type == 1)
yuyv422toABGRY((unsigned char *)buffers[v4l2_buf.index].start);
memcpy(data,buffers[v4l2_buf.index].start,buffers[v4l2_buf.index].length);
(*env)->ReleaseByteArrayElements(env, videodata, data, 0);
return v4l2_buf.index;
}
/*
*put in frame buffer to queue
*/
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_qbuf(JNIEnv * env, jclass obj,jint index)
{
int ret;
v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4l2_buf.memory = V4L2_MEMORY_MMAP;
v4l2_buf.index = index;
ret = ioctl(fd, VIDIOC_QBUF, &v4l2_buf);
if (ret < 0) {
LOGE("%s : VIDIOC_QBUF failed\n",__func__);
return ret;
}return 0;
}
/*
*streamoff
*/
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_streamoff(JNIEnv * env, jclass obj,jint index)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret;
ret = ioctl(fd, VIDIOC_STREAMOFF, &type);
if (ret < 0) {
LOGE("%s : VIDIOC_STREAMOFF failed\n",__func__);
return ret;
}return 0;
}
/*
*release
*/
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_release(JNIEnv * env, jclass obj)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret;
int i;
ret = ioctl(fd, VIDIOC_STREAMOFF, &type);
if (ret < 0) {
LOGE("%s : VIDIOC_STREAMOFF failed\n",__func__);
return ret;
}for (i = 0;
i < bufnum;
i++) {
ret = munmap(buffers[i].start, buffers[i].length);
if (ret < 0) {
LOGE("%s : munmap failed\n",__func__);
return ret;
}
}
free (buffers);
if(rgb) free(rgb);
if(ybuf) free(ybuf);
close(fd);
return 0;
}
主要增加了两个函数
yuyv422toABGRY 这个是yuv转rgb
Java_com_dao_usbcam_Fimcgzsd_pixeltobmp 这个是rgb转bitmap
这两个函数是我在网上找到例子simplewebcam里拷出来的
同时Android.mk修改如下
LOCAL_PATH := $(call my-dir)include $(CLEAR_VARS)
LOCAL_MODULE:= fimcgzsd
LOCAL_SRC_FILES := fimcgzsd.c
LOCAL_LDLIBS:= -llog -ljnigraphics
include $(BUILD_SHARED_LIBRARY)
增加了一个Application.mk
# The ARMv7 is significanly faster due to the use of the hardware FPU
APP_ABI := armeabi armeabi-v7a
APP_PLATFORM := android-8
这样在终端进入工程所有目录直接执行ndk-build(环境变量已经设置)即可生成动态库
应用层
增加一个c_type指定是zc301还是uvc 我还是把代码贴上来吧
package com.dao.usbcam;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.text.format.Time;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;
public class MainActivity extends Activity {
private ImageView mImag;
private int width = 320;
private int height = 240;
private String devname = "/dev/video3";
private byte[] mdata;
private Handler mHandler;
private int numbuf = 0;
private int index = 0;
private int ret = 0;
private int ctype = 1;
//0 is zc301 1 is uvc camera
public Button mcap;
private Bitmap bitmap;
private Bitmap bmp;
private int[] rgb;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
mImag = (ImageView)findViewById(R.id.mimg);
mcap = (Button)findViewById(R.id.mcap);
bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
numbuf= 4;
mdata = https://www.it610.com/article/new byte[width * height * numbuf];
rgb = new int[width * height * numbuf];
ret = Fimcgzsd.open(devname.getBytes());
if(ret < 0)
finish();
ret = Fimcgzsd.init(width, height, numbuf,ctype);
if(ret < 0)
finish();
ret = Fimcgzsd.streamon();
if(ret < 0)
finish();
mHandler = new Handler();
new StartThread().start();
mcap.setOnClickListener(new CaptureListener());
} final Runnable mUpdateUI = new Runnable() {@Override
public void run() {
// TODO Auto-generated method stub
mImag.setImageBitmap(bitmap);
}
};
class StartThread extends Thread {@Override
public void run() {
// TODO Auto-generated method stub
//super.run();
while(true) {
if(ctype == 1){
index = Fimcgzsd.dqbuf(mdata);
if((index < 0) || (mdata == null)) {
onDestroy();
break;
}
Fimcgzsd.pixeltobmp(bmp);
mHandler.post(mUpdateUI);
bitmap = bmp;
Fimcgzsd.qbuf(index);
//Fimcgzsd.yuvtorgb(mdata, rgb);
//mHandler.post(mUpdateUI);
//bitmap = Bitmap.createBitmap(rgb,width,height,Bitmap.Config.ARGB_8888);
//Fimcgzsd.qbuf(index);
} else {
index = Fimcgzsd.dqbuf(mdata);
if(index < 0) {
onDestroy();
break;
}
mHandler.post(mUpdateUI);
bitmap = BitmapFactory.decodeByteArray(mdata, 0, width * height);
Fimcgzsd.qbuf(index);
}
}
}
}
public static void saveMyBitmap(Bitmap mBitmap) {
Time mtime = new Time();
mtime.setToNow();
File fdir = new File(Environment.getExternalStorageDirectory().getPath() +"/DCIM/" + "/gzsd/");
if(!fdir.exists()) {
fdir.mkdir();
}
File f = new File(Environment.getExternalStorageDirectory().getPath() + "/DCIM/" + "/gzsd/" + mtime.year + mtime.month + mtime.monthDay + mtime.hour + mtime.minute +mtime.second+".png");
try {
f.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
FileOutputStream fOut = null;
try {
fOut = new FileOutputStream(f);
mBitmap.compress(Bitmap.CompressFormat.PNG, 100, fOut);
fOut.flush();
fOut.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}}
class CaptureListener implements OnClickListener{@Override
public void onClick(View v) {
// TODO Auto-generated method stub
//new CapThread().start();
//Fimcgzsd.streamoff();
saveMyBitmap(bitmap);
//Fimcgzsd.streamon();
Toast.makeText(MainActivity.this, "Capture Successfully", Toast.LENGTH_SHORT).show();
}
} class CapThread extends Thread {@Override
public void run() {
// TODO Auto-generated method stub
//super.run();
saveMyBitmap(bitmap);
Toast.makeText(MainActivity.this, "Capture Successfully", Toast.LENGTH_LONG).show();
} }
@Override
protected void onStop() {
// TODO Auto-generated method stub
super.onStop();
Fimcgzsd.release();
finish();
} @Override
protected void onPause() {
// TODO Auto-generated method stub
super.onPause();
Fimcgzsd.release();
finish();
} @Override
protected void onDestroy() {
// TODO Auto-generated method stub
super.onDestroy();
Fimcgzsd.release();
} @Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu;
this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
finish();
return true;
}}
原理什么的我前面已经简单的进行了说明这里 我就不说了 具体效果如下
文章图片
感觉有点 模糊 我发现拿来自拍 效果还不错 模糊产生美 哈哈..
============================================
作者:hclydao
http://blog.csdn.net/hclydao
版权没有,但是转载请保留此段声明
============================================
【android4.0 USB Camera实例(三)UVC】
推荐阅读
- Android除了三大动画,还有哪些动画()
- 使用ValueAnimator设置动画
- android系统|android4.4 电阻触摸校准修改说明
- Android ffmpeg解码
- android studio2.3.2增加jni之自定义Android.mk
- Android中的EditText失去和得到焦点时的事件响应
- SQLite笔记之-重置表自动编号