Android实现基于肤色的皮肤检测
在android上实现 基于肤色的皮肤检测的几个技术要点:
(1)android上使用相机预览,包括相机api的使用和surfaceview的应用。
(2)android上相机使用的色彩空间NV12.
(3)NV12是YCrCb的色彩空间,了解YCrCb色彩空间。YCrCb和YUV之间的转换。
yuv色彩模型来源于rgb模型,该模型的特点是将亮度和色度分离开,从而适合于图像处理领域。YCbCr模型来源于yuv模型.
(4)YCrCb色彩空间表示的人类肤色的特征。这个特征是133≤Cr≤173,77≤Cb≤127.实验表明Cr在[140,160]区间是符合黄种人的肤色。
YUV和RGB的转换:
★这里是不是不是yuv而是Y Cb Cr???★
Y = 0.299 R + 0.587 G + 0.114 B
U = -0.1687 R - 0.3313 G + 0.5 B + 128
V = 0.5 R - 0.4187 G - 0.0813 B + 128
R = Y + 1.402 (V-128)
G = Y - 0.34414 (U-128) - 0.71414 (V-128)
B = Y + 1.772 (U-128)
以前,一直没明白yuv和YcbCr之间的差异,想必有些朋友也会有同样的疑惑。
所以,我看完之后就记载下来了。
一、和rgb之间换算公式的差异
yuv<-->rgb
Y'= 0.299*R' + 0.587*G' + 0.114*B'
U'= -0.147*R' - 0.289*G' + 0.436*B' = 0.492*(B'- Y')
V'= 0.615*R' - 0.515*G' - 0.100*B' = 0.877*(R'- Y')
R' = Y' + 1.140*V'
G' = Y' - 0.394*U' - 0.581*V'
B' = Y' + 2.032*U'
yCbCr<-->rgb
Y’ = 0.257*R' + 0.504*G' + 0.098*B' + 16
Cb' = -0.148*R' - 0.291*G' + 0.439*B' + 128
Cr' = 0.439*R' - 0.368*G' - 0.071*B' + 128
R' = 1.164*(Y’-16) + 1.596*(Cr'-128)
G' = 1.164*(Y’-16) - 0.813*(Cr'-128) - 0.392*(Cb'-128)
B' = 1.164*(Y’-16) + 2.017*(Cb'-128)
Note: 上面各个符号都带了一撇,表示该符号在原值基础上进行了gamma correction
源代码如下:
package com.example.hearrate;import java.io.FileNotFoundException;import java.io.FileOutputStream;import java.io.IOException;import java.util.List;import android.graphics.Bitmap;import android.graphics.BitmapFactory;import android.graphics.Canvas;import android.graphics.Color;import android.graphics.Paint;import android.graphics.PixelFormat;import android.graphics.PorterDuffXfermode;import android.graphics.Rect;import android.hardware.Camera;import android.hardware.Camera.CameraInfo;import android.hardware.Camera.Size;import android.os.AsyncTask;import android.os.Build;import android.os.Bundle;import android.annotation.SuppressLint;import android.app.Activity;import android.content.res.Configuration;import android.util.Log;import android.view.Menu;import android.view.SurfaceHolder;import android.view.SurfaceView;import android.graphics.PorterDuff;import android.graphics.PorterDuff.Mode;public class MainActivity extends Activity implements SurfaceHolder.Callback ,Camera.PreviewCallback{SurfaceHolder mHolder;SurfaceView mView;SurfaceView mLayer;SurfaceHolder mLayerHolder;private Camera mCamera =null;private boolean bIfPreview =false;private int mPreviewHeight;private int mPreviewWidth;private Canvas canvas; private Paint paint; private int facex=0,facey=0;private boolean bprocessing=false;private int[] RGBData; private byte[] mYUVData; private boolean bfront=false;@Overrideprotected void onCreate(Bundle savedInstanceState) {super.onCreate(savedInstanceState);setContentView(R.layout.activity_main);mView=(SurfaceView)findViewById(R.id.layer0);paint = new Paint(); paint.setColor(Color.RED); paint.setAntiAlias(true); mPreviewWidth=320;mPreviewHeight=400;mHolder=mView.getHolder();mHolder.addCallback(this);mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);mLayer=(SurfaceView)findViewById(R.id.layer1);mLayer.setZOrderOnTop(true);//mLayer.setEGLConfigChooser(8, 8, 8, 8, 16, 0);mLayerHolder=mLayer.getHolder();mLayerHolder.setFormat(PixelFormat.TRANSPARENT);mLayerHolder.addCallback(this);mLayerHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);}void drawlayer1(){canvas=mLayerHolder.lockCanvas();// canvas.drawRGB(0, 0, 0); // canvas.save(); Bitmap bmp=BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher); //绘制 // canvas.drawBitmap(bmp, null, paint); drawImage(canvas,bmp,facex,facex,72,72,0,0); canvas.restore(); bmp=null; mLayerHolder.unlockCanvasAndPost(canvas);}@Overridepublic boolean onCreateOptionsMenu(Menu menu) {// Inflate the menu; this adds items to the action bar if it is present.getMenuInflater().inflate(R.menu.activity_main, menu);return true;}@Overridepublic void surfaceChanged(SurfaceHolder arg0, int arg1, int width, int height) {// TODO Auto-generated method stubmPreviewWidth=width;mPreviewHeight=height;if(arg0.equals(mLayerHolder)){//drawlayer1();return;}RGBData= new int[mPreviewHeight* mPreviewWidth];mYUVData= new byte[mPreviewHeight* mPreviewWidth+(mPreviewHeight/2)* (mPreviewWidth/2)+(mPreviewHeight/2)* (mPreviewWidth/2)];initCamera(); }@SuppressLint("NewApi")@Overridepublic void surfaceCreated(SurfaceHolder arg0) {// TODO Auto-generated method stub// TODO Auto-generated method stub if(arg0.equals(mLayerHolder))return;if(Build.VERSION.SDK_INT>=Build.VERSION_CODES.GINGERBREAD){for(int i=0;i<Camera.getNumberOfCameras();i++){CameraInfo info=new CameraInfo();Camera.getCameraInfo(i, info);if(info.facing==CameraInfo.CAMERA_FACING_FRONT){//mCamera = Camera.open(i);//bfront=true;}}}if(mCamera==null){mCamera = Camera.open();// 开启摄像头(2.3版本后支持多摄像头,需传入参数) bfront=false;} try{ mCamera.setPreviewDisplay(mHolder);//set the surface to be used for live previewLog("成功打开");} catch (Exception ex){ if(null != mCamera){mCamera.release();mCamera = null; } canvas=mHolder.lockCanvas(); canvas.drawRGB(0, 0, 0); canvas.save(); Bitmap bmp=BitmapFactory.decodeResource(getResources(), R.drawable.bg); //绘制 // canvas.drawBitmap(bmp, null, paint); drawImage(canvas,bmp,0,0,mPreviewWidth,mPreviewHeight,0,0); canvas.restore(); bmp=null;mHolder.unlockCanvasAndPost(canvas); Log("打开失败"+ex.getMessage());}}// GameView.drawImage(canvas, mBitDestTop, miDTX, mBitQQ.getHeight(), mBitDestTop.getWidth(), mBitDestTop.getHeight()/2, 0, 0); public static void drawImage(Canvas canvas, Bitmap blt, int x, int y, int w, int h, int bx, int by) { //x,y表示绘画的起点, Rect src = new Rect();// 图片 Rect dst = new Rect();// 屏幕位置及尺寸 //src 这个是表示绘画图片的大小 src.left = bx; //0,0 src.top = by; src.right = bx + w;// mBitDestTop.getWidth();,这个是桌面图的宽度, src.bottom = by + h;//mBitDestTop.getHeight()/2;// 这个是桌面图的高度的一半 // 下面的 dst 是表示 绘画这个图片的位置 dst.left = x; //miDTX,//这个是可以改变的,也就是绘图的起点X位置 dst.top = y; //mBitQQ.getHeight();//这个是QQ图片的高度。 也就相当于 桌面图片绘画起点的Y坐标 dst.right = x + w; //miDTX + mBitDestTop.getWidth();// 表示需绘画的图片的右上角 dst.bottom = y + h; // mBitQQ.getHeight() + mBitDestTop.getHeight();//表示需绘画的图片的右下角 canvas.drawBitmap(blt, src, dst, null);//这个方法 第一个参数是图片原来的大小,第二个参数是 绘画该图片需显示多少。也就是说你想绘画该图片的某一些地方,而不是全部图片,第三个参数表示该图片绘画的位置 src = null; dst = null; }@Overridepublic void surfaceDestroyed(SurfaceHolder arg0) {// TODO Auto-generated method stubif(arg0.equals(mLayerHolder))return; if(null != mCamera) { mCamera.setPreviewCallback(null); //!!这个必须在前,不然退出出错 mCamera.stopPreview(); bIfPreview = false; mCamera.release(); mCamera = null; }}@Overridepublic void onPreviewFrame(byte[] data, Camera camera) {// TODO Auto-generated method stubLog("going into onPreviewFrame"+data.length); int imageWidth = camera.getParameters().getPreviewSize().width ; int imageHeight =camera.getParameters().getPreviewSize().height ; // int RGBData[] = new int[imageWidth* imageHeight]; if(!bprocessing) { System.arraycopy(data, 0, mYUVData, 0, data.length); new ProcessTask().execute(mYUVData); } // decodeYUV420SP(RGBData, mYUVData, imageWidth, imageHeight); // Bitmap bitmap = Bitmap.createBitmap(imageWidth, imageHeight, Bitmap.Config.ARGB_8888); // bitmap.setPixels(RGBData, 0, imageWidth, 0, 0, imageWidth, imageHeight); // FileOutputStream outStream = null; // ByteArrayOutputStream baos = new ByteArrayOutputStream(); // outStream = new FileOutputStream(String.format("/sdcard/%d.bmp", System.currentTimeMillis())); // outStream.write(bitmap.); // outStream.close(); /* FileOutputStream out;try {String path=String.format("/mnt/sdcard/%d.png", System.currentTimeMillis());out = new FileOutputStream(path); bitmap.compress(Bitmap.CompressFormat.PNG, 90, out); out.close();} catch (FileNotFoundException e) {// TODO Auto-generated catch blocke.printStackTrace();} catch (IOException e) {// TODO Auto-generated catch blocke.printStackTrace();} */ //mYUV420sp = data; // 获取原生的YUV420SP数据//int mInitPos=mPreviewWidth*mPreviewHeight;//if(mYUV420sp.length<=mInitPos+1)//return;//byte cr=0;//int framesize=mInitPos;//int uvp=0;//int i,j,u=0,v=0,yp = 0;//int uvp=framesize+(i>>1)*w+j;//canvas=mLayerHolder.lockCanvas();// canvas.drawRGB(0, 0, 0); //canvas.save(); // Bitmap bmp=BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher); //绘制 // canvas.drawBitmap(bmp, null, paint); // drawImage(canvas,bmp,facex,facex,72,72,0,0);// bmp=null;// int RGBData[] = new int[mPreviewHeight* mPreviewWidth]; // byte[] mYUVData = new byte[mYUV420sp.length]; // System.arraycopy(mYUV420sp, 0, mYUVData, 0, mYUV420sp.length);/*for( i=0,yp = 0;i<mPreviewHeight;i++){uvp=framesize+(i>>1)*mPreviewWidth;for( j=0;j<mPreviewWidth;j++){int y = (0xff & ((int) mYUVData[yp])) - 16;if (y < 0) y = 0; if((j&1)==0) {v = (0xff & mYUVData[uvp++]) - 128;u = (0xff & mYUVData[uvp++]) - 128; } // if(133≤Cr≤173,77≤Cb≤127 if(v>133&&v<173) canvas.drawPoint(j, i, paint); int y1192 = 1192 * y;int r = (y1192 + 1634 * v);int g = (y1192 - 833 * v - 400 * u);int b = (y1192 + 2066 * u);if (r < 0) r = 0; else if (r > 262143) r = 262143;if (g < 0) g = 0; else if (g > 262143) g = 262143;if (b < 0) b = 0; else if (b > 262143) b = 262143;//int rgb=0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);//r=(rgb&0x00ff0000)>>4;//g=(rgb&0x0000ff00)>>2;//b=(rgb&0x000000ff); // if(r>200&&g>200&&b>200) // canvas.drawPoint(j, i, paint);// rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);}} canvas.restore(); mLayerHolder.unlockCanvasAndPost(canvas); *//* * framesize=w*h; * yp=0; * for (int i=0;i<h;i++) * { * uvp=framesize+(i>>1)*w; * for(int j=0;j<w;j++,yp++) * { * int y = (0xff & ((int) yuv420sp[yp])) - 16; * if(j&1==0) * { * v = (0xff & yuv420sp[uvp++]) - 128; u = (0xff & yuv420sp[uvp++]) - 128; * } * * * } * } * * * */}public void drawdetect(){ canvas=mLayerHolder.lockCanvas(); if(canvas==null) return; canvas.drawColor(Color.TRANSPARENT); Paint p = new Paint(); //清屏 p.setXfermode(new PorterDuffXfermode(Mode.CLEAR)); canvas.drawPaint(p); p.setXfermode(new PorterDuffXfermode(Mode.SRC)); canvas.save(); canvas.drawBitmap(RGBData, 0, mPreviewWidth, 0, 0, mPreviewWidth, mPreviewHeight, true, p); canvas.restore(); mLayerHolder.unlockCanvasAndPost(canvas);}public void detectwhite(byte[] yuv420sp, int width, int height){//检测所有白色 final int frameSize = width * height; for (int j = 0, yp = 0; j < height; j++) { int uvp = frameSize + (j >> 1) * width, u = 0, v = 0; for (int i = 0; i < width; i++, yp++) { int y = (0xff & ((int) yuv420sp[yp])); if (y < 0) y = 0; if ((i & 1) == 0) { v = (0xff & yuv420sp[uvp++]);; u = (0xff & yuv420sp[uvp++]); } ///133≤Cr≤173,77≤Cb≤127 if(y>250){RGBData[yp]=Color.RED;// canvas.drawPoint(i, j, paint);}else{RGBData[yp]=Color.TRANSPARENT;} } }}public void detectface(byte[] yuv420sp, int width, int height){ final int frameSize = width * height; for (int j = 0, yp = 0; j < height; j++) { int uvp = frameSize + (j >> 1) * width, u = 0, v = 0; for (int i = 0; i < width; i++, yp++) { if ((i & 1) == 0) { v = (0xff & yuv420sp[uvp++]);; u = (0xff & yuv420sp[uvp++]); } ///133≤Cr≤173,77≤Cb≤127 if((v)>133&&(v)<160&&(u>77)&&(u<127)){RGBData[yp]=Color.RED;// canvas.drawPoint(i, j, paint);}else{RGBData[yp]=Color.TRANSPARENT;} } }} public void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) { final int frameSize = width * height; canvas=mLayerHolder.lockCanvas(); Paint paint1 = new Paint(); paint1.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR)); canvas.drawPaint(paint1); canvas.save(); for (int j = 0, yp = 0; j < height; j++) { int uvp = frameSize + (j >> 1) * width, u = 0, v = 0; for (int i = 0; i < width; i++, yp++) { int y = (0xff & ((int) yuv420sp[yp])) - 16; if (y < 0) y = 0; if ((i & 1) == 0) { v = (0xff & yuv420sp[uvp++]) - 128; u = (0xff & yuv420sp[uvp++]) - 128; } ///133≤Cr≤173,77≤Cb≤127 if((v)>133&&(v)<160&&(u>77)&&(u<127)){ canvas.drawPoint(i, j, paint);}/* * 这个是yuv转RGB的处理 * */ int y1192 = 1192 * y; int r = (y1192 + 1634 * v); int g = (y1192 - 833 * v - 400 * u); int b = (y1192 + 2066 * u); if (r < 0) r = 0; else if (r > 262143) r = 262143; if (g < 0) g = 0; else if (g > 262143) g = 262143; if (b < 0) b = 0; else if (b > 262143) b = 262143; rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff); r = (rgb[yp] >> 16)&0xff; g = (rgb[yp] >> 8) & 0xff; b = rgb[yp] & 0xff; // if(r==255&&g==255&&b==255) // canvas.drawPoint(i, j, paint); } } canvas.restore(); mLayerHolder.unlockCanvasAndPost(canvas);}private void initCamera(){if (bIfPreview){mCamera.stopPreview();}if(null != mCamera){try{Camera.Parameters parameters = mCamera.getParameters();// parameters.setFlashMode("off"); // 无闪光灯 parameters.setPictureFormat(PixelFormat.JPEG); //Sets the image format for picture 设定相片格式为JPEG,默认为NV21 parameters.setPreviewFormat(PixelFormat.YCbCr_420_SP); //Sets the image format for preview picture,默认为NV21 mCamera.setPreviewCallback(this); // 【调试】获取caera支持的PictrueSize,看看能否设置??List pictureSizes = mCamera.getParameters().getSupportedPictureSizes(); List previewSizes = mCamera.getParameters().getSupportedPreviewSizes(); List previewFormats = mCamera.getParameters().getSupportedPreviewFormats();List previewFrameRates = mCamera.getParameters().getSupportedPreviewFrameRates(); Size psize = null; for (int i = 0; i < pictureSizes.size(); i++) { psize = (Size) pictureSizes.get(i); } for (int i = 0; i < previewSizes.size(); i++) { psize = (Size) previewSizes.get(i); } Integer pf = null; for (int i = 0; i < previewFormats.size(); i++) {pf = (Integer) previewFormats.get(i); }// 设置拍照和预览图片大小 parameters.setPictureSize(640, 480); //指定拍照图片的大小parameters.setPreviewSize(mPreviewWidth, mPreviewHeight); // 指定preview的大小 //这两个属性 如果这两个属性设置的和真实手机的不一样时,就会报错if(bfront){ parameters.set("orientation", "landscape"); // parameters.set("rotation", 0); // 镜头角度转90度(默认摄像头是横拍) mCamera.setDisplayOrientation(0); // 在2.2以上可以使用}// 横竖屏镜头自动调整/*if (this.getResources().getConfiguration().orientation != Configuration.ORIENTATION_LANDSCAPE) { parameters.set("orientation", "portrait"); // parameters.set("rotation", 90); // 镜头角度转90度(默认摄像头是横拍) mCamera.setDisplayOrientation(90); // 在2.2以上可以使用 } else// 如果是横屏 { parameters.set("orientation", "landscape"); //mCamera.setDisplayOrientation(0); // 在2.2以上可以使用 } */ //添加对视频流处理函数// 设定配置参数并开启预览 mCamera.setParameters(parameters); // 将Camera.Parameters设定予Camera mCamera.startPreview(); // 打开预览画面 bIfPreview = true; // 【调试】设置后的图片大小和预览大小以及帧率 Camera.Size csize = mCamera.getParameters().getPreviewSize(); mPreviewHeight = csize.height; //mPreviewWidth = csize.width; csize = mCamera.getParameters().getPictureSize();}catch(Exception e){Log(e.getMessage());}}}void Log(String msg){System.out.println("LOG:"+msg);}int[] g_v_table,g_u_table,y_table;int[][] r_yv_table,b_yu_table;int inited = 0; void initTable(){g_v_table=new int[256];g_u_table=new int[256];y_table=new int[256];r_yv_table=new int[256][256];b_yu_table=new int[256][256];if (inited == 0){inited = 1;int m = 0,n=0;for (; m < 256; m++){g_v_table[m] = 833 * (m - 128);g_u_table[m] = 400 * (m - 128);y_table[m] = 1192 * (m - 16);}int temp = 0;for (m = 0; m < 256; m++)for (n = 0; n < 256; n++){temp = 1192 * (m - 16) + 1634 * (n - 128);if (temp < 0) temp = 0; else if (temp > 262143) temp = 262143;r_yv_table[m][n] = temp; temp = 1192 * (m - 16) + 2066 * (n - 128);if (temp < 0) temp = 0; else if (temp > 262143) temp = 262143;b_yu_table[m][n] = temp;}}}public class ProcessTask extends AsyncTask<byte[], Void, Void>{@Overrideprotected void onPostExecute(Void result) {// TODO Auto-generated method stubsuper.onPostExecute(result);drawdetect();bprocessing=false;}@Overrideprotected void onPreExecute() {// TODO Auto-generated method stubsuper.onPreExecute();if(bprocessing)this.cancel(true);}@Overrideprotected Void doInBackground(byte[]... params) {// TODO Auto-generated method stubbprocessing=true;byte[] data=params[0];//皮肤检测detectface(data,mPreviewWidth, mPreviewHeight); //白色检测//detectwhite(data,mPreviewWidth, mPreviewHeight); // publishProgress(null);return null;}}}
以下是layout
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" xmlns:tools="http://schemas.android.com/tools" android:layout_width="match_parent" android:layout_height="match_parent" tools:context=".MainActivity" > <FrameLayout android:layout_width="match_parent" android:layout_height="match_parent" > <SurfaceView android:id="@+id/layer0" android:layout_width="match_parent" android:layout_height="match_parent" /> </FrameLayout> <FrameLayout android:layout_width="match_parent" android:layout_height="match_parent" > <SurfaceView android:id="@+id/layer1" android:layout_width="match_parent" android:layout_height="match_parent" /> </FrameLayout> </RelativeLayout>
以下是manifest
<?xml version="1.0" encoding="utf-8"?><manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.example.hearrate" android:versionCode="1" android:versionName="1.0" > <uses-sdk android:minSdkVersion="8" android:targetSdkVersion="17" /><uses-permission android:name="android.permission.CAMERA"/><!-- 在SDCard中创建与删除文件权限 --><uses-permission android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEMS"/><!-- 往SDCard写入数据权限 --><uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/><uses-feature android:name="android.hardware.camera"/><uses-feature android:name="android.hardware.camera.autofocus"/> <application android:allowBackup="true" android:icon="@drawable/ic_launcher" android:label="@string/app_name" android:theme="@style/AppTheme" > <activity android:name="com.example.hearrate.MainActivity" android:label="@string/app_name" android:theme="@android:style/Theme.NoTitleBar.Fullscreen" android:screenOrientation="landscape" > <intent-filter> <action android:name="android.intent.action.MAIN" /> <category android:name="android.intent.category.LAUNCHER" /> </intent-filter> </activity> </application></manifest>
更多相关文章
- Android(安卓)调用相机拍照,适配到Android(安卓)10?
- android Webview 上传图片兼容各android版本
- Android
- Android(安卓)WebView上传图片到h5
- ImageView的属性android:scaleType,即ImageView.setScaleType(Ima
- LinearLayout按下(pressed)或获取焦点(focused)时背景设置不同颜
- Android根据图片Uri获取图片绝对路径
- Android下图片可拖动到任意位置的效果的实现源码
- ImageView的属性android:scaleType,即ImageView.setScaleType(Ima