人脸检测的API例子
package cliu.TutorialOnFaceDetect;
/*
* MyImageView.java
* Download by http://www.codefans.net
* [AUTHOR]: Chunyen Liu
* [SDK ]: Android SDK 2.1 and up
* [NOTE ]: developer.com tutorial, "Face Detection with Android APIs"
*/
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.widget.ImageView;
class MyImageView extends ImageView {
private Bitmap mBitmap;
private Canvas mCanvas;
private int mBitmapWidth = 200;
private int mBitmapHeight = 200;
private Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
private int mDisplayStyle = 0;
private int [] mPX = null;
private int [] mPY = null;
public MyImageView(Context c) {
super(c);
init();
}
public MyImageView(Context c, AttributeSet attrs) {
super(c, attrs);
init();
}
private void init() {
mBitmap = Bitmap.createBitmap(mBitmapWidth, mBitmapHeight, Bitmap.Config.RGB_565);
mCanvas = new Canvas(mBitmap);
mPaint.setStyle(Paint.Style.STROKE);
mPaint.setStrokeCap(Paint.Cap.ROUND);
mPaint.setColor(0x80ff0000);
mPaint.setStrokeWidth(3);
}
public Bitmap getBitmap() {
return mBitmap;
}
@Override
public void setImageBitmap(Bitmap bm) {
if (bm != null) {
mBitmapWidth = bm.getWidth();
mBitmapHeight = bm.getHeight();
mBitmap = Bitmap.createBitmap(mBitmapWidth, mBitmapHeight, Bitmap.Config.RGB_565);
mCanvas = new Canvas();
mCanvas.setBitmap(mBitmap);
mCanvas.drawBitmap(bm, 0, 0, null);
}
super.setImageBitmap(bm);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
mBitmapWidth = (mBitmap != null) ? mBitmap.getWidth() : 0;
mBitmapHeight = (mBitmap != null) ? mBitmap.getHeight() : 0;
if (mBitmapWidth == w && mBitmapHeight == h) {
return;
}
if (mBitmapWidth < w) mBitmapWidth = w;
if (mBitmapHeight < h) mBitmapHeight = h;
}
// set up detected face features for display
public void setDisplayPoints(int [] xx, int [] yy, int total, int style) {
mDisplayStyle = style;
mPX = null;
mPY = null;
if (xx != null && yy != null && total > 0) {
mPX = new int[total];
mPY = new int[total];
for (int i = 0; i < total; i++) {
mPX[i] = xx[i];
mPY[i] = yy[i];
}
}
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mBitmap != null) {
canvas.drawBitmap(mBitmap, 0, 0, null);
if (mPX != null && mPY != null) {
for (int i = 0; i < mPX.length; i++) {
if (mDisplayStyle == 1) {
canvas.drawCircle(mPX[i], mPY[i], 10.0f, mPaint);
} else {
canvas.drawRect(mPX[i] - 20, mPY[i] - 20, mPX[i] + 20, mPY[i] + 20, mPaint);
}
}
}
}
}
}
----------------------------------------------
package cliu.TutorialOnFaceDetect;
/*
* TutorialOnFaceDetect
* Download by http://www.codefans.net
* [AUTHOR]: Chunyen Liu
* [SDK ]: Android SDK 2.1 and up
* [NOTE ]: developer.com tutorial, "Face Detection with Android APIs"
*/
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.PointF;
import android.media.FaceDetector;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.widget.LinearLayout.LayoutParams;
public class TutorialOnFaceDetect extends Activity {
private MyImageView mIV;
private Bitmap mFaceBitmap;
private int mFaceWidth = 200;
private int mFaceHeight = 200;
private static final int MAX_FACES = 10;
private static String TAG = "TutorialOnFaceDetect";
private static boolean DEBUG = false;
protected static final int GUIUPDATE_SETFACE = 999;
protected Handler mHandler = new Handler(){
// @Override
public void handleMessage(Message msg) {
mIV.invalidate();
super.handleMessage(msg);
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mIV = new MyImageView(this);
setContentView(mIV, new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
// load the photo
Bitmap b = BitmapFactory.decodeResource(getResources(), R.drawable.face3);
mFaceBitmap = b.copy(Bitmap.Config.RGB_565, true);
b.recycle();
mFaceWidth = mFaceBitmap.getWidth();
mFaceHeight = mFaceBitmap.getHeight();
mIV.setImageBitmap(mFaceBitmap);
mIV.invalidate();
// perform face detection in setFace() in a background thread
doLengthyCalc();
}
public void setFace() {
FaceDetector fd;
FaceDetector.Face [] faces = new FaceDetector.Face[MAX_FACES];
PointF eyescenter = new PointF();
float eyesdist = 0.0f;
int [] fpx = null;
int [] fpy = null;
int count = 0;
try {
fd = new FaceDetector(mFaceWidth, mFaceHeight, MAX_FACES);
count = fd.findFaces(mFaceBitmap, faces);
} catch (Exception e) {
Log.e(TAG, "setFace(): " + e.toString());
return;
}
// check if we detect any faces
if (count > 0) {
fpx = new int[count * 2];
fpy = new int[count * 2];
for (int i = 0; i < count; i++) {
try {
faces[i].getMidPoint(eyescenter);
eyesdist = faces[i].eyesDistance();
// set up left eye location
fpx[2 * i] = (int)(eyescenter.x - eyesdist / 2);
fpy[2 * i] = (int)eyescenter.y;
// set up right eye location
fpx[2 * i + 1] = (int)(eyescenter.x + eyesdist / 2);
fpy[2 * i + 1] = (int)eyescenter.y;
if (DEBUG)
Log.e(TAG, "setFace(): face " + i + ": confidence = " + faces[i].confidence()
+ ", eyes distance = " + faces[i].eyesDistance()
+ ", pose = ("+ faces[i].pose(FaceDetector.Face.EULER_X) + ","
+ faces[i].pose(FaceDetector.Face.EULER_Y) + ","
+ faces[i].pose(FaceDetector.Face.EULER_Z) + ")"
+ ", eyes midpoint = (" + eyescenter.x + "," + eyescenter.y +")");
} catch (Exception e) {
Log.e(TAG, "setFace(): face " + i + ": " + e.toString());
}
}
}
mIV.setDisplayPoints(fpx, fpy, count * 2, 1);
}
private void doLengthyCalc() {
Thread t = new Thread() {
Message m = new Message();
public void run() {
try {
setFace();
m.what = TutorialOnFaceDetect.GUIUPDATE_SETFACE;
TutorialOnFaceDetect.this.mHandler.sendMessage(m);
} catch (Exception e) {
Log.e(TAG, "doLengthyCalc(): " + e.toString());
}
}
};
t.start();
}
}
-------------------------------------------------------
package cliu.TutorialOnFaceDetect;
/*
* TutorialOnFaceDetect1
* Download by http://www.codefans.net
* [AUTHOR]: Chunyen Liu
* [SDK ]: Android SDK 2.1 and up
* [NOTE ]: developer.com tutorial, "Face Detection with Android APIs"
*/
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.PointF;
import android.media.FaceDetector;
import android.os.Bundle;
import android.util.Log;
import android.widget.LinearLayout.LayoutParams;
public class TutorialOnFaceDetect1 extends Activity {
private MyImageView mIV;
private Bitmap mFaceBitmap;
private int mFaceWidth = 200;
private int mFaceHeight = 200;
private static final int MAX_FACES = 10;
private static String TAG = "TutorialOnFaceDetect";
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mIV = new MyImageView(this);
setContentView(mIV, new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
// load the photo
Bitmap b = BitmapFactory.decodeResource(getResources(), R.drawable.face3);
mFaceBitmap = b.copy(Bitmap.Config.RGB_565, true);
b.recycle();
mFaceWidth = mFaceBitmap.getWidth();
mFaceHeight = mFaceBitmap.getHeight();
mIV.setImageBitmap(mFaceBitmap);
// perform face detection and set the feature points
setFace();
mIV.invalidate();
}
public void setFace() {
FaceDetector fd;
FaceDetector.Face [] faces = new FaceDetector.Face[MAX_FACES];
PointF midpoint = new PointF();
int [] fpx = null;
int [] fpy = null;
int count = 0;
try {
fd = new FaceDetector(mFaceWidth, mFaceHeight, MAX_FACES);
count = fd.findFaces(mFaceBitmap, faces);
} catch (Exception e) {
Log.e(TAG, "setFace(): " + e.toString());
return;
}
// check if we detect any faces
if (count > 0) {
fpx = new int[count];
fpy = new int[count];
for (int i = 0; i < count; i++) {
try {
faces[i].getMidPoint(midpoint);
fpx[i] = (int)midpoint.x;
fpy[i] = (int)midpoint.y;
} catch (Exception e) {
Log.e(TAG, "setFace(): face " + i + ": " + e.toString());
}
}
}
mIV.setDisplayPoints(fpx, fpy, count, 0);
}
}
人脸检测的API例子的更多相关文章
- caffe_实战之两个简单的例子(物体分类和人脸检测)
一.物体分类: 这里使用的是caffe官网中自带的例子,我这里主要是对代码的解释~ 首先导入一些必要的库: import caffe import numpy as np import matplot ...
- paper 88:人脸检测和识别的Web服务API
本文汇总了全球范围内提供基于Web服务的人脸检测和识别的API,便于网络中快速部署和人脸相关的一些应用. 1:从How-old的火爆说起 最开始,网站的开发者只是给一个几百人的群发送email,请他们 ...
- [转]40多个关于人脸检测/识别的API、库和软件
[转]40多个关于人脸检测/识别的API.库和软件 http://news.cnblogs.com/n/185616/ 英文原文:List of 40+ Face Detection / Recogn ...
- 40多个关于人脸检测/识别的API、库和软件
英文原文:List of 40+ Face Detection / Recognition APIs, libraries, and software 译者:@吕抒真 译文:链接 自从谷歌眼镜被推出以 ...
- 转:40多个关于人脸检测/识别的API、库和软件
文章来自于:http://blog.jobbole.com/45936/ 自从谷歌眼镜被推出以来,围绕人脸识别,出现了很多争议.我们相信,不管是不是通过智能眼镜,人脸识别将在人与人交往甚至人与物交互中 ...
- javacv 340使用 人脸检测例子【转载】
Java下使用opencv进行人脸检测 工作需要,研究下人脸识别,发现opencv比较常用,尽管能检测人脸,但识别率不高,多数是用来获取摄像头的视频流的,提取里面的视频帧,实现人脸识别时通常会和其他框 ...
- 基于TensorFlow Object Detection API进行迁移学习训练自己的人脸检测模型(二)
前言 已完成数据预处理工作,具体参照: 基于TensorFlow Object Detection API进行迁移学习训练自己的人脸检测模型(一) 设置配置文件 新建目录face_faster_rcn ...
- 虹软人脸检测和识别C# - API
using System; using System.Collections.Generic; using System.Drawing; using System.Drawing.Drawing2D ...
- OpenCV + Python 人脸检测
必备知识 Haar-like opencv api 读取图片 灰度转换 画图 显示图像 获取人脸识别训练数据 探测人脸 处理人脸探测的结果 实例 图片素材 人脸检测代码 人脸检测结果 总结 下午的时候 ...
随机推荐
- 慕课网-安卓工程师初养成-3-3 Java中的赋值运算符
来源:http://www.imooc.com/code/1298 赋值运算符是指为变量或常量指定数值的符号.如可以使用 “=” 将右边的表达式结果赋给左边的操作数. Java 支持的常用赋值运算符, ...
- 慕课网-安卓工程师初养成-2-6 Java中的数据类型
来源:http://www.imooc.com/code/1230 通常情况下,为了方便物品的存储,我们会规定每个盒子可以存放的物品种类,就好比在“放臭袜子的盒子”里我们是不会放“面包”的!同理,变量 ...
- 家业兴衰说传承(cc)
家业兴衰说传承 2011年10月30日 09:53 选稿:天潼 来源:东方网 作者:万润龙 浙商走笔之九 “富贵子生浪荡子,浪荡子生苦恼子,苦恼子生勤奋子,勤奋子生富贵子”.这“四子歌”是流传在浙 ...
- Java基础——异常体系
在Java中,异常对象都是派生于Throwable类的一个实例,Java的异常体系如下图所示: 所有的异常都是由Throwable继承而来,在下一层立即分解为两个分支,Error和Exception. ...
- php文件大小单位转换GB MB KB
private function formatBytes($size){ $units = array('字节','K','M','G','T'); $i = 0; for( ; $size>= ...
- dll延迟加载
用于隐式链接选项, 这样设置后在exe调用dll的函数才会加载dll,调用DLL_PROCESS_ATTACH.否则隐式链接直接会在exe启动时加载dll
- eclipse+maven+jetty环境下修改了文件需要重启才能修改成功
遇到这种情况,需要在类库文件夹中修改配置文件(C:\.m2\repository\org\mortbay\jetty\jetty\6.1.22) 在以上路径下添加如下路径的压缩文件中的两个文件即可 路 ...
- Oracle:使用过程中的问题集锦
导读:在使用Oracle的过程中,又频繁的出问题.突然间就连接不上,各种报错了.在此,将问题给记录下来,方便以后查看. 一.ora 12514监听程序当前无法识别 之前一直链接使用的好好的,突然就连接 ...
- Windows API——CREATEEVENT——创建事件
事件是一个允许一个线程在某种情况发生时,唤醒另外一个线程的同步对象.事件告诉线程何时去执行某一给定的任务,从而使多个线程流平滑 CreateEvent是创建windows事件的意思,作用主要用在判断线 ...
- css透明属性
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8&quo ...