update:添加人脸属性模型,可预测姓名与年龄

This commit is contained in:
divenwu 2022-06-28 19:26:05 +08:00
parent 8923f6155a
commit 78ad2f05f0
11 changed files with 352 additions and 32 deletions

View File

@ -0,0 +1,18 @@
package com.visual.face.search.core.base;
import com.visual.face.search.core.domain.ImageMat;
import com.visual.face.search.core.domain.FaceInfo.Attribute;
import java.util.Map;
public interface FaceAttribute {
/**
* 人脸属性信息
* @param imageMat 图像数据
* @param params 参数信息
* @return
*/
Attribute inference(ImageMat imageMat, Map<String, Object> params);
}

View File

@ -4,6 +4,7 @@ import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
public class FaceInfo implements Comparable<FaceInfo>, Serializable {
/**人脸分数**/
public float score;
@ -15,6 +16,8 @@ public class FaceInfo implements Comparable<FaceInfo>, Serializable {
public Points points;
/**人脸特征向量**/
public Embedding embedding;
/**人脸属性信息**/
public Attribute attribute;
/**
* 构造函数
@ -333,7 +336,7 @@ public class FaceInfo implements Comparable<FaceInfo>, Serializable {
* 判断当前的人脸框是否是标准的人脸框即非旋转后的人脸框
* @return 否是标准的人脸框
*/
public boolean isNormal(){
public boolean normal(){
if((int)leftTop.x == (int)leftBottom.x && (int)leftTop.y == (int)rightTop.y){
if((int)rightBottom.x == (int)rightTop.x && (int)rightBottom.y == (int)leftBottom.y){
return true;
@ -446,4 +449,66 @@ public class FaceInfo implements Comparable<FaceInfo>, Serializable {
return new Embedding(image, embeds);
}
}
/**
* 人脸属性信息
*/
public static class Attribute implements Serializable {
public Integer age;
public Integer gender;
/**
* 构造函数
* @param gender 前图片的base64编码值
* @param age 当前图片的人脸向量信息
*/
private Attribute(Gender gender, Integer age) {
this.age = age;
this.gender = null == gender ? -1 : gender.getCode();
}
/**
* 获取枚举值
* @return
*/
public Gender valueOfGender(){
return Gender.valueOf(this.gender);
}
/**
* 构建人脸属性信息
* @param gender 前图片的base64编码值
* @param age 当前图片的人脸向量信息
*/
public static Attribute build(Gender gender, Integer age){
return new Attribute(gender, age);
}
}
public static enum Gender {
MALE(0), //男性
FEMALE(1), //女性
UNKNOWN(-1); //未知
private int code;
Gender(int code) {
this.code = code;
}
public int getCode() {
return this.code;
}
public static Gender valueOf(Integer code) {
code = null == code ? -1 : code;
if(code == 0){
return MALE;
}
if(code == 1){
return FEMALE;
}
return UNKNOWN;
}
}
}

View File

@ -2,19 +2,15 @@ package com.visual.face.search.core.extract;
import java.util.List;
import java.util.Map;
import com.visual.face.search.core.base.FaceAlignment;
import com.visual.face.search.core.base.FaceDetection;
import com.visual.face.search.core.base.FaceKeyPoint;
import com.visual.face.search.core.base.FaceRecognition;
import org.opencv.core.Mat;
import com.visual.face.search.core.base.*;
import com.visual.face.search.core.domain.ExtParam;
import com.visual.face.search.core.domain.FaceImage;
import com.visual.face.search.core.domain.FaceInfo;
import com.visual.face.search.core.domain.ImageMat;
import com.visual.face.search.core.models.InsightCoordFaceKeyPoint;
import com.visual.face.search.core.utils.CropUtil;
import com.visual.face.search.core.utils.MaskUtil;
import org.opencv.core.Mat;
import com.visual.face.search.core.models.InsightCoordFaceKeyPoint;
/**
* 人脸特征提取器实现
@ -28,6 +24,7 @@ public class FaceFeatureExtractorImpl implements FaceFeatureExtractor {
private FaceAlignment faceAlignment;
private FaceRecognition faceRecognition;
private FaceDetection backupFaceDetection;
private FaceAttribute faceAttribute;
/**
* 构造函数
@ -37,10 +34,14 @@ public class FaceFeatureExtractorImpl implements FaceFeatureExtractor {
* @param faceAlignment 人脸对齐模型
* @param faceRecognition 人脸特征提取模型
*/
public FaceFeatureExtractorImpl(FaceDetection faceDetection, FaceDetection backupFaceDetection, FaceKeyPoint faceKeyPoint, FaceAlignment faceAlignment, FaceRecognition faceRecognition) {
public FaceFeatureExtractorImpl(
FaceDetection faceDetection, FaceDetection backupFaceDetection,
FaceKeyPoint faceKeyPoint, FaceAlignment faceAlignment,
FaceRecognition faceRecognition, FaceAttribute faceAttribute) {
this.faceKeyPoint = faceKeyPoint;
this.faceDetection = faceDetection;
this.faceAlignment = faceAlignment;
this.faceAttribute = faceAttribute;
this.faceRecognition = faceRecognition;
this.backupFaceDetection = backupFaceDetection;
}
@ -71,13 +72,19 @@ public class FaceFeatureExtractorImpl implements FaceFeatureExtractor {
ImageMat cropImageMat = null;
ImageMat alignmentImage = null;
try {
//缩放人脸框的比例
float scaling = extParam.getScaling() <= 0 ? defScaling : extParam.getScaling();
//通过旋转角度获取正脸坐标并进行图像裁剪
FaceInfo.FaceBox box = faceInfo.rotateFaceBox().scaling(scaling);
cropFace = CropUtil.crop(image.toCvMat(), box);
//人脸标记关键点
FaceInfo.FaceBox rotateFaceBox = faceInfo.rotateFaceBox();
cropFace = CropUtil.crop(image.toCvMat(), rotateFaceBox);
cropImageMat = ImageMat.fromCVMat(cropFace);
//人脸属性检测
FaceInfo.Attribute attribute = this.faceAttribute.inference(cropImageMat, params);
faceInfo.attribute = attribute;
//进行缩放人脸区域并裁剪图片
float scaling = extParam.getScaling() <= 0 ? defScaling : extParam.getScaling();
FaceInfo.FaceBox box = rotateFaceBox.scaling(scaling);
cropFace = CropUtil.crop(image.toCvMat(), box);
cropImageMat = ImageMat.fromCVMat(cropFace);
//人脸标记关键点
FaceInfo.Points corpPoints = this.faceKeyPoint.inference(cropImageMat, params);
//还原原始图片中的关键点
FaceInfo.Point corpImageCenter = FaceInfo.Point.build((float)cropImageMat.center().x, (float)cropImageMat.center().y);

View File

@ -0,0 +1,115 @@
package com.visual.face.search.core.models;
import ai.onnxruntime.OnnxTensor;
import ai.onnxruntime.OrtSession;
import com.visual.face.search.core.base.BaseOnnxInfer;
import com.visual.face.search.core.base.FaceAttribute;
import com.visual.face.search.core.domain.FaceInfo;
import com.visual.face.search.core.domain.ImageMat;
import com.visual.face.search.core.utils.MathUtil;
import org.apache.commons.math3.linear.RealMatrix;
import org.opencv.core.*;
import org.opencv.imgproc.Imgproc;
import java.util.Collections;
import java.util.Map;
/**
* 人脸属性检测性别+年龄
* git:https://github.com/deepinsight/insightface/tree/master/attribute
*/
public class InsightAttributeDetection extends BaseOnnxInfer implements FaceAttribute {
private static final int[] inputSize = new int[]{96, 96};
/**
* 构造函数
* @param modelPath 模型路径
* @param threads 线程数
*/
public InsightAttributeDetection(String modelPath, int threads) {
super(modelPath, threads);
}
/**
* 人脸属性信息
* @param imageMat 图像数据
* @param params 参数信息
* @return
*/
@Override
public FaceInfo.Attribute inference(ImageMat imageMat, Map<String, Object> params) {
Mat M =null;
Mat img = null;
OnnxTensor tensor = null;
OrtSession.Result output = null;
try {
Mat image = imageMat.toCvMat();
int w = image.size(1);
int h = image.size(0);
float cx = 1.0f * w / 2;
float cy = 1.0f * h / 2;
float[]center = new float[]{cx, cy};
float rotate = 0;
float _scale = (float) (1.0f * inputSize[0] / (Math.max(w, h)*1.5));
Mat[] transform = transform(image, center, inputSize, _scale, rotate);
img = transform[0];
M = transform[1];
tensor = ImageMat.fromCVMat(img)
.blobFromImageAndDoReleaseMat(1.0, new Scalar(0, 0, 0), true)
.to4dFloatOnnxTensorAndDoReleaseMat(true);
output = this.getSession().run(Collections.singletonMap(this.getInputName(), tensor));
float[] value = ((float[][]) output.get(0).getValue())[0];
Integer age = Double.valueOf(Math.floor(value[2] * 100)).intValue();
FaceInfo.Gender gender = (value[0] > value[1]) ? FaceInfo.Gender.FEMALE : FaceInfo.Gender.MALE;
return FaceInfo.Attribute.build(gender, age);
} catch (Exception e) {
throw new RuntimeException(e);
}finally {
if(null != tensor){
tensor.close();
}
if(null != output){
output.close();
}
if(null != M){
M.release();
}
if(null != img){
img.release();
}
}
}
/**
* 获取人脸数据和仿射矩阵
* @param image
* @param center
* @param outputSize
* @param scale
* @param rotation
* @return
*/
private static Mat[] transform(Mat image, float[]center, int[]outputSize, float scale, float rotation){
double scale_ratio = scale;
double rot = rotation * Math.PI / 180.0;
double cx = center[0] * scale_ratio;
double cy = center[1] * scale_ratio;
//矩阵构造
RealMatrix t1 = MathUtil.similarityTransform((Double[][]) null, scale_ratio, null, null);
RealMatrix t2 = MathUtil.similarityTransform((Double[][]) null, null, null, new Double[]{- cx, - cy});
RealMatrix t3 = MathUtil.similarityTransform((Double[][]) null, null, rot, null);
RealMatrix t4 = MathUtil.similarityTransform((Double[][]) null, null, null, new Double[]{1.0*outputSize[0]/2, 1.0*outputSize[1]/2});
RealMatrix tx = MathUtil.dotProduct(t4, MathUtil.dotProduct(t3, MathUtil.dotProduct(t2, t1)));
RealMatrix tm = tx.getSubMatrix(0, 1, 0, 2);
//仿射矩阵
Mat matMTemp = new MatOfDouble(MathUtil.flatMatrix(tm, 1).toArray());
Mat matM = new Mat(2, 3, CvType.CV_32FC3);
matMTemp.reshape(1,2).copyTo(matM);
matMTemp.release();
//使用open cv做仿射变换
Mat dst = new Mat();
Imgproc.warpAffine(image, dst, matM, new Size(outputSize[0], outputSize[1]));
return new Mat[]{dst, matM};
}
}

View File

@ -1,9 +1,6 @@
package com.visual.face.search.core.test.extract;
import com.visual.face.search.core.base.FaceAlignment;
import com.visual.face.search.core.base.FaceDetection;
import com.visual.face.search.core.base.FaceKeyPoint;
import com.visual.face.search.core.base.FaceRecognition;
import com.visual.face.search.core.base.*;
import com.visual.face.search.core.domain.ExtParam;
import com.visual.face.search.core.domain.FaceImage;
import com.visual.face.search.core.domain.FaceInfo;
@ -24,6 +21,7 @@ public class FaceFeatureExtractOOMTest extends BaseTest {
private static String modelScrfdPath = "face-search-core/src/main/resources/model/onnx/detection_face_scrfd/scrfd_500m_bnkps.onnx";
private static String modelCoordPath = "face-search-core/src/main/resources/model/onnx/keypoint_coordinate/coordinate_106_mobilenet_05.onnx";
private static String modelArcPath = "face-search-core/src/main/resources/model/onnx/recognition_face_arc/glint360k_cosface_r18_fp16_0.1.onnx";
private static String modelArrPath = "face-search-core/src/main/resources/model/onnx/attribute_gender_age/insight_gender_age.onnx";
// private static String imagePath = "face-search-core/src/test/resources/images/faces";
private static String imagePath = "face-search-core/src/test/resources/images/faces/debug/debug_0001.jpg";
@ -47,7 +45,11 @@ public class FaceFeatureExtractOOMTest extends BaseTest {
FaceAlignment simple005pFaceAlignment = new Simple005pFaceAlignment();
FaceAlignment simple106pFaceAlignment = new Simple106pFaceAlignment();
FaceDetection pcnNetworkFaceDetection = new PcnNetworkFaceDetection(new String[]{modelPcn1Path, modelPcn2Path, modelPcn3Path}, 1);
FaceFeatureExtractor extractor = new FaceFeatureExtractorImpl(insightScrfdFaceDetection, pcnNetworkFaceDetection, insightCoordFaceKeyPoint, simple106pFaceAlignment, insightArcFaceRecognition);
FaceAttribute insightFaceAttribute = new InsightAttributeDetection(modelArrPath, 1);
FaceFeatureExtractor extractor = new FaceFeatureExtractorImpl(
insightScrfdFaceDetection, pcnNetworkFaceDetection, insightCoordFaceKeyPoint,
simple106pFaceAlignment, insightArcFaceRecognition, insightFaceAttribute);
// FaceFeatureExtractor extractor = new FaceFeatureExtractorImpl(insightScrfdFaceDetection, insightCoordFaceKeyPoint, simple106pFaceAlignment, insightArcFaceRecognition);
for (int i = 0; i < 100000; i++) {
for (String fileName : map.keySet()) {

View File

@ -1,9 +1,6 @@
package com.visual.face.search.core.test.extract;
import com.visual.face.search.core.base.FaceAlignment;
import com.visual.face.search.core.base.FaceDetection;
import com.visual.face.search.core.base.FaceKeyPoint;
import com.visual.face.search.core.base.FaceRecognition;
import com.visual.face.search.core.base.*;
import com.visual.face.search.core.domain.ExtParam;
import com.visual.face.search.core.domain.FaceImage;
import com.visual.face.search.core.domain.FaceInfo;
@ -31,9 +28,10 @@ public class FaceFeatureExtractTest extends BaseTest {
private static String modelScrfdPath = "face-search-core/src/main/resources/model/onnx/detection_face_scrfd/scrfd_500m_bnkps.onnx";
private static String modelCoordPath = "face-search-core/src/main/resources/model/onnx/keypoint_coordinate/coordinate_106_mobilenet_05.onnx";
private static String modelArcPath = "face-search-core/src/main/resources/model/onnx/recognition_face_arc/glint360k_cosface_r18_fp16_0.1.onnx";
private static String modelArrPath = "face-search-core/src/main/resources/model/onnx/attribute_gender_age/insight_gender_age.onnx";
private static String imagePath = "face-search-core/src/test/resources/images/faces";
// private static String imagePath = "face-search-core/src/test/resources/images/faces";
private static String imagePath = "/Users/diven/workspace/python/kuainiu/beidou-spoofing/test/datas/eval_liveness_v1/real";
public static void main(String[] args) {
@ -44,7 +42,11 @@ public class FaceFeatureExtractTest extends BaseTest {
FaceAlignment simple005pFaceAlignment = new Simple005pFaceAlignment();
FaceAlignment simple106pFaceAlignment = new Simple106pFaceAlignment();
FaceDetection pcnNetworkFaceDetection = new PcnNetworkFaceDetection(new String[]{modelPcn1Path, modelPcn2Path, modelPcn3Path}, 1);
FaceFeatureExtractor extractor = new FaceFeatureExtractorImpl(pcnNetworkFaceDetection, insightScrfdFaceDetection, insightCoordFaceKeyPoint, simple106pFaceAlignment, insightArcFaceRecognition);
FaceAttribute insightFaceAttribute = new InsightAttributeDetection(modelArrPath, 1);
FaceFeatureExtractor extractor = new FaceFeatureExtractorImpl(
pcnNetworkFaceDetection, insightScrfdFaceDetection, insightCoordFaceKeyPoint,
simple005pFaceAlignment, insightArcFaceRecognition, insightFaceAttribute);
for(String fileName : map.keySet()){
String imageFilePath = map.get(fileName);
System.out.println(imageFilePath);
@ -75,6 +77,11 @@ public class FaceFeatureExtractTest extends BaseTest {
Imgproc.circle(image, new Point(box1.rightBottom.x, box1.rightBottom.y), 3, new Scalar(0,0,255), -1);
Imgproc.circle(image, new Point(box1.leftBottom.x, box1.leftBottom.y), 3, new Scalar(0,0,255), -1);
FaceInfo.Attribute attribute = faceInfo.attribute;
Imgproc.putText(image, attribute.valueOfGender().name(), new Point(box.center().x-10, box.center().y), Imgproc.FONT_HERSHEY_PLAIN, 1, new Scalar(255,0,0));
Imgproc.putText(image, ""+attribute.age, new Point(box.center().x-10, box.center().y+20), Imgproc.FONT_HERSHEY_PLAIN, 1, new Scalar(255,0,0));
int pointNum = 1;
for(FaceInfo.Point keyPoint : faceInfo.points){
Imgproc.circle(image, new Point(keyPoint.x, keyPoint.y), 1, new Scalar(0,0,255), -1);

View File

@ -0,0 +1,69 @@
package com.visual.face.search.core.test.models;
import com.visual.face.search.core.domain.FaceInfo;
import com.visual.face.search.core.domain.ImageMat;
import com.visual.face.search.core.models.InsightAttributeDetection;
import com.visual.face.search.core.models.InsightScrfdFaceDetection;
import com.visual.face.search.core.test.base.BaseTest;
import com.visual.face.search.core.utils.CropUtil;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import java.util.List;
import java.util.Map;
public class InsightAttributeDetectionTest extends BaseTest {
private static String modelPathDetection = "face-search-core/src/main/resources/model/onnx/detection_face_scrfd/scrfd_500m_bnkps.onnx";
private static String modelPathAttribute = "face-search-core/src/main/resources/model/onnx/attribute_gender_age/insight_gender_age.onnx";
private static String imagePath = "face-search-core/src/test/resources/images/faces";
// private static String imagePath = "face-search-core/src/test/resources/images/faces/rotate";
// private static String imagePath = "face-search-core/src/test/resources/images/faces/debug";
public static void main(String[] args) {
Map<String, String> map = getImagePathMap(imagePath);
InsightScrfdFaceDetection inferDetection = new InsightScrfdFaceDetection(modelPathDetection, 2);
InsightAttributeDetection inferAttribute = new InsightAttributeDetection(modelPathAttribute, 2);
for(String fileName : map.keySet()){
String imageFilePath = map.get(fileName);
System.out.println(imageFilePath);
Mat image = Imgcodecs.imread(imageFilePath);
long s = System.currentTimeMillis();
List<FaceInfo> faceInfos = inferDetection.inference(ImageMat.fromCVMat(image), 0.5f, 0.7f, null);
long e = System.currentTimeMillis();
if(faceInfos.size() > 0){
System.out.println("fileName="+fileName+",\tcost="+(e-s)+",\t"+faceInfos.get(0).score);
}else{
System.out.println("fileName="+fileName+",\tcost="+(e-s)+",\t"+faceInfos);
}
for(FaceInfo faceInfo : faceInfos){
Mat cropFace = CropUtil.crop(image, faceInfo.box);
long a = System.currentTimeMillis();
FaceInfo.Attribute attribute = inferAttribute.inference(ImageMat.fromCVMat(cropFace), null);
System.out.println("ssss="+(System.currentTimeMillis() - a));
Imgproc.putText(image, attribute.valueOfGender().name(), new Point(faceInfo.box.x1()+10, faceInfo.box.y1()+10), Imgproc.FONT_HERSHEY_PLAIN, 1, new Scalar(0,0,255));
Imgproc.putText(image, ""+attribute.age, new Point(faceInfo.box.x1()+10, faceInfo.box.y1()+40), Imgproc.FONT_HERSHEY_PLAIN, 1, new Scalar(0,0,255));
Imgproc.rectangle(image, new Point(faceInfo.box.x1(), faceInfo.box.y1()), new Point(faceInfo.box.x2(), faceInfo.box.y2()), new Scalar(0,0,255));
int pointNum = 1;
for(FaceInfo.Point keyPoint : faceInfo.points){
Imgproc.circle(image, new Point(keyPoint.x, keyPoint.y), 3, new Scalar(0,0,255), -1);
Imgproc.putText(image, String.valueOf(pointNum), new Point(keyPoint.x+1, keyPoint.y), Imgproc.FONT_HERSHEY_PLAIN, 1, new Scalar(255,0,0));
pointNum ++ ;
}
}
HighGui.imshow(fileName, image);
HighGui.waitKey();
}
System.exit(1);
}
}

View File

@ -1,9 +1,6 @@
package com.visual.face.search.server.bootstrap.conf;
import com.visual.face.search.core.base.FaceAlignment;
import com.visual.face.search.core.base.FaceDetection;
import com.visual.face.search.core.base.FaceKeyPoint;
import com.visual.face.search.core.base.FaceRecognition;
import com.visual.face.search.core.base.*;
import com.visual.face.search.core.extract.FaceFeatureExtractor;
import com.visual.face.search.core.extract.FaceFeatureExtractorImpl;
import com.visual.face.search.core.models.*;
@ -50,6 +47,16 @@ public class ModelConfig {
private Integer faceRecognitionNameThread;
@Value("${visual.model.faceAttribute.name:InsightAttributeDetection}")
private String faceAttributeDetectionName;
@Value("${visual.model.faceAttribute.modelPath}")
private String[] faceAttributeDetectionNameModel;
@Value("${visual.model.faceAttribute.thread:4}")
private Integer faceAttributeDetectionNameThread;
/**
* 获取人脸识别模型
* @return
@ -123,6 +130,19 @@ public class ModelConfig {
}
}
/**
* 人脸属性检测
* @return
*/
@Bean(name = "visualAttributeDetection")
public InsightAttributeDetection getAttributeDetection(){
if(faceAttributeDetectionName.equalsIgnoreCase("InsightAttributeDetection")){
return new InsightAttributeDetection(getModelPath(faceAttributeDetectionName, faceAttributeDetectionNameModel)[0], faceAttributeDetectionNameThread);
}else{
return new InsightAttributeDetection(getModelPath(faceAttributeDetectionName, faceAttributeDetectionNameModel)[0], faceAttributeDetectionNameThread);
}
}
/**
* 构建特征提取器
* @param faceDetection 人脸识别模型
@ -136,8 +156,13 @@ public class ModelConfig {
@Qualifier("visualBackupFaceDetection")FaceDetection backupFaceDetection,
@Qualifier("visualFaceKeyPoint")FaceKeyPoint faceKeyPoint,
@Qualifier("visualFaceAlignment")FaceAlignment faceAlignment,
@Qualifier("visualFaceRecognition")FaceRecognition faceRecognition){
return new FaceFeatureExtractorImpl(faceDetection, backupFaceDetection, faceKeyPoint, faceAlignment, faceRecognition);
@Qualifier("visualFaceRecognition")FaceRecognition faceRecognition,
@Qualifier("visualAttributeDetection") FaceAttribute faceAttribute
){
return new FaceFeatureExtractorImpl(
faceDetection, backupFaceDetection, faceKeyPoint,
faceAlignment, faceRecognition, faceAttribute
);
}
/**
@ -172,6 +197,10 @@ public class ModelConfig {
return new String[]{basePath + "model/onnx/recognition_face_arc/glint360k_cosface_r18_fp16_0.1.onnx"};
}
if((null == modelPath || modelPath.length != 1) && "InsightAttributeDetection".equalsIgnoreCase(modelName)){
return new String[]{basePath + "model/onnx/attribute_gender_age/insight_gender_age.onnx"};
}
return modelPath;
}
}

View File

@ -42,6 +42,10 @@ visual:
name: InsightArcFaceRecognition
modelPath:
thread: 1
faceAttribute:
name: InsightAttributeDetection
modelPath:
thread: 1
engine:
selected: milvus
proxima:

View File

@ -42,6 +42,10 @@ visual:
name: InsightArcFaceRecognition
modelPath:
thread: 4
faceAttribute:
name: InsightAttributeDetection
modelPath:
thread: 4
engine:
selected: proxima
proxima: