diff --git a/build.gradle b/build.gradle
index 59660130..5890fd0c 100644
--- a/build.gradle
+++ b/build.gradle
@@ -5,7 +5,7 @@ buildscript {
jcenter()
}
dependencies {
- classpath 'com.android.tools.build:gradle:2.3.3'
+ classpath 'com.android.tools.build:gradle:3.0.0-alpha6'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
diff --git a/cgeDemo/src/main/java/org/wysaid/cgeDemo/CameraDemoActivity.java b/cgeDemo/src/main/java/org/wysaid/cgeDemo/CameraDemoActivity.java
index 3acd7287..eb90485c 100755
--- a/cgeDemo/src/main/java/org/wysaid/cgeDemo/CameraDemoActivity.java
+++ b/cgeDemo/src/main/java/org/wysaid/cgeDemo/CameraDemoActivity.java
@@ -4,6 +4,7 @@
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
+import android.graphics.PointF;
import android.hardware.Camera;
import android.net.Uri;
import android.os.Bundle;
@@ -18,12 +19,17 @@
import android.widget.LinearLayout;
import android.widget.SeekBar;
+import com.sensetime.stmobileapi.STMobileFaceAction;
+import com.sensetime.stmobileapi.STMobileMultiTrack106;
+import com.sensetime.stmobileapi.STUtils;
+
import org.wysaid.camera.CameraInstance;
import org.wysaid.myUtils.FileUtil;
import org.wysaid.myUtils.ImageUtil;
import org.wysaid.myUtils.MsgUtil;
import org.wysaid.nativePort.CGEFrameRecorder;
import org.wysaid.nativePort.CGENativeLibrary;
+import org.wysaid.stmobile.Accelerometer;
import org.wysaid.view.CameraRecordGLSurfaceView;
public class CameraDemoActivity extends AppCompatActivity {
@@ -338,12 +344,110 @@ public void onAutoFocus(boolean success, Camera camera) {
});
mCameraView.setPictureSize(600, 800, true);
+
+ Button faceDetectBtn = (Button) findViewById(R.id.face_detect_btn);
+ faceDetectBtn.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View v) {
+ CameraInstance.getInstance().getCameraDevice().setPreviewCallback(new Camera.PreviewCallback() {
+ @Override
+ public void onPreviewFrame(byte[] data, Camera camera) {
+ if (mNV21Data == null) {
+ mNV21Data = new byte[CameraInstance.getInstance().previewHeight() * CameraInstance.getInstance().previewWidth() * 2];
+ }
+ synchronized (mNV21Data) {
+ System.arraycopy(data, 0, mNV21Data, 0, data.length);
+ mIsNV21ready = true;
+ }
+ }
+ });
+ mCameraView.setFilterWithConfig("@facedetect");
+ }
+ });
+ mAcc = new Accelerometer(this);
+ mAcc.start();
+ }
+
+ private byte[] mNV21Data;
+ private boolean mIsFaceDetectThreadKilled = false;
+ private Thread mFaceDetectThread;
+ private boolean mIsNV21ready = false;
+ private Accelerometer mAcc;
+ private STMobileMultiTrack106 mTracker = null;
+ private static final int ST_MOBILE_TRACKING_ENABLE_FACE_ACTION = 0x00000020;
+ private byte[] tmp = null;
+
+ private void stopFaceDetectThread() {
+ mIsFaceDetectThreadKilled = true;
+ if (mFaceDetectThread != null) {
+ try {
+ mFaceDetectThread.join(1000);
+ } catch (InterruptedException e1) {
+ e1.printStackTrace();
+ }
+ }
+ }
+
+ private void startFaceDetectThread() {
+ mIsFaceDetectThreadKilled = false;
+
+ mFaceDetectThread = new Thread() {
+ @Override
+ public void run() {
+ while (!mIsFaceDetectThreadKilled) {
+ if (!mIsNV21ready) {
+ continue;
+ }
+ if (tmp == null) {
+ tmp = new byte[CameraInstance.getInstance().previewHeight() * CameraInstance.getInstance().previewWidth() * 2];
+ }
+ synchronized (mNV21Data) {
+ System.arraycopy(mNV21Data, 0, tmp, 0, mNV21Data.length);
+ mIsNV21ready = false;
+ }
+ boolean frontCamera = (CameraInstance.getInstance().getFacing() == Camera.CameraInfo.CAMERA_FACING_FRONT);
+ int dir = Accelerometer.getDirection();
+ /**
+ * 请注意前置摄像头与后置摄像头旋转定义不同
+ * 请注意不同手机摄像头旋转定义不同
+ */
+ if (frontCamera &&
+ ((CameraInstance.getInstance().getCameraInfo().orientation == 270 && (dir & 1) == 1) ||
+ (CameraInstance.getInstance().getCameraInfo().orientation == 90 && (dir & 1) == 0))) {
+ dir = (dir ^ 2);
+ }
+ STMobileFaceAction[] faceActions = mTracker.trackFaceAction(tmp, dir, CameraInstance.getInstance().previewWidth(), CameraInstance.getInstance().previewHeight());
+ boolean rotate270 = CameraInstance.getInstance().getCameraInfo().orientation == 270;
+ Log.e("xuezi", "rotate270" + rotate270);
+ if (faceActions != null && faceActions.length > 0) {
+ STMobileFaceAction r = faceActions[0];
+ PointF[] points = r.getFace().getPointsArray();
+ float[] pointArray = new float[212];
+ for (int i = 0; i < points.length; i++) {
+ if (rotate270) {
+ points[i] = STUtils.RotateDeg270(points[i], CameraInstance.getInstance().previewWidth(), CameraInstance.getInstance().previewHeight(), frontCamera);
+ } else {
+ points[i] = STUtils.RotateDeg90(points[i], CameraInstance.getInstance().previewWidth(), CameraInstance.getInstance().previewHeight(), frontCamera);
+ }
+ pointArray[2*i] = points[i].x / CameraInstance.getInstance().previewHeight();
+ pointArray[2*i+1] = points[i].y / CameraInstance.getInstance().previewWidth();
+ }
+ mCameraView.setFaceDetectPoints(pointArray);
+ }
+ else {
+ mCameraView.clearFaceDetectPoints();
+ }
+ }
+ }
+ };
+ mFaceDetectThread.start();
}
private View.OnClickListener mFilterSwitchListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
- MyButtons btn = (MyButtons) v;
+ CameraInstance.getInstance().getCameraDevice().setPreviewCallback(null);
+ MyButtons btn = (MyButtons)v;
mCameraView.setFilterWithConfig(btn.filterConfig);
mCurrentConfig = btn.filterConfig;
}
@@ -388,6 +492,13 @@ public void onPause() {
Log.i(LOG_TAG, "activity onPause...");
mCameraView.release(null);
mCameraView.onPause();
+ mAcc.stop();
+ stopFaceDetectThread();
+ if (mTracker != null) {
+ System.out.println("destroy tracker");
+ mTracker.destory();
+ mTracker = null;
+ }
}
@Override
@@ -395,6 +506,18 @@ public void onResume() {
super.onResume();
mCameraView.onResume();
+ mAcc.start();
+ if (mTracker == null) {
+// long start_init = System.currentTimeMillis();
+// int config = 0; //default config
+ int config = ST_MOBILE_TRACKING_ENABLE_FACE_ACTION;
+ mTracker = new STMobileMultiTrack106(this, config);
+ int max = 1;
+ mTracker.setMaxDetectableFaces(max);
+// long end_init = System.currentTimeMillis();
+// Log.i("track106", "init cost "+(end_init - start_init) +" ms");
+ }
+ startFaceDetectThread();
}
@Override
diff --git a/cgeDemo/src/main/res/layout/activity_camera_demo.xml b/cgeDemo/src/main/res/layout/activity_camera_demo.xml
index 6122be06..44cd87f7 100755
--- a/cgeDemo/src/main/res/layout/activity_camera_demo.xml
+++ b/cgeDemo/src/main/res/layout/activity_camera_demo.xml
@@ -29,6 +29,7 @@
@@ -123,4 +124,10 @@
+
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index 819de7a9..f6243487 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,6 +1,6 @@
-#Fri Jun 16 17:02:18 CST 2017
+#Fri Jul 14 16:02:35 CST 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-4.1-milestone-1-all.zip
diff --git a/library/build.gradle b/library/build.gradle
index 6e946d7f..31e9149d 100644
--- a/library/build.gradle
+++ b/library/build.gradle
@@ -39,4 +39,5 @@ android {
dependencies {
compile fileTree(dir: 'libs', include: ['*.jar'])
compile 'com.android.support:appcompat-v7:25.2.0'
+ compile project(":stmobile")
}
diff --git a/library/src/main/java/org/wysaid/camera/CameraInstance.java b/library/src/main/java/org/wysaid/camera/CameraInstance.java
index 8a84a340..eccc6ae7 100644
--- a/library/src/main/java/org/wysaid/camera/CameraInstance.java
+++ b/library/src/main/java/org/wysaid/camera/CameraInstance.java
@@ -28,6 +28,10 @@ public class CameraInstance {
private Camera mCameraDevice;
private Camera.Parameters mParams;
+ protected Camera.CameraInfo mCameraInfo = null;
+ public Camera.CameraInfo getCameraInfo() {
+ return mCameraInfo;
+ }
public static final int DEFAULT_PREVIEW_RATE = 30;
@@ -96,6 +100,8 @@ public synchronized boolean tryOpenCamera(CameraOpenCallback callback, int facin
if (cameraInfo.facing == facing) {
mDefaultCameraID = i;
mFacing = facing;
+ mCameraInfo = cameraInfo;
+ break;
}
}
}
@@ -305,6 +311,7 @@ public void initCamera(int previewRate) {
Log.i(LOG_TAG, String.format("Camera Picture Size: %d x %d", szPic.width, szPic.height));
Log.i(LOG_TAG, String.format("Camera Preview Size: %d x %d", szPrev.width, szPrev.height));
+
}
public synchronized void setFocusMode(String focusMode) {
diff --git a/library/src/main/java/org/wysaid/nativePort/CGEFrameRenderer.java b/library/src/main/java/org/wysaid/nativePort/CGEFrameRenderer.java
index 4cf66a67..4fa6b109 100644
--- a/library/src/main/java/org/wysaid/nativePort/CGEFrameRenderer.java
+++ b/library/src/main/java/org/wysaid/nativePort/CGEFrameRenderer.java
@@ -82,7 +82,7 @@ public void setRenderFlipScale(float x, float y) {
//initialize the filters width config string
public void setFilterWidthConfig(final String config) {
if(mNativeAddress != 0)
- nativeSetFilterWidthConfig(mNativeAddress, config);
+ nativeSetFilterWithConfig(mNativeAddress, config);
}
//set the mask rotation (radian)
@@ -95,7 +95,6 @@ public void setMaskRotation(float rot) {
public void setMaskFlipScale(float x, float y) {
if(mNativeAddress != 0)
nativeSetMaskFlipScale(mNativeAddress, x, y);
-
}
@@ -152,6 +151,14 @@ public void setNativeFilter(long nativeFilter) {
nativeSetFilterWithAddr(mNativeAddress, nativeFilter);
}
+ public void setFaceDetectPoints(float[] keyPoints) {
+ nativeSetFaceDetectPoints(mNativeAddress, keyPoints);
+ }
+
+ public void clearFaceDetectPoints() {
+ nativeClearFaceDetectPoints(mNativeAddress);
+ }
+
///////////////// protected ///////////////////////
protected native long nativeCreateRenderer();
@@ -166,7 +173,7 @@ public void setNativeFilter(long nativeFilter) {
protected native void nativeSetSrcFlipScale(long holder, float x, float y);
protected native void nativeSetRenderRotation(long holder, float rad);
protected native void nativeSetRenderFlipScale(long holder, float x, float y);
- protected native void nativeSetFilterWidthConfig(long holder, String config);
+ protected native void nativeSetFilterWithConfig(long holder, String config);
protected native void nativeSetFilterIntensity(long holder, float value);
protected native void nativeSetMaskRotation(long holder, float value);
protected native void nativeSetMaskFlipScale(long holder, float x, float y);
@@ -188,4 +195,6 @@ public void setNativeFilter(long nativeFilter) {
//特殊用法, 谨慎使用, 使用不当可能造成程序运行异常.
protected native void nativeSetFilterWithAddr(long holder, long filter);
+ protected native void nativeSetFaceDetectPoints(long holder, float[] keyPoints);
+ protected native void nativeClearFaceDetectPoints(long holder);
}
diff --git a/library/src/main/java/org/wysaid/stmobile/Accelerometer.java b/library/src/main/java/org/wysaid/stmobile/Accelerometer.java
new file mode 100644
index 00000000..b30741f8
--- /dev/null
+++ b/library/src/main/java/org/wysaid/stmobile/Accelerometer.java
@@ -0,0 +1,138 @@
+package org.wysaid.stmobile;
+
+import android.content.Context;
+import android.hardware.Sensor;
+import android.hardware.SensorEvent;
+import android.hardware.SensorEventListener;
+import android.hardware.SensorManager;
+
+/**
+ *
+ * @author MatrixCV
+ *
+ * Accelerometer 用于开启重力传感器,以获得当前手机朝向
+ *
+ */
+public class Accelerometer {
+ /**
+ *
+ * @author MatrixCV
+ *
+ * CLOCKWISE_ANGLE为手机旋转角度
+ * 其Deg0定义如下图所示
+ * ___________________
+ * | +--------------+ |
+ * | | | |
+ * | | | |
+ * | | | O|
+ * | | | |
+ * | |______________| |
+ * ---------------------
+ * 顺时针旋转后得到Deg90,即手机竖屏向上,如下图所示
+ * ___________
+ * | |
+ * |+---------+|
+ * || ||
+ * || ||
+ * || ||
+ * || ||
+ * || ||
+ * |+---------+|
+ * |_____O_____|
+ */
+ public enum CLOCKWISE_ANGLE {
+ Deg0(0), Deg90(1), Deg180(2), Deg270(3);
+ private int value;
+ private CLOCKWISE_ANGLE(int value){
+ this.value = value;
+ }
+ public int getValue() {
+ return value;
+ }
+ }
+
+ private SensorManager sensorManager = null;
+
+ private boolean hasStarted = false;
+
+ private static CLOCKWISE_ANGLE rotation;
+
+ /**
+ *
+ * @param ctx
+ * 用Activity初始化获得传感器
+ */
+ public Accelerometer(Context ctx) {
+ sensorManager = (SensorManager) ctx
+ .getSystemService(Context.SENSOR_SERVICE);
+ rotation = CLOCKWISE_ANGLE.Deg0;
+ }
+
+ /**
+ * 开始对传感器的监听
+ */
+ public void start() {
+ if (hasStarted) return;
+ hasStarted = true;
+ rotation = CLOCKWISE_ANGLE.Deg0;
+ sensorManager.registerListener(accListener,
+ sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
+ SensorManager.SENSOR_DELAY_NORMAL);
+ }
+
+ /**
+ * 结束对传感器的监听
+ */
+ public void stop() {
+ if (!hasStarted) return;
+ hasStarted = false;
+ sensorManager.unregisterListener(accListener);
+ }
+
+ /**
+ *
+ * @return
+ * 返回当前手机转向
+ */
+ static public int getDirection() {
+ return rotation.getValue();
+ }
+
+ /**
+ * 传感器与手机转向之间的逻辑
+ */
+ private SensorEventListener accListener = new SensorEventListener() {
+
+ @Override
+ public void onAccuracyChanged(Sensor arg0, int arg1) {
+ }
+
+ @Override
+ public void onSensorChanged(SensorEvent arg0) {
+ if (arg0.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
+ float x = arg0.values[0];
+ float y = arg0.values[1];
+ float z = arg0.values[2];
+ if (Math.abs(x)>3 || Math.abs(y)>3) {
+ if (Math.abs(x)>Math.abs(y)) {
+ if (x > 0) {
+ rotation = CLOCKWISE_ANGLE.Deg0;
+ //Log.d("ROTATION","CLOCKWISE_ANGLE: Deg0");
+ } else {
+ rotation = CLOCKWISE_ANGLE.Deg180;
+ //Log.d("ROTATION","CLOCKWISE_ANGLE: Deg180");
+ }
+ } else {
+ if (y > 0) {
+ rotation = CLOCKWISE_ANGLE.Deg90;
+ //Log.d("ROTATION","CLOCKWISE_ANGLE: Deg90");
+ } else {
+ rotation = CLOCKWISE_ANGLE.Deg270;
+ //Log.d("ROTATION","CLOCKWISE_ANGLE: Deg270");
+ }
+ }
+ }
+ }
+ }
+ };
+}
diff --git a/library/src/main/java/org/wysaid/view/CameraGLSurfaceView.java b/library/src/main/java/org/wysaid/view/CameraGLSurfaceView.java
index 28a50d31..0ddd3c9f 100644
--- a/library/src/main/java/org/wysaid/view/CameraGLSurfaceView.java
+++ b/library/src/main/java/org/wysaid/view/CameraGLSurfaceView.java
@@ -228,6 +228,32 @@ public synchronized boolean setFlashLightMode(String mode) {
return true;
}
+ public synchronized void setFaceDetectPoints(final float[] keyPoints) {
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ if(mFrameRecorder != null) {
+ mFrameRecorder.setFaceDetectPoints(keyPoints);
+ } else {
+ Log.e(LOG_TAG, "setFaceDetectPoints");
+ }
+ }
+ });
+ }
+
+ public synchronized void clearFaceDetectPoints() {
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ if(mFrameRecorder != null) {
+ mFrameRecorder.clearFaceDetectPoints();
+ } else {
+ Log.e(LOG_TAG, "clearFaceDetectPoints");
+ }
+ }
+ });
+ }
+
public synchronized void setFilterWithConfig(final String config) {
queueEvent(new Runnable() {
@Override
diff --git a/library/src/main/jni/Android.mk b/library/src/main/jni/Android.mk
index 4ba346d2..b9dfa4ef 100644
--- a/library/src/main/jni/Android.mk
+++ b/library/src/main/jni/Android.mk
@@ -84,6 +84,7 @@ LOCAL_SRC_FILES := \
$(CGE_SOURCE)/filters/cgeDynamicWaveFilter.cpp \
$(CGE_SOURCE)/filters/cgeMotionFlowFilter.cpp \
$(CGE_SOURCE)/filters/cgeColorMappingFilter.cpp \
+ $(CGE_SOURCE)/filters/cgeFaceDetectFilter.cpp \
$(CGE_SOURCE)/extends/cgeThread.cpp \
\
$(CGE_ROOT)/interface/cgeNativeLibrary.cpp \
diff --git a/library/src/main/jni/cge/common/cgeImageHandler.cpp b/library/src/main/jni/cge/common/cgeImageHandler.cpp
index 6b31c95c..fdbb80d4 100644
--- a/library/src/main/jni/cge/common/cgeImageHandler.cpp
+++ b/library/src/main/jni/cge/common/cgeImageHandler.cpp
@@ -787,4 +787,11 @@ namespace CGE
return true;
}
+ void CGEImageHandler::setFaceDetectKeyPoint(float *keyPoints) {
+ m_keyPoints = keyPoints;
+ }
+
+ float* CGEImageHandler::getFaceDetectKeyPoint() {
+ return m_keyPoints;
+ }
}
diff --git a/library/src/main/jni/cge/filters/cgeDataParsingEngine.cpp b/library/src/main/jni/cge/filters/cgeDataParsingEngine.cpp
index a82f4686..af7e3ed8 100755
--- a/library/src/main/jni/cge/filters/cgeDataParsingEngine.cpp
+++ b/library/src/main/jni/cge/filters/cgeDataParsingEngine.cpp
@@ -13,11 +13,13 @@
#include "cgeFilterBasic.h"
#include "cgeDynamicFilters.h"
#include "cgeColorMappingFilter.h"
+#include "cgeFaceDetectFilter.h"
#include
#include
#include
+
//为了加快处理速度,使用固定大小的buffer来存储Parser所需参数。
//每个method后面的参数长度都不应该超过BUFFER_LEN。
//如果你的Parser所需参数超过此长度,请将BUFFER_LEN增加到合适的长度。
@@ -74,13 +76,13 @@ tableParserHelper(vec, pstr + n, i - n); \
CGE_LOG_ERROR("CGEDataParsingEngine::curveParser Create Curve filter Failed!\n");
return nullptr;
}
- }
+ }
for(int i = 0; pstr[i] != '\0' && pstr[i] != '@';)
{
switch (pstr[i])
{
- case 'R': case 'r':
+ case 'R': case 'r':
if(toupper(pstr[i + 1]) == 'G' && toupper(pstr[i + 2]) == 'B')
{
vecRGB.clear();
@@ -101,7 +103,7 @@ tableParserHelper(vec, pstr + n, i - n); \
vecR.clear();
++i;
int n = i;
- for(char c = toupper(pstr[i]);
+ for(char c = toupper(pstr[i]);
c != '\0' && c != 'R' && c != 'G' && c != 'B' && c != '@'; c = toupper(pstr[i])) ++i;
tableParserHelper(vecR, pstr + n, i - n);
if(vecR.size() < 2)
@@ -125,7 +127,7 @@ tableParserHelper(vec, pstr + n, i - n); \
else
{
proc->pushPointsG(vecG.data(), vecG.size());
- }
+ }
break;
case 'B': case 'b':
vecB.clear();
@@ -138,9 +140,9 @@ tableParserHelper(vec, pstr + n, i - n); \
else
{
proc->pushPointsB(vecB.data(), vecB.size());
- }
+ }
break;
- default:
+ default:
++i;
break;
}
@@ -161,7 +163,7 @@ tableParserHelper(vec, pstr + n, i - n); \
{
using namespace std;
using namespace CGE;
-
+
float vignetteStart, vignetteEnd, colorScaleLow, colorScaleRange, saturation;
int isLinear = 0;
while(*pstr != '\0' && !isdigit(*pstr)) ++pstr;
@@ -205,7 +207,7 @@ tableParserHelper(vec, pstr + n, i - n); \
{
++i;
int n = i;
- for(char c = toupper(pstr[i]);
+ for(char c = toupper(pstr[i]);
c != '\0' && c != 'R' && c != 'G' && c != 'B' && c != '@'; c = toupper(pstr[i])) ++i;
tableParserHelper(vecR, pstr + n, i - n);
}
@@ -248,7 +250,7 @@ tableParserHelper(vec, pstr + n, i - n); \
float vignetteStart, vignetteEnd, colorScaleLow, colorScaleRange, saturation;
int isLinear = 0;
while(*pstr != '\0' && !isdigit(*pstr)) ++pstr;
- if(sscanf(pstr, "%f%*c%f%*c%f%*c%f%*c%f%*c%d",
+ if(sscanf(pstr, "%f%*c%f%*c%f%*c%f%*c%f%*c%d",
&vignetteStart, &vignetteEnd, &colorScaleLow, &colorScaleRange, &saturation, &isLinear) < 5)
{
return nullptr;
@@ -266,6 +268,16 @@ tableParserHelper(vec, pstr + n, i - n); \
return proc;
}
+ CGEImageFilterInterface* CGEDataParsingEngine::faceDetectParser(const char* pstr, CGEMutipleEffectFilter* fatherFilter)
+ {
+ CGEFaceDetectFilter* proc = new CGEFaceDetectFilter;
+ proc->init();
+
+ if(fatherFilter != nullptr)
+ fatherFilter->addFilter(proc);
+ return proc;
+ }
+
#define ADJUSTHELP_COMMON_FUNC(str, procName, setFunc) \
do{\
float intensity;\
diff --git a/library/src/main/jni/cge/filters/cgeFaceDetectFilter.cpp b/library/src/main/jni/cge/filters/cgeFaceDetectFilter.cpp
new file mode 100644
index 00000000..14618490
--- /dev/null
+++ b/library/src/main/jni/cge/filters/cgeFaceDetectFilter.cpp
@@ -0,0 +1,63 @@
+//
+// Created by Xue Weining on 2017/6/27.
+//
+#include "cgeFaceDetectFilter.h"
+static CGEConstString s_fshFaceDetect = CGE_SHADER_STRING_PRECISION_H
+(
+ varying vec2 textureCoordinate;
+ uniform sampler2D inputImageTexture;
+ uniform float points[212];
+ uniform bool isFaceDetect;
+
+ void main() {
+ gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
+ if (isFaceDetect) {
+ for (int i = 0; i < 106; i++) {
+ if (pow(abs(textureCoordinate.x - points[2*i]), 2.0f) +
+ pow(abs(textureCoordinate.y - points[2*i+1]), 2.0f) < 0.00001f) {
+ gl_FragColor = vec4(1.0f, 1.0f, 0.0f, 1.0f);
+ }
+ }
+ }
+ }
+);
+
+namespace CGE
+{
+ bool CGEFaceDetectFilter::init()
+ {
+ if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshFaceDetect))
+ {
+ return true;
+ }
+ return false;
+ }
+
+ void CGEFaceDetectFilter::render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+ {
+ handler->setAsTarget();
+ m_program.bind();
+
+ glEnableVertexAttribArray(0);
+ glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
+ glActiveTexture(GL_TEXTURE0);
+ glBindTexture(GL_TEXTURE_2D, srcTexture);
+ if(m_uniformParam != nullptr)
+ m_uniformParam->assignUniforms(handler, m_program.programID());
+
+ float* keyPoints = handler->getFaceDetectKeyPoint();
+ if (keyPoints != nullptr) {
+ GLint isFaceDetectUniform = glGetUniformLocation(m_program.programID(), "isFaceDetect");
+ glUniform1i(isFaceDetectUniform, GL_TRUE);
+ GLint pointsUniform = glGetUniformLocation(m_program.programID(), "points");
+ glUniform1fv(pointsUniform, 212, keyPoints);
+ }
+ else {
+ GLint isFaceDetectUniform = glGetUniformLocation(m_program.programID(), "isFaceDetect");
+ glUniform1i(isFaceDetectUniform, GL_FALSE);
+ }
+
+ glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+ cgeCheckGLError("glDrawArrays");
+ }
+}
diff --git a/library/src/main/jni/cge/filters/cgeMultipleEffects.cpp b/library/src/main/jni/cge/filters/cgeMultipleEffects.cpp
index 229fbc03..5b2dbbf8 100644
--- a/library/src/main/jni/cge/filters/cgeMultipleEffects.cpp
+++ b/library/src/main/jni/cge/filters/cgeMultipleEffects.cpp
@@ -238,7 +238,10 @@ namespace CGE
*pBuffer = '\0';
- if(strcmp(buffer, "blend") == 0)
+ if (strcmp(buffer, "facedetect") == 0) {
+ CGEDataParsingEngine::faceDetectParser(ptr, this);
+ }
+ else if(strcmp(buffer, "blend") == 0)
{
CGEDataParsingEngine::blendParser(ptr, this);
}
diff --git a/library/src/main/jni/include/cgeImageHandler.h b/library/src/main/jni/include/cgeImageHandler.h
index 53715e57..5c521326 100644
--- a/library/src/main/jni/include/cgeImageHandler.h
+++ b/library/src/main/jni/include/cgeImageHandler.h
@@ -52,7 +52,8 @@ namespace CGE
void getOutputFBOSize(int &w, int &h) { w = m_dstImageSize.width; h = m_dstImageSize.height; }
void copyTextureData(void* data, int w, int h, GLuint texID, GLenum dataFmt, GLenum channelFmt);
-
+ virtual void setFaceDetectKeyPoint(float* keyPoints) = 0;
+ virtual float* getFaceDetectKeyPoint() = 0;
protected:
virtual bool initImageFBO(const void* data, int w, int h, GLenum channelFmt, GLenum dataFmt, int channel);
virtual void clearImageFBO();
@@ -133,9 +134,10 @@ namespace CGE
void setResultDrawer(TextureDrawer* drawer);
virtual void useImageFBO();
-
+ void setFaceDetectKeyPoint(float* keyPoints);
+ float* getFaceDetectKeyPoint();
protected:
-
+ float* m_keyPoints = nullptr;
bool m_bRevertEnabled;
std::vector m_vecFilters;
diff --git a/library/src/main/jni/include/filters/cgeDataParsingEngine.h b/library/src/main/jni/include/filters/cgeDataParsingEngine.h
index 8f811329..874c9be2 100644
--- a/library/src/main/jni/include/filters/cgeDataParsingEngine.h
+++ b/library/src/main/jni/include/filters/cgeDataParsingEngine.h
@@ -33,7 +33,7 @@ namespace CGE
static CGEImageFilterInterface* beautifyParser(const char* pstr, CGEMutipleEffectFilter* fatherFilter = nullptr);
static CGEImageFilterInterface* blurParser(const char* pstr, CGEMutipleEffectFilter* fatherFilter = nullptr);
static CGEImageFilterInterface* dynamicParser(const char* pstr, CGEMutipleEffectFilter* fatherFilter = nullptr);
-
+ static CGEImageFilterInterface* faceDetectParser(const char* pstr, CGEMutipleEffectFilter* fatherFilter = nullptr);
};
}
diff --git a/library/src/main/jni/include/filters/cgeFaceDetectFilter.h b/library/src/main/jni/include/filters/cgeFaceDetectFilter.h
new file mode 100644
index 00000000..ddd57382
--- /dev/null
+++ b/library/src/main/jni/include/filters/cgeFaceDetectFilter.h
@@ -0,0 +1,25 @@
+//
+// Created by Xue Weining on 2017/6/27.
+//
+
+#ifndef ANDROID_GPUIMAGE_PLUS_CGEFACEDETECTFILTER_H_H
+#define ANDROID_GPUIMAGE_PLUS_CGEFACEDETECTFILTER_H_H
+
+#include "cgeGLFunctions.h"
+namespace CGE
+{
+ class CGEFaceDetectFilter : public CGEImageFilterInterface {
+ public:
+ bool init();
+ void render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID);
+ protected:
+ ProgramObject m_programDrawPoint;
+ private:
+ GLfloat* m_keyfacePoint;
+ GLfloat* m_keyfaceColor;
+ GLint aPosition;
+ GLint aColor;
+ };
+}
+
+#endif //ANDROID_GPUIMAGE_PLUS_CGEFACEDETECTFILTER_H_H
diff --git a/library/src/main/jni/interface/cgeFrameRendererWrapper.cpp b/library/src/main/jni/interface/cgeFrameRendererWrapper.cpp
index af68557f..599be5fd 100755
--- a/library/src/main/jni/interface/cgeFrameRendererWrapper.cpp
+++ b/library/src/main/jni/interface/cgeFrameRendererWrapper.cpp
@@ -120,7 +120,7 @@ JNIEXPORT void JNICALL Java_org_wysaid_nativePort_CGEFrameRenderer_nativeSetRend
* Method: nativeSetFilterWidthConfig
* Signature: (Ljava/nio/ByteBuffer;Ljava/lang/String;)V
*/
-JNIEXPORT void JNICALL Java_org_wysaid_nativePort_CGEFrameRenderer_nativeSetFilterWidthConfig
+JNIEXPORT void JNICALL Java_org_wysaid_nativePort_CGEFrameRenderer_nativeSetFilterWithConfig
(JNIEnv *env, jobject obj, jlong addr, jstring config)
{
static CGETexLoadArg texLoadArg;
@@ -254,7 +254,20 @@ JNIEXPORT void JNICALL Java_org_wysaid_nativePort_CGEFrameRenderer_nativeSetFilt
renderer->setFilter((CGEImageFilterInterfaceAbstract*)filter);
}
+JNIEXPORT void JNICALL Java_org_wysaid_nativePort_CGEFrameRenderer_nativeSetFaceDetectPoints
+ (JNIEnv *env, jobject, jlong addr, jfloatArray keyPoints)
+{
+ CGEFrameRenderer* renderer = (CGEFrameRenderer*)addr;
+ float* nativeKeyPoints = env->GetFloatArrayElements(keyPoints, JNI_FALSE);
+ renderer->getImageHandler()->setFaceDetectKeyPoint(nativeKeyPoints);
+}
+JNIEXPORT void JNICALL Java_org_wysaid_nativePort_CGEFrameRenderer_nativeClearFaceDetectPoints
+ (JNIEnv *env, jobject, jlong addr)
+{
+ CGEFrameRenderer* renderer = (CGEFrameRenderer*)addr;
+ renderer->getImageHandler()->setFaceDetectKeyPoint(nullptr);
+}
}
diff --git a/library/src/main/jni/interface/cgeFrameRendererWrapper.h b/library/src/main/jni/interface/cgeFrameRendererWrapper.h
index 147d934e..7279febc 100755
--- a/library/src/main/jni/interface/cgeFrameRendererWrapper.h
+++ b/library/src/main/jni/interface/cgeFrameRendererWrapper.h
@@ -83,7 +83,7 @@ JNIEXPORT void JNICALL Java_org_wysaid_nativePort_CGEFrameRenderer_nativeSetRend
* Method: nativeSetFilterWidthConfig
* Signature: (Ljava/nio/ByteBuffer;Ljava/lang/String;)V
*/
-JNIEXPORT void JNICALL Java_org_wysaid_nativePort_CGEFrameRenderer_nativeSetFilterWidthConfig
+JNIEXPORT void JNICALL Java_org_wysaid_nativePort_CGEFrameRenderer_nativeSetFilterWithConfig
(JNIEnv *, jobject, jlong, jstring);
/*
diff --git a/library/src/main/libs/armeabi-v7a/libCGE.so b/library/src/main/libs/armeabi-v7a/libCGE.so
index cdbecaf3..fa04af8f 100755
Binary files a/library/src/main/libs/armeabi-v7a/libCGE.so and b/library/src/main/libs/armeabi-v7a/libCGE.so differ
diff --git a/library/src/main/libs/armeabi-v7a/libCGEExt.so b/library/src/main/libs/armeabi-v7a/libCGEExt.so
index ed4d632e..054a5800 100755
Binary files a/library/src/main/libs/armeabi-v7a/libCGEExt.so and b/library/src/main/libs/armeabi-v7a/libCGEExt.so differ
diff --git a/library/src/main/libs/armeabi-v7a/libFaceTracker.so b/library/src/main/libs/armeabi-v7a/libFaceTracker.so
index 9c8de788..9efd0a2a 100755
Binary files a/library/src/main/libs/armeabi-v7a/libFaceTracker.so and b/library/src/main/libs/armeabi-v7a/libFaceTracker.so differ
diff --git a/settings.gradle b/settings.gradle
index 5bc0df71..92f35b2c 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -1 +1 @@
-include ':library', ':cgeDemo'
+include ':library', ':cgeDemo', 'stmobile'
diff --git a/stmobile/.gitignore b/stmobile/.gitignore
new file mode 100644
index 00000000..796b96d1
--- /dev/null
+++ b/stmobile/.gitignore
@@ -0,0 +1 @@
+/build
diff --git a/stmobile/build.gradle b/stmobile/build.gradle
new file mode 100644
index 00000000..87142028
--- /dev/null
+++ b/stmobile/build.gradle
@@ -0,0 +1,36 @@
+apply plugin: 'com.android.library'
+
+android {
+ compileSdkVersion 25
+ buildToolsVersion "25.0.2"
+
+ defaultConfig {
+ minSdkVersion 14
+ targetSdkVersion 25
+
+ testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
+
+ ndk {
+ abiFilters 'armeabi-v7a'
+ }
+ }
+
+ lintOptions {
+ abortOnError false
+ }
+
+ buildTypes {
+ release {
+ minifyEnabled false
+ proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
+ }
+ }
+}
+
+dependencies {
+ androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
+ exclude group: 'com.android.support', module: 'support-annotations'
+ })
+ testCompile 'junit:junit:4.12'
+ compile files('libs/jna-min.jar')
+}
diff --git a/stmobile/libs/jna-min.jar b/stmobile/libs/jna-min.jar
new file mode 100644
index 00000000..de659a75
Binary files /dev/null and b/stmobile/libs/jna-min.jar differ
diff --git a/stmobile/proguard-rules.pro b/stmobile/proguard-rules.pro
new file mode 100644
index 00000000..a3f0d013
--- /dev/null
+++ b/stmobile/proguard-rules.pro
@@ -0,0 +1,25 @@
+# Add project specific ProGuard rules here.
+# By default, the flags in this file are appended to flags specified
+# in E:\AndroidStudioSDK\sdk/tools/proguard/proguard-android.txt
+# You can edit the include path and order by changing the proguardFiles
+# directive in build.gradle.
+#
+# For more details, see
+# http://developer.android.com/guide/developing/tools/proguard.html
+
+# Add any project specific keep options here:
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+# public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile
diff --git a/stmobile/src/main/AndroidManifest.xml b/stmobile/src/main/AndroidManifest.xml
new file mode 100644
index 00000000..4e90c951
--- /dev/null
+++ b/stmobile/src/main/AndroidManifest.xml
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/stmobile/src/main/assets/track_face_action1.0.0.model b/stmobile/src/main/assets/track_face_action1.0.0.model
new file mode 100644
index 00000000..348ec61e
Binary files /dev/null and b/stmobile/src/main/assets/track_face_action1.0.0.model differ
diff --git a/stmobile/src/main/java/com/sensetime/stmobileapi/STImageFormat.java b/stmobile/src/main/java/com/sensetime/stmobileapi/STImageFormat.java
new file mode 100644
index 00000000..d140bed5
--- /dev/null
+++ b/stmobile/src/main/java/com/sensetime/stmobileapi/STImageFormat.java
@@ -0,0 +1,9 @@
+package com.sensetime.stmobileapi;
+
+public class STImageFormat {
+ public static final int ST_PIX_FMT_GRAY8 = 0;
+ public static final int ST_PIX_FMT_YUV420P = 1;
+ public static final int ST_PIX_FMT_NV12 = 2;
+ public static final int ST_PIX_FMT_NV21 = 3;
+ public static final int ST_PIX_FMT_BGRA8888 = 4;
+}
diff --git a/stmobile/src/main/java/com/sensetime/stmobileapi/STMobile106.java b/stmobile/src/main/java/com/sensetime/stmobileapi/STMobile106.java
new file mode 100644
index 00000000..46e820a6
--- /dev/null
+++ b/stmobile/src/main/java/com/sensetime/stmobileapi/STMobile106.java
@@ -0,0 +1,66 @@
+package com.sensetime.stmobileapi;
+
+import com.sensetime.stmobileapi.STMobileApiBridge.st_mobile_106_t;
+
+import android.graphics.PointF;
+import android.graphics.Rect;
+
+/**
+ * STMobile API: StMobile
+ * Created by Guangli W on 9/7/15.
+ */
+public class STMobile106 extends st_mobile_106_t{
+
+ public STMobile106() {
+ }
+
+ /**
+ * Get face Rect
+ * @return Rect
+ */
+ public Rect getRect() {
+ Rect r = new Rect();
+ r.bottom = rect.bottom;
+ r.top = rect.top;
+ r.left = rect.left;
+ r.right = rect.right;
+ return r;
+ }
+
+ /**
+ * Get face Points Array
+ * @return Points Array, please check the length of array
+ */
+ public PointF[] getPointsArray() {
+ PointF[] ans = new PointF[106];
+ for (int i = 0; i < 106; i++) {
+ ans[i] = new PointF();
+ ans[i].x = points_array[2 * i];
+ ans[i].y = points_array[2 * i + 1];
+ }
+ return ans;
+ }
+
+ public STMobile106(st_mobile_106_t origin) {
+ rect.bottom = origin.rect.bottom;
+ rect.top = origin.rect.top;
+ rect.left = origin.rect.left;
+ rect.right = origin.rect.right;
+
+ score = origin.score;
+ yaw = origin.yaw;
+ pitch = origin.pitch;
+ roll = origin.roll;
+ eye_dist = origin.eye_dist;
+ ID = origin.ID;
+
+ for (int i = 0; i < points_array.length; i++) {
+ points_array[i] = origin.points_array[i];
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "STMobile(" + getRect() + ", " + score + ")";
+ }
+}
diff --git a/stmobile/src/main/java/com/sensetime/stmobileapi/STMobileApiBridge.java b/stmobile/src/main/java/com/sensetime/stmobileapi/STMobileApiBridge.java
new file mode 100644
index 00000000..6611c385
--- /dev/null
+++ b/stmobile/src/main/java/com/sensetime/stmobileapi/STMobileApiBridge.java
@@ -0,0 +1,346 @@
+package com.sensetime.stmobileapi;
+
+import java.util.Arrays;
+import java.util.List;
+
+import com.sun.jna.Library;
+import com.sun.jna.Native;
+import com.sun.jna.Pointer;
+import com.sun.jna.Structure;
+import com.sun.jna.ptr.FloatByReference;
+import com.sun.jna.ptr.IntByReference;
+import com.sun.jna.ptr.PointerByReference;
+
+public interface STMobileApiBridge extends Library {
+ class st_rect_t extends Structure {
+
+ public static class ByValue extends st_rect_t implements Structure.ByValue {
+ }
+
+ public int left;
+ public int top;
+ public int right;
+ public int bottom;
+
+ @Override
+ protected List getFieldOrder() {
+ return Arrays.asList(new String[]{"left", "top", "right", "bottom"});
+ }
+
+ @Override
+ public st_rect_t clone() {
+ st_rect_t copy = new st_rect_t();
+ copy.left = this.left;
+ copy.top = this.top;
+ copy.right = this.right;
+ copy.bottom = this.bottom;
+ return copy;
+ }
+
+ /**
+ * The jna.Structure class is passed on by reference by default,
+ * however, in some cases, we need it by value.
+ */
+ public st_rect_t.ByValue copyToValue() {
+ st_rect_t.ByValue retObj = new st_rect_t.ByValue();
+ retObj.left = this.left;
+ retObj.top = this.top;
+ retObj.right = this.right;
+ retObj.bottom = this.bottom;
+ return retObj;
+ }
+ }
+
+ class st_mobile_106_t extends Structure {
+ public st_rect_t rect;
+ public float score;
+ public float[] points_array = new float[212];
+ public float yaw;
+ public float pitch;
+ public float roll;
+ public int eye_dist;
+ public int ID;
+
+ public st_mobile_106_t() {
+ super();
+ }
+
+ public st_mobile_106_t(Pointer p) {
+ super(p);
+ }
+
+ @Override
+ protected List getFieldOrder() {
+ return Arrays.asList(new String[]{"rect", "score", "points_array",
+ "yaw", "pitch", "roll", "eye_dist", "ID"});
+ }
+
+ @Override
+ public st_mobile_106_t clone() {
+ st_mobile_106_t copy = new st_mobile_106_t();
+ copy.rect = this.rect.clone();
+ copy.score = this.score;
+ copy.points_array = this.points_array;
+ copy.yaw = this.yaw;
+ copy.pitch = this.pitch;
+ copy.roll = this.roll;
+ copy.eye_dist = this.eye_dist;
+ copy.ID = this.ID;
+ return copy;
+ }
+
+ public static st_mobile_106_t[] arrayCopy(st_mobile_106_t[] origin) {
+ st_mobile_106_t[] copy = new st_mobile_106_t[origin.length];
+ for (int i = 0; i < origin.length; ++i) {
+ copy[i] = origin[i].clone();
+ }
+ return copy;
+ }
+ }
+
+ /**
+ * face信息及face上的相关动作
+
+ typedef struct st_mobile_face_action_t {
+ struct st_mobile_106_t face; /// 人脸信息,包含矩形、106点、pose信息等//
+ unsigned int face_action; /// 脸部动作
+ } st_mobile_face_action_t;
+
+ * */
+
+ class st_mobile_face_action_t extends Structure {
+ public st_mobile_106_t face;
+ public int face_action;
+
+ public st_mobile_face_action_t() {
+ super();
+ }
+
+ public st_mobile_face_action_t(Pointer p) {
+ super(p);
+ }
+
+ @Override
+ protected List getFieldOrder() {
+ return Arrays.asList(new String[] {"face", "face_action"});
+ }
+
+ @Override
+ protected Object clone() throws CloneNotSupportedException {
+ st_mobile_face_action_t copy = new st_mobile_face_action_t();
+ copy.face = this.face;
+ copy.face_action = this.face_action;
+
+ return copy;
+ }
+
+ public static st_mobile_face_action_t[] arrayCopy(st_mobile_face_action_t[] origin) {
+ st_mobile_face_action_t[] copy = new st_mobile_face_action_t[origin.length];
+ for(int i=0; i 0) {
+ out.write(buffer, 0, n);
+ }
+ in.close();
+ out.close();
+ } catch (IOException e) {
+ modelFile.delete();
+ }
+ }
+ }
+ }
+
+ protected String getModelPath(String modelName) {
+ String path = null;
+ File dataDir = mContext.getApplicationContext().getExternalFilesDir(null);
+ if (dataDir != null) {
+ path = dataDir.getAbsolutePath() + File.separator + modelName;
+ }
+ return path;
+ }
+
+ public void destory()
+ {
+ long start_destroy = System.currentTimeMillis();
+ if(detectHandle != null) {
+ STMobileApiBridge.FACESDK_INSTANCE.st_mobile_face_detection_destroy(detectHandle);
+ detectHandle = null;
+ }
+ long end_destroy = System.currentTimeMillis();
+ Log.i(TAG, "destroy cost "+(end_destroy - start_destroy)+" ms");
+ }
+
+ /**
+ * Given the Image by Bitmap to detect face
+ * @param image Input image by Bitmap
+ * @param orientation Image orientation
+ * @return CvFace array, each one in array is Detected by SDK native API
+ */
+ public STMobile106[] detect(Bitmap image, int orientation) {
+ if(DEBUG)
+ Log.d(TAG, "detect bitmap");
+
+ int[] colorImage = STUtils.getBGRAImageByte(image);
+ return detect(colorImage, STImageFormat.ST_PIX_FMT_BGRA8888,image.getWidth(), image.getHeight(), image.getWidth(), orientation);
+ }
+
+ /**
+ * Given the Image by Byte Array to detect face
+ * @param colorImage Input image by int
+ * @param cvImageFormat Image format
+ * @param imageWidth Image width
+ * @param imageHeight Image height
+ * @param imageStride Image stride
+ * @param orientation Image orientation
+ * @return CvFace array, each one in array is Detected by SDK native API
+ */
+ public STMobile106[] detect(int[] colorImage,int cvImageFormat, int imageWidth, int imageHeight, int imageStride, int orientation) {
+ if(DEBUG)
+ Log.d(TAG, "detect int array");
+
+ if(detectHandle == null){
+ return null;
+ }
+ long startTime = System.currentTimeMillis();
+
+ int rst = STMobileApiBridge.FACESDK_INSTANCE.st_mobile_face_detection_detect(detectHandle, colorImage, cvImageFormat,imageWidth,
+ imageHeight, imageStride, orientation, ptrToArray, ptrToSize);
+ long endTime = System.currentTimeMillis();
+
+ if(DEBUG)Log.d(TAG, "detect time: "+(endTime-startTime)+"ms");
+
+ if (rst != ResultCode.ST_OK.getResultCode()) {
+ throw new RuntimeException("Calling st_mobile_face_detection_detect() method failed! ResultCode=" + rst);
+ }
+
+ if (ptrToSize.getValue() == 0) {
+ if(DEBUG)Log.d(TAG, "ptrToSize.getValue() == 0");
+ return new STMobile106[0];
+ }
+
+ st_mobile_106_t arrayRef = new st_mobile_106_t(ptrToArray.getValue());
+ arrayRef.read();
+ st_mobile_106_t[] array = st_mobile_106_t.arrayCopy((st_mobile_106_t[]) arrayRef.toArray(ptrToSize.getValue()));
+ STMobileApiBridge.FACESDK_INSTANCE.st_mobile_face_detection_release_result(ptrToArray.getValue(), ptrToSize.getValue());
+
+ STMobile106[] ret = new STMobile106[array.length];
+ for (int i = 0; i < array.length; i++) {
+ ret[i] = new STMobile106(array[i]);
+ }
+
+ if(DEBUG)Log.d(TAG, "track : "+ ret);
+
+ return ret;
+ }
+
+ /**
+ * Given the Image by Byte to detect face
+ * @param image Input image by byte
+ * @param orientation Image orientation
+ * @param width Image width
+ * @param height Image height
+ * @return CvFace array, each one in array is Detected by SDK native API
+ */
+ public STMobile106[] detect(byte[] image, int orientation,int width,int height) {
+ if(DEBUG){
+ Log.d(TAG, "detect byte array");
+ }
+
+ return detect(image, STImageFormat.ST_PIX_FMT_NV21,width, height, width, orientation);
+ }
+
+ /**
+ * Given the Image by Byte Array to detect face
+ * @param colorImage Input image by byte
+ * @param cvImageFormat Image format
+ * @param imageWidth Image width
+ * @param imageHeight Image height
+ * @param imageStride Image stride
+ * @param orientation Image orientation
+ * @return CvFace array, each one in array is Detected by SDK native API
+ */
+ public STMobile106[] detect(byte[] colorImage,int cvImageFormat, int imageWidth, int imageHeight, int imageStride, int orientation) {
+ if(DEBUG){
+ Log.d(TAG, "detect 111");
+ }
+
+ if(detectHandle == null){
+ return null;
+ }
+ long startTime = System.currentTimeMillis();
+
+ int rst = STMobileApiBridge.FACESDK_INSTANCE.st_mobile_face_detection_detect(detectHandle, colorImage, cvImageFormat,imageWidth,
+ imageHeight, imageStride, orientation, ptrToArray, ptrToSize);
+ long endTime = System.currentTimeMillis();
+
+ if(DEBUG)Log.d(TAG, "detect time: "+(endTime-startTime)+"ms");
+
+ if (rst != ResultCode.ST_OK.getResultCode()) {
+ throw new RuntimeException("Calling st_mobile_face_detection_detect() method failed! ResultCode=" + rst);
+ }
+
+ if (ptrToSize.getValue() == 0) {
+ return new STMobile106[0];
+ }
+
+ st_mobile_106_t arrayRef = new st_mobile_106_t(ptrToArray.getValue());
+ arrayRef.read();
+ st_mobile_106_t[] array = st_mobile_106_t.arrayCopy((st_mobile_106_t[]) arrayRef.toArray(ptrToSize.getValue()));
+ STMobileApiBridge.FACESDK_INSTANCE.st_mobile_face_detection_release_result(ptrToArray.getValue(), ptrToSize.getValue());
+
+ STMobile106[] ret = new STMobile106[array.length];
+ for (int i = 0; i < array.length; i++) {
+ ret[i] = new STMobile106(array[i]);
+ }
+
+ if(DEBUG)Log.d(TAG, "track : "+ ret);
+
+ return ret;
+ }
+}
diff --git a/stmobile/src/main/java/com/sensetime/stmobileapi/STMobileMultiTrack106.java b/stmobile/src/main/java/com/sensetime/stmobileapi/STMobileMultiTrack106.java
new file mode 100644
index 00000000..3c81b20d
--- /dev/null
+++ b/stmobile/src/main/java/com/sensetime/stmobileapi/STMobileMultiTrack106.java
@@ -0,0 +1,321 @@
+package com.sensetime.stmobileapi;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.util.Log;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import com.sensetime.stmobileapi.STMobileApiBridge.ResultCode;
+import com.sensetime.stmobileapi.STMobileApiBridge.st_mobile_106_t;
+import com.sensetime.stmobileapi.STMobileApiBridge.st_mobile_face_action_t;
+import com.sun.jna.Pointer;
+import com.sun.jna.ptr.IntByReference;
+import com.sun.jna.ptr.PointerByReference;
+
+public class STMobileMultiTrack106 {
+ private Pointer trackHandle;
+ public static final int FACE_KEY_POINTS_COUNT = 106;
+ static boolean DEBUG = true;// false;
+
+ public static int ST_MOBILE_TRACKING_DEFAULT_CONFIG = 0x00000000;
+ public static int ST_MOBILE_TRACKING_SINGLE_THREAD = 0x00000001;
+
+ private Context mContext;
+ private static final String BEAUTIFY_MODEL_NAME = "track_face_action1.0.0.model";
+
+ PointerByReference ptrToArray = new PointerByReference();
+ PointerByReference faceAction_ptrToArray = new PointerByReference();
+
+ IntByReference ptrToSize = new IntByReference(); //face count int pointer
+
+ /**
+ *
+ * Note
+ track only one face:
+ frist:trackHandle = STMobileApiBridge.FACESDK_INSTANCE.st_mobile_tracker_106_create(modulePath, handlerPointer);
+ second: setMaxDetectableFaces(1)参数设为1
+ * track多张人脸:
+ * trackHandle = STMobileApiBridge.FACESDK_INSTANCE.st_mobile_tracker_106_create(modulePath, handlerPointer);
+ second:setMaxDetectableFaces(num)参数设为-1
+ */
+
+ public STMobileMultiTrack106(Context context, int config) {
+ PointerByReference handlerPointer = new PointerByReference();
+ mContext = context;
+ synchronized(this.getClass())
+ {
+ copyModelIfNeed(BEAUTIFY_MODEL_NAME);
+ }
+ String modulePath = getModelPath(BEAUTIFY_MODEL_NAME);
+ int rst = STMobileApiBridge.FACESDK_INSTANCE.st_mobile_tracker_106_create(modulePath, config, handlerPointer);
+ if(rst != ResultCode.ST_OK.getResultCode())
+ {
+ return;
+ }
+ trackHandle = handlerPointer.getValue();
+ }
+
+ private void copyModelIfNeed(String modelName) {
+ String path = getModelPath(modelName);
+ if (path != null) {
+ File modelFile = new File(path);
+ if (!modelFile.exists()) {
+ //如果模型文件不存在或者当前模型文件的版本跟sdcard中的版本不一样
+ try {
+ if (modelFile.exists())
+ modelFile.delete();
+ modelFile.createNewFile();
+ InputStream in = mContext.getApplicationContext().getAssets().open(modelName);
+ if(in == null)
+ {
+ Log.e("MultiTrack106", "the src module is not existed");
+ }
+ OutputStream out = new FileOutputStream(modelFile);
+ byte[] buffer = new byte[4096];
+ int n;
+ while ((n = in.read(buffer)) > 0) {
+ out.write(buffer, 0, n);
+ }
+ in.close();
+ out.close();
+ } catch (IOException e) {
+ modelFile.delete();
+ }
+ }
+ }
+ }
+
+ protected String getModelPath(String modelName) {
+ String path = null;
+ File dataDir = mContext.getApplicationContext().getExternalFilesDir(null);
+ if (dataDir != null) {
+ path = dataDir.getAbsolutePath() + File.separator + modelName;
+ }
+ return path;
+ }
+
+ public int setMaxDetectableFaces(int max)
+ {
+ int rst = -1;
+ if(trackHandle != null){
+ rst = STMobileApiBridge.FACESDK_INSTANCE.st_mobile_tracker_106_set_facelimit(trackHandle,max);
+ }
+ return rst;
+ }
+
+ public void destory()
+ {
+ long start_destroy = System.currentTimeMillis();
+ if(trackHandle != null) {
+ STMobileApiBridge.FACESDK_INSTANCE.st_mobile_tracker_106_destroy(trackHandle);
+ trackHandle = null;
+ }
+ long end_destroy = System.currentTimeMillis();
+ Log.i("track106", "destroy cost "+(end_destroy - start_destroy)+" ms");
+ }
+ /**
+ * Given the Image by Bitmap to track face
+ * @param image Input image by Bitmap
+ * @param orientation Image orientation
+ * @return CvFace array, each one in array is Detected by SDK native API
+ */
+ public STMobile106[] track(Bitmap image, int orientation) {
+ if(DEBUG)System.out.println("SampleLiveness-------->CvFaceMultiTrack--------->track1");
+
+ int[] colorImage = STUtils.getBGRAImageByte(image);
+ return track(colorImage, STImageFormat.ST_PIX_FMT_BGRA8888,image.getWidth(), image.getHeight(), image.getWidth(), orientation);
+ }
+
+ /**
+ * Given the Image by Byte Array to track face
+ * @param colorImage Input image by int
+ * @param cvImageFormat Image format
+ * @param imageWidth Image width
+ * @param imageHeight Image height
+ * @param imageStride Image stride
+ * @param orientation Image orientation
+ * @return CvFace array, each one in array is Detected by SDK native API
+ */
+ public STMobile106[] track(int[] colorImage,int cvImageFormat, int imageWidth, int imageHeight, int imageStride, int orientation) {
+ if(DEBUG)System.out.println("SampleLiveness-------->CvFaceMultiTrack--------->track2");
+
+ if(trackHandle == null){
+ return null;
+ }
+ long startTime = System.currentTimeMillis();
+ /*
+ int rst = STMobileApiBridge.FACESDK_INSTANCE.cv_face_track_106(trackHandle, colorImage, cvImageFormat,imageWidth,
+ imageHeight, imageStride, orientation, ptrToArray, ptrToSize);
+ */
+ int rst = STMobileApiBridge.FACESDK_INSTANCE.st_mobile_tracker_106_track(trackHandle, colorImage, cvImageFormat,imageWidth,
+ imageHeight, imageStride, orientation, ptrToArray, ptrToSize);
+ long endTime = System.currentTimeMillis();
+
+ if(DEBUG)Log.d("Test", "multi track time: "+(endTime-startTime)+"ms");
+
+ if (rst != ResultCode.ST_OK.getResultCode()) {
+ throw new RuntimeException("Calling cv_face_multi_track() method failed! ResultCode=" + rst);
+ }
+
+ if (ptrToSize.getValue() == 0) {
+ if(DEBUG)Log.d("Test", "ptrToSize.getValue() == 0");
+ return new STMobile106[0];
+ }
+
+ st_mobile_106_t arrayRef = new st_mobile_106_t(ptrToArray.getValue());
+ arrayRef.read();
+ st_mobile_106_t[] array = st_mobile_106_t.arrayCopy((st_mobile_106_t[]) arrayRef.toArray(ptrToSize.getValue()));
+ STMobileApiBridge.FACESDK_INSTANCE.st_mobile_tracker_106_release_result(ptrToArray.getValue(), ptrToSize.getValue());
+
+ STMobile106[] ret = new STMobile106[array.length];
+ for (int i = 0; i < array.length; i++) {
+ ret[i] = new STMobile106(array[i]);
+ }
+
+ if(DEBUG)Log.d("Test", "track : "+ ret);
+
+ return ret;
+ }
+
+ /**
+ * Given the Image by Byte to track face
+ * @param image Input image by byte
+ * @param orientation Image orientation
+ * @param width Image width
+ * @param height Image height
+ * @return CvFace array, each one in array is Detected by SDK native API
+ */
+ public STMobile106[] track(byte[] image, int orientation,int width,int height) {
+ if(DEBUG){
+ System.out.println("SampleLiveness-------->CvFaceMultiTrack--------->track3");
+ }
+
+ return track(image, STImageFormat.ST_PIX_FMT_NV21,width, height, width, orientation);
+ }
+
+ /**
+ * Given the Image by Byte Array to track face
+ * @param colorImage Input image by byte
+ * @param cvImageFormat Image format
+ * @param imageWidth Image width
+ * @param imageHeight Image height
+ * @param imageStride Image stride
+ * @param orientation Image orientation
+ * @return CvFace array, each one in array is Detected by SDK native API
+ */
+ public STMobile106[] track(byte[] colorImage,int cvImageFormat, int imageWidth, int imageHeight, int imageStride, int orientation) {
+ Log.d("ST_mobile", "track: "+imageWidth+" "+imageHeight+" "+imageStride+" "+orientation);
+ if(DEBUG){
+ System.out.println("SampleLiveness-------->CvFaceMultiTrack--------->track4");
+ }
+
+ if(trackHandle == null){
+ return null;
+ }
+ long startTime = System.currentTimeMillis();
+ /*
+ int rst = STMobileApiBridge.FACESDK_INSTANCE.cv_face_track_106(trackHandle, colorImage, cvImageFormat,imageWidth,
+ imageHeight, imageStride, orientation, ptrToArray, ptrToSize);
+ */
+
+ int rst = STMobileApiBridge.FACESDK_INSTANCE.st_mobile_tracker_106_track(trackHandle, colorImage, cvImageFormat,imageWidth,
+ imageHeight, imageStride, orientation, ptrToArray, ptrToSize);
+// int rst = STMobileApiBridge.FACESDK_INSTANCE.st_mobile_tracker_106_track_face_action(trackHandle, colorImage, cvImageFormat, imageWidth,
+// imageHeight, imageStride, orientation, ptrToArray, ptrToSize);
+
+ long endTime = System.currentTimeMillis();
+
+ if(DEBUG)Log.d("Test", "multi track time: "+(endTime-startTime)+"ms");
+
+ if (rst != ResultCode.ST_OK.getResultCode()) {
+ throw new RuntimeException("Calling cv_face_multi_track() method failed! ResultCode=" + rst);
+ }
+
+ if (ptrToSize.getValue() == 0) {
+ return new STMobile106[0];
+ }
+
+ st_mobile_106_t arrayRef = new st_mobile_106_t(ptrToArray.getValue());
+ arrayRef.read();
+ st_mobile_106_t[] array = st_mobile_106_t.arrayCopy((st_mobile_106_t[]) arrayRef.toArray(ptrToSize.getValue()));
+ //STMobileApiBridge.FACESDK_INSTANCE.st_mobile_tracker_106_release_result(ptrToArray.getValue(), ptrToSize.getValue());
+
+ STMobile106[] ret = new STMobile106[array.length];
+ for (int i = 0; i < array.length; i++) {
+ ret[i] = new STMobile106(array[i]);
+ }
+
+ if(DEBUG)Log.d("Test", "track : "+ ret);
+
+ return ret;
+ }
+
+ /**
+ * Given the Image by Byte to trace face action
+ * @param image Input image by byte
+ * @param orientation Image orientation
+ * @param width Image width
+ * @param height Image height
+ * @return CvFace action array, each one in array is Detected by SDK native API
+ * */
+ public STMobileFaceAction[] trackFaceAction(byte[] image, int orientation, int width, int height) {
+ Log.d("ST_mobile", "track: "+width+" "+height+" "+orientation);
+ if(DEBUG) {
+ System.out.println("SampleTrackFaceAction-------->CvFaceMultiTrack--------->trackFaceAction1");
+ }
+ return trackFaceAction(image, STImageFormat.ST_PIX_FMT_NV21, width, height, width, orientation);
+ }
+
+ /**
+ * Given the Image by Byte Array to track face action
+ * @param colorImage Input image by byte
+ * @param cvImageFormat Image format
+ * @param imageWidth Image width
+ * @param imageHeight Image height
+ * @param imageStride Image stride
+ * @param orientation Image orientation
+ * @return CvFace action array, each one in array is Detected by SDK native API
+ * */
+ public STMobileFaceAction[] trackFaceAction(byte[] colorImage, int cvImageFormat, int imageWidth, int imageHeight, int imageStride, int orientation) {
+ if(DEBUG) {
+ System.out.println("SampleTrackFaceAction-------->CvFaceMultiTrack--------->trackFaceAction2");
+ }
+
+ if(trackHandle == null) {
+ return null;
+ }
+
+ long startTime = System.currentTimeMillis();
+ int rst = STMobileApiBridge.FACESDK_INSTANCE.st_mobile_tracker_106_track_face_action(trackHandle, colorImage, cvImageFormat, imageWidth,
+ imageHeight, imageStride, orientation, faceAction_ptrToArray, ptrToSize);
+ long endTime = System.currentTimeMillis();
+ if(DEBUG)Log.d("trackFaceAction", "multi track face action time: "+(endTime-startTime)+"ms");
+
+ if(rst != ResultCode.ST_OK.getResultCode()) {
+ throw new RuntimeException("Calling cv_face_action_multi_track() method failed! ResultCode=" + rst);
+ }
+
+ if(ptrToSize.getValue() == 0) {
+ return new STMobileFaceAction[0];
+ }
+
+ st_mobile_face_action_t arrayRef = new st_mobile_face_action_t(faceAction_ptrToArray.getValue());
+ arrayRef.read();
+ st_mobile_face_action_t[] array = st_mobile_face_action_t.arrayCopy((st_mobile_face_action_t[]) arrayRef.toArray(ptrToSize.getValue()));
+
+ STMobileFaceAction[] ret = new STMobileFaceAction[array.length];
+ for(int i=0; i
+ STMobile
+