diff --git a/cgeDemo/src/main/java/org/wysaid/cgeDemo/ChartGLRenderer.java b/cgeDemo/src/main/java/org/wysaid/cgeDemo/ChartGLRenderer.java
new file mode 100644
index 00000000..4d3f82c5
--- /dev/null
+++ b/cgeDemo/src/main/java/org/wysaid/cgeDemo/ChartGLRenderer.java
@@ -0,0 +1,67 @@
+package org.wysaid.cgeDemo;
+
+import android.content.Context;
+import android.opengl.GLSurfaceView;
+import android.opengl.GLU;
+import android.util.Log;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+public class ChartGLRenderer implements GLSurfaceView.Renderer {
+
+ public volatile float[] chartData = new float[ChartWaveForm.BUFFER_SIZE];
+ private int width;
+ private int height;
+ private Context context;
+ private ChartWaveForm lineChart;
+
+ public ChartGLRenderer(Context context) {
+ this.context = context;
+ lineChart = new ChartWaveForm();
+ }
+
+ @Override
+ public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
+ //lineChart = new LineChart();
+ }
+
+ @Override
+ public void onSurfaceChanged(GL10 gl, int width, int height) {
+ this.width = width;
+ this.height = height;
+ //Prevent a divide by 0 by making height =1
+ if (height == 0) {
+ height = 1;
+ }
+ Log.d("执行到这", "ChartSurfaceView---Width =="+width+"----Height =="+height);
+ //Reset current viewport
+ gl.glViewport(0, 0, width, height);
+ //Select Projection Matrix
+ gl.glMatrixMode(GL10.GL_PROJECTION);
+ //Reset Projection Matrix
+ gl.glLoadIdentity();
+ //Calculate The Aspect Ratio Of The Window
+ //Log.d("Chart Ratio2 "," width " +width + " H " + height);
+ //void gluPerspective (GLdouble fovy, GLdouble aspect, GLdouble zNear, GLdouble zFar)
+ //fovy是眼睛上下睁开的幅度,角度值,值越小,视野范围越狭小(眯眼),值越大,视野范围越宽阔(睁开铜铃般的大眼);
+ //zNear表示近裁剪面到眼睛的距离,zFar表示远裁剪面到眼睛的距离,注意zNear和zFar不能设置设置为负值(你怎么看到眼睛后面的东西)。
+ //aspect表示裁剪面的宽w高h比,这个影响到视野的截面有多大。
+ GLU.gluPerspective(gl, 50.0f, (float) height * 2.0f / (float) width, 0.1f, 100.0f);
+ gl.glMatrixMode(GL10.GL_MODELVIEW); //Select The Modelview Matrix
+ gl.glLoadIdentity(); //Reset The Modelview Matrix
+ }
+
+ @Override
+ public void onDrawFrame(GL10 gl) {
+ // clear Screen and Depth Buffer
+ gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
+ // Reset the Modelview Matrix
+ gl.glLoadIdentity();
+ //Move 5 units into the screen is the same as moving the camera 5 units away
+ gl.glTranslatef(0.0f, 0.0f, -3.0f);
+ this.lineChart.setResolution(width, height);
+ this.lineChart.setChartData(chartData);
+ lineChart.draw(gl);
+ }
+}
diff --git a/cgeDemo/src/main/java/org/wysaid/cgeDemo/ChartGLSurfaceView.java b/cgeDemo/src/main/java/org/wysaid/cgeDemo/ChartGLSurfaceView.java
new file mode 100644
index 00000000..8b0ead24
--- /dev/null
+++ b/cgeDemo/src/main/java/org/wysaid/cgeDemo/ChartGLSurfaceView.java
@@ -0,0 +1,111 @@
+package org.wysaid.cgeDemo;
+
+import android.content.Context;
+import android.graphics.PixelFormat;
+import android.opengl.GLSurfaceView;
+
+public class ChartGLSurfaceView extends GLSurfaceView {
+
+ //线程标志位
+ private boolean isUpdating = false;
+ //渲染Renderer
+ private ChartGLRenderer mRenderer;
+ //y坐标数组
+ private float[] datapoints = new float[ChartWaveForm.BUFFER_SIZE];
+
+ public ChartGLSurfaceView(Context context) {
+ super(context);
+ //设置EGL配置选择
+ setEGLConfigChooser(8, 8, 8, 8, 16, 0);
+ //设置处于屏幕最前边
+ this.setZOrderOnTop(true); //necessary
+ getHolder().setFormat(PixelFormat.TRANSLUCENT);
+ // Set the Renderer for drawing on the GLSurfaceView
+ mRenderer = new ChartGLRenderer(context);
+ setRenderer(mRenderer);
+ //初始化
+ for (int i = 0; i < datapoints.length; i++) {
+ datapoints[i] = 0;
+ }
+ setChartData(datapoints);
+ // Render the view only when there is a change in the drawing data,有变化时绘制
+ //setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+ new Thread(new Task()).start();
+ }
+
+ /**
+ * 设置数据源
+ * @param datapoints
+ */
+ public void setChartData(float[] datapoints) {
+// L.d("执行到这","decodeThread---02");
+ if (datapoints != null && datapoints.length > 0) {
+ isUpdating = true;
+ this.datapoints = datapoints.clone();
+// float gMaxValue = getMax(datapoints);
+// float gMinValue = getMin(datapoints);
+// for (int i = 0; i < this.datapoints.length; i++) {
+// this.datapoints[i] = (((datapoints[i] - gMinValue) * (1.0f - (-1.0f)) / (gMaxValue - gMinValue)) + (-1));
+//// L.d("执行到这","this.datapoints[i]=="+String.valueOf(this.datapoints[i]));
+// }
+ isUpdating = false;
+// L.d("执行到这","gMaxValue==="+gMaxValue+"---gMinValue==="+gMinValue);
+ }
+ }
+
+ /**
+ * 获取数组最大值
+ * @param array
+ * @return
+ */
+ private float getMax(float[] array) {
+ if(array != null && array.length > 0){
+ float max = array[0];
+ for (int i = 1; i < array.length; i++) {
+ if (array[i] > max) {
+ max = array[i];
+ }
+ }
+ return max;
+ } else {
+ return 0f;
+ }
+ }
+
+ /**
+ * 获取数组最小值
+ * @param array
+ * @return
+ */
+ private float getMin(float[] array) {
+ if(array != null && array.length > 0){
+ float min = array[0];
+ for (int i = 1; i < array.length; i++) {
+ if (array[i] < min) {
+ min = array[i];
+ }
+ }
+ return min;
+ } else {
+ return 0f;
+ }
+ }
+
+ class Task implements Runnable {
+ @Override
+ public void run() {
+ while (true) {
+ if (!isUpdating) {
+ mRenderer.chartData = datapoints;
+ requestRender();
+ }
+// try {
+// Thread.sleep(30);
+// } catch (InterruptedException e) {
+// e.printStackTrace();
+// }
+ }
+ }
+ }
+
+}
diff --git a/cgeDemo/src/main/java/org/wysaid/cgeDemo/ChartWaveForm.java b/cgeDemo/src/main/java/org/wysaid/cgeDemo/ChartWaveForm.java
new file mode 100644
index 00000000..4f67d53f
--- /dev/null
+++ b/cgeDemo/src/main/java/org/wysaid/cgeDemo/ChartWaveForm.java
@@ -0,0 +1,89 @@
+package org.wysaid.cgeDemo;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+import javax.microedition.khronos.opengles.GL10;
+
+public class ChartWaveForm {
+
+ //缓冲区长度,点数
+ public final static int BUFFER_SIZE = 1570464;//VideoPlayerGLSurfaceView.mViewWidth*VideoPlayerGLSurfaceView.mViewHeight=984*1596=1570464
+ //缓冲区数组
+ private float[] mChartDatas = new float[BUFFER_SIZE];
+ //缓冲区缓存
+ private FloatBuffer vertexBuffer;
+ //顶点坐标数组
+ private float[] vertices = new float[BUFFER_SIZE * 3];
+ //绘图区域
+ private int width, height;
+
+ public ChartWaveForm() {
+
+ }
+
+ /**
+ * 根据缓冲区数组 封装 顶点坐标数组
+ */
+ private void drawRealtimeChart() {
+ //坐标系xyz,屏幕中心为原点坐标(0,0,0),左上角为(-1,1,0),右下角(1,-1,0),坐标值有问题?好窄
+ float span = 20.0f / 984;//VideoPlayerGLSurfaceView.mViewWidth=984
+ //vertices的0,3,6……位置放x坐标值
+ //vertices的1,4,7……位置放y坐标值
+ //vertices的2,5,8……位置放z坐标值,平面图,默认为0
+ for (int i = 0; i < BUFFER_SIZE; i++) {
+
+ vertices[i * 3] = -10 + (i % 984) * span;//VideoPlayerGLSurfaceView.mViewWidth=984
+// vertices[i * 3] = -10 + i * span;
+// L.d("执行到这", "X轴坐标==" + vertices[i * 3]);
+
+ vertices[i * 3 + 1] = mChartDatas[i];
+
+ vertices[i * 3 + 2] = 0.0f;
+ }
+ }
+
+ /**
+ * 开辟对应的缓冲区存放顶点坐标数组
+ */
+ private void vertexGenerate() {
+ //一个浮点数在内存中占4个字节,在内存中开辟指定长度的缓冲区,用来存放顶点坐标数组
+ ByteBuffer vertexByteBuffer = ByteBuffer.allocateDirect(vertices.length * 4);
+ //设置字节处理规则,大端模式或者小端模式,设置为默认
+ vertexByteBuffer.order(ByteOrder.nativeOrder());
+ //将内存中分配的字节缓冲区转换成浮点数缓冲区
+ vertexBuffer = vertexByteBuffer.asFloatBuffer();
+ //存放顶点坐标数组
+ vertexBuffer.put(vertices);
+ //复位缓冲区,position指向0,第一个数据
+ vertexBuffer.position(0);
+ }
+
+ public void setResolution(int width, int height) {
+ this.width = width;
+ this.height = height;
+ }
+
+ public void setChartData(float[] chartData) {
+ this.mChartDatas = chartData;
+ drawRealtimeChart();
+ vertexGenerate();
+ }
+
+ public void draw(GL10 gl) {
+ gl.glViewport(0, 0, width, height);
+ // bind the previously generated texture 顶点绘图
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ // set the color for the triangle 设置绘图颜色
+ gl.glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
+ // Point to our vertex buffer 设置顶点数据,3代表XYZ坐标系
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
+ // Line width
+ gl.glPointSize(2.5f);
+ // Draw the vertices as triangle strip,顶点之间的连接模式
+ gl.glDrawArrays(GL10.GL_POINTS, 0, vertices.length / 3);
+ //Disable the client state before leaving 关闭顶点设置
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ }
+}
diff --git a/cgeDemo/src/main/java/org/wysaid/cgeDemo/HistogramView.java b/cgeDemo/src/main/java/org/wysaid/cgeDemo/HistogramView.java
new file mode 100644
index 00000000..3eac170f
--- /dev/null
+++ b/cgeDemo/src/main/java/org/wysaid/cgeDemo/HistogramView.java
@@ -0,0 +1,96 @@
+
+package org.wysaid.cgeDemo;
+
+import android.content.Context;
+import android.content.res.Configuration;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.os.Handler;
+import android.os.Message;
+import android.view.View;
+
+public class HistogramView extends View implements Handler.Callback {
+ static final private String TAG = "HistogramView";
+
+ private static final int HISTOGRAM = 0;
+
+ private Paint paint;
+ private Handler handler;
+ // private Converter converter;
+ private Configuration config;
+
+ private int width;
+ private int height;
+ private float max;
+
+ private float[] histogram;
+ private long count;
+
+ // HistogramView
+ public HistogramView(Context context) {
+ super(context);
+ paint = new Paint();
+ handler = new Handler(this);
+// converter = new Converter(getContext());
+ }
+
+ // onSizeChanged
+ @Override
+ public void onSizeChanged(int w, int h, int oldw, int oldh) {
+ width = w;
+ height = h;
+
+ config = getResources().getConfiguration();
+ }
+
+ // onDraw
+ @Override
+ public void onDraw(Canvas canvas) {
+ if (histogram == null) return;
+
+ canvas.drawColor(Color.BLACK);
+
+ float xscale = (float) width / 256;
+ float yscale = (float) height / 10000;
+
+ paint.setStrokeWidth(xscale);
+ paint.setColor(Color.WHITE);
+
+ for (int x = 0; x < 256; x++) {
+ float xpos = x * xscale;
+ float ypos;
+ if (histogram[x] < 10000) {
+ ypos = histogram[x] * yscale;
+ } else {
+ ypos = 10000 * yscale;
+ }
+ canvas.drawLine(xpos, height, xpos + xscale, height - ypos, paint);
+ }
+ }
+
+ public void onPreviewFrame(float[] data, VideoPlayerGLSurfaceView view) {
+ if (data != null) {
+// if (count++ % 2 == 0) {
+ Message message = handler.obtainMessage(HISTOGRAM, view.getViewWidth(), view.getViewheight(), data);
+ message.sendToTarget();
+// }
+ }
+ }
+
+ @Override
+ public boolean handleMessage(Message message) {
+ // process incoming messages here
+ histogram = (float[]) message.obj;
+// int width = message.arg1;
+// int height = message.arg2;
+// byte[] data = (byte[]) message.obj;
+//
+// byte[] pixels = converter.convertToRGB(data, width, height);
+// histogram =
+// converter.luminanceHistogram(data, width, height);
+// converter.histogram(pixels, width, height);
+ invalidate();
+ return true;
+ }
+}
diff --git a/cgeDemo/src/main/java/org/wysaid/cgeDemo/MainActivity.java b/cgeDemo/src/main/java/org/wysaid/cgeDemo/MainActivity.java
index e6e42404..8f61b0d8 100644
--- a/cgeDemo/src/main/java/org/wysaid/cgeDemo/MainActivity.java
+++ b/cgeDemo/src/main/java/org/wysaid/cgeDemo/MainActivity.java
@@ -14,23 +14,89 @@
import android.widget.Button;
import android.widget.LinearLayout;
-import java.io.IOException;
-import java.io.InputStream;
-
import org.wysaid.common.Common;
-import org.wysaid.myUtils.MsgUtil;
import org.wysaid.nativePort.CGENativeLibrary;
+import java.io.IOException;
+import java.io.InputStream;
+
public class MainActivity extends AppCompatActivity {
+// uniform float radius;
+// uniform vec2 center;
+// uniform vec4 borderColor;
+// uniform float borderThickness;
+//
+// void main()
+// {
+// vec2 textureCoordinateToUse = textureCoordinate;
+// float dist = distance(center, textureCoordinate);
+//
+// if (dist < radius && factor > 0.0) {
+// textureCoordinateToUse -= center;
+// textureCoordinateToUse = textureCoordinateToUse / factor;
+// textureCoordinateToUse += center;
+// }
+//
+// gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse);
+// }
+//
+// void main()
+// {
+// vec4 color = texture2D(inputImageTexture, textureCoordinate);
+//
+// //vec4 color = vec4(255.0,255.0,255.0,1.0);
+//
+// vec2 uv = textureCoordinate.xy - center;
+//
+// float d = sqrt(dot(uv,uv));
+//
+// //float t = 1.0 - smoothstep(radius-borderThickness,radius, d);
+// float t = 1.0 - smoothstep(0.0, borderThickness, abs(radius-d));
+// gl_FragColor = vec4(color.rgb,color.a*t);
+// }
public static final String LOG_TAG = "wysaid";
public static final String EFFECT_CONFIGS[] = {
- "",
+
+ "@style waveform 0.5 0.5 200. 150. 0.0 0.0 0.0 ",
+ "@style hist 0.5 0.5 200. 150. 0.0 0.0 0.0 ",
+
+ "@style 3x3Texture 0.0002 0.0002 ",
+// "@pixblend cl 1 0 0 0 90",
+// "@pixblend cl 255 0 0 255 100",
+// "@blend colorbw 255 0 0 255 100",
+// "@style stroke 255.0 0.0 0.0 0.2 2. 3. ",
+ "@style histogram 255.0 255.0 255.0",
+
+ //"@style waveform 255.0 0.0 0.0 0.02",
+ "@style drawbg 0.5 0.5 0.2 0.2 0.02",
+ "@style waveform 0.5 0.5 0.5 0.5 255.0 0.0 0.0 0.1",
+ "@style drawbg 0.05 0.8 0.35 0.4 0.02 @style waveform 0.05 0.8 0.35 0.4 255.0 0.0 0.0 0.02",
+
+ "@adjust level 0.66 0.23 0.44 ",
+ //"@blend overlay histogram_bg.png 100",
+ "@style zebraCrossing histogram_bg.png",
+
+ "@style lumrange 0.6",
"@curve RGB(0,255)(255,0) @style cm mapping0.jpg 80 80 8 3", // ASCII art (字符画效果)
"@beautify face 1 480 640", //Beautify
- "@adjust lut edgy_amber.png",
+
+ "@style falsecolor 1.0",
+
+// "@style falsecolor colorscale_hsv.jpg",
+// "@style falsecolor colorscale_jet.jpg",
+// "@style falsecolor colorscale_rainbow.jpg",
+// "@style falsecolor colorscale_spring.jpg",
+// "@style lut colorscale_autumn.jpg",
+// "@style lut colorscale_bone.jpg",
+// "@style lut colorscale_cool.jpg",
+// "@style lut colorscale_hot.jpg",
+// "@style lut colorscale_summer.jpg",
+// "@style lut colorscale_winter.jpg",
+ //11
+ "@style fcolor 1.0 ",
"@adjust lut filmstock.png",
"@adjust lut foggy_night.png",
"@adjust lut late_sunset.png",
@@ -40,26 +106,46 @@ public class MainActivity extends AppCompatActivity {
"@blur lerp 1", //can adjust blur mix
"#unpack @dynamic wave 1", //can adjust speed
"@dynamic wave 0.5", //can adjust wave mix
+ //21
"#unpack @style sketch 0.9",
"#unpack @krblend sr hehe.jpg 100 ",
"#unpack @krblend ol hehe.jpg 100",
"#unpack @krblend add hehe.jpg 100",
+ "#unpack @krblend add histogram_bg.png 100",
"#unpack @krblend darken hehe.jpg 100",
"@beautify bilateral 100 3.5 2 ",
- "@style crosshatch 0.01 0.003 ",
- "@style edge 1 2 ",
- "@style edge 1 2 @curve RGB(0, 255)(255, 0) ",
+ //27
+ "",
+ "@style edge 0.2 2 1 ",
+ "@style edge 0.5 2 1",
+ "@style edge 1 1 1",
+ "@style sobel 0.1 2 ",
+ "@style sobel 0.5 2",
+ "@style sobel 1 1",
+ "@style drawround 0.5 0.5 0.05 0.02 255.0 0.0 0.0",
+ "@style magnifier 0.5 0.5 1.5 0.2 @style drawsquare 0.5 0.5 255.0 0.0 0.0 0.003 0.2",// @style drawround 0.5 0.5 0.003 0.2 255.0 0.0 0.0
+ //36
+ "@style singlecolor 255.0 0.0 0.0 0.2",
+ "@pixblend cb 255 0 0 255 100",
+// "@pixblend ol 255 0 0 255 100",
+ "@style drawround 0.5 0.5 0.5 0.2 255.0 0.0 0.0",
+ "@style drawsquare 0.5 0.5 255.0 0.0 0.0 0.003 0.2",
+ "@style drawcross 0.5 0.5 255.0 0.0 0.0",
+// "@style toon 0.2 10.0 ",
"@style edge 1 2 @curve RGB(0, 255)(255, 0) @adjust saturation 0 @adjust level 0.33 0.71 0.93 ",
"@adjust level 0.31 0.54 0.13 ",
- "#unpack @style emboss 1 2 2 ",
- "@style halftone 1.2 ",
+ //43
+ "#unpack @style emboss 1 1 1 ",
+ "@style halftone 1.5 ",
"@vigblend overlay 255 0 0 255 100 0.12 0.54 0.5 0.5 3 ",
"@curve R(0, 0)(63, 101)(200, 84)(255, 255)G(0, 0)(86, 49)(180, 183)(255, 255)B(0, 0)(19, 17)(66, 41)(97, 92)(137, 156)(194, 211)(255, 255)RGB(0, 0)(82, 36)(160, 183)(255, 255) ",
"@adjust exposure 0.98 ",
"@adjust shadowhighlight -200 200 ",
"@adjust sharpen 10 1.5 ",
+ //50 颜色平衡
"@adjust colorbalance 0.99 0.52 -0.31 ",
- "@adjust level 0.66 0.23 0.44 ",
+ "@style crosshatch 0.03 0.002",
+
"@style min",
"@style max",
"@style haze 0.5 -0.14 1 0.8 1 ",
@@ -191,7 +277,7 @@ public static class DemoClassDescription {
new DemoClassDescription("TestCaseActivity", "Test Cases")
};
- public class DemoButton extends Button implements View.OnClickListener {
+ public class DemoButton extends android.support.v7.widget.AppCompatButton implements View.OnClickListener {
private DemoClassDescription mDemo;
public void setDemo(DemoClassDescription demo) {
@@ -207,12 +293,6 @@ public void setDemo(DemoClassDescription demo) {
@Override
public void onClick(final View v) {
-
- if (mDemo.activityName == "FaceTrackingDemoActivity") {
- MsgUtil.toastMsg(v.getContext(), "Error: Please checkout the branch 'face_features' for this demo!");
- return;
- }
-
Log.i(LOG_TAG, String.format("%s is clicked!", mDemo.title));
Class cls;
try {
diff --git a/cgeDemo/src/main/java/org/wysaid/cgeDemo/VideoPlayerDemoActivity.java b/cgeDemo/src/main/java/org/wysaid/cgeDemo/VideoPlayerDemoActivity.java
index c48ec892..28ba9706 100755
--- a/cgeDemo/src/main/java/org/wysaid/cgeDemo/VideoPlayerDemoActivity.java
+++ b/cgeDemo/src/main/java/org/wysaid/cgeDemo/VideoPlayerDemoActivity.java
@@ -1,11 +1,14 @@
package org.wysaid.cgeDemo;
+import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.MediaPlayer;
import android.net.Uri;
+import android.os.Handler;
+import android.os.Message;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
@@ -14,14 +17,17 @@
import android.view.View;
import android.widget.Button;
import android.widget.LinearLayout;
+import android.widget.RelativeLayout;
import android.widget.SeekBar;
+import org.wysaid.cgeDemo.ChartGLSurfaceView;
+import org.wysaid.cgeDemo.HistogramView;
+import org.wysaid.cgeDemo.VideoPlayerGLSurfaceView;
import org.wysaid.common.Common;
import org.wysaid.myUtils.FileUtil;
import org.wysaid.myUtils.ImageUtil;
import org.wysaid.myUtils.MsgUtil;
import org.wysaid.nativePort.CGEFrameRenderer;
-import org.wysaid.view.VideoPlayerGLSurfaceView;
public class VideoPlayerDemoActivity extends AppCompatActivity {
@@ -34,6 +40,36 @@ public class VideoPlayerDemoActivity extends AppCompatActivity {
public static final int REQUEST_CODE_PICK_VIDEO = 1;
+ public static VideoPlayerDemoActivity instance = null;
+ private ChartGLSurfaceView chartView;
+ private HistogramView histogram;
+ RelativeLayout mGLViewGroup;
+ Button mHistBtn;
+ Button mWaveBtn;
+
+ public Handler mHandler = new Handler(new Handler.Callback() {
+ @Override
+ public boolean handleMessage(Message msg) {
+ switch (msg.what) {
+ case 0:
+ if (chartView != null) {
+ chartView.setChartData((float[]) msg.obj);
+ }
+ break;
+
+ case 1:
+
+ if (histogram != null) {
+ histogram.onPreviewFrame((float[]) msg.obj, mPlayerView);
+ }
+ break;
+ default:
+ break;
+ }
+ return false;
+ }
+ });
+
private VideoPlayerGLSurfaceView.PlayCompletionCallback playCompletionCallback = new VideoPlayerGLSurfaceView.PlayCompletionCallback() {
@Override
public void playComplete(MediaPlayer player) {
@@ -48,6 +84,7 @@ public boolean playFailed(MediaPlayer player, final int what, final int extra) {
}
};
+ @SuppressLint("AppCompatCustomView")
class MyVideoButton extends Button implements View.OnClickListener {
Uri videoUri;
@@ -78,6 +115,35 @@ public void run() {
}
}
+ public static VideoPlayerDemoActivity getInstance() {
+ return instance;
+ }
+
+ public void setWStatue(boolean statue) {
+ mPlayerView.switchWaveform(statue);
+ }
+
+ //隐藏波形图
+ public void hideWaveform() {
+ if (chartView != null) {
+ setWStatue(false);
+ mGLViewGroup.removeView(chartView);
+ chartView = null;
+ }
+ }
+
+ public void setHStatue(boolean statue) {
+ mPlayerView.switchLumForm(statue);
+ }
+
+ public void hideBrightness() {
+ if (histogram != null) {
+ setHStatue(false);
+ mGLViewGroup.removeView(histogram);
+ histogram = null;
+ }
+ }
+
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
@@ -86,6 +152,45 @@ protected void onCreate(Bundle savedInstanceState) {
mPlayerView.setZOrderOnTop(false);
mPlayerView.setZOrderMediaOverlay(true);
+ instance = this;
+
+ mGLViewGroup = (RelativeLayout) findViewById(R.id.sv_device_group);
+ mHistBtn = (Button) findViewById(R.id.histgramBtn);
+ mHistBtn.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View v) {
+
+ if (chartView != null) {
+ hideWaveform();
+ }
+ if (histogram == null) {
+ histogram = new HistogramView(VideoPlayerDemoActivity.this);
+ mGLViewGroup.addView(histogram);
+ setHStatue(true);
+ } else {
+ hideBrightness();
+ }
+ }
+ });
+
+ mWaveBtn = (Button) findViewById(R.id.waveformBtn);
+ mWaveBtn.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View v) {
+
+ if (histogram != null) {
+ hideBrightness();
+ }
+ if (chartView == null) {
+ chartView = new ChartGLSurfaceView(VideoPlayerDemoActivity.this);
+ mGLViewGroup.addView(chartView);
+ setWStatue(true);
+ } else {
+ hideWaveform();
+ }
+ }
+ });
+
mShapeBtn = (Button) findViewById(R.id.switchShapeBtn);
mShapeBtn.setOnClickListener(new View.OnClickListener() {
@@ -318,4 +423,5 @@ public boolean onOptionsItemSelected(MenuItem item) {
return super.onOptionsItemSelected(item);
}
+
}
diff --git a/cgeDemo/src/main/java/org/wysaid/cgeDemo/VideoPlayerGLSurfaceView.java b/cgeDemo/src/main/java/org/wysaid/cgeDemo/VideoPlayerGLSurfaceView.java
new file mode 100644
index 00000000..f5e322ad
--- /dev/null
+++ b/cgeDemo/src/main/java/org/wysaid/cgeDemo/VideoPlayerGLSurfaceView.java
@@ -0,0 +1,694 @@
+package org.wysaid.cgeDemo;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Matrix;
+import android.graphics.PixelFormat;
+import android.graphics.SurfaceTexture;
+import android.media.MediaPlayer;
+import android.net.Uri;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+import android.os.Message;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.Surface;
+
+import org.wysaid.cgeDemo.VideoPlayerDemoActivity;
+import org.wysaid.common.Common;
+import org.wysaid.nativePort.CGEFrameRenderer;
+import org.wysaid.texUtils.TextureRenderer;
+
+import java.nio.ByteBuffer;
+import java.nio.IntBuffer;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ * Created by wangyang on 15/11/26.
+ */
+
+public class VideoPlayerGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
+
+ public static final String LOG_TAG = Common.LOG_TAG;
+
+ private SurfaceTexture mSurfaceTexture;
+ private int mVideoTextureID;
+ private CGEFrameRenderer mFrameRenderer;
+
+
+ private TextureRenderer.Viewport mRenderViewport = new TextureRenderer.Viewport();
+ private float[] mTransformMatrix = new float[16];
+ private boolean mIsUsingMask = false;
+
+ public boolean isUsingMask() {
+ return mIsUsingMask;
+ }
+
+ private float mMaskAspectRatio = 1.0f;
+
+ private int mViewWidth = 1000;
+ private int mViewHeight = 1000;
+
+ public int getViewWidth() {
+ return mViewWidth;
+ }
+
+ public int getViewheight() {
+ return mViewHeight;
+ }
+
+ private int mVideoWidth = 1000;
+ private int mVideoHeight = 1000;
+
+ private boolean mFitFullView = false;
+
+ public void setFitFullView(boolean fit) {
+ mFitFullView = fit;
+ if (mFrameRenderer != null)
+ calcViewport();
+ }
+
+ private MediaPlayer mPlayer;
+
+ private Uri mVideoUri;
+
+ public interface PlayerInitializeCallback {
+
+ //对player 进行初始化设置, 设置未默认启动的listener, 比如 bufferupdateListener.
+ void initPlayer(MediaPlayer player);
+ }
+
+ public void setPlayerInitializeCallback(PlayerInitializeCallback callback) {
+ mPlayerInitCallback = callback;
+ }
+
+ PlayerInitializeCallback mPlayerInitCallback;
+
+ public interface PlayPreparedCallback {
+ void playPrepared(MediaPlayer player);
+ }
+
+ PlayPreparedCallback mPreparedCallback;
+
+ public interface PlayCompletionCallback {
+ void playComplete(MediaPlayer player);
+
+
+ /*
+
+ what 取值: MEDIA_ERROR_UNKNOWN,
+ MEDIA_ERROR_SERVER_DIED
+
+ extra 取值 MEDIA_ERROR_IO
+ MEDIA_ERROR_MALFORMED
+ MEDIA_ERROR_UNSUPPORTED
+ MEDIA_ERROR_TIMED_OUT
+
+ returning false would cause the 'playComplete' to be called
+ */
+ boolean playFailed(MediaPlayer mp, int what, int extra);
+ }
+
+ PlayCompletionCallback mPlayCompletionCallback;
+
+ public synchronized void setVideoUri(final Uri uri, final PlayPreparedCallback preparedCallback, final PlayCompletionCallback completionCallback) {
+
+ mVideoUri = uri;
+ mPreparedCallback = preparedCallback;
+ mPlayCompletionCallback = completionCallback;
+
+ if (mFrameRenderer != null) {
+
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ Log.i(LOG_TAG, "setVideoUri...");
+
+ if (mSurfaceTexture == null || mVideoTextureID == 0) {
+ mVideoTextureID = Common.genSurfaceTextureID();
+ mSurfaceTexture = new SurfaceTexture(mVideoTextureID);
+ mSurfaceTexture.setOnFrameAvailableListener(VideoPlayerGLSurfaceView.this);
+ }
+ _useUri();
+ }
+ });
+ }
+ }
+
+ public synchronized void setFilterWithConfig(final String config) {
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+
+ if (mFrameRenderer != null) {
+ mFrameRenderer.setFilterWidthConfig(config);
+ } else {
+ Log.e(LOG_TAG, "setFilterWithConfig after release!!");
+ }
+ }
+ });
+ }
+
+ public void setFilterIntensity(final float intensity) {
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ if (mFrameRenderer != null) {
+ mFrameRenderer.setFilterIntensity(intensity);
+ } else {
+ Log.e(LOG_TAG, "setFilterIntensity after release!!");
+ }
+ }
+ });
+ }
+
+ public interface SetMaskBitmapCallback {
+ void setMaskOK(CGEFrameRenderer recorder);
+ }
+
+ public void setMaskBitmap(final Bitmap bmp, final boolean shouldRecycle) {
+ setMaskBitmap(bmp, shouldRecycle, null);
+ }
+
+ //注意, 当传入的bmp为null时, SetMaskBitmapCallback 不会执行.
+ public void setMaskBitmap(final Bitmap bmp, final boolean shouldRecycle, final SetMaskBitmapCallback callback) {
+
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+
+ if (mFrameRenderer == null) {
+ Log.e(LOG_TAG, "setMaskBitmap after release!!");
+ return;
+ }
+
+ if (bmp == null) {
+ mFrameRenderer.setMaskTexture(0, 1.0f);
+ mIsUsingMask = false;
+ calcViewport();
+ return;
+ }
+
+ int texID = Common.genNormalTextureID(bmp, GLES20.GL_NEAREST, GLES20.GL_CLAMP_TO_EDGE);
+
+ mFrameRenderer.setMaskTexture(texID, bmp.getWidth() / (float) bmp.getHeight());
+ mIsUsingMask = true;
+ mMaskAspectRatio = bmp.getWidth() / (float) bmp.getHeight();
+
+ if (callback != null) {
+ callback.setMaskOK(mFrameRenderer);
+ }
+
+ if (shouldRecycle)
+ bmp.recycle();
+
+ calcViewport();
+ }
+ });
+ }
+
+ public synchronized MediaPlayer getPlayer() {
+ if (mPlayer == null) {
+ Log.e(LOG_TAG, "Player is not initialized!");
+ }
+ return mPlayer;
+ }
+
+ public interface OnCreateCallback {
+ void createOK();
+ }
+
+ private OnCreateCallback mOnCreateCallback;
+
+ //定制一些初始化操作
+ public void setOnCreateCallback(final OnCreateCallback callback) {
+
+ assert callback != null : "无意义操作!";
+
+ if (mFrameRenderer == null) {
+ mOnCreateCallback = callback;
+ } else {
+ // 已经创建完毕, 直接执行
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ callback.createOK();
+ }
+ });
+ }
+ }
+
+ public VideoPlayerGLSurfaceView(Context context, AttributeSet attrs) {
+ super(context, attrs);
+
+ Log.i(LOG_TAG, "MyGLSurfaceView Construct...");
+
+ setEGLContextClientVersion(2);
+ setEGLConfigChooser(8, 8, 8, 8, 8, 0);
+ getHolder().setFormat(PixelFormat.RGBA_8888);
+ setRenderer(this);
+ setRenderMode(RENDERMODE_WHEN_DIRTY);
+ setZOrderOnTop(true);
+
+ Log.i(LOG_TAG, "MyGLSurfaceView Construct OK...");
+ }
+
+ @Override
+ public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+
+ Log.i(LOG_TAG, "video player onSurfaceCreated...");
+
+ GLES20.glDisable(GLES20.GL_DEPTH_TEST);
+ GLES20.glDisable(GLES20.GL_STENCIL_TEST);
+
+ if (mOnCreateCallback != null) {
+ mOnCreateCallback.createOK();
+ }
+
+ if (mVideoUri != null && (mSurfaceTexture == null || mVideoTextureID == 0)) {
+ mVideoTextureID = Common.genSurfaceTextureID();
+ mSurfaceTexture = new SurfaceTexture(mVideoTextureID);
+ mSurfaceTexture.setOnFrameAvailableListener(VideoPlayerGLSurfaceView.this);
+ _useUri();
+ }
+
+ }
+
+ @Override
+ public void onSurfaceChanged(GL10 gl, int width, int height) {
+ GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+
+ mViewWidth = width;
+ mViewHeight = height;
+
+ Log.i("执行到这", "mViewWidth=="+mViewWidth+"---mViewHeight=="+mViewHeight);
+ bufferByte = new byte[mViewWidth * mViewHeight * 4];
+ byteBuffer = ByteBuffer.wrap(bufferByte);
+
+ calcViewport();
+ }
+
+ //must be in the OpenGL thread!
+ public void release() {
+
+ Log.i(LOG_TAG, "Video player view release...");
+
+ if (mPlayer != null) {
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+
+ Log.i(LOG_TAG, "Video player view release run...");
+
+ if (mPlayer != null) {
+
+ mPlayer.setSurface(null);
+ if (mPlayer.isPlaying())
+ mPlayer.stop();
+ mPlayer.release();
+ mPlayer = null;
+ }
+
+ if (mFrameRenderer != null) {
+ mFrameRenderer.release();
+ mFrameRenderer = null;
+ }
+
+ if (mSurfaceTexture != null) {
+ mSurfaceTexture.release();
+ mSurfaceTexture = null;
+ }
+
+ if (mVideoTextureID != 0) {
+ GLES20.glDeleteTextures(1, new int[]{mVideoTextureID}, 0);
+ mVideoTextureID = 0;
+ }
+
+ mIsUsingMask = false;
+ mPreparedCallback = null;
+ mPlayCompletionCallback = null;
+
+ Log.i(LOG_TAG, "Video player view release OK");
+ }
+ });
+ }
+ }
+
+ @Override
+ public void onPause() {
+ Log.i(LOG_TAG, "surfaceview onPause ...");
+
+ super.onPause();
+ }
+
+ @Override
+ public void onDrawFrame(GL10 gl) {
+
+ if (mSurfaceTexture == null || mFrameRenderer == null) {
+ return;
+ }
+
+ mSurfaceTexture.updateTexImage();
+
+ if (!mPlayer.isPlaying()) {
+ return;
+ }
+
+ mSurfaceTexture.getTransformMatrix(mTransformMatrix);
+ mFrameRenderer.update(mVideoTextureID, mTransformMatrix);
+
+ mFrameRenderer.runProc();
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+
+ GLES20.glEnable(GLES20.GL_BLEND);
+ mFrameRenderer.render(mRenderViewport.x, mRenderViewport.y, mRenderViewport.width, mRenderViewport.height);
+ GLES20.glDisable(GLES20.GL_BLEND);
+
+
+// if (waveFormSwitch || lumFormSwitch) {
+// byteBuffer.position(0);
+// GLES20.glReadPixels(0, 0, mViewWidth, mViewHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, byteBuffer);
+// }
+
+ }
+
+ private long mTimeCount2 = 0;
+ private long mFramesCount2 = 0;
+ private long mLastTimestamp2 = 0;
+
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ requestRender();
+
+ if(waveFormSwitch ||lumFormSwitch){
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ byteBuffer.position(0);
+ GLES20.glReadPixels(0, 0, mViewWidth, mViewHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, byteBuffer);
+ }
+ });
+ }
+ if (mLastTimestamp2 == 0)
+ mLastTimestamp2 = System.currentTimeMillis();
+
+ long currentTimestamp = System.currentTimeMillis();
+
+ ++mFramesCount2;
+ mTimeCount2 += currentTimestamp - mLastTimestamp2;
+ mLastTimestamp2 = currentTimestamp;
+ if (mTimeCount2 >= 1e3) {
+ Log.i(LOG_TAG, String.format("播放帧率: %d", mFramesCount2));
+ mTimeCount2 -= 1e3;
+ mFramesCount2 = 0;
+ }
+ }
+
+ private void calcViewport() {
+ float scaling;
+
+ if (mIsUsingMask) {
+ scaling = mMaskAspectRatio;
+ } else {
+ scaling = mVideoWidth / (float) mVideoHeight;
+ }
+
+ float viewRatio = mViewWidth / (float) mViewHeight;
+ float s = scaling / viewRatio;
+
+ int w, h;
+
+ if (mFitFullView) {
+ //撑满全部view(内容大于view)
+ if (s > 1.0) {
+ w = (int) (mViewHeight * scaling);
+ h = mViewHeight;
+ } else {
+ w = mViewWidth;
+ h = (int) (mViewWidth / scaling);
+ }
+ } else {
+ //显示全部内容(内容小于view)
+ if (s > 1.0) {
+ w = mViewWidth;
+ h = (int) (mViewWidth / scaling);
+ } else {
+ h = mViewHeight;
+ w = (int) (mViewHeight * scaling);
+ }
+ }
+
+ mRenderViewport.width = w;
+ mRenderViewport.height = h;
+ mRenderViewport.x = (mViewWidth - mRenderViewport.width) / 2;
+ mRenderViewport.y = (mViewHeight - mRenderViewport.height) / 2;
+ Log.i(LOG_TAG, String.format("View port: %d, %d, %d, %d", mRenderViewport.x, mRenderViewport.y, mRenderViewport.width, mRenderViewport.height));
+ }
+
+ private void _useUri() {
+
+ if (mPlayer != null) {
+
+ mPlayer.stop();
+ mPlayer.reset();
+
+ } else {
+ mPlayer = new MediaPlayer();
+ }
+
+ try {
+ mPlayer.setDataSource(getContext(), mVideoUri);
+ mPlayer.setSurface(new Surface(mSurfaceTexture));
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ Log.e(LOG_TAG, "useUri failed");
+
+ if (mPlayCompletionCallback != null) {
+ this.post(new Runnable() {
+ @Override
+ public void run() {
+ if (mPlayCompletionCallback != null) {
+ if (!mPlayCompletionCallback.playFailed(mPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, MediaPlayer.MEDIA_ERROR_UNSUPPORTED))
+ mPlayCompletionCallback.playComplete(mPlayer);
+ }
+ }
+ });
+ }
+ return;
+ }
+
+ if (mPlayerInitCallback != null) {
+ mPlayerInitCallback.initPlayer(mPlayer);
+ }
+
+ mPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
+ @Override
+ public void onCompletion(MediaPlayer mp) {
+ if (mPlayCompletionCallback != null) {
+ mPlayCompletionCallback.playComplete(mPlayer);
+ }
+ Log.i(LOG_TAG, "Video Play Over");
+ }
+ });
+
+ mPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
+ @Override
+ public void onPrepared(MediaPlayer mp) {
+ mVideoWidth = mp.getVideoWidth();
+ mVideoHeight = mp.getVideoHeight();
+
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+
+ if (mFrameRenderer == null) {
+ mFrameRenderer = new CGEFrameRenderer();
+ }
+
+ if (mFrameRenderer.init(mVideoWidth, mVideoHeight, mVideoWidth, mVideoHeight)) {
+ //Keep right orientation for source texture blending
+ mFrameRenderer.setSrcFlipScale(1.0f, -1.0f);
+ mFrameRenderer.setRenderFlipScale(1.0f, -1.0f);
+ } else {
+ Log.e(LOG_TAG, "Frame Recorder init failed!");
+ }
+
+ calcViewport();
+ }
+ });
+
+ if (mPreparedCallback != null) {
+ mPreparedCallback.playPrepared(mPlayer);
+ } else {
+ mp.start();
+ }
+
+ Log.i(LOG_TAG, String.format("Video resolution 1: %d x %d", mVideoWidth, mVideoHeight));
+ }
+ });
+
+ mPlayer.setOnErrorListener(new MediaPlayer.OnErrorListener() {
+ @Override
+ public boolean onError(MediaPlayer mp, int what, int extra) {
+
+ if (mPlayCompletionCallback != null)
+ return mPlayCompletionCallback.playFailed(mp, what, extra);
+ return false;
+ }
+ });
+
+ try {
+ mPlayer.prepareAsync();
+ } catch (Exception e) {
+ Log.i(LOG_TAG, String.format("Error handled: %s, play failure handler would be called!", e.toString()));
+ if (mPlayCompletionCallback != null) {
+ this.post(new Runnable() {
+ @Override
+ public void run() {
+ if (mPlayCompletionCallback != null) {
+ if (!mPlayCompletionCallback.playFailed(mPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, MediaPlayer.MEDIA_ERROR_UNSUPPORTED))
+ mPlayCompletionCallback.playComplete(mPlayer);
+ }
+ }
+ });
+ }
+ }
+
+ }
+
+ public interface TakeShotCallback {
+ //传入的bmp可以由接收者recycle
+ void takeShotOK(Bitmap bmp);
+ }
+
+ public synchronized void takeShot(final TakeShotCallback callback) {
+ assert callback != null : "callback must not be null!";
+
+ if (mFrameRenderer == null) {
+ Log.e(LOG_TAG, "Drawer not initialized!");
+ callback.takeShotOK(null);
+ return;
+ }
+
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+
+ IntBuffer buffer = IntBuffer.allocate(mRenderViewport.width * mRenderViewport.height);
+
+ GLES20.glReadPixels(mRenderViewport.x, mRenderViewport.y, mRenderViewport.width, mRenderViewport.height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
+ Bitmap bmp = Bitmap.createBitmap(mRenderViewport.width, mRenderViewport.height, Bitmap.Config.ARGB_8888);
+ bmp.copyPixelsFromBuffer(buffer);
+
+ Bitmap bmp2 = Bitmap.createBitmap(mRenderViewport.width, mRenderViewport.height, Bitmap.Config.ARGB_8888);
+
+ Canvas canvas = new Canvas(bmp2);
+ Matrix mat = new Matrix();
+ mat.setTranslate(0.0f, -mRenderViewport.height / 2.0f);
+ mat.postScale(1.0f, -1.0f);
+ mat.postTranslate(0.0f, mRenderViewport.height / 2.0f);
+
+ canvas.drawBitmap(bmp, mat, null);
+ bmp.recycle();
+
+ callback.takeShotOK(bmp2);
+ }
+ });
+
+ }
+
+
+ private boolean waveFormSwitch = false;
+ private boolean lumFormSwitch = false;
+ private Thread dataThread;
+ private Thread lumThread;
+ private byte[] bufferByte;
+ private ByteBuffer byteBuffer;
+
+ public void switchWaveform(boolean sw) {
+ waveFormSwitch = sw;
+ if (waveFormSwitch) {
+ dataThread = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ while (waveFormSwitch) {
+ Message message = new Message();
+ message.what = 0;
+ message.obj = byteToFloat(bufferByte);
+ VideoPlayerDemoActivity.getInstance().mHandler.sendMessage(message);
+ }
+ }
+ });
+ dataThread.start();
+ } else {
+ if (dataThread != null && dataThread.isAlive()) dataThread.interrupt();
+ }
+ }
+
+ public void switchLumForm(boolean sl) {
+ lumFormSwitch = sl;
+ if (lumFormSwitch) {
+ lumThread = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ while (lumFormSwitch) {
+ Message message = new Message();
+ message.what = 1;
+ message.obj = byteToFormFloat(bufferByte);
+ VideoPlayerDemoActivity.getInstance().mHandler.sendMessage(message);
+ }
+ }
+ });
+ lumThread.start();
+ } else {
+ if (lumThread != null && lumThread.isAlive()) lumThread.interrupt();
+ }
+ }
+
+ public float[] byteToFormFloat(byte[] bytes) {
+ float[] fGroup = new float[256];
+ for (int i = 0; i < bytes.length; i += 4) {
+
+ String hex = Integer.toHexString(bytes[i] & 0xFF);
+
+ if (hex.length() == 1) {
+ fGroup[toInt(hex)] += 1;
+ } else if (hex.length() == 2) {
+ fGroup[toInt(hex.substring(0, 1)) * 16 + toInt(hex.substring(1, 2))] += 1;
+ }
+
+ }
+ return fGroup;
+ }
+
+ public float[] byteToFloat(byte[] bytes) {
+ float[] fGroup = new float[bytes.length / 4];
+ int j = 0;
+ for (int i = 0; i < bytes.length; i += 4) {
+// for (int i = 0; i < bytes.length; i++) {
+ String hex = Integer.toHexString(bytes[i] & 0xFF);
+
+ if (hex.length() == 1) {
+ fGroup[j] = (((toInt(hex) - 0.0f) * (1.0f - (-1.0f)) / (255.0f - 0.0f)) + (-1f));
+ } else if (hex.length() == 2) {
+ fGroup[j] = (((toInt(hex.substring(0, 1)) * 16 + toInt(hex.substring(1, 2)) - 0.0f) * (1.0f - (-1.0f)) / (255.0f - 0.0f)) + (-1f));
+
+ }
+
+ j++;
+ }
+ return fGroup;
+ }
+
+ private static int toInt(String c) {
+ int b = "0123456789abcdef".indexOf(c);
+ return b;
+ }
+
+}
diff --git a/cgeDemo/src/main/res/layout/activity_video_player_demo.xml b/cgeDemo/src/main/res/layout/activity_video_player_demo.xml
index a74ddf00..124b5587 100644
--- a/cgeDemo/src/main/res/layout/activity_video_player_demo.xml
+++ b/cgeDemo/src/main/res/layout/activity_video_player_demo.xml
@@ -15,12 +15,19 @@
android:layout_height="match_parent"
>
-
+
+
+
+
+
+
+