https://blog.csdn.net/keen_zuxwang/article/details/78363464

demo:
http://download.csdn.net/download/keen_zuxwang/10041423

1、创建顶点位置、纹理数组
2、创建、编译、加载shader程序,获得shader中各变量的句柄(如获取纹理采样sampler2D变量的句柄)
3、程序通过program给shader传递各参量,如:顶点位置、纹理坐标,激活、绑定纹理,传递模型/视图/投影矩阵等, 然后通过glDrawArrays()/glDrawElements()绘制图元(片元着色器通过这些参量计算出每个像素的值、然后通过底层EGL 渲染到相应的ANativeWindow)

camera2 摄像头图像opengles渲染显示:
1、生成纹理
GLES20.glGenTextures(1, textures, 0); // 产生纹理id
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);//绑定 纹理id
2、创建SurfaceTexture
SurfaceTexture videoTexture = new SurfaceTexture(textures[0]); //通过创建的纹理id,生成SurfaceTexture
3、生成Surface
Surface surface0 = new Surface(videoTexture); // 通过创建的SurfaceTexture,生成Surface
4、添加camera2预览输出Surface,从而实现camera图像 -> Surface
mPreviewBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); // 创建camera2 捕获请求,预览模式
//添加预览输出的Surface, 从而实现camera图像 -> Surface
mPreviewBuilder.addTarget(surface);
mPreviewBuilder.addTarget(surface0);
camera.createCaptureSession(Arrays.asList(surface, surface0), mSessionStateCallback, null); // 创建捕获会话

所以整个摄像头图像渲染流程:
camera图像 -> Surface -> videoTexture/videoTexture.updateTexImage() -> GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]) ->
GLES20.glDrawElements()

1、vertex shader

attribute vec4 vPosition;
attribute vec4 vTexCoordinate;
uniform mat4 textureTransform;
uniform mat4 uProjMatrix;
uniform mat4 uProjMatrix0;
uniform int xyFlag; // 镜像选择

//参量传递->fragment shader
varying vec2 v_TexCoordinate;
varying vec4 gPosition;
varying vec2 varyPostion;

void main () {
v_TexCoordinate = (textureTransform * vTexCoordinate).xy;
//gl_Position = vPosition;
if(xyFlag==0){
gl_Position = vPosition;
}else if(xyFlag==1){
gl_Position = uProjMatrix*vPosition; //变换矩左乘
}else if(xyFlag==2){
gl_Position = uProjMatrix0*vPosition;
}
gPosition = gl_Position;
varyPostion = vPosition.xy;
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
2、fragment shader

#extension GL_OES_EGL_image_external : require // 外部扩展图像纹理
precision mediump float;
uniform samplerExternalOES texture; //外部扩展纹理采样器变量
uniform sampler2D texture0; //贴图纹理采样器变量
uniform int colorFlag; // 滤镜类型
uniform float mratio; // 纹理融合因子
const highp float mWidth=640.0;
const highp float mHeight=480.0;
const highp vec3 W = vec3(0.299,0.587,0.114);
const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721); //光亮度里三个值相加要为1,各个值代表着颜色的百分比,中间是绿色的值,70%的比重会让效果更好点。
const lowp float saturation=0.5;
const highp float radius = 1.41;
const highp vec2 center = vec2(0.5, 0.5);
const highp float refractiveIndex=0.5;
//矩形融合区域
const vec2 leftBottom = vec2(-1.0, 0.40);
const vec2 rightTop = vec2(-0.40, 1.0);
//模拟坐标数组
vec2 blurCoordinates[24];

//从vertex shader传入的参量
varying vec4 gPosition;
varying vec2 v_TexCoordinate;
varying vec2 varyPostion;

float hardLight(float color)
{
if(color <= 0.5)
color = color * color * 2.0;
else
color = 1.0 - ((1.0 - color)*(1.0 - color) * 2.0);
return color;
}

void modifyColor(vec4 color){
color.r=max(min(color.r,1.0),0.0);
color.g=max(min(color.g,1.0),0.0);
color.b=max(min(color.b,1.0),0.0);
color.a=max(min(color.a,1.0),0.0);
}

void main(){
if(colorFlag==0){
//矩形区域融合
if (varyPostion.x >= leftBottom.x && varyPostion.x <= rightTop.x
&& varyPostion.y >= leftBottom.y && varyPostion.y <= rightTop.y) {
if(mratio < 0.0000001){ //暖色效果
vec4 color = texture2D(texture, v_TexCoordinate);
vec4 deltaColor = color + vec4(0.1, 0.1, 0.0, 0.0); // 暖色
modifyColor(deltaColor);
gl_FragColor=deltaColor;
}else if(mratio > 0.99){ //放大镜效果
gl_FragColor= texture2D(texture, vec2(v_TexCoordinate.x/2.0+0.25, v_TexCoordinate.y/2.0+0.25)); //nColor;
}else{
vec2 tex0 = vec2((varyPostion.x-leftBottom.x)/(rightTop.x-leftBottom.x),
1.0-(varyPostion.y-leftBottom.y)/(rightTop.y-leftBottom.y));
vec4 color = texture2D(texture0, tex0);
gl_FragColor = color*mratio + texture2D(texture,v_TexCoordinate)*(1.0-mratio); //1.0-v_TexCoordinate
//gl_FragColor = texture2D(texture, 1.0-v_TexCoordinate);
}
}else{
//vec4 color1 = texture2D(texture, v_TexCoordinate);
//vec4 color2 = texture2D(texture0, v_TexCoordinate);//vec2(v_TexCoordinate.s/10, v_TexCoordinate.t/10));
// gl_FragColor = mix(color1, color2, mratio);
gl_FragColor = texture2D(texture, v_TexCoordinate);
}
}
else if(colorFlag==7){
//光亮度里三个值相加要为1,各个值代表着颜色的百分比,中间是绿色的值,70%的比重会让效果更好点。
vec4 textureColor = texture2D(texture, v_TexCoordinate);
float luminance = dot(textureColor.rgb, luminanceWeighting); //GLSL中的点乘运算,线性代数的点运算符相乘两个数字。点乘计算需要将纹理颜色信息和相对应的亮度权重相乘。然后取出所有的三个值相加到一起计算得到这个像素的中和亮度值。
vec3 greyScaleColor = vec3(luminance);
gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w); //用mix函数把计算的灰度值,初识的纹理颜色和得到的饱和度信息结合起来。
}
else if(colorFlag==8){
float aspectRatio = mWidth/mHeight;
vec2 textureCoordinateToUse = vec2(v_TexCoordinate.x, (v_TexCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
//归一化坐标空间需要考虑屏幕是一个单位宽和一个单位长。
float distanceFromCenter = distance(center, textureCoordinateToUse); //center
//计算特定像素点距离球形的中心有多远。使用GLSL内建的distance()函数,用勾股定律计算出中心坐标和长宽比矫正过的纹理坐标的距离
float checkForPresenceWithinSphere = step(distanceFromCenter, radius); //计算片段是否在球体内。
distanceFromCenter = distanceFromCenter / radius; //标准化到球心的距离,重新设置distanceFromCenter
float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter); //模拟一个玻璃球,需要计算球的“深度”是多少。
vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth)); //归一化
vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);
//GLSL的refract()函数以刚才创建的球法线和折射率来计算当光线通过球时从任意一个点看起来如何。

gl_FragColor = texture2D(texture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere; //最后凑齐所有计算需要的颜色信息。
}
else if(colorFlag==1){ //将此灰度值作为输出颜色的RGB值,这样就会变成黑白滤镜
vec4 color = texture2D(texture, v_TexCoordinate);
float fGrayColor = (0.3*color.r + 0.59*color.g + 0.11*color.b); // 求灰度值
gl_FragColor = vec4(fGrayColor, fGrayColor, fGrayColor, 1.0);
}
else if(colorFlag==2){ //冷暖色调
vec4 color = texture2D(texture, v_TexCoordinate);
vec4 deltaColor = color + vec4(0.1, 0.1, 0.0, 0.0); // 暖色
modifyColor(deltaColor);
gl_FragColor=deltaColor;
}
else if(colorFlag==3){ //增加亮度、降低亮度等
vec4 color = texture2D(texture, v_TexCoordinate);
vec4 deltaColor = color + vec4(0.0, 0.0, 0.1, 0.0); //vec4(0.006, 0.004, 0.002, 0.0); // blue色
modifyColor(deltaColor);
gl_FragColor=deltaColor;
}
else if(colorFlag==4){ //放大镜效果
vec4 nColor=texture2D(texture, v_TexCoordinate);
float uXY = mWidth/mHeight;
vec2 vChange = vec2(0.0, 0.0);
float dis = distance(vec2(gPosition.x, gPosition.y/uXY), vChange);
if(dis < 0.5){ //圆形放大区域
nColor=texture2D(texture,vec2(v_TexCoordinate.x/2.0+0.25, v_TexCoordinate.y/2.0+0.25));
}
gl_FragColor=nColor;
}
else if(colorFlag==5){ //类似高斯模糊、径向模糊
vec4 nColor=texture2D(texture, v_TexCoordinate);
vec3 vChangeColor = vec3(0.025, 0.025, 0.025); // 定义边距

//取周边纹理像素值求平均
nColor+=texture2D(texture,vec2(v_TexCoordinate.x-vChangeColor.r,v_TexCoordinate.y-vChangeColor.r));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x-vChangeColor.r,v_TexCoordinate.y+vChangeColor.r));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x+vChangeColor.r,v_TexCoordinate.y-vChangeColor.r));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x+vChangeColor.r,v_TexCoordinate.y+vChangeColor.r));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x-vChangeColor.g,v_TexCoordinate.y-vChangeColor.g));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x-vChangeColor.g,v_TexCoordinate.y+vChangeColor.g));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x+vChangeColor.g,v_TexCoordinate.y-vChangeColor.g));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x+vChangeColor.g,v_TexCoordinate.y+vChangeColor.g));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x-vChangeColor.b,v_TexCoordinate.y-vChangeColor.b));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x-vChangeColor.b,v_TexCoordinate.y+vChangeColor.b));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x+vChangeColor.b,v_TexCoordinate.y-vChangeColor.b));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x+vChangeColor.b,v_TexCoordinate.y+vChangeColor.b));
nColor/=13.0;
gl_FragColor=nColor;
}
else if(colorFlag==6)
{
float mul_x = 2.0 / mWidth;
float mul_y = 2.0 / mHeight;
float pParams = 0.0;
vec2 pStepOffset = vec2(mul_x, mul_y);
vec3 centralColor = texture2D(texture, v_TexCoordinate).rgb;

blurCoordinates[0] = v_TexCoordinate.xy + pStepOffset * vec2(0.0, -10.0);
blurCoordinates[1] = v_TexCoordinate.xy + pStepOffset * vec2(0.0, 10.0);
blurCoordinates[2] = v_TexCoordinate.xy + pStepOffset * vec2(-10.0, 0.0);
blurCoordinates[3] = v_TexCoordinate.xy + pStepOffset * vec2(10.0, 0.0);
blurCoordinates[4] = v_TexCoordinate.xy + pStepOffset * vec2(5.0, -8.0);
blurCoordinates[5] = v_TexCoordinate.xy + pStepOffset * vec2(5.0, 8.0);
blurCoordinates[6] = v_TexCoordinate.xy + pStepOffset * vec2(-5.0, 8.0);
blurCoordinates[7] = v_TexCoordinate.xy + pStepOffset * vec2(-5.0, -8.0);
blurCoordinates[8] = v_TexCoordinate.xy + pStepOffset * vec2(8.0, -5.0);
blurCoordinates[9] = v_TexCoordinate.xy + pStepOffset * vec2(8.0, 5.0);
blurCoordinates[10] = v_TexCoordinate.xy + pStepOffset * vec2(-8.0, 5.0);
blurCoordinates[11] = v_TexCoordinate.xy + pStepOffset * vec2(-8.0, -5.0);
blurCoordinates[12] = v_TexCoordinate.xy + pStepOffset * vec2(0.0, -6.0);
blurCoordinates[13] = v_TexCoordinate.xy + pStepOffset * vec2(0.0, 6.0);
blurCoordinates[14] = v_TexCoordinate.xy + pStepOffset * vec2(6.0, 0.0);
blurCoordinates[15] = v_TexCoordinate.xy + pStepOffset * vec2(-6.0, 0.0);
blurCoordinates[16] = v_TexCoordinate.xy + pStepOffset * vec2(-4.0, -4.0);
blurCoordinates[17] = v_TexCoordinate.xy + pStepOffset * vec2(-4.0, 4.0);
blurCoordinates[18] = v_TexCoordinate.xy + pStepOffset * vec2(4.0, -4.0);
blurCoordinates[19] = v_TexCoordinate.xy + pStepOffset * vec2(4.0, 4.0);
blurCoordinates[20] = v_TexCoordinate.xy + pStepOffset * vec2(-2.0, -2.0);
blurCoordinates[21] = v_TexCoordinate.xy + pStepOffset * vec2(-2.0, 2.0);
blurCoordinates[22] = v_TexCoordinate.xy + pStepOffset * vec2(2.0, -2.0);
blurCoordinates[23] = v_TexCoordinate.xy + pStepOffset * vec2(2.0, 2.0);

float sampleColor = centralColor.g * 22.0;
sampleColor += texture2D(texture, blurCoordinates[0]).g;
sampleColor += texture2D(texture, blurCoordinates[1]).g;
sampleColor += texture2D(texture, blurCoordinates[2]).g;
sampleColor += texture2D(texture, blurCoordinates[3]).g;
sampleColor += texture2D(texture, blurCoordinates[4]).g;
sampleColor += texture2D(texture, blurCoordinates[5]).g;
sampleColor += texture2D(texture, blurCoordinates[6]).g;
sampleColor += texture2D(texture, blurCoordinates[7]).g;
sampleColor += texture2D(texture, blurCoordinates[8]).g;
sampleColor += texture2D(texture, blurCoordinates[9]).g;
sampleColor += texture2D(texture, blurCoordinates[10]).g;
sampleColor += texture2D(texture, blurCoordinates[11]).g;
sampleColor += texture2D(texture, blurCoordinates[12]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[13]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[14]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[15]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[16]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[17]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[18]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[19]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[20]).g * 3.0;
sampleColor += texture2D(texture, blurCoordinates[21]).g * 3.0;
sampleColor += texture2D(texture, blurCoordinates[22]).g * 3.0;
sampleColor += texture2D(texture, blurCoordinates[23]).g * 3.0;

sampleColor = sampleColor / 62.0;

float highPass = centralColor.g - sampleColor + 0.5;

for(int i = 0; i < 5;i++)
{
highPass = hardLight(highPass);
}
float luminance = dot(centralColor, W);

float alpha = pow(luminance, pParams);

vec3 smoothColor = centralColor + (centralColor-vec3(highPass))*alpha*0.1;

gl_FragColor = vec4(mix(smoothColor.rgb, max(smoothColor, centralColor), alpha), 1.0);
}
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
3、shader工具类

public class HelpUtils
{
private static final String TAG = "ShaderHelper";
public static int compileShader(final int shaderType, final String shaderSource)
{
int shaderHandle = GLES20.glCreateShader(shaderType);
if (shaderHandle != 0)
{
// Pass in the shader source.
GLES20.glShaderSource(shaderHandle, shaderSource);
// Compile the shader.
GLES20.glCompileShader(shaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0)
{
Log.e(TAG, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shaderHandle));
GLES20.glDeleteShader(shaderHandle);
shaderHandle = 0;
}
}
if (shaderHandle == 0){
throw new RuntimeException("Error creating shader.");
}
return shaderHandle;
}

public static int createAndLinkProgram(final int vertexShaderHandle, final int fragmentShaderHandle, final String[] attributes)
{
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0)
{
// Bind the vertex shader to the program.
GLES20.glAttachShader(programHandle, vertexShaderHandle);
// Bind the fragment shader to the program.
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
// Bind attributes
if (attributes != null){
final int size = attributes.length;
for (int i = 0; i < size; i++){
GLES20.glBindAttribLocation(programHandle, i, attributes[i]);
}
}
// Link the two shaders together into a program.
GLES20.glLinkProgram(programHandle);
// Get the link status.
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);

// If the link failed, delete the program.
if (linkStatus[0] == 0) {
Log.e(TAG, "Error compiling program: " + GLES20.glGetProgramInfoLog(programHandle));
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0){
throw new RuntimeException("Error creating program.");
}
return programHandle;
}

public static int loadTexture(final Context context, final int resourceId) {
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0) {
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling

// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(
context.getResources(), resourceId, options);

// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);

// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}

if (textureHandle[0] == 0) {
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}

public static String readTextFileFromRawResource(final Context context,
final int resourceId)
{
final InputStream inputStream = context.getResources().openRawResource(
resourceId);
final InputStreamReader inputStreamReader = new InputStreamReader(
inputStream);
final BufferedReader bufferedReader = new BufferedReader(
inputStreamReader);

String nextLine;
final StringBuilder body = new StringBuilder();

try{
while ((nextLine = bufferedReader.readLine()) != null){
body.append(nextLine);
body.append('\n');
}
}
catch (IOException e){
return null;
}

return body.toString();
}

//从sh脚本中加载shader内容的方法
public static String loadFromAssetsFile(String fname,Resources r)
{
String result=null;
try {
InputStream in=r.getAssets().open(fname);
int ch=0;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
while((ch=in.read())!=-1) {
baos.write(ch);
}
byte[] buff=baos.toByteArray();
baos.close();
in.close();
result=new String(buff,"UTF-8");
result=result.replaceAll("\\r\\n","\n");
}
catch(Exception e){
e.printStackTrace();
}
return result;
}
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
4、GLViewMediaActivity类, shader操作类,实现GLSurfaceView.Renderer接口
通过创建的SurfaceTexture videoTexture(textures[0]生成的),生成Surface,所以整个摄像头图像渲染流程:
camera图像 -> Surface -> videoTexture/videoTexture.updateTexImage() -> GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]) ->
GLES20.glDrawElements()

public class GLViewMediaActivity extends Activity implements TextureView.SurfaceTextureListener, GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final String videoPath = Environment.getExternalStorageDirectory()+"/live.mp4";
public static final String TAG = "GLViewMediaActivity";
private static float squareCoords[] = {
-1.0f, 1.0f, // top left
-1.0f, -1.0f, // bottom left
1.0f, -1.0f, // bottom right
1.0f, 1.0f // top right
};
private static short drawOrder[] = {0, 1, 2, 0, 2, 3}; // Texture to be shown in backgrund
private float textureCoords[] = {
0.0f, 1.0f, 0.0f, 1.0f,
0.0f, 0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f
};

private int[] textures = new int[1];
private int width, height;
private int shaderProgram;
private FloatBuffer vertexBuffer;
private FloatBuffer textureBuffer;
private ShortBuffer drawListBuffer;
private float[] videoTextureTransform = new float[16];
private SurfaceTexture videoTexture;
private GLSurfaceView glView;

private Context context;
private RelativeLayout previewLayout = null;

private boolean frameAvailable = false;
int textureParamHandle;
int textureCoordinateHandle;
int positionHandle;
int textureTranformHandle;

public int mRatio;
public float ratio=0.5f;
public int mColorFlag=0;
public int xyFlag=0;
TextureView mPreviewView;
CameraCaptureSession mSession;
CaptureRequest.Builder mPreviewBuilder;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main_1);
context = this;
glView = new GLSurfaceView(this);

previewLayout = (RelativeLayout)findViewById(R.id.previewLayout);
//RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(640,480 );
previewLayout.addView(glView);//, layoutParams);
mPreviewView = (TextureView) findViewById(R.id.id_textureview);
mPreviewView.setSurfaceTextureListener(this);

glView.setEGLContextClientVersion(2);
glView.setRenderer(this);
//glView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);

SeekBar seekBar = (SeekBar) findViewById(R.id.id_seekBar);
seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
// TODO Auto-generated method stub
ratio = progress/100.0f; // 贴图纹理 & 摄像头图像外部扩展纹理的融合因子
}

@Override
public void onStartTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub

}

@Override
public void onStopTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub

}
});

Button btn_color = (Button)findViewById(R.id.btn_color);
Button btn_mirror = (Button)findViewById(R.id.btn_mirror);

btn_color.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
// 滤镜类型选择
if(mColorFlag == 0) {
mColorFlag = 7;
Toast.makeText(GLViewMediaActivity.this, "Saturation adjust!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag == 7) {
mColorFlag = 1;
Toast.makeText(GLViewMediaActivity.this, "Gray Color!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag == 1) {
mColorFlag = 2;
Toast.makeText(GLViewMediaActivity.this, "Warm Color!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag == 2){
mColorFlag = 3;
Toast.makeText(GLViewMediaActivity.this, "Cool Color!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag == 3){
mColorFlag = 4;
Toast.makeText(GLViewMediaActivity.this, "Amplify!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag == 4){
mColorFlag = 5;
Toast.makeText(GLViewMediaActivity.this, "Vague!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag == 5){
mColorFlag = 6;
Toast.makeText(GLViewMediaActivity.this, "Beauty!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag ==6){
mColorFlag = 0;
Toast.makeText(GLViewMediaActivity.this, "Orignal Color!", Toast.LENGTH_SHORT).show();
}
}
});

btn_mirror.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
//X 、Y轴镜像选择
if(xyFlag == 0) {
Toast.makeText(GLViewMediaActivity.this, "X Mirror!", Toast.LENGTH_SHORT).show();
xyFlag = 1;
}else if(xyFlag == 1){
xyFlag = 2;
Toast.makeText(GLViewMediaActivity.this, "Y Mirror!", Toast.LENGTH_SHORT).show();
}else if(xyFlag == 2) {
xyFlag = 0;
Toast.makeText(GLViewMediaActivity.this, "Normal!", Toast.LENGTH_SHORT).show();
}
}
});
}

@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onDestroy() {
super.onDestroy();
}

@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
Log.i(TAG, "onSurfaceTextureAvailable: width = " + width + ", height = " + height);
String[] CameraIdList = cameraManager.getCameraIdList();
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(CameraIdList[0]);
characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
// 6.0 动态获取权限
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
//startCodec();
cameraManager.openCamera(CameraIdList[0], mCameraDeviceStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {}

@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}

@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {}
CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
Log.i(TAG, " CameraDevice.StateCallback onOpened ");
try {
mCameraDevice = camera;
startPreview(camera);
} catch (CameraAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}

@Override
public void onDisconnected(CameraDevice camera) {
if (null != mCameraDevice) {
mCameraDevice.close();
GLViewMediaActivity.this.mCameraDevice = null;
}
}

@Override
public void onError(CameraDevice camera, int error) {}
};

private void startPreview(CameraDevice camera) throws CameraAccessException {
SurfaceTexture texture = mPreviewView.getSurfaceTexture();
texture.setDefaultBufferSize(mPreviewView.getWidth(), mPreviewView.getHeight());
Surface surface = new Surface(texture); // TextureView -> SurfaceTexture -> Surface

Surface surface0 = new Surface(videoTexture); // 通过创建的SurfaceTexture,生成Surface,videoTexture 由textures[0]生成的,所以整个摄像头图像渲染流程:camera图像 -> Surface -> videoTexture/ videoTexture.updateTexImage() -> GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);

Log.i(TAG, " startPreview ");
try {
mPreviewBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); //CameraDevice.TEMPLATE_STILL_CAPTURE
} catch (CameraAccessException e) {
e.printStackTrace();
}
//添加预览输出的Surface: camera图像 -> Surface
mPreviewBuilder.addTarget(surface);
mPreviewBuilder.addTarget(surface0);
camera.createCaptureSession(Arrays.asList(surface, surface0), mSessionStateCallback, null);
}

//1、CameraCaptureSession.StateCallback
private CameraCaptureSession.StateCallback mSessionStateCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
try {
Log.i(TAG, " onConfigured ");
//session.capture(mPreviewBuilder.build(), mSessionCaptureCallback, mHandler);
mSession = session;
//自动聚焦
mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
//自动曝光
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
//int rotation = getWindowManager().getDefaultDisplay().getRotation();
//mPreviewBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
session.setRepeatingRequest(mPreviewBuilder.build(), null, null); //null
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

@Override
public void onConfigureFailed(CameraCaptureSession session) {}
};

int callback_time;
//2、 CameraCaptureSession.CaptureCallback()
private CameraCaptureSession.CaptureCallback mSessionCaptureCallback =new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
//Toast.makeText(GLViewMediaActivity.this, "picture success!", Toast.LENGTH_SHORT).show();
callback_time++;
Log.i(TAG, " CaptureCallback = "+callback_time);
}

@Override
public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult) {
Toast.makeText(GLViewMediaActivity.this, "picture failed!", Toast.LENGTH_SHORT).show();
}
};

public int initTexture(int drawableId)
{
//生成纹理ID
int[] textures = new int[1];
GLES20.glGenTextures
(
1, //产生的纹理id的数量
textures, //纹理id的数组
0 //偏移量
);
int textureId = textures[0];
Log.i(TAG, " initTexture textureId = " + textureId);

GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,GLES20.GL_CLAMP_TO_EDGE);

//加载图片
InputStream is = this.getResources().openRawResource(drawableId);
Bitmap bitmapTmp;
try {
bitmapTmp = BitmapFactory.decodeStream(is);
} finally {
try {
is.close();
}
catch(IOException e) {
e.printStackTrace();
}
}
//加载纹理
GLUtils.texImage2D
(
GLES20.GL_TEXTURE_2D, //纹理类型,在OpenGL ES中必须为GL10.GL_TEXTURE_2D
0, //纹理的层次,0表示基本图像层,直接贴图,多重纹理mipmap,可选其它level层
bitmapTmp, //纹理图像
0 //纹理边框尺寸
);
bitmapTmp.recycle(); //纹理加载成功后释放图片
return textureId;
}

int textureIdOne;
int textureHandle;

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
setupGraphics();
setupVertexBuffer();
setupTexture();
textureIdOne= initTexture(R.drawable.bg); // 生成贴图纹理
}

@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
this.width = width;
this.height = height;
GLES20.glViewport(0, 0, width, height);
setSize(width, height); //根据高、宽设置模型/视图/投影矩阵
}

@Override
public void onDrawFrame(GL10 gl) {
synchronized (this) {
if (frameAvailable) {
videoTexture.updateTexImage(); // 更新SurfaceTexture纹理图像信息,然后绑定的GLES11Ext.GL_TEXTURE_EXTERNAL_OES纹理才能渲染
videoTexture.getTransformMatrix(videoTextureTransform); // 获取SurfaceTexture纹理变换矩
frameAvailable = false;
}
}
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); //设置清除颜色
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
//GL_COLOR_BUFFER_BIT 设置窗口颜色
//GL_DEPTH_BUFFER_BIT 设置深度缓存--把所有像素的深度值设置为最大值(一般为远裁剪面)
drawTexture();
}

@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (this) {
frameAvailable = true;
}
}

private float[] mViewMatrix=new float[16];
private float[] mProjectMatrix=new float[16];
private float[] mModelMatrix=new float[16];
private float[] mModelMatrix0=new float[16];
private float[] matrix=new float[16];
private float[] matrix0=new float[16];
private int gHWidth;
private int gHHeight;

public void setSize(int width,int height){
float ratio=(float)width/height;
//投影矩 -- 视窗显示
Matrix.frustumM(mProjectMatrix, 0, -ratio, ratio, -1, 1, 1, 3);
//视图矩 -- 相机位置/相机目标位置/相机各朝向
Matrix.setLookAtM(mViewMatrix, 0,
0.0f, 0.0f, 1.0f, //0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, //0.0f, 0.0f,-1.0f,
0f, 1.0f, 0.0f);
//模型矩 -- 物体本身的位置、朝向
Matrix.setIdentityM(mModelMatrix,0);
Matrix.setIdentityM(mModelMatrix0,0);
//Matrix.scaleM(mModelMatrix,0,2,2,2);

Matrix.multiplyMM(matrix,0,mProjectMatrix,0,mViewMatrix,0); //矩阵乘法
Matrix.multiplyMM(matrix0,0,mProjectMatrix,0,mViewMatrix,0);

matrix = flip(mModelMatrix, true, false);
matrix0 = flip(mModelMatrix0, false, true);
}

public static float[] rotate(float[] m,float angle){
Matrix.rotateM(m,0,angle,0,0,1);
return m;
}

//镜像
public float[] flip(float[] m,boolean x,boolean y){
if(x||y){
Matrix.scaleM(m,0,x?-1:1,y?-1:1,1);
}
return m;
}

private void setupGraphics() {
final String vertexShader = HelpUtils.readTextFileFromRawResource(context, R.raw.vetext_sharder);
final String fragmentShader = HelpUtils.readTextFileFromRawResource(context, R.raw.fragment_sharder);

final int vertexShaderHandle = HelpUtils.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
final int fragmentShaderHandle = HelpUtils.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
shaderProgram = HelpUtils.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle,
new String[]{"texture", "vPosition", "vTexCoordinate", "textureTransform"});

GLES20.glUseProgram(shaderProgram);
textureParamHandle = GLES20.glGetUniformLocation(shaderProgram, "texture"); // 摄像头图像外部扩展纹理
textureCoordinateHandle = GLES20.glGetAttribLocation(shaderProgram, "vTexCoordinate"); // 顶点纹理坐标
positionHandle = GLES20.glGetAttribLocation(shaderProgram, "vPosition"); // 顶点坐标
textureTranformHandle = GLES20.glGetUniformLocation(shaderProgram, "textureTransform");

textureHandle = GLES20.glGetUniformLocation(shaderProgram, "texture0"); // 获得贴图对应的纹理采样器句柄(索引)
mRatio = GLES20.glGetUniformLocation(shaderProgram, "mratio"); // 融合因子
gHWidth=GLES20.glGetUniformLocation(shaderProgram,"mWidth"); // 视窗宽、高
gHHeight=GLES20.glGetUniformLocation(shaderProgram,"mHeight");

GLES20.glUniform1i(gHWidth,width);
GLES20.glUniform1i(gHHeight,height);
}

private void setupVertexBuffer() {
// Draw list buffer
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder()); // 转换成本地字节序
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);

// Initialize the texture holder
//顶点位置
ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder()); // 转换成本地字节序
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);

//纹理坐标
ByteBuffer texturebb = ByteBuffer.allocateDirect(textureCoords.length * 4);
texturebb.order(ByteOrder.nativeOrder()); // 转换成本地字节序
textureBuffer = texturebb.asFloatBuffer();
textureBuffer.put(textureCoords);
textureBuffer.position(0);
}

private void setupTexture() {
// Generate the actual texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // 激活(使能)相应的纹理单元
GLES20.glGenTextures(1, textures, 0); // 产生纹理id
checkGlError("Texture generate");

GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
//通过纹理id,绑定到相应的纹理单元,纹理单元内存放的类型可以很多种,比如GLES20.GL_TEXTURE_1D、GLES20.GL_TEXTURE_2D、GLES20.GL_TEXTURE_3D、GLES11Ext.GL_TEXTURE_EXTERNAL_OES等
checkGlError("Texture bind");

videoTexture = new SurfaceTexture(textures[0]); // 通过创建的纹理id,生成SurfaceTexture
videoTexture.setOnFrameAvailableListener(this);
}

private void drawTexture() {
int mHProjMatrix=GLES20.glGetUniformLocation(shaderProgram,"uProjMatrix");
GLES20.glUniformMatrix4fv(mHProjMatrix,1,false,matrix,0);

int mHProjMatrix0=GLES20.glGetUniformLocation(shaderProgram,"uProjMatrix0");
GLES20.glUniformMatrix4fv(mHProjMatrix0,1,false,matrix0,0);

int mXyFlag = GLES20.glGetUniformLocation(shaderProgram, "xyFlag"); //镜像类型: x镜像,y镜像---通过不同的变化矩阵与顶点位置向量进行左乘,如:uProjMatrix*vPosition;
GLES20.glUniform1i(mXyFlag, xyFlag);

int mColorFlagHandle = GLES20.glGetUniformLocation(shaderProgram, "colorFlag"); // 纹理操作类型(滤镜处理):饱和度/灰度/冷暖色/放大镜/模糊/美颜/纹理融合
GLES20.glUniform1i(mColorFlagHandle, mColorFlag);

//顶点属性一般包括位置、颜色、法线、纹理坐标属性
GLES20.glEnableVertexAttribArray(positionHandle); // 使能相应的顶点位置属性的顶点属性数组
GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer); // 指定(绑定)该相应的顶点位置属性的顶点属性数组

GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]); // 摄像头图像纹理
GLES20.glUniform1i(textureParamHandle, 0);

GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIdOne); // 贴图的图像纹理
GLES20.glUniform1i(textureHandle, 1);

GLES20.glEnableVertexAttribArray(textureCoordinateHandle);
GLES20.glVertexAttribPointer(textureCoordinateHandle, 4, GLES20.GL_FLOAT, false, 0, textureBuffer);

GLES20.glUniformMatrix4fv(textureTranformHandle, 1, false, videoTextureTransform, 0); // GL_TEXTURE_EXTERNAL_OES纹理的变化矩
GLES20.glUniform1f(mRatio, ratio); // 纹理融合因子

GLES20.glDrawElements(GLES20.GL_TRIANGLE_STRIP, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer); // 根据顶点位置索引进行绘制片元
GLES20.glDisableVertexAttribArray(positionHandle);
GLES20.glDisableVertexAttribArray(textureCoordinateHandle);
}

public void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e("SurfaceTest", op + ": glError " + GLUtils.getEGLErrorString(error));
}
}

}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
demo效果:
纹理融合

放大镜;

模糊:

暖色:

冷色:

美颜:

镜像:

---------------------
作者:keen_zuxwang
来源:CSDN
原文:https://blog.csdn.net/keen_zuxwang/article/details/78363464
版权声明:本文为博主原创文章,转载请附上博文链接!

Android Camera2 Opengles2.0 实时滤镜(冷暖色/放大镜/模糊/美颜)的更多相关文章

  1. Android -- Camera2(Android5.0)

    Camera2 Camera2是Android5.0中的其中一个新的特性,新的API.与原来的camera API相比,不同之处在于: 原生支持RAW照片输出 突发拍摄模式 制约拍照速度的不再是软件而 ...

  2. Android平台Camera实时滤镜实现方法探讨(三)--通过Shader实现YUV转换RBG

    http://blog.csdn.net/oshunz/article/details/50055057 文章例如该链接通过将YUV分成三个纹理,在shader中取出并且经过公式变换,转换成RGB.我 ...

  3. Android上使用OpenglES2.0遇到的一点问题

    按照教程开发OpenglES2.0应用,遇到Logcat报错“Called unimplemented OpenGL ES API” 在论坛和stackoverflow上找到了答案. 1.manife ...

  4. Android OpenGLES2.0(十七)——球形天空盒VR效果实现

    在3D游戏中通常都会用到天空盒,在3D引擎中也一般会存在天空盒组件,让开发者可以直接使用.那么天空盒是什么?天空盒又是如何实现的呢?本篇博客主要介绍如何在Android中利用OpenGLES绘制一个天 ...

  5. Android平台Camera实时滤镜实现方法探讨(十)--代码地址以及简单介绍(20160118更新)

    简单做了个相机和图片编辑模块,时间原因非常多功能还没有做.尚有BUG,见谅,将在以后抽时间改动 代码地址 PS:请点个Star^-^ --------------------------------- ...

  6. (转)使用OpenGL显示图像(七)Android OpenGLES2.0——纹理贴图之显示图片

    转:http://blog.csdn.net/junzia/article/details/52842816 前面几篇博客,我们将了Android中利用OpenGL ES 2.0绘制各种形体,并在上一 ...

  7. Android Studio 1.0.2项目实战——从一个APP的开发过程认识Android Studio

    Android Studio 1.0.1刚刚发布不久,谷歌紧接着发布了Android Studio 1.0.2版本,和1.0.0一样,是一个Bug修复版本.在上一篇Android Studio 1.0 ...

  8. [Android] android studio 2.0即时运行功能探秘

    即时运行instant Run是android studio 2中,开发人员最关心的特性之一 在google发布studio 2.0之后,马上更新体验了一把,然而发现,并没快多少,说好的即时运行呢? ...

  9. win7下搭建opengles2.0编程环境

    原帖地址:http://sixgod.org/archives/72   1.下载AMD的OpenGL ES2.0的模拟器,地址: http://www.opengles-book.com/ESEmu ...

随机推荐

  1. NO Route to Host 连接mysql数据库

    显然是请求被服务器的防火墙给拦截了 1,vi /etc/sysconfig/iptables 2,在倒数第三行以前添加 -A INPUT -p tcp -m state --state NEW -m  ...

  2. ios UrlEncode与UrlDecode

    url字符串中具有特殊功能的特殊字符的字符串,或者中文字符,作为参数用GET方式传递时,需要用urlencode处理一下.当然,有时作为Post参数传递,也需要urlencode处理一下. NSStr ...

  3. android js与控件交互初探。

    1.创建一个mainacvity 在oncreate中加入, mWeb是一个webview组件,网络权限记得自己加. <uses-permission android:name="an ...

  4. java基础(四) -变量类型

    在Java语言中,所有的变量在使用前必须声明.声明变量的基本格式如下: type identifier [ = value][, identifier [= value] ...] ; 格式说明:ty ...

  5. 只使用处理I/O的printDigit方法,编写一种方法一输出任意的double型量(可以是负的)

    /** * Question:只使用处理IO的printDigit函数,编写一个过程以输出任意double型量(可以为负) * @author wulei * 这道题我的理解是使用最基本的System ...

  6. Xshell连接不上虚拟机提示ssh服务器拒绝了密码,请再试一次

    1. 设置ubuntu的管理员root的密码 hughes@hughes-virtual:~$ sudo passwd (供xshell连接时使用) 2. 确保源文件和系统已更新 hughes@hug ...

  7. C# 基于DocumentFormat.OpenXml的数据导出到Excel

    using DocumentFormat.OpenXml; using DocumentFormat.OpenXml.Packaging; using DocumentFormat.OpenXml.S ...

  8. sudo安装某一文件报错:E: 无法获得锁 /var/lib/dpkg/lock - open (11: 资源暂时不可用) E: 无法锁定管理目录(/var/lib/dpkg/),是否有其他进程正占用它?

    报错原因:资源被占用 解决方法: sudo rm /var/cache/apt/archives/lock sudo rm /var/lib/dpkg/lock

  9. 标准I/O流

    一.标准输入流 标准输入流对象cin,重点掌握的函数 cin.get() //一次只能读取一个字符 cin.get(一个参数) //读一个字符 cin.get(三个参数) //可以读字符串 cin.g ...

  10. 关于CTeX的几个大坑

    https://blog.csdn.net/zjutczj/article/details/53463478 最近一直忙着写小论文,毕业设计中期答辩,没有更新博客,忙过这一阵我会把这段时间学习机器学习 ...