WEBGL学习【十二】鼠标操作场景
<!DOCTYPE HTML>
<html lang="en">
<head>
<title>Listing 7-3 and 7-4, Texturing and Lighting With the Phong Reflection Model.</title>
<script src="./lib/webgl-debug.js"></script>
<script src="./lib/glMatrix.js"></script>
<script src="./lib/webgl-utils.js"></script> <meta charset="utf-8">
<script id="shader-vs" type="x-shader/x-vertex">
// Vertex shader implemented to perform lighting according to the
// Phong reflection model. Forwards texture coordinates to fragment
// shader.
attribute vec3 aVertexPosition;
attribute vec3 aVertexNormal;
attribute vec2 aTextureCoordinates; uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
uniform mat3 uNMatrix; uniform vec3 uLightPosition;
uniform vec3 uAmbientLightColor;
uniform vec3 uDiffuseLightColor;
uniform vec3 uSpecularLightColor; varying vec2 vTextureCoordinates;
varying vec3 vLightWeighting; const float shininess = 32.0; void main() {
// Get the vertex position in eye coordinates
vec4 vertexPositionEye4 = uMVMatrix * vec4(aVertexPosition, 1.0);
vec3 vertexPositionEye3 = vertexPositionEye4.xyz / vertexPositionEye4.w; // Calculate the vector (l) to the light source
vec3 vectorToLightSource = normalize(uLightPosition - vertexPositionEye3); // Transform the normal (n) to eye coordinates
vec3 normalEye = normalize(uNMatrix * aVertexNormal); // Calculate n dot l for diffuse lighting
float diffuseLightWeightning = max(dot(normalEye,
vectorToLightSource), 0.0); // Calculate the reflection vector (r) that is needed for specular light
vec3 reflectionVector = normalize(reflect(-vectorToLightSource,
normalEye)); // The camera in eye coordinates is located in the origin and is pointing
// along the negative z-axis. Calculate viewVector (v) in eye coordinates as:
// (0.0, 0.0, 0.0) - vertexPositionEye3
vec3 viewVectorEye = -normalize(vertexPositionEye3); float rdotv = max(dot(reflectionVector, viewVectorEye), 0.0); float specularLightWeightning = pow(rdotv, shininess); // Sum up all three reflection components and send to the fragment shader
vLightWeighting = uAmbientLightColor +
uDiffuseLightColor * diffuseLightWeightning +
uSpecularLightColor * specularLightWeightning; // Finally transform the geometry
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoordinates = aTextureCoordinates;
}
</script> <!--片元着色器-->
<script id="shader-fs" type="x-shader/x-fragment">
precision mediump float; varying vec2 vTextureCoordinates;
varying vec3 vLightWeighting;
uniform sampler2D uSampler; void main() {
vec4 texelColor = texture2D(uSampler, vTextureCoordinates); //结合了纹理和光照的片段着色器(vLightWeighting包含已经计算得到的环境光和漫反射光)
gl_FragColor = vec4(vLightWeighting.rgb * texelColor.rgb, texelColor.a);
}
</script> <script type="text/javascript">
// globals
var gl;
var pwgl = {};
// Keep track of ongoing image loads to be able to handle lost context
pwgl.ongoingImageLoads = [];
var canvas; function createGLContext(canvas) {
var names = ["webgl", "experimental-webgl"];
var context = null;
for (var i=0; i < names.length; i++) {
try {
context = canvas.getContext(names[i]);
} catch(e) {}
if (context) {
break;
}
}
if (context) {
context.viewportWidth = canvas.width;
context.viewportHeight = canvas.height;
} else {
alert("Failed to create WebGL context!");
}
return context;
} function loadShaderFromDOM(id) {
var shaderScript = document.getElementById(id); // If we don't find an element with the specified id
// we do an early exit
if (!shaderScript) {
return null;
} // Loop through the children for the found DOM element and
// build up the shader source code as a string
var shaderSource = "";
var currentChild = shaderScript.firstChild;
while (currentChild) {
if (currentChild.nodeType == 3) { // 3 corresponds to TEXT_NODE
shaderSource += currentChild.textContent;
}
currentChild = currentChild.nextSibling;
} var shader;
if (shaderScript.type == "x-shader/x-fragment") {
shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (shaderScript.type == "x-shader/x-vertex") {
shader = gl.createShader(gl.VERTEX_SHADER);
} else {
return null;
} gl.shaderSource(shader, shaderSource);
gl.compileShader(shader); if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS) &&
!gl.isContextLost()) {
alert(gl.getShaderInfoLog(shader));
return null;
}
return shader;
} function setupShaders() {
var vertexShader = loadShaderFromDOM("shader-vs");
var fragmentShader = loadShaderFromDOM("shader-fs"); var shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram); if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS) &&
!gl.isContextLost()) {
alert("Failed to link shaders: " + gl.getProgramInfoLog(shaderProgram));
} gl.useProgram(shaderProgram);
pwgl.vertexPositionAttributeLoc =
gl.getAttribLocation(shaderProgram, "aVertexPosition"); pwgl.vertexNormalAttributeLoc =
gl.getAttribLocation(shaderProgram, "aVertexNormal"); pwgl.vertexTextureAttributeLoc =
gl.getAttribLocation(shaderProgram, "aTextureCoordinates"); pwgl.uniformMVMatrixLoc =
gl.getUniformLocation(shaderProgram, "uMVMatrix"); pwgl.uniformProjMatrixLoc =
gl.getUniformLocation(shaderProgram, "uPMatrix"); pwgl.uniformNormalMatrixLoc =
gl.getUniformLocation(shaderProgram, "uNMatrix"); pwgl.uniformSamplerLoc =
gl.getUniformLocation(shaderProgram, "uSampler"); pwgl.uniformLightPositionLoc =
gl.getUniformLocation(shaderProgram, "uLightPosition"); pwgl.uniformAmbientLightColorLoc =
gl.getUniformLocation(shaderProgram, "uAmbientLightColor"); pwgl.uniformDiffuseLightColorLoc =
gl.getUniformLocation(shaderProgram, "uDiffuseLightColor"); pwgl.uniformSpecularLightColorLoc =
gl.getUniformLocation(shaderProgram, "uSpecularLightColor"); gl.enableVertexAttribArray(pwgl.vertexPositionAttributeLoc);
gl.enableVertexAttribArray(pwgl.vertexNormalAttributeLoc);
gl.enableVertexAttribArray(pwgl.vertexTextureAttributeLoc); pwgl.modelViewMatrix = mat4.create();
pwgl.projectionMatrix = mat4.create();
pwgl.modelViewMatrixStack = [];
} function pushModelViewMatrix() {
var copyToPush = mat4.create(pwgl.modelViewMatrix);
pwgl.modelViewMatrixStack.push(copyToPush);
} function popModelViewMatrix() {
if (pwgl.modelViewMatrixStack.length == 0) {
throw "Error popModelViewMatrix() - Stack was empty ";
}
pwgl.modelViewMatrix = pwgl.modelViewMatrixStack.pop();
} function setupFloorBuffers() {
pwgl.floorVertexPositionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.floorVertexPositionBuffer); var floorVertexPosition = [
// Plane in y=0
5.0, 0.0, 5.0, //v0
5.0, 0.0, -5.0, //v1
-5.0, 0.0, -5.0, //v2
-5.0, 0.0, 5.0]; //v3 gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(floorVertexPosition),
gl.STATIC_DRAW); pwgl.FLOOR_VERTEX_POS_BUF_ITEM_SIZE = 3;
pwgl.FLOOR_VERTEX_POS_BUF_NUM_ITEMS = 4; //指定地板的法向量的方向
// Specify normals to be able to do lighting calculations
pwgl.floorVertexNormalBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.floorVertexNormalBuffer); var floorVertexNormals = [
0.0, 1.0, 0.0, //v0
0.0, 1.0, 0.0, //v1
0.0, 1.0, 0.0, //v2
0.0, 1.0, 0.0]; //v3 gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(floorVertexNormals),
gl.STATIC_DRAW); pwgl.FLOOR_VERTEX_NORMAL_BUF_ITEM_SIZE = 3;
pwgl.FLOOR_VERTEX_NORMAL_BUF_NUM_ITEMS = 4; // Setup texture coordinates buffer
pwgl.floorVertexTextureCoordinateBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.floorVertexTextureCoordinateBuffer);
var floorVertexTextureCoordinates = [
2.0, 0.0,
2.0, 2.0,
0.0, 2.0,
0.0, 0.0
]; gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(floorVertexTextureCoordinates),
gl.STATIC_DRAW); pwgl.FLOOR_VERTEX_TEX_COORD_BUF_ITEM_SIZE = 2;
pwgl.FLOOR_VERTEX_TEX_COORD_BUF_NUM_ITEMS = 4; // Setup index buffer
pwgl.floorVertexIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, pwgl.floorVertexIndexBuffer);
var floorVertexIndices = [0, 1, 2, 3]; gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(floorVertexIndices),
gl.STATIC_DRAW); pwgl.FLOOR_VERTEX_INDEX_BUF_ITEM_SIZE = 1;
pwgl.FLOOR_VERTEX_INDEX_BUF_NUM_ITEMS = 4;
} function setupCubeBuffers() {
pwgl.cubeVertexPositionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.cubeVertexPositionBuffer); var cubeVertexPosition = [
// Front face
1.0, 1.0, 1.0, //v0
-1.0, 1.0, 1.0, //v1
-1.0, -1.0, 1.0, //v2
1.0, -1.0, 1.0, //v3 // Back face
1.0, 1.0, -1.0, //v4
-1.0, 1.0, -1.0, //v5
-1.0, -1.0, -1.0, //v6
1.0, -1.0, -1.0, //v7 // Left face
-1.0, 1.0, 1.0, //v8
-1.0, 1.0, -1.0, //v9
-1.0, -1.0, -1.0, //v10
-1.0, -1.0, 1.0, //v11 // Right face
1.0, 1.0, 1.0, //12
1.0, -1.0, 1.0, //13
1.0, -1.0, -1.0, //14
1.0, 1.0, -1.0, //15 // Top face
1.0, 1.0, 1.0, //v16
1.0, 1.0, -1.0, //v17
-1.0, 1.0, -1.0, //v18
-1.0, 1.0, 1.0, //v19 // Bottom face
1.0, -1.0, 1.0, //v20
1.0, -1.0, -1.0, //v21
-1.0, -1.0, -1.0, //v22
-1.0, -1.0, 1.0, //v23
]; gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVertexPosition),
gl.STATIC_DRAW); pwgl.CUBE_VERTEX_POS_BUF_ITEM_SIZE = 3;
pwgl.CUBE_VERTEX_POS_BUF_NUM_ITEMS = 24; // Specify normals to be able to do lighting calculations
pwgl.cubeVertexNormalBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.cubeVertexNormalBuffer); //指定立方体的每一个面的法向量
var cubeVertexNormals = [
// Front face
0.0, 0.0, 1.0, //v0
0.0, 0.0, 1.0, //v1
0.0, 0.0, 1.0, //v2
0.0, 0.0, 1.0, //v3 // Back face
0.0, 0.0, -1.0, //v4
0.0, 0.0, -1.0, //v5
0.0, 0.0, -1.0, //v6
0.0, 0.0, -1.0, //v7 // Left face
-1.0, 0.0, 0.0, //v8
-1.0, 0.0, 0.0, //v9
-1.0, 0.0, 0.0, //v10
-1.0, 0.0, 0.0, //v11 // Right face
1.0, 0.0, 0.0, //12
1.0, 0.0, 0.0, //13
1.0, 0.0, 0.0, //14
1.0, 0.0, 0.0, //15 // Top face
0.0, 1.0, 0.0, //v16
0.0, 1.0, 0.0, //v17
0.0, 1.0, 0.0, //v18
0.0, 1.0, 0.0, //v19 // Bottom face
0.0, -1.0, 0.0, //v20
0.0, -1.0, 0.0, //v21
0.0, -1.0, 0.0, //v22
0.0, -1.0, 0.0, //v23
]; gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVertexNormals),
gl.STATIC_DRAW); pwgl.CUBE_VERTEX_NORMAL_BUF_ITEM_SIZE = 3;
pwgl.CUBE_VERTEX_NORMAL_BUF_NUM_ITEMS = 24; // Setup buffer with texture coordinates
pwgl.cubeVertexTextureCoordinateBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.cubeVertexTextureCoordinateBuffer);
var textureCoordinates = [
//Front face
0.0, 0.0, //v0
1.0, 0.0, //v1
1.0, 1.0, //v2
0.0, 1.0, //v3 // Back face
0.0, 1.0, //v4
1.0, 1.0, //v5
1.0, 0.0, //v6
0.0, 0.0, //v7 // Left face
0.0, 1.0, //v8
1.0, 1.0, //v9
1.0, 0.0, //v10
0.0, 0.0, //v11 // Right face
0.0, 1.0, //v12
1.0, 1.0, //v13
1.0, 0.0, //v14
0.0, 0.0, //v15 // Top face
0.0, 1.0, //v16
1.0, 1.0, //v17
1.0, 0.0, //v18
0.0, 0.0, //v19 // Bottom face
0.0, 1.0, //v20
1.0, 1.0, //v21
1.0, 0.0, //v22
0.0, 0.0, //v23
]; gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(textureCoordinates),gl.STATIC_DRAW);
pwgl.CUBE_VERTEX_TEX_COORD_BUF_ITEM_SIZE = 2;
pwgl.CUBE_VERTEX_TEX_COORD_BUF_NUM_ITEMS = 24; pwgl.cubeVertexIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, pwgl.cubeVertexIndexBuffer);
var cubeVertexIndices = [
0, 1, 2, 0, 2, 3, // Front face
4, 6, 5, 4, 7, 6, // Back face
8, 9, 10, 8, 10, 11, // Left face
12, 13, 14, 12, 14, 15, // Right face
16, 17, 18, 16, 18, 19, // Top face
20, 22, 21, 20, 23, 22 // Bottom face
];
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeVertexIndices),
gl.STATIC_DRAW);
pwgl.CUBE_VERTEX_INDEX_BUF_ITEM_SIZE = 1;
pwgl.CUBE_VERTEX_INDEX_BUF_NUM_ITEMS = 36;
} function textureFinishedLoading(image, texture) {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true); gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE,
image); gl.generateMipmap(gl.TEXTURE_2D); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.MIRRORED_REPEAT);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.MIRRORED_REPEAT);
gl.bindTexture(gl.TEXTURE_2D, null);
} function loadImageForTexture(url, texture) {
var image = new Image();
image.onload = function() {
pwgl.ongoingImageLoads.splice(pwgl.ongoingImageLoads.indexOf(image), 1);
textureFinishedLoading(image, texture);
}
pwgl.ongoingImageLoads.push(image);
image.src = url;
} function setupTextures() {
// Texture for the table
pwgl.woodTexture = gl.createTexture();
loadImageForTexture("./resources/wood_128x128.jpg", pwgl.woodTexture); // Texture for the floor
pwgl.groundTexture = gl.createTexture();
loadImageForTexture("./resources/wood_floor_256.jpg", pwgl.groundTexture); // Texture for the box on the table
pwgl.boxTexture = gl.createTexture();
loadImageForTexture("./resources/wicker_256.jpg", pwgl.boxTexture); //创建一个立方体
pwgl.colorCube = gl.createTexture();
loadImageForTexture("./resources/xiuxiuba.bmp", pwgl.colorCube);
} function setupBuffers() {
setupFloorBuffers();
setupCubeBuffers();
} //设置光源位置,环境光颜色,漫反射光颜色,镜面反射光
function setupLights() {
gl.uniform3fv(pwgl.uniformLightPositionLoc, [0.0, 20.0, 0.0]);
gl.uniform3fv(pwgl.uniformAmbientLightColorLoc, [0.2, 0.2, 0.2]);
gl.uniform3fv(pwgl.uniformDiffuseLightColorLoc, [0.7, 0.7, 0.7]);
gl.uniform3fv(pwgl.uniformSpecularLightColorLoc, [0.8, 0.8, 0.8]);
} function uploadModelViewMatrixToShader() {
gl.uniformMatrix4fv(pwgl.uniformMVMatrixLoc, false, pwgl.modelViewMatrix);
} function uploadProjectionMatrixToShader() {
gl.uniformMatrix4fv(pwgl.uniformProjMatrixLoc,
false, pwgl.projectionMatrix);
} //上传法向量的矩阵到着色器
function uploadNormalMatrixToShader() {
var normalMatrix = mat3.create();
//计算矩阵的逆
mat4.toInverseMat3(pwgl.modelViewMatrix, normalMatrix);
//计算转置矩阵
mat3.transpose(normalMatrix);
//把法向量矩阵传给着色器
gl.uniformMatrix3fv(pwgl.uniformNormalMatrixLoc, false, normalMatrix);
} function drawFloor() {
// Bind position buffer
gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.floorVertexPositionBuffer);
gl.vertexAttribPointer(pwgl.vertexPositionAttributeLoc,
pwgl.FLOOR_VERTEX_POS_BUF_ITEM_SIZE,
gl.FLOAT, false, 0, 0); // Bind normal buffer
gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.floorVertexNormalBuffer);
gl.vertexAttribPointer(pwgl.vertexNormalAttributeLoc,
pwgl.FLOOR_VERTEX_NORMAL_BUF_ITEM_SIZE,
gl.FLOAT, false, 0, 0); // Bind texture coordinate buffer
gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.floorVertexTextureCoordinateBuffer);
gl.vertexAttribPointer(pwgl.vertexTextureAttributeLoc,
pwgl.FLOOR_VERTEX_TEX_COORD_BUF_ITEM_SIZE,
gl.FLOAT, false, 0, 0); gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, pwgl.groundTexture); // Bind index buffer and draw the floor
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, pwgl.floorVertexIndexBuffer);
gl.drawElements(gl.TRIANGLE_FAN, pwgl.FLOOR_VERTEX_INDEX_BUF_NUM_ITEMS,
gl.UNSIGNED_SHORT, 0);
} function drawCube(texture) {
// Bind position buffer
gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.cubeVertexPositionBuffer);
gl.vertexAttribPointer(pwgl.vertexPositionAttributeLoc,
pwgl.CUBE_VERTEX_POS_BUF_ITEM_SIZE,
gl.FLOAT, false, 0, 0); // Bind normal buffer
gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.cubeVertexNormalBuffer);
gl.vertexAttribPointer(pwgl.vertexNormalAttributeLoc,
pwgl.CUBE_VERTEX_NORMAL_BUF_ITEM_SIZE,
gl.FLOAT, false, 0, 0); // Bind texture coordinate buffer
gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.cubeVertexTextureCoordinateBuffer);
gl.vertexAttribPointer(pwgl.vertexTextureAttributeLoc,
pwgl.CUBE_VERTEX_TEX_COORD_BUF_ITEM_SIZE,
gl.FLOAT, false, 0, 0); gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture); // Bind index buffer and draw cube
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, pwgl.cubeVertexIndexBuffer); gl.drawElements(gl.TRIANGLES, pwgl.CUBE_VERTEX_INDEX_BUF_NUM_ITEMS,
gl.UNSIGNED_SHORT, 0);
} function drawTable(){
// Draw a simple table by modifying the modelview matrix
// (translate and scale) and then use the function drawCube()
// to draw a table top and four table legs. pushModelViewMatrix();
mat4.translate(pwgl.modelViewMatrix, [0.0, 1.0, 0.0], pwgl.modelViewMatrix);
mat4.scale(pwgl.modelViewMatrix, [2.0, 0.1, 2.0], pwgl.modelViewMatrix);
uploadModelViewMatrixToShader();
uploadNormalMatrixToShader();
// Draw the actual cube (now scaled to a cuboid) with woodTexture
drawCube(pwgl.woodTexture);
popModelViewMatrix(); // Draw the table legs
for (var i=-1; i<=1; i+=2) {
for (var j= -1; j<=1; j+=2) {
pushModelViewMatrix();
mat4.translate(pwgl.modelViewMatrix, [i*1.9, -0.1, j*1.9], pwgl.modelViewMatrix);
mat4.scale(pwgl.modelViewMatrix, [0.1, 1.0, 0.1], pwgl.modelViewMatrix);
uploadModelViewMatrixToShader();
uploadNormalMatrixToShader();
drawCube(pwgl.woodTexture);
popModelViewMatrix();
}
}
} //var currentAngle;
function draw(currentTime) {
pwgl.requestId = requestAnimFrame(draw);
if (currentTime === undefined) {
currentTime = Date.now();
}
currentTime = Date.now(); // Update FPS if a second or more has passed since last FPS update
if(currentTime - pwgl.previousFrameTimeStamp >= 1000) {
pwgl.fpsCounter.innerHTML = pwgl.nbrOfFramesForFPS;
pwgl.nbrOfFramesForFPS = 0;
pwgl.previousFrameTimeStamp = currentTime;
} gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
mat4.perspective(60, gl.viewportWidth / gl.viewportHeight,
1, 100.0, pwgl.projectionMatrix);
mat4.identity(pwgl.modelViewMatrix);
mat4.lookAt([8, 12, 8],[0, 0, 0], [0, 1,0], pwgl.modelViewMatrix);
mat4.rotateY(pwgl.modelViewMatrix, pwgl.yRot, pwgl.modelViewMatrix); //旋转
pwgl.yRot += 0.01;
//鼠标移动
mat4.rotateX(pwgl.modelViewMatrix, pwgl.currentAngle[0], pwgl.modelViewMatrix);
mat4.rotateY(pwgl.modelViewMatrix, pwgl.currentAngle[1], pwgl.modelViewMatrix);
//鼠标滚轮
mat4.translate(pwgl.modelViewMatrix, [0, 0, pwgl.zTri], pwgl.modelViewMatrix); uploadModelViewMatrixToShader();
uploadProjectionMatrixToShader();
uploadNormalMatrixToShader();
gl.uniform1i(pwgl.uniformSamplerLoc, 0); drawFloor(); // Draw table
pushModelViewMatrix();
mat4.translate(pwgl.modelViewMatrix, [0.0, 1.1, 0.0], pwgl.modelViewMatrix);
uploadModelViewMatrixToShader();
uploadNormalMatrixToShader();
drawTable();
popModelViewMatrix(); // Calculate the position for the box that is initially
// on top of the table but will then be moved during animation
pushModelViewMatrix();
if (currentTime === undefined) {
currentTime = Date.now();
}
if (pwgl.animationStartTime === undefined) {
pwgl.animationStartTime = currentTime;
}
// Update the position of the box
if (pwgl.y < 5) {
// First move the box vertically from its original position on top of
// the table (where y = 2.7) to 5 units above the floor (y = 5).
// Let this movement take 3 seconds
pwgl.y = 2.7 + (currentTime - pwgl.animationStartTime)/3000 * (5.0-2.7);
}
else {
// Then move the box in a circle where one revolution takes 2 seconds
pwgl.angle = (currentTime - pwgl.animationStartTime)/2000*2*Math.PI % (2*Math.PI); pwgl.x = Math.cos(pwgl.angle) * pwgl.circleRadius;
pwgl.z = Math.sin(pwgl.angle) * pwgl.circleRadius;
} mat4.translate(pwgl.modelViewMatrix, [pwgl.x, pwgl.y, pwgl.z], pwgl.modelViewMatrix);
mat4.scale(pwgl.modelViewMatrix, [0.5, 0.5, 0.5], pwgl.modelViewMatrix);
uploadModelViewMatrixToShader();
uploadNormalMatrixToShader();
drawCube(pwgl.boxTexture);
popModelViewMatrix(); // Update number of drawn frames to be able to count fps
pwgl.nbrOfFramesForFPS++;
} function handleContextLost(event) {
event.preventDefault();
cancelRequestAnimFrame(pwgl.requestId); // Ignore all ongoing image loads by removing
// their onload handler
for (var i = 0; i < pwgl.ongoingImageLoads.length; i++) {
pwgl.ongoingImageLoads[i].onload = undefined;
}
pwgl.ongoingImageLoads = [];
} function init() {
// Initialization that is performed during first startup, but when the
// event webglcontextrestored is received is included in this function.
setupShaders();
setupBuffers();
setupLights();
setupTextures();
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.enable(gl.DEPTH_TEST); // Initialize some varibles for the moving box
pwgl.x = 0.0;
pwgl.y = 2.7;
pwgl.z = 0.0;
pwgl.circleRadius = 4.0;
pwgl.angle = 0; //Y轴旋转角度
pwgl.yRot = 0;
//Z轴移动角度
pwgl.zTri = 0; // Initialize some variables related to the animation
pwgl.animationStartTime = undefined;
pwgl.nbrOfFramesForFPS = 0;
pwgl.previousFrameTimeStamp = Date.now();
} function handleContextRestored(event) {
init();
pwgl.requestId = requestAnimFrame(draw,canvas);
} //处理鼠标的事件
var dragging = false; // Dragging or not
var lastX = -1, lastY = -1; // Last position of the mouse
function handleMouseDown(ev) {
var x = ev.clientX, y = ev.clientY;
// Start dragging if a moue is in <canvas>
var rect = ev.target.getBoundingClientRect();
if (rect.left <= x && x < rect.right && rect.top <= y && y < rect.bottom) {
lastX = x; lastY = y;
dragging = true;
}
}
function handleMouseMove(ev) {
var x = ev.clientX, y = ev.clientY;
if (dragging) {
var factor = 100/canvas.height; // The rotation ratio
var dx = factor * (x - lastX);
var dy = factor * (y - lastY);
// Limit x-axis rotation angle to -90 to 90 degrees
pwgl.currentAngle[0] = Math.max(Math.min(pwgl.currentAngle[0] + dy, 90.0), -90.0);
pwgl.currentAngle[1] = pwgl.currentAngle[1] + dx;
}
lastX = x, lastY = y;
}
function handleMouseUp() {
dragging = false;
} /*
* 在非firefox浏览器中,滚轮向上滚动返回的数值是120,向下滚动返回-120
而在firefox浏览器中,滚轮向上滚动返回的数值是-3,向下滚动返回3
* */
//鼠标滚轮
var scrollFunc = function (e) {
e = e || window.event; var t1 = document.getElementById("wheelDelta");
var t2 = document.getElementById("detail");
if (e.wheelDelta) {//IE/Opera/Chrome
t1.value = "IE、Opera、Safari、Chrome";
t2.value = e.wheelDelta;
if (e.wheelDelta == 120){
pwgl.zTri += 1.0;
} else {
pwgl.zTri -= 1.0;
}
} else if (e.detail) {//Firefox
t1.value = "Firefox";
t2.value = e.detail;
if (e.wheelDelta == -3){
pwgl.zTri += 1.0;
} else {
pwgl.zTri -= 1.0;
}
}
//ScrollText(direct); }
/*注册事件*/
if (document.addEventListener) {
document.addEventListener('DOMMouseScroll', scrollFunc, false);
}//W3C
window.onmousewheel = document.onmousewheel = scrollFunc;//IE/Opera/Chrome/Safari function startup() {
canvas = document.getElementById("myGLCanvas");
canvas = WebGLDebugUtils.makeLostContextSimulatingContext(canvas); canvas.addEventListener('webglcontextlost', handleContextLost, false);
canvas.addEventListener('webglcontextrestored', handleContextRestored, false);
//canvas.addEventListener("", )
canvas.addEventListener('mousedown', handleMouseDown, false);
canvas.addEventListener('mousemove', handleMouseMove, false);
canvas.addEventListener('mouseup', handleMouseUp, false);
//canvas.addEventListener('onmousewheel', handlemosew) gl = createGLContext(canvas);
init(); //注册一个事件响应的函数【鼠标移动控制】
pwgl.currentAngle = [0.0, 0.0];
//initEventHandlers(canvas, currentAngle); pwgl.fpsCounter = document.getElementById("fps"); // Uncomment the three lines of code below to be able to test lost context
// window.addEventListener('mousedown', function() {
// canvas.loseContext();
// }); // Draw the complete scene
draw();
}
</script> </head> <body onload="startup();">
<canvas id="myGLCanvas" width="500" height="500"></canvas>
<div id="fps-counter">
FPS: <span id="fps">--</span>
</div>
<p><label for="wheelDelta"> 浏览器类型:</label>(IE/Opera)<input type="text" id="wheelDelta"/></p>
<p><label for="detail"> 滚动值:</label><input type="text" id="detail"/></p>
</body> </html>
WEBGL学习【十二】鼠标操作场景的更多相关文章
- (转)SpringMVC学习(十二)——SpringMVC中的拦截器
http://blog.csdn.net/yerenyuan_pku/article/details/72567761 SpringMVC的处理器拦截器类似于Servlet开发中的过滤器Filter, ...
- webgl学习笔记二-绘图多点
写在前面 建议先看下第一篇webgl学习笔记一-绘图单点 第一篇文章,介绍了如何用webgl绘图一个点.接下来本文介绍的是如何绘制多个点.形成一个面. webgl提供了一种很方便的机制,即缓冲区对象, ...
- Scala学习十二——高阶函数
一.本章要点 在Scala中函数是”头等公民“(可以作为参数,返回值,赋值给其他); 可以创建匿名函数,通常还会交给其他函数; 函数参数可以给出需要稍后执行的行为; 许多集合方法都接受函数参数,将函数 ...
- Selenium基础知识(二)鼠标操作
一.鼠标操作 这个需要使用webdriver下的ActionChains类,这个类是操作鼠标操作的: from selenium.webdriver import ActionChains 鼠标操作可 ...
- WebGL学习笔记二——绘制基本图元
webGL的基本图元点.线.三角形 gl.drawArrays(mode, first,count) first,代表从第几个点开始绘制即顶点的起始位置 count,代表绘制的点的数量. mode,代 ...
- python学习笔记(十 二)、操作数据库
每一种语言都少不了多数据库进行各种操作. python支持多种数据库.有关python支持的数据库清单,请参阅:https://wiki.python.org/moin/DatabaseInterfa ...
- JavaWeb学习 (十二)————使用Session防止表单重复提交
在平时开发中,如果网速比较慢的情况下,用户提交表单后,发现服务器半天都没有响应,那么用户可能会以为是自己没有提交表单,就会再点击提交按钮重复提交表单,我们在开发中必须防止表单重复提交. 一.表单重复提 ...
- Selenium2学习(二)-- 操作浏览器基本方法
前面已经把环境搭建好了,这从这篇开始,正式学习selenium的webdriver框架.我们平常说的 selenium自动化,其实它并不是类似于QTP之类的有GUI界面的可视化工具,我们要学的是web ...
- Selenium(十二):操作Cookie、调用JavaScript、HTML5的视频播放
1. 操作Cookie 有时候我们想要验证浏览器中cookie是否正确,因为基于真实cookie的测试是无法通过白盒和集成测试的.WebDriver提供了操作Cookie的相关方法,可以读取.添加和删 ...
随机推荐
- DJANGO之自定义模板过滤器
我查找了DJANGO模板的过滤器,好像指定字符串包含指定关-键字符的过滤器没有呢, 没有硬着头-皮,按网上其它人的作法,写了一个,成功了...:) 参考URL: http://liuzhijun.it ...
- 大红数星星 图论 XD网络赛
问题 A: 大红数星星 时间限制: 3 Sec 内存限制: 128 MB提交: 1066 解决: 67[提交][状态][讨论版] 题目描述 “三角形十分的美丽,相信大家小学就学过三角形具有稳定性, ...
- 洛谷——P1002 过河卒||codevs——T1010 过河卒
https://www.luogu.org/problem/show?pid=1002#sub||http://codevs.cn/problem/1010/ 题目描述 棋盘上A点有一个过河卒,需要走 ...
- Spring深入理解(二)
这个方法实现了 AbstractApplicationContext 的抽象方法 refreshBeanFactory,这段代码清楚的说明了 BeanFactory 的创建过程.注意 BeanFact ...
- UVa 10465 Homer Simpson(DP 全然背包)
题意 霍默辛普森吃汉堡 有两种汉堡 一中吃一个须要m分钟 还有一种吃一个须要n分钟 他共同拥有t分钟时间 要我们输出他在尽量用掉全部时间的前提下最多能吃多少个汉堡 假设时间无法用 ...
- D3D triangle list(三角形列) 小样例
画三角形列的样例程序 #pragma once #pragma comment(lib,"d3d9.lib") #pragma comment(lib,"d3dx9.li ...
- 一个使用命令行编译Android项目的工具类
一个使用命令行编译Android项目的工具类 简单介绍 编译apk项目须要使用的几个工具,基本都在sdk中,它们各自是(Windows系统): 1.aapt.exe 资源打包工具 2.android. ...
- LeetCode Weekly Contest 20
1. 520. Detect Capital 题目描述的很清楚,直接写,注意:字符串长度为1的时候,大写和小写都是满足要求的,剩下的情况单独判断.还有:我感觉自己写的代码很丑,判断条件比较多,需要改进 ...
- C# txt文件操作
//打开文件到流 FileStream fs=new FileStream(path,FileMode.Open,FileAccess.ReadWrite); //写文件流的方法 StreamWrit ...
- Hadoop系列之实验环境搭建
实验环境基本配置 硬件:硬盘单节点50GB,1G内存,单核. 操作系统:CentOS6.4 64bit Hadoop:2.20 64bit(已编译) JDK:jdk1.7 磁盘分区: / 5GB /b ...