如何找出理光西塔的双鱼眼的three.js所R71 SphereGeometry UV贴图(How

2019-10-29 03:34发布

我试图重现three.js所panaorama dualfisheye例如使用three.js所R71。

我需要坚持R71,因为最终我将使用Autodesk的锻造观众这个代码是基于three.js所R71。

我取得了一些进展,但我需要搞清楚的UV贴图帮助。

如果你从这个链接比较结果three.js所panaorama dualfisheye例如与代码段有obiously问题。

  var camera, scene, renderer; var isUserInteracting = false, onMouseDownMouseX = 0, onMouseDownMouseY = 0, lon = 0, onMouseDownLon = 0, lat = 0, onMouseDownLat = 0, phi = 0, theta = 0, distance = 500; init(); animate(); function init() { var container, mesh; container = document.getElementById('container'); camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 2000); scene = new THREE.Scene(); // var geometry = new THREE.SphereBufferGeometry( 500, 60, 40 ).toNonIndexed(); var geometry = new THREE.SphereGeometry(500, 60, 40); // invert the geometry on the x-axis so that all of the faces point inward // geometry.scale( - 1, 1, 1 ); geometry.applyMatrix(new THREE.Matrix4().makeScale(-1, 1, 1)); /* // Remap UVs // var normals = geometry.attributes.normal.array; var normals = []; geometry.faces.forEach(element => { normals.push(element.normal) }); var uvs = geometry.faceVertexUvs // var uvs = geometry.attributes.uv.array; for (var i = 0, l = normals.length / 3; i < l; i++) { var x = normals[i * 3 + 0]; var y = normals[i * 3 + 1]; var z = normals[i * 3 + 2]; if (i < l / 2) { var correction = (x == 0 && z == 0) ? 1 : (Math.acos(y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI); uvs[i * 2 + 0] = x * (404 / 1920) * correction + (447 / 1920); uvs[i * 2 + 1] = z * (404 / 1080) * correction + (582 / 1080); } else { var correction = (x == 0 && z == 0) ? 1 : (Math.acos(- y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI); uvs[i * 2 + 0] = - x * (404 / 1920) * correction + (1460 / 1920); uvs[i * 2 + 1] = z * (404 / 1080) * correction + (582 / 1080); } } */ // geometry.rotateZ( - Math.PI / 2 ); geometry.applyMatrix(new THREE.Matrix4().makeRotationZ(Math.PI / 2)) THREE.ImageUtils.crossOrigin = ''; var texture = THREE.ImageUtils.loadTexture('https://threejs.org/examples/textures/ricoh_theta_s.jpg'); this.texture = texture; texture.format = THREE.RGBFormat; var material = new THREE.MeshBasicMaterial({ map: texture }); material.map.repeat.set(1, 1); material.map.offset.set(0, 0); mesh = new THREE.Mesh(geometry, material); scene.add(mesh); renderer = new THREE.WebGLRenderer(); renderer.setPixelRatio(window.devicePixelRatio); renderer.setSize(window.innerWidth, window.innerHeight); container.appendChild(renderer.domElement); document.addEventListener('mousedown', onDocumentMouseDown, false); document.addEventListener('mousemove', onDocumentMouseMove, false); document.addEventListener('mouseup', onDocumentMouseUp, false); document.addEventListener('wheel', onDocumentMouseWheel, false); window.addEventListener('resize', onWindowResize, false); } function onWindowResize() { camera.aspect = window.innerWidth / window.innerHeight; camera.updateProjectionMatrix(); renderer.setSize(window.innerWidth, window.innerHeight); } function onDocumentMouseDown(event) { event.preventDefault(); isUserInteracting = true; onPointerDownPointerX = event.clientX; onPointerDownPointerY = event.clientY; onPointerDownLon = lon; onPointerDownLat = lat; } function onDocumentMouseMove(event) { if (isUserInteracting === true) { lon = (onPointerDownPointerX - event.clientX) * 0.1 + onPointerDownLon; lat = (onPointerDownPointerY - event.clientY) * 0.1 + onPointerDownLat; } } function onDocumentMouseUp(event) { isUserInteracting = false; } function onDocumentMouseWheel(event) { distance += event.deltaY * 0.05; distance = THREE.Math.clamp(distance, 400, 1000); } function animate() { requestAnimationFrame(animate); update(); } function update() { if (isUserInteracting === false) { lon += 0.1; } lat = Math.max(- 85, Math.min(85, lat)); phi = THREE.Math.degToRad(90 - lat); theta = THREE.Math.degToRad(lon - 180); camera.position.x = distance * Math.sin(phi) * Math.cos(theta); camera.position.y = distance * Math.cos(phi); camera.position.z = distance * Math.sin(phi) * Math.sin(theta); camera.lookAt(scene.position); renderer.render(scene, camera); } 
  body { background-color: #000000; margin: 0px; overflow: hidden; } 
  <script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/71/three.js"></script> <div id="container"></div> 

感谢您的时间。

Answer 1:

用几何代替BufferGeometry法线每面( face.vertexNormals ),并且阵列Vector3 。 的UV将Vector2s的数组的数组的数组。

someVector2 = geometry.faceVertexUvs[setNdx][faceNdx][vertexNdx]

 var camera, scene, renderer; var isUserInteracting = false, onMouseDownMouseX = 0, onMouseDownMouseY = 0, lon = 0, onMouseDownLon = 0, lat = 0, onMouseDownLat = 0, phi = 0, theta = 0, distance = 500; init(); animate(); function init() { var container, mesh; container = document.getElementById('container'); camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 2000); scene = new THREE.Scene(); var geometry = new THREE.SphereGeometry(500, 60, 40); // invert the geometry on the x-axis so that all of the faces point inward geometry.applyMatrix(new THREE.Matrix4().makeScale(-1, 1, 1)); // Remap UVs var uvs = geometry.faceVertexUvs[0]; geometry.faces.forEach((face, ndx) => { const faceUVs = uvs[ndx]; for (var i = 0; i < 3; ++i) { const faceNormal = face.vertexNormals[i]; var x = faceNormal.x; var y = faceNormal.y; var z = faceNormal.z; if (ndx < geometry.faces.length / 2) { var correction = (x == 0 && z == 0) ? 1 : (Math.acos(y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI); faceUVs[i].x = x * (404 / 1920) * correction + (447 / 1920); faceUVs[i].y = z * (404 / 1080) * correction + (582 / 1080); } else { var correction = (x == 0 && z == 0) ? 1 : (Math.acos(- y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI); faceUVs[i].x = - x * (404 / 1920) * correction + (1460 / 1920); faceUVs[i].y = z * (404 / 1080) * correction + (582 / 1080); } } }); geometry.applyMatrix(new THREE.Matrix4().makeRotationZ(Math.PI / 2)) THREE.ImageUtils.crossOrigin = ''; var texture = THREE.ImageUtils.loadTexture('https://threejs.org/examples/textures/ricoh_theta_s.jpg'); this.texture = texture; texture.format = THREE.RGBFormat; var material = new THREE.MeshBasicMaterial({ map: texture }); material.map.repeat.set(1, 1); material.map.offset.set(0, 0); mesh = new THREE.Mesh(geometry, material); scene.add(mesh); renderer = new THREE.WebGLRenderer(); renderer.setPixelRatio(window.devicePixelRatio); renderer.setSize(window.innerWidth, window.innerHeight); container.appendChild(renderer.domElement); document.addEventListener('mousedown', onDocumentMouseDown, false); document.addEventListener('mousemove', onDocumentMouseMove, false); document.addEventListener('mouseup', onDocumentMouseUp, false); document.addEventListener('wheel', onDocumentMouseWheel, false); window.addEventListener('resize', onWindowResize, false); } function onWindowResize() { camera.aspect = window.innerWidth / window.innerHeight; camera.updateProjectionMatrix(); renderer.setSize(window.innerWidth, window.innerHeight); } function onDocumentMouseDown(event) { event.preventDefault(); isUserInteracting = true; onPointerDownPointerX = event.clientX; onPointerDownPointerY = event.clientY; onPointerDownLon = lon; onPointerDownLat = lat; } function onDocumentMouseMove(event) { if (isUserInteracting === true) { lon = (onPointerDownPointerX - event.clientX) * 0.1 + onPointerDownLon; lat = (onPointerDownPointerY - event.clientY) * 0.1 + onPointerDownLat; } } function onDocumentMouseUp(event) { isUserInteracting = false; } function onDocumentMouseWheel(event) { distance += event.deltaY * 0.05; distance = THREE.Math.clamp(distance, 400, 1000); } function animate() { requestAnimationFrame(animate); update(); } function update() { if (isUserInteracting === false) { lon += 0.1; } lat = Math.max(- 85, Math.min(85, lat)); phi = THREE.Math.degToRad(90 - lat); theta = THREE.Math.degToRad(lon - 180); camera.position.x = distance * Math.sin(phi) * Math.cos(theta); camera.position.y = distance * Math.cos(phi); camera.position.z = distance * Math.sin(phi) * Math.sin(theta); camera.lookAt(scene.position); renderer.render(scene, camera); } 
 body { background-color: #000000; margin: 0px; overflow: hidden; } 
 <script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/71/three.js"></script> <div id="container"></div> 

让我指出,我想通了这一点通过运行样品,然后在Chrome中打开devtools,把一个断点,并检查变量。

这里的乌布苏

而这里的顶点法线



文章来源: How do I figure out the UV Mapping from Ricoh Theta S Dual FIsh Eye to a Three.js r71 SphereGeometry