moved equirect-detection to src-fragment + cleaned up animation + updated RFC
This commit is contained in:
parent
39463736e1
commit
b8777c8daa
11 changed files with 1184 additions and 1311 deletions
|
|
@ -31,7 +31,7 @@
|
|||
<canvas id="qrcode" style="display:none" width="300" height="300"></canvas>
|
||||
|
||||
<a-scene light="defaultLightsEnabled: false">
|
||||
<a-entity id="player" wasd-controls look-controls>
|
||||
<a-entity id="player">
|
||||
<a-entity id="left-hand" laser-controls="hand: left" raycaster="objects:.ray" blink-controls="cameraRig:#player; teleportOrigin: #camera; collisionEntities: #floor">
|
||||
<a-entity rotation="-90 0 0" position="0 0.1 0" id="navigator">
|
||||
<a-entity id="back" xrf-button="label: <; width:0.05; action: history.back()" position="-0.025 0 0" class="ray"></a-entity>
|
||||
|
|
@ -39,7 +39,7 @@
|
|||
</a-entity>
|
||||
</a-entity>
|
||||
<a-entity id="right-hand" laser-controls="hand: right" raycaster="objects:.ray" blink-controls="cameraRig:#player; teleportOrigin: #camera; collisionEntities: #floor"></a-entity>
|
||||
<a-entity camera="fov:90" position="0 1.6 0" id="camera"></a-entity>
|
||||
<a-entity camera="fov:90" wasd-controls look-controls position="0 1.6 0" id="camera"></a-entity>
|
||||
</a-entity>
|
||||
|
||||
<a-entity id="home" xrf="index.gltf#pos=0,0,0"></a-entity>
|
||||
|
|
|
|||
BIN
example/assets/equirect.jpg
Normal file
BIN
example/assets/equirect.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 688 KiB |
File diff suppressed because one or more lines are too long
|
|
@ -23,15 +23,6 @@ window.AFRAME.registerComponent('xrf', {
|
|||
})
|
||||
if( !XRF.camera ) throw 'xrfragment: no camera detected, please declare <a-entity camera..> ABOVE entities with xrf-attributes'
|
||||
|
||||
// override the camera-related XR Fragments so the camera-rig is affected
|
||||
let camOverride = (xrf,v,opts) => {
|
||||
opts.camera = document.querySelector('[camera]').object3D.parent
|
||||
xrf(v,opts)
|
||||
}
|
||||
|
||||
xrf.pos = camOverride
|
||||
xrf.href = camOverride
|
||||
|
||||
// in order to set the rotation programmatically
|
||||
// we need to disable look-controls
|
||||
xrf.rot = (xrf,v,opts) => {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ var xrf = {}
|
|||
xrf.init = function(opts){
|
||||
opts = opts || {}
|
||||
xrf.Parser.debug = xrf.debug
|
||||
xrf.detectCameraRig(opts)
|
||||
for ( let i in opts ) xrf[i] = opts[i]
|
||||
xrf.emit('init',opts)
|
||||
return xrf
|
||||
|
|
@ -16,6 +17,20 @@ xrf.query = function(){
|
|||
alert("queries are not implemented (yet) for this particular framework")
|
||||
}
|
||||
|
||||
xrf.detectCameraRig = function(opts){
|
||||
if( opts.camera ){ // detect rig (if any)
|
||||
let getCam = ((cam) => () => cam)(opts.camera)
|
||||
let offsetY = 0
|
||||
while( opts.camera.parent.type != "Scene" ){
|
||||
offsetY += opts.camera.position.y
|
||||
opts.camera = opts.camera.parent
|
||||
opts.camera.getCam = getCam
|
||||
opts.camera.updateProjectionMatrix = () => opts.camera.getCam().updateProjectionMatrix()
|
||||
}
|
||||
opts.camera.offsetY = offsetY
|
||||
}
|
||||
}
|
||||
|
||||
xrf.roundrobin = (frag, store) => {
|
||||
if( !frag.args || frag.args.length == 0 ) return 0
|
||||
if( !store.rr ) store.rr = {}
|
||||
|
|
|
|||
|
|
@ -19,8 +19,8 @@ xrf.patchRenderer = function(renderer){
|
|||
renderer.xr.addEventListener( 'sessionstart', () => xrf.baseReferenceSpace = renderer.xr.getReferenceSpace() );
|
||||
renderer.xr.enabled = true;
|
||||
renderer.render = ((render) => function(scene,camera){
|
||||
if( xrf.model && xrf.model.render )
|
||||
xrf.model.render(scene,camera)
|
||||
let time = xrf.model && xrf.model.clock ? xrf.model.clock.getDelta() : 0
|
||||
xrf.emit('render',{scene,camera,time}) // allow fragments to do something at renderframe
|
||||
render(scene,camera)
|
||||
})(renderer.render.bind(renderer))
|
||||
}
|
||||
|
|
@ -47,29 +47,7 @@ xrf.parseModel = function(model,url){
|
|||
model.file = file
|
||||
// eval embedded XR fragments
|
||||
model.scene.traverse( (mesh) => xrf.hashbus.pub.mesh(mesh,model) )
|
||||
// add animations
|
||||
model.clock = new xrf.THREE.Clock();
|
||||
model.mixer = new xrf.THREE.AnimationMixer(model.scene)
|
||||
model.animations.map( (anim) => {
|
||||
anim.action = model.mixer.clipAction( anim )
|
||||
//anim.action.setLoop(0)
|
||||
anim.action.play()
|
||||
})
|
||||
|
||||
let tmp = new xrf.THREE.Vector3()
|
||||
model.render = function(){
|
||||
let time = model.clock.getDelta()
|
||||
model.mixer.update( time )
|
||||
|
||||
// update focusline
|
||||
xrf.focusLine.material.color.r = (1.0 + Math.sin( model.clock.getElapsedTime()*10 ))/2
|
||||
xrf.focusLine.material.dashSize = 0.2 + 0.02*Math.sin( model.clock.getElapsedTime() )
|
||||
xrf.focusLine.material.gapSize = 0.1 + 0.02*Math.sin( model.clock.getElapsedTime() *3 )
|
||||
xrf.focusLine.material.opacity = (0.25 + 0.15*Math.sin( model.clock.getElapsedTime() * 3 )) * xrf.focusLine.opacity;
|
||||
if( xrf.focusLine.opacity > 0.0 ) xrf.focusLine.opacity -= time*0.2
|
||||
if( xrf.focusLine.opacity < 0.0 ) xrf.focusLine.opacity = 0
|
||||
}
|
||||
|
||||
xrf.emit('parseModel',{model,url,file})
|
||||
}
|
||||
|
||||
xrf.getLastModel = () => xrf.model.last
|
||||
|
|
|
|||
|
|
@ -39,55 +39,8 @@ xrf.frag.href = function(v, opts){
|
|||
scale: new THREE.Vector3(),
|
||||
quat: new THREE.Quaternion()
|
||||
}
|
||||
// detect equirectangular image
|
||||
let texture = mesh.material && mesh.material.map ? mesh.material.map : null
|
||||
if( texture && texture.source.data.height == texture.source.data.width/2 ){
|
||||
texture.mapping = THREE.ClampToEdgeWrapping
|
||||
texture.needsUpdate = true
|
||||
|
||||
// poor man's equi-portal
|
||||
mesh.material = new THREE.ShaderMaterial( {
|
||||
side: THREE.DoubleSide,
|
||||
uniforms: {
|
||||
pano: { value: texture },
|
||||
selected: { value: false },
|
||||
},
|
||||
vertexShader: `
|
||||
vec3 portalPosition;
|
||||
varying vec3 vWorldPosition;
|
||||
varying float vDistanceToCenter;
|
||||
varying float vDistance;
|
||||
void main() {
|
||||
vDistanceToCenter = clamp(length(position - vec3(0.0, 0.0, 0.0)), 0.0, 1.0);
|
||||
portalPosition = (modelMatrix * vec4(0.0, 0.0, 0.0, 1.0)).xyz;
|
||||
vDistance = length(portalPosition - cameraPosition);
|
||||
vWorldPosition = (modelMatrix * vec4(position, 1.0)).xyz;
|
||||
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
|
||||
}
|
||||
`,
|
||||
fragmentShader: `
|
||||
#define RECIPROCAL_PI2 0.15915494
|
||||
uniform sampler2D pano;
|
||||
uniform bool selected;
|
||||
varying float vDistanceToCenter;
|
||||
varying float vDistance;
|
||||
varying vec3 vWorldPosition;
|
||||
void main() {
|
||||
vec3 direction = normalize(vWorldPosition - cameraPosition );
|
||||
vec2 sampleUV;
|
||||
sampleUV.y = -clamp(direction.y * 0.5 + 0.5, 0.0, 1.0);
|
||||
sampleUV.x = atan(direction.z, -direction.x) * -RECIPROCAL_PI2;
|
||||
sampleUV.x += 0.33; // adjust focus to AFRAME's a-scene.components.screenshot.capture()
|
||||
vec4 color = texture2D(pano, sampleUV);
|
||||
// Convert color to grayscale (lazy lite approach to not having to match tonemapping/shaderstacking of THREE.js)
|
||||
float luminance = 0.2126 * color.r + 0.7152 * color.g + 0.0722 * color.b;
|
||||
vec4 grayscale_color = selected ? color : vec4(vec3(luminance) + vec3(0.33), color.a);
|
||||
gl_FragColor = grayscale_color;
|
||||
}
|
||||
`,
|
||||
});
|
||||
mesh.material.needsUpdate = true
|
||||
}else if( mesh.material){ mesh.material = mesh.material.clone() }
|
||||
mesh.material = mesh.material.clone() // we need this so we can individually highlight meshes
|
||||
|
||||
let click = mesh.userData.XRF.href.exec = (e) => {
|
||||
|
||||
|
|
|
|||
|
|
@ -117,3 +117,17 @@ xrf.addEventListener('href', (opts) => {
|
|||
let frag = xrf.URI.parse( opts.xrf.string, xrf.XRF.NAVIGATOR | xrf.XRF.PV_OVERRIDE | xrf.XRF.METADATA )
|
||||
xrf.frag.updatePredefinedView({frag,scene:xrf.scene,href:opts.xrf})
|
||||
})
|
||||
|
||||
xrf.addEventListener('render', (opts) => {
|
||||
let model = xrf.model
|
||||
if( !model || !model.clock ) return
|
||||
// update focusline
|
||||
let {time} = opts
|
||||
xrf.focusLine.material.color.r = (1.0 + Math.sin( model.clock.getElapsedTime()*10 ))/2
|
||||
xrf.focusLine.material.dashSize = 0.2 + 0.02*Math.sin( model.clock.getElapsedTime() )
|
||||
xrf.focusLine.material.gapSize = 0.1 + 0.02*Math.sin( model.clock.getElapsedTime() *3 )
|
||||
xrf.focusLine.material.opacity = (0.25 + 0.15*Math.sin( model.clock.getElapsedTime() * 3 )) * xrf.focusLine.opacity;
|
||||
if( xrf.focusLine.opacity > 0.0 ) xrf.focusLine.opacity -= time*0.2
|
||||
if( xrf.focusLine.opacity < 0.0 ) xrf.focusLine.opacity = 0
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ xrf.frag.src = function(v, opts){
|
|||
let mimetype = res.headers.get('Content-type')
|
||||
if( url.replace(/#.*/,'').match(/\.(gltf|glb)$/) ) mimetype = 'gltf'
|
||||
//if( url.match(/\.(fbx|stl|obj)$/) ) mimetype =
|
||||
opts = { ...opts, src, frag }
|
||||
opts = { ...opts, src, frag, mimetype }
|
||||
return xrf.frag.src.type[ mimetype ] ? xrf.frag.src.type[ mimetype ](url,opts) : xrf.frag.src.type.unknown(url,opts)
|
||||
})
|
||||
.then( (model) => {
|
||||
|
|
@ -132,6 +132,7 @@ xrf.frag.src.filterScene = (scene,opts) => {
|
|||
if( frag.q ){
|
||||
src = scene.clone(true);
|
||||
xrf.frag.q.filter(src,frag)
|
||||
console.dir(src)
|
||||
}
|
||||
src.traverse( (m) => {
|
||||
if( m.userData && (m.userData.src || m.userData.href) ) return ; // prevent infinite recursion
|
||||
|
|
@ -152,6 +153,6 @@ xrf.frag.src.type = {}
|
|||
|
||||
xrf.frag.src.type['unknown'] = function( url, opts ){
|
||||
return new Promise( (resolve,reject) => {
|
||||
reject(`${url} mimetype not found or supported (yet)`)
|
||||
reject(`${url} mimetype '${opts.mimetype}' not found or supported (yet)`)
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,18 +7,59 @@
|
|||
xrf.frag.src.type['image/png'] = function(url,opts){
|
||||
let {mesh} = opts
|
||||
let restrictTo3DBoundingBox = mesh.geometry
|
||||
const texture = new THREE.TextureLoader().load( url );
|
||||
texture.colorSpace = THREE.SRGBColorSpace;
|
||||
|
||||
//const geometry = new THREE.BoxGeometry();
|
||||
const material = new THREE.MeshBasicMaterial({
|
||||
map: texture,
|
||||
transparent: url.match(/(png|gif)/) ? true : false,
|
||||
let renderEquirect = (texture) => {
|
||||
console.dir(texture)
|
||||
texture.mapping = THREE.EquirectangularReflectionMapping
|
||||
texture.needsUpdate = true
|
||||
texture.wrapS = THREE.RepeatWrapping;
|
||||
texture.wrapT = THREE.RepeatWrapping;
|
||||
texture.magFilter = THREE.NearestFilter
|
||||
texture.minFilter = THREE.NearestFilter
|
||||
|
||||
// poor man's equi-portal
|
||||
mesh.material = new THREE.ShaderMaterial( {
|
||||
side: THREE.DoubleSide,
|
||||
color: 0xFFFFFF,
|
||||
opacity:1
|
||||
uniforms: {
|
||||
pano: { value: texture },
|
||||
selected: { value: false },
|
||||
},
|
||||
vertexShader: `
|
||||
vec3 portalPosition;
|
||||
varying vec3 vWorldPosition;
|
||||
varying float vDistanceToCenter;
|
||||
varying float vDistance;
|
||||
void main() {
|
||||
vDistanceToCenter = clamp(length(position - vec3(0.0, 0.0, 0.0)), 0.0, 1.0);
|
||||
portalPosition = (modelMatrix * vec4(0.0, 0.0, 0.0, 1.0)).xyz;
|
||||
vDistance = length(portalPosition - cameraPosition);
|
||||
vWorldPosition = (modelMatrix * vec4(position, 1.0)).xyz;
|
||||
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
|
||||
}
|
||||
`,
|
||||
fragmentShader: `
|
||||
#define RECIPROCAL_PI2 0.15915494
|
||||
uniform sampler2D pano;
|
||||
uniform bool selected;
|
||||
varying float vDistanceToCenter;
|
||||
varying float vDistance;
|
||||
varying vec3 vWorldPosition;
|
||||
void main() {
|
||||
vec3 direction = normalize(vWorldPosition - cameraPosition );
|
||||
vec2 sampleUV;
|
||||
sampleUV.y = clamp(direction.y * 0.5 + 0.5, 0.0, 1.0);
|
||||
sampleUV.x = atan(direction.z, -direction.x) * -RECIPROCAL_PI2;
|
||||
sampleUV.x += 0.33; // adjust focus to AFRAME's a-scene.components.screenshot.capture()
|
||||
vec4 color = texture2D(pano, sampleUV);
|
||||
vec4 selected_color = selected ? color*vec4(1.5) : color;
|
||||
gl_FragColor = selected_color;
|
||||
}
|
||||
`,
|
||||
});
|
||||
mesh.material.needsUpdate = true
|
||||
}
|
||||
|
||||
let renderImage = (texture) => {
|
||||
// stretch image by pinning uv-coordinates to corners
|
||||
if( mesh.geometry ){
|
||||
if( mesh.geometry.attributes.uv ){ // buffergeometries
|
||||
|
|
@ -35,8 +76,30 @@ xrf.frag.src.type['image/png'] = function(url,opts){
|
|||
//}
|
||||
}
|
||||
}
|
||||
mesh.material = material
|
||||
//const geometry = new THREE.BoxGeometry();
|
||||
mesh.material = new THREE.MeshBasicMaterial({
|
||||
map: texture,
|
||||
transparent: url.match(/(png|gif)/) ? true : false,
|
||||
side: THREE.DoubleSide,
|
||||
color: 0xFFFFFF,
|
||||
opacity:1
|
||||
});
|
||||
}
|
||||
xrf.frag.src.type['image/gif'] = xrf.frag.src.type['image/png']
|
||||
xrf.frag.src.type['image/jpg'] = xrf.frag.src.type['image/png']
|
||||
|
||||
let onLoad = (texture) => {
|
||||
texture.colorSpace = THREE.SRGBColorSpace;
|
||||
// detect equirectangular image
|
||||
if( texture && texture.source.data.height == texture.source.data.width/2 ){
|
||||
renderEquirect(texture)
|
||||
}else{
|
||||
renderImage(texture)
|
||||
}
|
||||
}
|
||||
|
||||
new THREE.TextureLoader().load( url, onLoad, null, console.error );
|
||||
|
||||
}
|
||||
|
||||
xrf.frag.src.type['image/gif'] = xrf.frag.src.type['image/png']
|
||||
xrf.frag.src.type['image/jpeg'] = xrf.frag.src.type['image/png']
|
||||
|
||||
|
|
|
|||
|
|
@ -4,12 +4,7 @@ xrf.frag.t = function(v, opts){
|
|||
if( !model.animations || model.animations[0] == undefined ) return console.warn('no animation in scene')
|
||||
|
||||
model.mixer.t = v
|
||||
let duration = model.animations[0].duration
|
||||
let frames = model.animations[0].tracks[0].times.length
|
||||
let mixer = model.mixer
|
||||
mixer.loop = mixer.loop || {frameStart:0,frameStop:99999999,speed: 1}
|
||||
mixer.loop.fps = frames / duration
|
||||
|
||||
xrf.frag.t.calculateLoop( v, mixer.loop, mixer.loop.fps )
|
||||
|
||||
// update speed
|
||||
|
|
@ -22,7 +17,9 @@ xrf.frag.t = function(v, opts){
|
|||
// (re)trigger audio
|
||||
}
|
||||
|
||||
//if( v.x != 0 ) xrf.emit('play',v) *TODO* touchend/usergesture
|
||||
// play animations
|
||||
mixer.play( v.x == 1 )
|
||||
|
||||
if( v.y > 0 || v.z > 0 ) updateTime( mixer.loop.timeStart )
|
||||
|
||||
// update loop when needed
|
||||
|
|
@ -52,3 +49,62 @@ xrf.frag.t.calculateLoop = (t,obj,fps) => {
|
|||
obj.timeStart = obj.frameStart / (fps * obj.speedAbs)
|
||||
obj.timeStop = obj.frameStop / (fps * obj.speedAbs)
|
||||
}
|
||||
|
||||
if( document.location.hash.match(/t=/) ){
|
||||
let url = document.location.href
|
||||
let playAfterUserGesture = () => {
|
||||
xrf.hashbus.pub(url) // re-post t fragment on the hashbus again
|
||||
window.removeEventListener('click',playAfterUserGesture)
|
||||
window.removeEventListener('touchstart',playAfterUserGesture)
|
||||
}
|
||||
window.addEventListener('click', playAfterUserGesture )
|
||||
window.addEventListener('touchstart', playAfterUserGesture )
|
||||
}
|
||||
|
||||
xrf.addEventListener('parseModel', (opts) => {
|
||||
let {model,file,url} = opts
|
||||
// add animations
|
||||
model.clock = new xrf.THREE.Clock();
|
||||
let mixer = new xrf.THREE.AnimationMixer(model.scene)
|
||||
mixer.play = (play) => {
|
||||
mixer.isPlaying = play
|
||||
model.animations.map( (anim) => {
|
||||
anim.action = mixer.clipAction( anim )
|
||||
anim.action.setLoop(THREE.LoopOnce,0)
|
||||
if( play === false) anim.action.stop()
|
||||
else anim.action.play()
|
||||
})
|
||||
xrf.emit( play === false ? 'stop' : 'play',{play})
|
||||
}
|
||||
mixer.stop = () => {
|
||||
mixer.play(false)
|
||||
}
|
||||
mixer.duration = model.animations.length ? model.animations[0].duration : 1
|
||||
mixer.frames = model.animations.length ? model.animations[0].tracks[0].times.length : 1
|
||||
mixer.loop = mixer.loop || {frameStart:0,frameStop:99999999,speed: 1}
|
||||
mixer.loop.fps = mixer.frames / mixer.duration
|
||||
model.mixer = mixer
|
||||
})
|
||||
|
||||
xrf.addEventListener('render', (opts) => {
|
||||
let model = xrf.model
|
||||
let {time} = opts
|
||||
if( !model || !model.clock ) return
|
||||
model.mixer.update( time )
|
||||
|
||||
// update camera if possible
|
||||
if( model.cameras.length && model.mixer.isPlaying ){
|
||||
|
||||
let cam = xrf.camera.getCam()
|
||||
// cam.fov = model.cameras[0].fov (why is blender not exporting radians?)
|
||||
cam.far = model.cameras[0].far
|
||||
cam.near = model.cameras[0].near
|
||||
|
||||
let rig = xrf.camera
|
||||
rig.position.copy( model.cameras[0].position )
|
||||
rig.position.y -= rig.offsetY // VR/AR compensate camera rig
|
||||
//rig.rotation.copy( model.cameras[0].rotation )
|
||||
|
||||
rig.updateProjectionMatrix()
|
||||
}
|
||||
})
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue