media fragments work now

This commit is contained in:
Leon van Kammen 2024-02-13 17:10:24 +00:00
parent 1d4cf30654
commit e9be997182
19 changed files with 188 additions and 65 deletions

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1,18 @@
precision mediump float;
precision mediump int;
uniform float time;
varying vec3 vPosition;
varying vec4 vColor;
void main() {
vec4 color = vec4( vColor );
color.r += sin( vPosition.x * 10.0 + time ) * 0.5;
color.g = 0.0;
color.b += cos( vPosition.x * 10.0 + time ) * 0.5;
gl_FragColor = color;
}

View file

@ -0,0 +1,20 @@
precision mediump float;
precision mediump int;
uniform mat4 modelViewMatrix; // optional
uniform mat4 projectionMatrix; // optional
attribute vec3 position;
attribute vec4 color;
varying vec3 vPosition;
varying vec4 vColor;
void main() {
vPosition = position;
vColor = color;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}

View file

@ -37,7 +37,7 @@ window.AFRAME.registerComponent('xrf', {
}
})
aScene.renderer.toneMapping = THREE.ACESFilmicToneMapping;
aScene.renderer.toneMappingExposure = 1.5;
aScene.renderer.toneMappingExposure = 1.25;
if( !XRF.camera ) throw 'xrfragment: no camera detected, please declare <a-entity camera..> ABOVE entities with xrf-attributes'
// this is just for convenience (not part of spec): hide/show stuff based on VR/AR tags in 3D model
@ -76,6 +76,12 @@ window.AFRAME.registerComponent('xrf', {
let com = blinkControls.components['blink-controls']
if( com ) com.update({collisionEntities:true})
else console.warn("xrfragments: blink-controls is not mounted, please run manually: $('[blink-controls]).components['blink-controls'].update({collisionEntities:true})")
blinkControls.addEventListener('teleported', (e) => {
if( e.detail.newPosition.z < 0){
console.warn('teleported to negative Z-value: https://github.com/jure/aframe-blink-controls/issues/30')
}
})
}
// give headset users way to debug without a cumbersome usb-tapdance

View file

@ -11,6 +11,7 @@ xrf.init = function(opts){
console.log("add #debug=[0-9] to URL to see XR Fragment debuglog")
xrf.debug = parseInt( ( document.location.hash.match(/debug=([0-9])/) || [0,'0'] )[1] )
}
if( xrf.debug != undefined ) xrf.stats()
xrf.Parser.debug = xrf.debug
xrf.detectCameraRig(opts)
@ -51,6 +52,16 @@ xrf.roundrobin = (frag, store) => {
return store.rr[label].index = 0
}
xrf.stats = () => {
// bookmarklet from https://github.com/zlgenuine/threejs_stats
(function(){
for( let i = 0; i < 4; i++ ){
var script=document.createElement('script');script.onload=function(){var stats=new Stats();stats.showPanel( i );
stats.dom.style.marginTop = `${i*48}px`; document.body.appendChild(stats.dom);requestAnimationFrame(function loop(){stats.update();requestAnimationFrame(loop)});};script.src='//rawgit.com/mrdoob/stats.js/master/build/stats.min.js';document.head.appendChild(script);
}
})()
}
xrf.hasTag = (tag,tags) => String(tags).match( new RegExp(`(^| )${tag}( |$)`,`g`) )
// map library functions to xrf

View file

@ -7,7 +7,7 @@ xrf.init = ((init) => function(opts){
opts.scene.add(scene)
opts.scene = scene
init(opts)
if( opts.loaders ) Object.values(opts.loaders).map( xrf.patchLoader )
//if( opts.loaders ) Object.values(opts.loaders).map( xrf.patchLoader )
xrf.patchRenderer(opts)
xrf.navigator.init()
@ -32,21 +32,6 @@ xrf.patchRenderer = function(opts){
}
xrf.patchLoader = function(loader){
if( loader.prototype.load.xrf_patched ) return // prevent patching aliased loaders twice
loader.prototype.load = ((load) => function(url, onLoad, onProgress, onError){
load.call( this,
url,
(model) => {
onLoad(model);
xrf.parseModel(model,url)
},
onProgress,
onError)
})(loader.prototype.load)
loader.prototype.load.xrf_patched = true
}
xrf.getFile = (url) => url.split("/").pop().replace(/#.*/,'')
// parseModel event is essential for src.js to hook into embedded loaded models

View file

@ -53,7 +53,7 @@ xrf.navigator.to = (url,flags,loader,data) => {
if( xrf.model ) xrf.navigator.pushState( `${dir}${file}`, hash )
xrf.model = model
if( !model.isXRF ) xrf.emit('parseModel',{model,url,file}) // loader.load() does this automatically (but not loader.parse)
if( !model.isXRF ) xrf.parseModel(model,url) // this marks the model as an XRF model
if(xrf.debug ) model.animations.map( (a) => console.log("anim: "+a.name) )
@ -96,7 +96,9 @@ xrf.navigator.init = () => {
window.addEventListener('popstate', function (event){
if( !xrf.navigator.updateHash.active ){ // ignore programmatic hash updates (causes infinite recursion)
xrf.navigator.to( document.location.search.substr(1) + document.location.hash )
if( !document.location.hash.match(/pos=/) ){
history.back() // go back until we find a position
}else xrf.navigator.to( document.location.search.substr(1) + document.location.hash )
}
})
@ -153,6 +155,9 @@ xrf.navigator.updateHash = (hash,opts) => {
xrf.navigator.pushState = (file,hash) => {
if( file == document.location.search.substr(1) ) return // page is in its default state
if( !hash.match(/pos=/) ){
history.forward() // go forward until we find a position
}
window.history.pushState({},`${file}#${hash}`, document.location.pathname + `?${file}#${hash}` )
xrf.emit('pushState', {file, hash} )
}

View file

@ -1,7 +1,7 @@
xrf.getCollisionMeshes = () => {
let meshes = []
xrf.scene.traverse( (n) => {
if( !n.userData.href && !n.userData.src && xrf.hasNoMaterial(n) ){
if( n.type == 'Mesh' && !n.userData.href && !n.userData.src && xrf.hasNoMaterial(n) ){
meshes.push(n)
}
})

View file

@ -4,8 +4,12 @@
xrf.addEventListener('parseModel', (opts) => {
let {model,url,file} = opts
if( model.isSRC || opts.isSRC ) return // ignore SRC models
xrf.URI.vars = new Proxy({},{
set(me,k,v){ me[k] = v },
set(me,k,v){
if( k.match(/^(name)$/) ) return
me[k] = v
},
get(me,k ){
if( k == '__object' ){
let obj = {}
@ -33,21 +37,23 @@ xrf.addEventListener('dynamicKeyValue', (opts) => {
let {id,match,v} = opts
if( !v.is( xrf.XRF.CUSTOMFRAG) ) return // only process custom frags from here
if( v.string.match(/(<|>)/) ) return // ignore filter values
// check if fragment is an objectname
if( match.length > 0 ){
xrf.frag.dynamic.material(v,opts)
}else{
if( !xrf.URI.vars[ v.string ] ) return console.warn(`'${v.string}' metadata not found in scene`) // only assign to known values
xrf.URI.vars[ id ] = xrf.URI.vars[ v.string ] // update var
if( xrf.debug ) console.log(`URI.vars[${id}]='${v.string}'`)
xrf.scene.traverse( (n) => { // reflect new changes
if( n.userData && n.userData.src && n.userData.srcTemplate && n.userData.srcTemplate.match(`{${id}}`) ){
let srcNewFragments = xrf.frag.src.expandURI( n ).replace(/.*#/,'')
console.log(`URI.vars[${id}] => updating ${n.name} => ${srcNewFragments}`)
let frag = xrf.hashbus.pub( srcNewFragments, n )
}
})
}
if( !xrf.URI.vars[ v.string ] ) return console.warn(`'${v.string}' metadata not found in scene`) // only assign to known values
xrf.URI.vars[ id ] = xrf.URI.vars[ v.string ] // update var
if( xrf.debug ) console.log(`URI.vars[${id}]='${v.string}'`)
xrf.scene.traverse( (n) => { // reflect new changes
if( n.userData && n.userData.src && n.userData.srcTemplate && n.userData.srcTemplate.match(`{${id}}`) ){
let srcNewFragments = xrf.frag.src.expandURI( n ).replace(/.*#/,'')
console.log(`URI.vars[${id}] => updating ${n.name} => ${srcNewFragments}`)
let frag = xrf.hashbus.pub( srcNewFragments, n )
}
})
})

View file

@ -1,12 +1,15 @@
xrf.addEventListener('dynamicKey', (opts) => {
const doFilter = (opts) => {
let {scene,id,match,v} = opts
if( v.filter ){
let frags = {}
frags[ v.filter.key ] = v
xrf.filter.scene({frag:frags,scene})
}
})
}
xrf.addEventListener('dynamicKey', doFilter )
xrf.addEventListener('dynamicKeyValue', doFilter )
// spec: https://xrfragment.org/#filters
xrf.filter = function(query, cb){

View file

@ -1,21 +1,26 @@
xrf.frag.dynamic.material = function(v,opts){
let {match} = opts
setMaterial = (mesh,material,reset) => {
if( !mesh.materialOriginal ) mesh.materialOriginal = mesh.material
if( reset ) mesh.material = mesh.materialOriginal
else mesh.material = material
}
// update material in case of <tag_or_object>[*]=<materialname>
let material
xrf.scene.traverse( (n) => n.material && (n.material.name == v.string) && (material = n.material) )
if( !material && !v.reset ) return // nothing to do
if( material ) xrf.frag.dynamic.material.setMatch(match,material,v)
xrf.frag.dynamic.material.setMatch(match,material,v)
}
xrf.frag.dynamic.material.setMaterial = function(mesh,material,reset){
if( !mesh.materialOriginal ) mesh.materialOriginal = mesh.material
let visible = mesh.material.visible //remember
if( reset ){
mesh.material = mesh.materialOriginal
}else mesh.material = material
mesh.material.visible = visible
}
xrf.frag.dynamic.material.setMatch = function(match,material,v){
const setMaterial = xrf.frag.dynamic.material.setMaterial
match.map( (m) => {
for( let i in m.types ){
let type = m.types[i]

View file

@ -56,7 +56,7 @@ xrf.frag.href = function(v, opts){
let selected = mesh.userData.XRF.href.selected = (state) => () => {
if( mesh.selected == state ) return // nothing changed
console.log("state="+(selected?'selected':'unselected'))
xrf.interactive.objects.map( (o) => {
let newState = o.name == mesh.name ? state : false
if( o.material ){
@ -93,6 +93,47 @@ xrf.frag.href = function(v, opts){
}, 0, mesh )
}
xrf.addEventListener('audioInited', function(opts){
let {THREE,listener} = opts
opts.audio = opts.audio || {}
opts.audio.click = opts.audio.click || '/example/assets/audio/click.wav'
opts.audio.hover = opts.audio.hover || '/example/assets/audio/hover.wav'
opts.audio.teleport = opts.audio.teleport || '/example/assets/audio/teleport.wav'
let audio = xrf.frag.href.audio = {}
actions = ['click','hover','teleport']
actions.map( (action) => {
const audioLoader = new THREE.AudioLoader();
audio[action] = new THREE.Audio( xrf.camera.listener )
audioLoader.load( opts.audio[action], function( buffer ) {
audio[action].setBuffer( buffer );
})
});
xrf.addEventListener('href', (opts) => {
let v = opts.xrf
if( opts.selected ){
xrf.frag.href.audio.hover.stop()
xrf.frag.href.audio.hover.play()
return
}
if( opts.click ){
xrf.frag.href.audio.click.stop()
xrf.frag.href.audio.click.play()
return
}
})
xrf.addEventListener('navigateLoading', (e) => {
xrf.frag.href.audio.click.stop()
xrf.frag.href.audio.teleport.stop()
xrf.frag.href.audio.teleport.play()
})
})
/**
* > above solutions were abducted from [[this|https://i.imgur.com/E3En0gJ.png]] and [[this|https://i.imgur.com/lpnTz3A.png]] survey result
*

View file

@ -13,7 +13,7 @@ xrf.frag.src = function(v, opts){
if(xrf.debug) console.log(`src.js: instancing ${opts.isLocal?'local':'remote'} object ${url}`)
if( opts.isLocal ){
xrf.frag.src.localSRC(url,srcFrag,opts) // local
xrf.frag.src.localSRC(url,srcFrag,opts) // local
}else xrf.frag.src.externalSRC(url,srcFrag,opts) // external file
xrf.hashbus.pub( url.replace(/.*#/,''), mesh) // eval src-url fragments
@ -32,7 +32,7 @@ xrf.frag.src.addModel = (model,url,frag,opts) => {
if( mesh.material && mesh.userData.src ) mesh.material.visible = false // hide placeholder object
//enableSourcePortation(scene)
if( xrf.frag.src.renderAsPortal(mesh) ){
if( opts.isPortal ){
// only add remote objects, because
// local scene-objects are already added to scene
xrf.portalNonEuclidian({...opts,model,scene:model.scene})
@ -91,15 +91,16 @@ xrf.frag.src.externalSRC = (url,frag,opts) => {
xrf.frag.src.localSRC = (url,frag,opts) => {
let {model,mesh,scene} = opts
setTimeout( () => {
//setTimeout( (mesh,scene) => {
if( mesh.material ) mesh.material = mesh.material.clone() // clone, so we can individually highlight meshes
let _model = {
animations: model.animations,
scene: scene.clone() // *TODO* opts.isPortal ? scene : scene.clone()
scene: scene.clone()
// scene: opts.isPortal ? scene : scene.clone()
}
_model.scenes = [_model.scene]
xrf.frag.src.addModel(_model,url,frag, opts) // current file
},500 )
//},1000,mesh,scene )
}
// scale embedded XR fragments https://xrfragment.org/#scaling%20of%20instanced%20objects
@ -136,7 +137,7 @@ xrf.frag.src.scale = function(scene, opts, url){
xrf.frag.src.filterScene = (scene,opts) => {
let { mesh, model, camera, renderer, THREE, hashbus, frag} = opts
scene = xrf.filter.scene({scene,frag,reparent:true}) // *TODO* ,copyScene: opts.isPortal})
scene = xrf.filter.scene({scene,frag,reparent:true,copyScene: opts.isPortal})
if( !opts.isLocal ){
scene.traverse( (m) => {

View file

@ -11,13 +11,7 @@ let loadAudio = (mimetype) => function(url,opts){
let {urlObj,dir,file,hash,ext} = xrf.parseUrl(url)
let frag = xrf.URI.parse( url )
/* WebAudio: setup context via THREEjs */
if( !camera.listener ){
camera.listener = new THREE.AudioListener();
// *FIXME* camera vs camerarig conflict
(camera.getCam ? camera.getCam() : camera).add( camera.listener );
}
xrf.init.audio()
let isPositionalAudio = !(mesh.position.x == 0 && mesh.position.y == 0 && mesh.position.z == 0)
const audioLoader = new THREE.AudioLoader();
let sound = isPositionalAudio ? new THREE.PositionalAudio( camera.listener)
@ -93,11 +87,31 @@ let loadAudio = (mimetype) => function(url,opts){
})
}
xrf.init.audio = (opts) => {
let camera = xrf.camera
/* WebAudio: setup context via THREEjs */
if( !camera.listener ){
camera.listener = new THREE.AudioListener();
// *FIXME* camera vs camerarig conflict
(camera.getCam ? camera.getCam() : camera).add( camera.listener );
xrf.emit('audioInited',{listener:camera.listener, ...opts})
}
}
xrf.addEventListener('init', xrf.init.audio )
// stop playing audio when loading another scene
xrf.addEventListener('reset', () => {
xrf.scene.traverse( (n) => n.audio && (n.audio.playXRF({x:0,y:0})) && (n.audio.remove()) )
xrf.scene.traverse( (n) => {
if( n.media && n.media.audio ){
if( n.media.audio.stop ) n.media.audio.stop()
if( n.media.audio.remove ) n.media.audio.remove()
}
})
})
let audioMimeTypes = [
'audio/x-wav',
'audio/wav',

View file

@ -33,6 +33,14 @@ xrf.frag.src.type['image/png'] = function(url,opts){
mesh.material.map = texture
mesh.material.needsUpdate = true
mesh.needsUpdate = true
//// *TODO* update clones in portals or dont clone scene of portals..
//xrf.scene.traverse( (n) => {
// if( n.userData.src == mesh.userData.src && mesh.uuid != n.uuid ){
// n.material = mesh.material
// n.material.needsUpdate = true
// }
//})
}
let onLoad = (texture) => {

View file

@ -3,7 +3,6 @@
xrf.portalNonEuclidian = function(opts){
let { frag, mesh, model, camera, scene, renderer} = opts
mesh.portal = {
pos: mesh.position.clone(),
posWorld: new xrf.THREE.Vector3(),

View file

@ -41,7 +41,7 @@ let loadVideo = (mimetype) => function(url,opts){
}
}
if( mediafragment == 's' ){
video.playbackRate = Math.abs( video.speed ) // html5 video does not support reverseplay :/
video.playbackRate = Math.abs( v.x ) // html5 video does not support reverseplay :/
}
if( mediafragment == 'loop' ){
video.looping = true
@ -51,7 +51,7 @@ let loadVideo = (mimetype) => function(url,opts){
// stop playing audio when loading another scene
xrf.addEventListener('reset', () => {
xrf.scene.traverse( (n) => n.video && (n.video.playXRF({x:0,y:0})) && (n.video.remove()) )
xrf.scene.traverse( (n) => n.media && n.media.video && (n.media.video.pause()) && (n.media.video.remove()) )
})
let videoMimeTypes = [

View file

@ -1,4 +1,4 @@
// this is the global #t mediafragment handler (which affects the 3D animation)
// this ns the global #t mediafragment handler (which affects the 3D animation)
xrf.frag.t = function(v, opts){
let { frag, mesh, model, camera, scene, renderer, THREE} = opts
@ -15,7 +15,7 @@ xrf.frag.t = function(v, opts){
console.warn('no animations found in model')
return xrf.emit( v.x == 0 ? 'stop' : 'play',{isPlaying: v.x != 0 })
}
xrf.mixers.map ( (mixer) => {
mixer.t = v
@ -91,13 +91,12 @@ xrf.addEventListener('parseModel', (opts) => {
action.setLoop( xrf.THREE.LoopOnce, )
action.timeScale = mixer.timeScale
action.enabled = true
if( t && t.x === 0 ) action.play()
if( t && t.x != undefined ) action.play()
}
})
mixer.setTime(mixer.loop.timeStart)
mixer.time = Math.abs( mixer.loop.timeStart )
mixer.update(0)
mixer.checkZombies( model.animations)
}
// monkeypatch: update loop when needed
@ -119,6 +118,8 @@ xrf.addEventListener('parseModel', (opts) => {
mixer.update.patched = true
}
mixer.checkZombies( model.animations)
// calculate total duration/frame based on longest animation
mixer.duration = 0
if( model.animations.length ){