both aframe and threejs events now bubble up

This commit is contained in:
Leon van Kammen 2024-02-12 17:21:40 +00:00
parent b321096eb9
commit a47f8b60ed
9 changed files with 82 additions and 60 deletions

View file

@ -84,6 +84,9 @@ window.AFRAME.registerComponent('xrf', {
}
})
xrf.addEventListener('navigateError', (opts) => {
AFRAME.fade.out()
})
xrf.addEventListener('navigateLoading', (opts) => {
let p = opts.promise()
@ -112,16 +115,19 @@ window.AFRAME.registerComponent('xrf', {
// raycaster can find & execute it
AFRAME.XRF.clickableMeshToEntity = (opts) => {
let {mesh,clickHandler} = opts;
let el = document.createElement("a-entity")
el.setAttribute("xrf-get",mesh.name ) // turn into AFRAME entity
el.setAttribute("class","ray") // expose to raycaster
el.setAttribute("pressable", '') // detect hand-controller click
// respond to cursor via laser-controls (https://aframe.io/docs/1.4.0/components/laser-controls.html)
el.addEventListener("click", clickHandler )
el.addEventListener("mouseenter", mesh.userData.XRF.href.selected(true) )
el.addEventListener("mouseleave", mesh.userData.XRF.href.selected(false) )
el.addEventListener("pressedstarted", clickHandler )
$('a-scene').appendChild(el)
let createEl = function(c){
let el = document.createElement("a-entity")
el.setAttribute("xrf-get",c.name ) // turn into AFRAME entity
el.setAttribute("class","ray") // expose to raycaster
el.setAttribute("pressable", '') // detect hand-controller click
// respond to cursor via laser-controls (https://aframe.io/docs/1.4.0/components/laser-controls.html)
el.addEventListener("click", clickHandler )
el.addEventListener("mouseenter", mesh.userData.XRF.href.selected(true) )
el.addEventListener("mouseleave", mesh.userData.XRF.href.selected(false) )
el.addEventListener("pressedstarted", clickHandler )
$('a-scene').appendChild(el)
}
createEl(mesh)
}
xrf.addEventListener('interactionReady', AFRAME.XRF.clickableMeshToEntity )

View file

@ -75,10 +75,17 @@ xrf.navigator.to = (url,flags,loader,data) => {
xrf.emit('navigateLoaded',{url,model})
resolve(model)
}
if( data ){ // file upload
loader.parse(data, "", onLoad )
}else loader.load(url, onLoad )
}else{
try{
loader.load(url, onLoad )
}catch(e){
console.error(e)
xrf.emit('navigateError',{url})
}
}
})
})
})
@ -116,6 +123,7 @@ xrf.navigator.setupNavigateFallbacks = () => {
xrf.addEventListener('navigate', (opts) => {
let {url} = opts
let {urlObj,dir,file,hash,ext} = xrf.parseUrl(url)
// handle http links
if( url.match(/^http/) && !xrf.loaders[ext] ){
let inIframe

View file

@ -30,14 +30,7 @@ xrf.interactiveGroup = function(THREE,renderer,camera){
const raycaster = new Raycaster();
const tempMatrix = new Matrix4();
let dispatchEvent = (object,_event) => {
object.dispatchEvent(_event)
// bubble up
object.traverseAncestors( (n) => n.userData && n.userData.href && n.dispatchEvent(_event) )
}
// Pointer Events
const element = renderer.domElement;
function onPointerEvent( event ) {
@ -62,12 +55,12 @@ xrf.interactiveGroup = function(THREE,renderer,camera){
_event.type = event.type;
_event.data.set( uv.x, 1 - uv.y );
dispatchEvent( object, _event );
object.dispatchEvent( _event );
}else{
if( object.selected ) {
_event.type = 'mouseleave'
dispatchEvent( object, _event)
object.dispatchEvent( _event)
}
}
@ -84,7 +77,7 @@ xrf.interactiveGroup = function(THREE,renderer,camera){
// WebXR Controller Events
// TODO: Dispatch pointerevents too
const events = {
const eventsMapper = {
'move': 'mousemove',
'select': 'click',
'selectstart': 'mousedown',
@ -104,20 +97,22 @@ xrf.interactiveGroup = function(THREE,renderer,camera){
if ( intersections.length > 0 ) {
console.log(object.name)
const intersection = intersections[ 0 ];
object = intersection.object;
const uv = intersection.uv;
_event.type = events[ event.type ];
_event.type = eventsMapper[ event.type ];
_event.data.set( uv.x, 1 - uv.y );
dispatchEvent( object, _event );
object.dispatchEvent( _event );
}else{
if( object.selected ) {
_event.type = 'mouseleave'
dispatchEvent( object, _event)
object.dispatchEvent(_event)
}
}

View file

@ -38,18 +38,15 @@ xrf.addEventListener('dynamicKeyValue', (opts) => {
if( match.length > 0 ){
xrf.frag.dynamic.material(v,opts)
}else{
if( !xrf.URI.vars[ v.string ] ) return // only assign to known values
if( !xrf.URI.vars[ v.string ] ) return console.warn(`'${v.string}' metadata not found in scene`) // only assign to known values
xrf.URI.vars[ id ] = xrf.URI.vars[ v.string ] // update var
if( xrf.debug ) console.log(`URI.vars[${id}]='${v.string}'`)
xrf.scene.traverse( (n) => { // reflect new changes
if( n.userData && n.userData.src && n.userData.srcTemplate ){
let srcOldFragments = n.userData.src.replace(/.*#/,'')
if( n.userData && n.userData.src && n.userData.srcTemplate && n.userData.srcTemplate.match(`{${id}}`) ){
let srcNewFragments = xrf.frag.src.expandURI( n ).replace(/.*#/,'')
if( srcOldFragments != srcNewFragments ){
console.log(`URI.vars[${id}] => updating ${n.name}`)
let frag = xrf.hashbus.pub( srcNewFragments, n )
}
console.log(`URI.vars[${id}] => updating ${n.name} => ${srcNewFragments}`)
let frag = xrf.hashbus.pub( srcNewFragments, n )
}
})
}

View file

@ -35,6 +35,9 @@ xrf.frag.href = function(v, opts){
let click = mesh.userData.XRF.href.exec = (e) => {
// bubble up!
mesh.traverseAncestors( (n) => n.userData && n.userData.href && n.dispatchEvent({type:e.type,data:{}}) )
let lastPos = `pos=${camera.position.x.toFixed(2)},${camera.position.y.toFixed(2)},${camera.position.z.toFixed(2)}`
xrf
.emit('href',{click:true,mesh,xrf:v}) // let all listeners agree
@ -46,19 +49,14 @@ xrf.frag.href = function(v, opts){
const flags = isLocal ? xrf.XRF.PV_OVERRIDE : undefined
let toFrag = xrf.URI.parse( v.string, xrf.XRF.NAVIGATOR | xrf.XRF.PV_OVERRIDE | xrf.XRF.METADATA )
// always commit current location in case of teleport (keep a trail of last positions before we navigate)
//if( isLocal && !hasPos ){
// xrf.hashbus.pub( v.string, xrf.model ) // publish to hashbus
//}else{
//if( !e.nocommit && !document.location.hash.match(lastPos) ) xrf.navigator.updateHash(`#${lastPos}`)
xrf.navigator.to(v.string) // let's surf
//}
xrf.navigator.to(v.string) // let's surf
})
.catch( console.error )
}
let selected = mesh.userData.XRF.href.selected = (state) => () => {
if( mesh.selected == state ) return // nothing changed
console.log("state="+(selected?'selected':'unselected'))
xrf.interactive.objects.map( (o) => {
let newState = o.name == mesh.name ? state : false
if( o.material ){

View file

@ -3,7 +3,7 @@ xrf.frag.loop = function(v, opts){
// handle object media players
if( mesh && mesh.media ){
for( let i in mesh.media ) mesh.media[i].pub(v)
for( let i in mesh.media ) mesh.media[i].set("loop",v)
return
}

View file

@ -3,7 +3,7 @@ xrf.frag.s = function(v, opts){
// handle object media players
if( mesh && mesh.media ){
for( let i in mesh.media ) mesh.media[i].pub(v)
for( let i in mesh.media ) mesh.media[i].set("s",v)
return
}

View file

@ -24,18 +24,18 @@ let loadAudio = (mimetype) => function(url,opts){
: new THREE.Audio( camera.listener )
mesh.media = mesh.media || {}
mesh.media.audio = { play: () => mesh.media.audio.autoplay = true }
mesh.media.audio = { set: (mediafragment,v) => mesh.media.audio[mediafragment] = v }
audioLoader.load( url.replace(/#.*/,''), function( buffer ) {
sound.setBuffer( buffer );
sound.setLoop(false);
sound.setVolume(1.0);
sound.setVolume( 1.0 )
if( isPositionalAudio ){
sound.setRefDistance( mesh.scale.x);
sound.setRolloffFactor(20.0)
//sound.setDirectionalCone( 360, 360, 0.01 );
}
}else sound.setVolume( mesh.scale.x )
mesh.add(sound)
@ -44,13 +44,17 @@ let loadAudio = (mimetype) => function(url,opts){
sound[mediafragment] = v
if( mediafragment == 't'){
sound.pause()
if( sound.isPlaying && v.y != undefined && v.x == v.y ) return
if( sound.isPlaying && v.y != undefined && v.x == v.y ){
sound.offset = v.x * buffer.sampleRate ;
sound.pause()
return
}else sound.stop()
// apply embedded audio/video samplerate/fps or global mixer fps
sound.setLoopStart(v.x * buffer.sampleRate );
sound.setLoopEnd(v.y * buffer.sampleRate );
sound.offset = v.x * buffer.sampleRate ;
sound.setLoopStart(v.x);
sound.setLoopEnd(v.y || buffer.duration);
sound.offset = v.x;
sound.play()
}
@ -66,25 +70,36 @@ let loadAudio = (mimetype) => function(url,opts){
sound.setLoop( v.loop )
sound.play()
}
debugger
}catch(e){ console.warn(e) }
}
// autoplay if user already requested play (before the sound was loaded)
let autoplay = mesh.media.audio && mesh.media.audio.autoplay
let lazySet = {}
let mediaFragments = ['t','loop','s']
mediaFragments.map( (f) => mesh.media.audio[f] && (lazySet[f] = mesh.media.audio[f]) )
mesh.media.audio = sound
if( autoplay ){
xrf.hashbus.pub(mesh.media.audio.autoplay)
}
});
}
// autoplay if user already requested play (before the sound was loaded)
mediaFragments.map( (f) => {
if( lazySet[f] ) mesh.media.audio.set(f, lazySet[f] )
})
});
// apply Media fragments from URL
(['t','loop','s']).map( (f) => {
if( frag[f] ){
mesh.media.audio.set( f, frag[f] )
}
})
}
// stop playing audio when loading another scene
xrf.addEventListener('reset', () => {
xrf.scene.traverse( (n) => n.audio && (n.audio.playXRF({x:0,y:0})) && (n.audio.remove()) )
})
let audioMimeTypes = [
'audio/x-wav',
'audio/wav',
'audio/mpeg',
'audio/mp3',

View file

@ -33,16 +33,19 @@ let loadVideo = (mimetype) => function(url,opts){
if( mediafragment == 't'){
video.pause()
if( t.x !== undefined && t.x == t.y ) return // stop paused
if( v.x !== undefined && v.x == v.y ) return // stop paused
else{
video.currentTime = t.x
video.time = t.x
video.currentTime = v.x
video.time = v.x
video.play()
}
}
if( mediafragment == 's' ){
video.playbackRate = Math.abs( video.speed ) // html5 video does not support reverseplay :/
}
if( mediafragment == 'loop' ){
video.looping = true
}
}
}