addded build

This commit is contained in:
Leon van Kammen 2023-11-06 11:50:11 +01:00
parent 4afed5d0c0
commit fa42f0585c
3 changed files with 126 additions and 15 deletions

View File

@ -909,11 +909,10 @@ xrf.reset = () => {
return true
};
let nodes = []
xrf.scene.traverse( (n) => n.audio ? n.audio.remove() : false )
xrf.scene.traverse( (child) => child.isXRF ? nodes.push(child) : false )
nodes.map( disposeObject ) // leave non-XRF objects intact
xrf.interactive = xrf.InteractiveGroup( xrf.THREE, xrf.renderer, xrf.camera)
if( xrf.audio ) xrf.audio.map( (a) => a.remove() )
xrf.audio = []
xrf.add( xrf.interactive )
xrf.layers = 0
xrf.emit('reset',{})
@ -1834,8 +1833,9 @@ let loadAudio = (mimetype) => function(url,opts){
}
}
mesh.add(sound)
xrf.audio.push(sound)
});
mesh.audio = sound
}
let audioMimeTypes = [
@ -1851,7 +1851,7 @@ audioMimeTypes.map( (mimetype) => xrf.frag.src.type[ mimetype ] = loadAudio(mim
// listen to t XR fragment changes
xrf.addEventListener('t', (opts) => {
let t = opts.frag.t
xrf.audio.map( (a) => a.playXRF(t) )
xrf.scene.traverse( (n) => n.audio && n.audio.playXRF ? n.audio.playXRF(t) : false )
})
/*
* mimetype: model/gltf+json
@ -1887,7 +1887,6 @@ xrf.frag.src.type['image/png'] = function(url,opts){
let restrictTo3DBoundingBox = mesh.geometry
let renderEquirect = (texture) => {
console.dir(texture)
texture.mapping = THREE.EquirectangularReflectionMapping
texture.needsUpdate = true
texture.wrapS = THREE.RepeatWrapping;
@ -1980,6 +1979,44 @@ xrf.frag.src.type['image/png'] = function(url,opts){
xrf.frag.src.type['image/gif'] = xrf.frag.src.type['image/png']
xrf.frag.src.type['image/jpeg'] = xrf.frag.src.type['image/png']
let loadVideo = (mimetype) => function(url,opts){
let {mesh,src,camera} = opts
let {urlObj,dir,file,hash,ext} = xrf.parseUrl(url)
let frag = xrf.URI.parse( url )
let video = mesh.video = document.createElement('video')
video.setAttribute("crossOrigin","anonymous")
video.setAttribute("playsinline",'')
video.addEventListener('loadedmetadata', function(){
let texture = new THREE.VideoTexture( video );
texture.colorSpace = THREE.SRGBColorSpace;
let mat = new xrf.THREE.MeshBasicMaterial()
mat.map = texture
mesh.material = mat
})
video.src = url
video.playXRF = (t) => {
if( t.x == 0 ) video.pause()
else{
video.playbackRate = Math.abs( t.x ) // html5 video does not support reverseplay :/
video.play()
}
if( t.y != undefined ) video.time = t.y
}
}
let videoMimeTypes = [
'video/ogg',
'video/mp4'
]
videoMimeTypes.map( (mimetype) => xrf.frag.src.type[ mimetype ] = loadVideo(mimetype) )
// listen to t XR fragment changes
xrf.addEventListener('t', (opts) => {
let t = opts.frag.t
xrf.scene.traverse( (n) => n.video ? n.video.playXRF(t) : false )
})
window.AFRAME.registerComponent('xrf', {
schema: {
},

View File

@ -909,11 +909,10 @@ xrf.reset = () => {
return true
};
let nodes = []
xrf.scene.traverse( (n) => n.audio ? n.audio.remove() : false )
xrf.scene.traverse( (child) => child.isXRF ? nodes.push(child) : false )
nodes.map( disposeObject ) // leave non-XRF objects intact
xrf.interactive = xrf.InteractiveGroup( xrf.THREE, xrf.renderer, xrf.camera)
if( xrf.audio ) xrf.audio.map( (a) => a.remove() )
xrf.audio = []
xrf.add( xrf.interactive )
xrf.layers = 0
xrf.emit('reset',{})
@ -1834,8 +1833,9 @@ let loadAudio = (mimetype) => function(url,opts){
}
}
mesh.add(sound)
xrf.audio.push(sound)
});
mesh.audio = sound
}
let audioMimeTypes = [
@ -1851,7 +1851,7 @@ audioMimeTypes.map( (mimetype) => xrf.frag.src.type[ mimetype ] = loadAudio(mim
// listen to t XR fragment changes
xrf.addEventListener('t', (opts) => {
let t = opts.frag.t
xrf.audio.map( (a) => a.playXRF(t) )
xrf.scene.traverse( (n) => n.audio && n.audio.playXRF ? n.audio.playXRF(t) : false )
})
/*
* mimetype: model/gltf+json
@ -1887,7 +1887,6 @@ xrf.frag.src.type['image/png'] = function(url,opts){
let restrictTo3DBoundingBox = mesh.geometry
let renderEquirect = (texture) => {
console.dir(texture)
texture.mapping = THREE.EquirectangularReflectionMapping
texture.needsUpdate = true
texture.wrapS = THREE.RepeatWrapping;
@ -1980,3 +1979,41 @@ xrf.frag.src.type['image/png'] = function(url,opts){
xrf.frag.src.type['image/gif'] = xrf.frag.src.type['image/png']
xrf.frag.src.type['image/jpeg'] = xrf.frag.src.type['image/png']
let loadVideo = (mimetype) => function(url,opts){
let {mesh,src,camera} = opts
let {urlObj,dir,file,hash,ext} = xrf.parseUrl(url)
let frag = xrf.URI.parse( url )
let video = mesh.video = document.createElement('video')
video.setAttribute("crossOrigin","anonymous")
video.setAttribute("playsinline",'')
video.addEventListener('loadedmetadata', function(){
let texture = new THREE.VideoTexture( video );
texture.colorSpace = THREE.SRGBColorSpace;
let mat = new xrf.THREE.MeshBasicMaterial()
mat.map = texture
mesh.material = mat
})
video.src = url
video.playXRF = (t) => {
if( t.x == 0 ) video.pause()
else{
video.playbackRate = Math.abs( t.x ) // html5 video does not support reverseplay :/
video.play()
}
if( t.y != undefined ) video.time = t.y
}
}
let videoMimeTypes = [
'video/ogg',
'video/mp4'
]
videoMimeTypes.map( (mimetype) => xrf.frag.src.type[ mimetype ] = loadVideo(mimetype) )
// listen to t XR fragment changes
xrf.addEventListener('t', (opts) => {
let t = opts.frag.t
xrf.scene.traverse( (n) => n.video ? n.video.playXRF(t) : false )
})

View File

@ -909,11 +909,10 @@ xrf.reset = () => {
return true
};
let nodes = []
xrf.scene.traverse( (n) => n.audio ? n.audio.remove() : false )
xrf.scene.traverse( (child) => child.isXRF ? nodes.push(child) : false )
nodes.map( disposeObject ) // leave non-XRF objects intact
xrf.interactive = xrf.InteractiveGroup( xrf.THREE, xrf.renderer, xrf.camera)
if( xrf.audio ) xrf.audio.map( (a) => a.remove() )
xrf.audio = []
xrf.add( xrf.interactive )
xrf.layers = 0
xrf.emit('reset',{})
@ -1834,8 +1833,9 @@ let loadAudio = (mimetype) => function(url,opts){
}
}
mesh.add(sound)
xrf.audio.push(sound)
});
mesh.audio = sound
}
let audioMimeTypes = [
@ -1851,7 +1851,7 @@ audioMimeTypes.map( (mimetype) => xrf.frag.src.type[ mimetype ] = loadAudio(mim
// listen to t XR fragment changes
xrf.addEventListener('t', (opts) => {
let t = opts.frag.t
xrf.audio.map( (a) => a.playXRF(t) )
xrf.scene.traverse( (n) => n.audio && n.audio.playXRF ? n.audio.playXRF(t) : false )
})
/*
* mimetype: model/gltf+json
@ -1887,7 +1887,6 @@ xrf.frag.src.type['image/png'] = function(url,opts){
let restrictTo3DBoundingBox = mesh.geometry
let renderEquirect = (texture) => {
console.dir(texture)
texture.mapping = THREE.EquirectangularReflectionMapping
texture.needsUpdate = true
texture.wrapS = THREE.RepeatWrapping;
@ -1980,4 +1979,42 @@ xrf.frag.src.type['image/png'] = function(url,opts){
xrf.frag.src.type['image/gif'] = xrf.frag.src.type['image/png']
xrf.frag.src.type['image/jpeg'] = xrf.frag.src.type['image/png']
let loadVideo = (mimetype) => function(url,opts){
let {mesh,src,camera} = opts
let {urlObj,dir,file,hash,ext} = xrf.parseUrl(url)
let frag = xrf.URI.parse( url )
let video = mesh.video = document.createElement('video')
video.setAttribute("crossOrigin","anonymous")
video.setAttribute("playsinline",'')
video.addEventListener('loadedmetadata', function(){
let texture = new THREE.VideoTexture( video );
texture.colorSpace = THREE.SRGBColorSpace;
let mat = new xrf.THREE.MeshBasicMaterial()
mat.map = texture
mesh.material = mat
})
video.src = url
video.playXRF = (t) => {
if( t.x == 0 ) video.pause()
else{
video.playbackRate = Math.abs( t.x ) // html5 video does not support reverseplay :/
video.play()
}
if( t.y != undefined ) video.time = t.y
}
}
let videoMimeTypes = [
'video/ogg',
'video/mp4'
]
videoMimeTypes.map( (mimetype) => xrf.frag.src.type[ mimetype ] = loadVideo(mimetype) )
// listen to t XR fragment changes
xrf.addEventListener('t', (opts) => {
let t = opts.frag.t
xrf.scene.traverse( (n) => n.video ? n.video.playXRF(t) : false )
})
export default xrf;