// components/ar-tracker/ar-tracker.ts Component({ /** * 组件的属性列表 */ properties: { }, lifetimes: { async attached() { console.log('data', this.data) }, async detached() { console.error('detached') this.video1 && (this.video1 = null); this.video12 && (this.video12 = null); this.video2 && (this.video2 = null); this.animator1 && (this.animator1 = null); this.animator2 && (this.animator2 = null); this.animator3 && (this.animator3 = null); this.innerAudioContext1 && this.innerAudioContext1.destroy() && (this.innerAudioContext1 = null) this.setData({ isStartPlay1: false, isStartPlay2: false, isStartPlay3: false, }) } }, /** * 组件的初始数据 */ data: { loaded: false, isStartPlay1: false, isStartAudioPlay1: false, isStartPlay2: false, isStartPlay3: false, }, /** * 组件的方法列表 */ methods: { handleReady({ detail }) { const xrScene = this.scene = detail.value; this.mat = new(wx.getXrFrameSystem().Matrix4)(); console.log('xr-scene', xrScene) this.triggerEvent('ready') this.innerAudioContext1 = wx.createInnerAudioContext({ useWebAudioImplement: false // 是否使用 WebAudio 作为底层音频驱动,默认关闭。对于短音频、播放频繁的音频建议开启此选项,开启后将获得更优的性能表现。由于开启此选项后也会带来一定的内存增长,因此对于长音频建议关闭此选项 }) this.innerAudioContext1.src = 'https://houseoss.4dkankan.com/mini-ar-test/AR/01/audio1.MP3' this.innerAudioContext1.onPlay = () => { this.setData({ isStartAudioPlay1: true }) } this.innerAudioContext1.onPause = () => { this.setData({ isStartAudioPlay1: false }) } this.innerAudioContext1.onEnded = () => { this.setData({ isStartAudioPlay1: false }) } console.log('this.innerAudioContext1', this.innerAudioContext1) }, handleARReady() { console.log('handleARReady') }, handleAssetsProgress: function ({ detail }) { const progress = Math.floor(detail.value.progress * 100) console.log('progress', progress) this.triggerEvent('progress', progress) }, handleAssetsLoaded({ detail }) { console.log('assets loaded', detail.value); this.triggerEvent('loaded') this.setData({ loaded: true }) }, handleItem1Loaded({ detail }) { const el = detail.value.target; console.warn('item1 load', el) const gltf = el.getComponent("gltf"); const video = this.scene.assets.getAsset("video-texture", "item1-v"); const video12 = this.scene.assets.getAsset("video-texture", "item12-v"); const newMat = this.scene.assets.getAsset("material", "item1-m"); const newMat2 = this.scene.assets.getAsset("material", "item12-m"); this.video1 = video this.video12 = video12 this.video1.onEnd = () => { this.innerAudioContext1 && this.innerAudioContext1.play() this.video12 && this.video12.play(); } const animator = el.getComponent("animator"); this.animator1 = animator // getPrimitivesByNodeName const othergltf = this.scene.assets.getAsset("gltf", "item1") // console.error('meshes', gltf.meshes) // mesh_video01_16054map // mesh_video_16024video1 // mesh_video02_16030video2 console.log('1', gltf.getPrimitivesByMeshName('mesh_video_16024video1')) console.log('2', gltf.getPrimitivesByMeshName('mesh_video02_16030video2')) console.error('othergltf', othergltf) for (const mesh of othergltf.model.meshes) { // mesh.material = newMat console.warn('mesh', mesh.name) } for (const mesh of gltf.getPrimitivesByNodeName('node_video1_-1630')) { mesh.material = newMat console.warn('mesh1', mesh) } for (const mesh of gltf.getPrimitivesByNodeName('node_video2_-1646')) { mesh.material = newMat2 console.warn('mesh2', mesh) } }, handleItem2Loaded({ detail }) { const el = detail.value.target; console.warn('item2 load') const gltf = el.getComponent("gltf"); const video = this.scene.assets.getAsset("video-texture", "item2-v"); const newMat = this.scene.assets.getAsset("material", "item2-m"); this.video2 = video for (const mesh of gltf.getPrimitivesByNodeName("video")) { console.error('2有video') mesh.material = newMat } }, handleItem3Loaded({ detail }) { const el = detail.value.target; console.warn('item3 load') const gltf = el.getComponent("gltf"); const animator = el.getComponent("animator"); this.animator3 = animator console.warn('animator3', this.animator3) }, play(type) { if (this.data.loaded) { if (type === 1) { console.warn('play 1') if (this.video1) { this.data.isStartPlay1 ? this.video1.resume() : this.video1.play() // this.data.isStartAudioPlay1 ? this.innerAudioContext1.resume() : this.innerAudioContext1.play() } if (this.animator1) { // this.data.isStartPlay1 ? this.animator1.resume('dd') : this.animator1.play('dd') this.animator1.play('dd') } this.setData({ isStartPlay1: true }) } if (type === 2) { console.warn('play 2') if (this.video2) { this.data.isStartPlay2 ? this.video2.resume() : this.video2.play() } this.setData({ isStartPlay2: true }) } if (type === 3) { console.warn('play 3') if (this.animator3) { if (!this.data.isStartPlay3) { this.animator3.play('Group21137Action', { loop: 0 }); this.animator3.play('polySurface1Action', { loop: 0 }); this.animator3.play('vlAction', { loop: 0 }); } else { this.animator3.resume('Group21137Action') this.animator3.resume('polySurface1Action') this.animator3.resume('vlAction') } } this.setData({ isStartPlay3: true }) } } }, pause(type) { if (type === 1) { console.warn('pause 1') if (this.innerAudioContext1 && this.data.isStartAudioPlay1) { this.innerAudioContext1.pause() } // if (this.animator1) { // this.animator1.pause('Animation') // } } if (type === 2) { console.warn('pause 2') if (this.video2) { this.video2.pause() } } }, handleARTrackerState1({ detail }) { // 事件的值即为`ARTracker`实例 const tracker = detail.value; // 获取当前状态和错误信息 const { state, errorMessage } = tracker; if (state == 2) { this.play(1) } else { this.data.isStartPlay1 && this.pause(1) } }, handleARTrackerState2({ detail }) { // 事件的值即为`ARTracker`实例 const tracker = detail.value; // 获取当前状态和错误信息 const { state, errorMessage } = tracker; if (state == 2) { this.play(2); } else { this.pause(2); } }, handleARTrackerState3({ detail }) { // 事件的值即为`ARTracker`实例 const tracker = detail.value; // 获取当前状态和错误信息 const { state, errorMessage } = tracker; if (state == 2) { this.play(3); } else { this.pause(3); } } } })