diff --git a/adapters/common/engine/Audio.js b/adapters/common/engine/Audio.js index 60f22698..0b97f3bd 100644 --- a/adapters/common/engine/Audio.js +++ b/adapters/common/engine/Audio.js @@ -7,7 +7,9 @@ if (Audio) { let elem = this._src._nativeAsset; // Reuse dom audio element if (!this._element) { - this._element = __globalAdapter.createInnerAudioContext(); + this._element = CC_WORKER_AUDIO_SYSTEM + ? new WorkerAudio() + : __globalAdapter.createInnerAudioContext(); } this._element.src = elem.src; }, diff --git a/adapters/platforms/wechat/res/workers/audio-worker.js b/adapters/platforms/wechat/res/workers/audio-worker.js index 9970d62e..5df876b5 100644 --- a/adapters/platforms/wechat/res/workers/audio-worker.js +++ b/adapters/platforms/wechat/res/workers/audio-worker.js @@ -1,9 +1,95 @@ +const { main } = require("./ipc-worker.js"); +const { CC_WORKER_AUDIO_SYSTEM_SYNC_INTERVAL } = globalThis; + var audio_worker = { map: {}, - create(callback, sn) { - this.map[sn] = worker.createInnerAudioContext(); + timer: null, + + create(callback, id) { + this.map[id] = { + audio: worker.createInnerAudioContext({ useWebAudioImplement: true }), + cache: { + duration: 0, + currentTime: 0, + paused: true, + }, + callbacks: {}, + }; + if (!this.timer) { + this.timer = setInterval(this.ensureUpdate.bind(this), CC_WORKER_AUDIO_SYSTEM_SYNC_INTERVAL); + } + }, + + call(callback, id, type, arg) { + const audio = this.map[id].audio; + switch (type) { + case 0: + audio.play(); + break; + case 1: + audio.pause(); + break; + case 2: + audio.seek(arg); + break; + case 3: + audio.stop(); + break; + case 4: + audio.src = arg; + break; + case 5: + audio.loop = arg; + break; + case 6: + audio.volume = arg; + break; + + default: + break; + } + }, + + ensureUpdate() { + // struct: [id, duration, currentTime, paused, ...id2, duration2] + const infos = []; + for (const id in this.map) { + const data = this.map[id]; + const audio = data.audio; + const cache = data.cache; + if ( + audio.duration !== cache.duration + || audio.currentTime !== cache.currentTime + || audio.paused !== cache.paused + ) { + cache.duration = audio.duration; + cache.currentTime = audio.currentTime; + cache.paused = audio.paused; + infos.push(id, cache.duration, cache.currentTime, cache.paused); + } + } + if (infos.length > 0) { + main.audioAdapter.onUpdate(infos); + } + }, + + on(callback, id, type) { + const data = this.map[id]; + data.audio["on" + type]((data.callbacks[type] = data => { + main.audioAdapter.onCallback(id, type, data); + })); + }, + + off(callback, id, type) { + const data = this.map[id]; + data.audio["off" + type](data.callbacks[type]); + delete data.callbacks[type]; + }, + + destroy(callback, id) { + this.map[id].destroy(); + delete this.map[id]; }, - }; module.exports = audio_worker; diff --git a/adapters/platforms/wechat/worker/audio.js b/adapters/platforms/wechat/worker/audio.js index 00452915..611cd095 100644 --- a/adapters/platforms/wechat/worker/audio.js +++ b/adapters/platforms/wechat/worker/audio.js @@ -2,74 +2,144 @@ let _id = 0; class WorkerAudio { id = ++_id; + callbacks = {}; get src() { - + return this._src; } set src(str) { - + if (this._src !== str) { + this._src = str; + audioWorkerAdapter.call(this.id, 4, str); + } } + _src = ""; get loop() { - + return this._loop; } set loop(v) { - + if (this._loop !== v) { + this._loop = v; + audioWorkerAdapter.call(this.id, 5, v); + } } _loop = false; get volume() { - + return this._volume; } set volume(v) { - + if (this._volume !== v) { + this._volume = v; + audioWorkerAdapter.call(this.id, 6, v); + } } _volume = 1; - // 只读,从 Worker 单向同步值 + // 只读,从 Worker 单向同步值,由于是异步的,部分值会先模拟 duration = 0; currentTime = 0; paused = true; constructor() { - - } - - get src() { - - } - set src(clip) { - + audioWorkerAdapter.create(this); } play() { - + this.paused = false; + audioWorkerAdapter.call(this.id, 0, null); } pause() { - + this.paused = true; + audioWorkerAdapter.call(this.id, 1, null); } - seek() { - + seek(position) { + this.paused = false; + this.currentTime = position; + audioWorkerAdapter.call(this.id, 2, position); } stop() { - + this.paused = true; + audioWorkerAdapter.call(this.id, 3, null); } destroy() { - + this.paused = true; + audioWorkerAdapter.destroy(this.id); } - } +[ + "Canplay", + "Ended", + "Error", + "Pause", + "Play", + "Seeked", + "Seeking", + "Stop", + "TimeUpdate", + "Waiting", +].forEach(name => { + WorkerAudio.prototype["on" + name] = function (callback) { + audioWorkerAdapter.on(this.id, name, callback); + }; + WorkerAudio.prototype["off" + name] = function (callback) { + audioWorkerAdapter.off(this.id, name, callback); + }; +}); + var audioWorkerAdapter = { - on(id, callback) { + audios: {}, + create(audio) { + this.audios[audio.id] = audio; + worker.audio.create([audio.id]); }, - off(id, callback) { + call(id, type, arg) { + worker.audio.call([id, type, arg]); + }, + + on(id, type, callback) { + this.audios[id].callbacks[type] = callback; + worker.audio.on([id, type]); + }, + + off(id, type, callback) { + delete this.audios[id].callbacks[type]; + worker.audio.off([id, type]); + }, + + onCallback(args, cmdId, callback) { + const id = args[0]; + const type = args[1]; + const data = args[2]; + this.audios[id].callbacks[type](data); + }, + + onUpdate(args, cmdId, callback) { + // struct: [id, duration, currentTime, paused, ...id2, duration2] + const infos = args[0]; + for (let i = 0; i < infos.length; i += 4) { + const id = infos[i]; + const duration = infos[i + 1]; + const currentTime = infos[i + 2]; + const paused = infos[i + 3]; + const audio = this.audios[id]; + audio.duration = duration; + audio.currentTime = currentTime; + audio.paused = paused; + } + }, + + destroy(id) { + worker.audio.destroy(id); + delete this.audios[id]; }, }; diff --git a/adapters/platforms/wechat/wrapper/builtin/Audio.js b/adapters/platforms/wechat/wrapper/builtin/Audio.js index a96fffa0..f3abb554 100644 --- a/adapters/platforms/wechat/wrapper/builtin/Audio.js +++ b/adapters/platforms/wechat/wrapper/builtin/Audio.js @@ -25,7 +25,9 @@ export default class Audio extends HTMLAudioElement { this.readyState = HAVE_NOTHING - const innerAudioContext = wx.createInnerAudioContext() + const innerAudioContext = CC_WORKER_AUDIO_SYSTEM + ? new WorkerAudio() + : wx.createInnerAudioContext() _innerAudioContextMap[this._$sn] = innerAudioContext