cocos打包web - ios设备息屏及前后台切换音频播放问题

发布于:2025-07-31 ⋅ 阅读:(17) ⋅ 点赞:(0)
  1. 切换前台时,延迟暂停与恢复能解决大部分ios平台前后台切换后音频无法恢复的问题;
if (cc.sys.isBrowser && cc.sys.os === cc.sys.OS_IOS && cc.sys.isMobile) {
	cc.game.on(cc.game.EVENT_GAME_INITED, () => {
		cc.game.on(cc.game.EVENT_SHOW, () => {
			setTimeout(() => {
				audioContext.suspend();
			}, 50);
			setTimeout(() => {
				audioContext.resume();
			}, 100);
		});
	});
}
  1. 如果还是无法恢复,重新播放音频时,先暂停一次所有音频,然后在恢复所有音频(重写CCAudio.js);
/* 音频重写【CCAudio.js部分重写】
 * @Description: 主要用于解决IOS音频异常(切后台后无声音),需在Creator编辑器内设置为插件
 * @Author: vcom_ls 2670813470@qq.com
 * @Date: 2025-02-28 10:48:08
 * @LastEditors: vcom_ls 2670813470@qq.com
 * @LastEditTime: 2025-03-04 17:55:14
 * @FilePath: \MyVcom\assets\CC\CCAudioManager\AudioOverriding.js
 * @Copyright (c) 2025 by vcom_ls, All Rights Reserved.
 */

let touchBinded = false;
let touchPlayList = [
	//{ instance: Audio, offset: 0, audio: audio }
];
cc._Audio.prototype._createElement = function () {
	let elem = this._src._nativeAsset;
	if (elem instanceof HTMLAudioElement) {
		// Reuse dom audio element
		if (!this._element) {
			this._element = document.createElement('audio');
		}
		this._element.src = elem.src;
	} else {
		this._element = new WebAudioElement(elem, this);
	}
};
cc._Audio.play = function () {
	let self = this;
	this._src &&
		this._src._ensureLoaded(function () {
			// marked as playing so it will playOnLoad
			self._state = 1;
			// TODO: move to audio event listeners
			self._bindEnded();
			let playPromise = self._element.play();
			// dom audio throws an error if pause audio immediately after playing
			if (window.Promise && playPromise instanceof Promise) {
				playPromise.catch(function (err) {
					// do nothing
				});
			}
			self._touchToPlay();
		});
};
cc._Audio._touchToPlay = function () {
	// # same start
	// if (this._src && this._src.loadMode === LoadMode.DOM_AUDIO && this._element.paused) {
	if (this._src && this._src.loadMode === 0 && this._element.paused) {
		touchPlayList.push({ instance: this, offset: 0, audio: this._element });
	}
	// # same end

	if (touchBinded) return;
	touchBinded = true;

	let touchEventName = 'ontouchend' in window ? 'touchend' : 'mousedown';
	// Listen to the touchstart body event and play the audio when necessary.
	cc.game.canvas.addEventListener(touchEventName, function () {
		let item;
		while ((item = touchPlayList.pop())) {
			item.audio.play(item.offset);
		}
	});
};
cc._Audio.stop = function () {
	let self = this;
	this._src &&
		this._src._ensureLoaded(function () {
			self._element.pause();
			self._element.currentTime = 0;
			// remove touchPlayList
			for (let i = 0; i < touchPlayList.length; i++) {
				if (touchPlayList[i].instance === self) {
					touchPlayList.splice(i, 1);
					break;
				}
			}
			self._unbindEnded();
			self.emit('stop');
			self._state = 3;
		});
};

let TIME_CONSTANT;
if (cc.sys.browserType === cc.sys.BROWSER_TYPE_EDGE || cc.sys.browserType === cc.sys.BROWSER_TYPE_BAIDU || cc.sys.browserType === cc.sys.BROWSER_TYPE_UC) {
	TIME_CONSTANT = 0.01;
} else {
	TIME_CONSTANT = 0;
}
// Encapsulated WebAudio interface
let WebAudioElement = function (buffer, audio) {
	this._audio = audio;
	this._context = cc.sys.__audioSupport.context;
	this._buffer = buffer;

	this._gainObj = this._context['createGain']();
	this.volume = 1;

	this._gainObj['connect'](this._context['destination']);
	this._loop = false;
	// The time stamp on the audio time axis when the recording begins to play.
	this._startTime = -1;
	// Record the currently playing 'Source'
	this._currentSource = null;
	// Record the time has been played
	this.playedLength = 0;

	this._currentTimer = null;

	this._endCallback = function () {
		if (this.onended) {
			this.onended(this);
		}
	}.bind(this);
};

let isHide = false; // 是否切换后台
(function (proto) {
	proto.play = function (offset) {
		// # add start
		if (isHide && cc.sys.isBrowser && cc.sys.os === cc.sys.OS_IOS && cc.sys.isMobile) {
			isHide = false;
			cc.sys.__audioSupport.context.suspend();
		}
		// # add end
		// If repeat play, you need to stop before an audio
		if (this._currentSource && !this.paused) {
			this._currentSource.onended = null;
			this._currentSource.stop(0);
			this.playedLength = 0;
		}

		let audio = this._context['createBufferSource']();
		audio.buffer = this._buffer;
		audio['connect'](this._gainObj);
		audio.loop = this._loop;

		this._startTime = this._context.currentTime;
		offset = offset || this.playedLength;
		if (offset) {
			this._startTime -= offset;
		}
		let duration = this._buffer.duration;

		let startTime = offset;
		let endTime;
		if (this._loop) {
			if (audio.start) audio.start(0, startTime);
			else if (audio['notoGrainOn']) audio['noteGrainOn'](0, startTime);
			else audio['noteOn'](0, startTime);
		} else {
			endTime = duration - offset;
			if (audio.start) audio.start(0, startTime, endTime);
			else if (audio['noteGrainOn']) audio['noteGrainOn'](0, startTime, endTime);
			else audio['noteOn'](0, startTime, endTime);
		}

		this._currentSource = audio;

		audio.onended = this._endCallback;

		// If the current audio context time stamp is 0 and audio context state is suspended
		// There may be a need to touch events before you can actually start playing audio
		if ((!audio.context.state || audio.context.state === 'suspended') && this._context.currentTime === 0) {
			let self = this;
			clearTimeout(this._currentTimer);
			this._currentTimer = setTimeout(function () {
				if (self._context.currentTime === 0) {
					touchPlayList.push({
						instance: self._audio,
						offset: offset,
						audio: self,
					});
				}
			}, 10);
		}

		if (cc.sys.os === cc.sys.OS_IOS && cc.sys.isBrowser && cc.sys.isMobile) {
			// Audio context is suspended when you unplug the earphones,
			// and is interrupted when the app enters background.
			// Both make the audioBufferSource unplayable.

			// # diff start
			// if ((audio.context.state === 'suspended' && this._context.currentTime !== 0) || audio.context.state === 'interrupted') {
			// reference: https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/resume
			audio.context.resume();
			// }
			// # diff end
		}
	};

	proto.pause = function () {
		clearTimeout(this._currentTimer);
		if (this.paused) return;
		// Record the time the current has been played
		this.playedLength = this._context.currentTime - this._startTime;
		// If more than the duration of the audio, Need to take the remainder
		this.playedLength %= this._buffer.duration;
		let audio = this._currentSource;
		if (audio) {
			if (audio.onended) {
				audio.onended._binded = false;
				audio.onended = null;
			}
			audio.stop(0);
		}
		this._currentSource = null;
		this._startTime = -1;
	};

	Object.defineProperty(proto, 'paused', {
		get: function () {
			// If the current audio is a loop, paused is false
			if (this._currentSource && this._currentSource.loop) return false;

			// startTime default is -1
			if (this._startTime === -1) return true;

			// Current time -  Start playing time > Audio duration
			return this._context.currentTime - this._startTime > this._buffer.duration;
		},
		enumerable: true,
		configurable: true,
	});

	Object.defineProperty(proto, 'loop', {
		get: function () {
			return this._loop;
		},
		set: function (bool) {
			if (this._currentSource) this._currentSource.loop = bool;

			return (this._loop = bool);
		},
		enumerable: true,
		configurable: true,
	});

	Object.defineProperty(proto, 'volume', {
		get: function () {
			return this._volume;
		},
		set: function (num) {
			this._volume = num;
			// https://www.chromestatus.com/features/5287995770929152
			if (this._gainObj.gain.setTargetAtTime) {
				try {
					this._gainObj.gain.setTargetAtTime(num, this._context.currentTime, TIME_CONSTANT);
				} catch (e) {
					// Some other unknown browsers may crash if TIME_CONSTANT is 0
					this._gainObj.gain.setTargetAtTime(num, this._context.currentTime, 0.01);
				}
			} else {
				this._gainObj.gain.value = num;
			}

			if (cc.sys.os === cc.sys.OS_IOS && !this.paused && this._currentSource) {
				// IOS must be stop webAudio
				this._currentSource.onended = null;
				this.pause();
				this.play();
			}
		},
		enumerable: true,
		configurable: true,
	});

	Object.defineProperty(proto, 'currentTime', {
		get: function () {
			if (this.paused) {
				return this.playedLength;
			}
			// Record the time the current has been played
			this.playedLength = this._context.currentTime - this._startTime;
			// If more than the duration of the audio, Need to take the remainder
			this.playedLength %= this._buffer.duration;
			return this.playedLength;
		},
		set: function (num) {
			if (!this.paused) {
				this.pause();
				this.playedLength = num;
				this.play();
			} else {
				this.playedLength = num;
			}
			return num;
		},
		enumerable: true,
		configurable: true,
	});

	Object.defineProperty(proto, 'duration', {
		get: function () {
			return this._buffer.duration;
		},
		enumerable: true,
		configurable: true,
	});
})(WebAudioElement.prototype);

// # add start
if (cc.sys.isBrowser && cc.sys.os === cc.sys.OS_IOS && cc.sys.isMobile) {
	cc.game.on(cc.game.EVENT_GAME_INITED, () => {
		cc.game.on(cc.game.EVENT_HIDE, () => {
			// 'suspended':音频处于暂停状态、
			// 'running':音频正在运行、
			// 'closed':音频上下文已关闭、
			// 'interrupted':音频被中断。
			let audioContext = cc.sys.__audioSupport.context;
			let state = audioContext.state;
			console.log('hide', state, new Date().getTime());

			//// 无效废弃
			// if (state === 'running') {
			// 	audioContext.suspend();
			// }
			// 切换后台时重置音频状态
			isHide = true;
		});
		cc.game.on(cc.game.EVENT_SHOW, () => {
			// 'suspended':音频处于暂停状态、
			// 'running':音频正在运行、
			// 'closed':音频上下文已关闭、
			// 'interrupted':音频被中断。
			let audioContext = cc.sys.__audioSupport.context;
			let state = audioContext.state;
			console.log('show', state, new Date().getTime());

			//// 无效废弃
			// if (state === 'interrupted' || state === 'suspended') {
			// 	audioContext
			// 		.resume()
			// 		.then(() => {
			// 			console.log('尝试恢复音频上下文');
			// 		})
			// 		.catch((error) => {
			// 			console.error('恢复音频上下文失败:', error);
			// 		});
			// }

			setTimeout(() => {
				audioContext.suspend();
			}, 50);
			setTimeout(() => {
				audioContext.resume();
			}, 100);
		});
	});
}
// # add end

简单总结:发现问题后,最开始是准备严格按照音频上下文状态来处理逻辑,测试后发现无效(感兴趣的同学可以去试试)。同时增加了输出的切换前后台输出,发现并非像安卓一样切换后台时输出“hide”,恢复前台时输出“show”,而是有时候一次输出两个“hide”,而且通过输出的时间发现,“hide”和“show”几乎是同时输出的,而且时间明显不是切换后台的时间;因此猜测在 ios 上 会不会是在恢复前台后才先后调用 “EVENT_HIDE” 与 “EVENT_SHOW” 呢?(仅猜测结果无法保证)不过对此我想到手动来处理音频的暂停与恢复,因此有了第一个方法;

第二种方法是做了一个保证(考虑到万一因为安全机制【禁止在无用户交互的情况下自动播放音频】导致恢复音频失败),在切换后台后,首次播放音频时调用一次 “suspend”方法,再 调用一次 “resume”方法,来恢复音频;

亲测只延迟来处理都能解决大部分 ios web 没音的问题(使用第二种方法记得设置插件)。


网站公告

今日签到

点亮在社区的每一天
去签到