feat: Support autoplay audio tag

--story=1017623 --user=刘瑞斌 【应用】-语音播放时遇到音频文件直接播放文件内容 https://www.tapd.cn/57709429/s/1642154
This commit is contained in:
CaptainB 2025-01-06 15:43:24 +08:00 committed by 刘瑞斌
parent d9df013e33
commit 6b23fcd11c
2 changed files with 91 additions and 34 deletions

View File

@ -76,7 +76,7 @@
</span> </span>
</div> </div>
<!-- 先渲染不然不能播放 --> <!-- 先渲染不然不能播放 -->
<audio ref="audioPlayer" controls hidden="hidden"></audio> <audio ref="audioPlayer" v-for="item in audioList" :key="item" controls hidden="hidden"></audio>
</template> </template>
<script setup lang="ts"> <script setup lang="ts">
import { onMounted, ref } from 'vue' import { onMounted, ref } from 'vue'
@ -110,11 +110,13 @@ const props = withDefaults(
const emit = defineEmits(['update:data', 'regeneration']) const emit = defineEmits(['update:data', 'regeneration'])
const audioPlayer = ref<HTMLAudioElement | null>(null) const audioPlayer = ref<HTMLAudioElement[] | null>([])
const audioPlayerStatus = ref(false) const audioPlayerStatus = ref(false)
const buttonData = ref(props.data) const buttonData = ref(props.data)
const loading = ref(false) const loading = ref(false)
const utterance = ref<SpeechSynthesisUtterance | null>(null) const utterance = ref<SpeechSynthesisUtterance | null>(null)
const audioList = ref<string[]>([])
const currentAudioIndex = ref(0)
function regeneration() { function regeneration() {
emit('regeneration') emit('regeneration')
@ -170,8 +172,29 @@ const playAnswerText = (text: string) => {
text = markdownToPlainText(text) text = markdownToPlainText(text)
// console.log(text) // console.log(text)
audioPlayerStatus.value = true audioPlayerStatus.value = true
if (props.tts_type === 'BROWSER') { //
if (text !== utterance.value?.text) { audioList.value = text.split(/(<audio[^>]*><\/audio>)/)
playAnswerTextPart()
}
const playAnswerTextPart = () => {
// console.log(audioList.value, currentAudioIndex.value)
if (currentAudioIndex.value === audioList.value.length) {
audioPlayerStatus.value = false
currentAudioIndex.value = 0
return
}
if (audioList.value[currentAudioIndex.value].includes('<audio')) {
if (audioPlayer.value) {
audioPlayer.value[currentAudioIndex.value].src = audioList.value[currentAudioIndex.value].match(/src="([^"]*)"/)?.[1] || ''
audioPlayer.value[currentAudioIndex.value].play() //
audioPlayer.value[currentAudioIndex.value].onended = () => {
currentAudioIndex.value += 1
playAnswerTextPart()
}
}
} else if (props.tts_type === 'BROWSER') {
if (audioList.value[currentAudioIndex.value] !== utterance.value?.text) {
window.speechSynthesis.cancel() window.speechSynthesis.cancel()
} }
if (window.speechSynthesis.paused) { if (window.speechSynthesis.paused) {
@ -179,10 +202,11 @@ const playAnswerText = (text: string) => {
return return
} }
// SpeechSynthesisUtterance // SpeechSynthesisUtterance
utterance.value = new SpeechSynthesisUtterance(text) utterance.value = new SpeechSynthesisUtterance(audioList.value[currentAudioIndex.value])
utterance.value.onend = () => { utterance.value.onend = () => {
audioPlayerStatus.value = false
utterance.value = null utterance.value = null
currentAudioIndex.value += 1
playAnswerTextPart()
} }
utterance.value.onerror = () => { utterance.value.onerror = () => {
audioPlayerStatus.value = false audioPlayerStatus.value = false
@ -190,15 +214,14 @@ const playAnswerText = (text: string) => {
} }
// //
window.speechSynthesis.speak(utterance.value) window.speechSynthesis.speak(utterance.value)
} } else if (props.tts_type === 'TTS') {
if (props.tts_type === 'TTS') {
// //
if (audioPlayer.value?.src) { if (audioPlayer.value && audioPlayer.value[currentAudioIndex.value]?.src) {
audioPlayer.value?.play() audioPlayer.value[currentAudioIndex.value].play()
return return
} }
applicationApi applicationApi
.postTextToSpeech((props.applicationId as string) || (id as string), { text: text }, loading) .postTextToSpeech((props.applicationId as string) || (id as string), { text: audioList.value[currentAudioIndex.value] }, loading)
.then(async (res: any) => { .then(async (res: any) => {
if (res.type === 'application/json') { if (res.type === 'application/json') {
const text = await res.text() const text = await res.text()
@ -219,11 +242,12 @@ const playAnswerText = (text: string) => {
// link.click() // link.click()
// audioPlayer DOM // audioPlayer DOM
if (audioPlayer.value instanceof HTMLAudioElement) { if (audioPlayer.value) {
audioPlayer.value.src = url audioPlayer.value[currentAudioIndex.value].src = url
audioPlayer.value.play() // audioPlayer.value[currentAudioIndex.value].play() //
audioPlayer.value.onended = () => { audioPlayer.value[currentAudioIndex.value].onended = () => {
audioPlayerStatus.value = false currentAudioIndex.value += 1
playAnswerTextPart()
} }
} else { } else {
console.error('audioPlayer.value is not an instance of HTMLAudioElement') console.error('audioPlayer.value is not an instance of HTMLAudioElement')
@ -238,7 +262,11 @@ const playAnswerText = (text: string) => {
const pausePlayAnswerText = () => { const pausePlayAnswerText = () => {
audioPlayerStatus.value = false audioPlayerStatus.value = false
if (props.tts_type === 'TTS') { if (props.tts_type === 'TTS') {
audioPlayer.value?.pause() if (audioPlayer.value) {
audioPlayer.value?.forEach((item) => {
item.pause()
})
}
} }
if (props.tts_type === 'BROWSER') { if (props.tts_type === 'BROWSER') {
window.speechSynthesis.pause() window.speechSynthesis.pause()

View File

@ -54,7 +54,7 @@
<EditContentDialog ref="EditContentDialogRef" @refresh="refreshContent" /> <EditContentDialog ref="EditContentDialogRef" @refresh="refreshContent" />
<EditMarkDialog ref="EditMarkDialogRef" @refresh="refreshMark" /> <EditMarkDialog ref="EditMarkDialogRef" @refresh="refreshMark" />
<!-- 先渲染不然不能播放 --> <!-- 先渲染不然不能播放 -->
<audio ref="audioPlayer" controls hidden="hidden"></audio> <audio ref="audioPlayer" v-for="item in audioList" :key="item" controls hidden="hidden"></audio>
</div> </div>
</div> </div>
</template> </template>
@ -88,7 +88,7 @@ const props = defineProps({
const emit = defineEmits(['update:data']) const emit = defineEmits(['update:data'])
const audioPlayer = ref<HTMLAudioElement | null>(null) const audioPlayer = ref<HTMLAudioElement[] | null>(null)
const EditContentDialogRef = ref() const EditContentDialogRef = ref()
const EditMarkDialogRef = ref() const EditMarkDialogRef = ref()
@ -96,6 +96,8 @@ const EditMarkDialogRef = ref()
const buttonData = ref(props.data) const buttonData = ref(props.data)
const loading = ref(false) const loading = ref(false)
const utterance = ref<SpeechSynthesisUtterance | null>(null) const utterance = ref<SpeechSynthesisUtterance | null>(null)
const audioList = ref<string[]>([])
const currentAudioIndex = ref(0)
function editContent(data: any) { function editContent(data: any) {
EditContentDialogRef.value.open(data) EditContentDialogRef.value.open(data)
@ -149,8 +151,29 @@ const playAnswerText = (text: string) => {
text = markdownToPlainText(text) text = markdownToPlainText(text)
// console.log(text) // console.log(text)
audioPlayerStatus.value = true audioPlayerStatus.value = true
if (props.tts_type === 'BROWSER') { //
if (text !== utterance.value?.text) { audioList.value = text.split(/(<audio[^>]*><\/audio>)/)
playAnswerTextPart()
}
const playAnswerTextPart = () => {
// console.log(audioList.value, currentAudioIndex.value)
if (currentAudioIndex.value === audioList.value.length) {
audioPlayerStatus.value = false
currentAudioIndex.value = 0
return
}
if (audioList.value[currentAudioIndex.value].includes('<audio')) {
if (audioPlayer.value) {
audioPlayer.value[currentAudioIndex.value].src = audioList.value[currentAudioIndex.value].match(/src="([^"]*)"/)?.[1] || ''
audioPlayer.value[currentAudioIndex.value].play() //
audioPlayer.value[currentAudioIndex.value].onended = () => {
currentAudioIndex.value += 1
playAnswerTextPart()
}
}
} else if (props.tts_type === 'BROWSER') {
if (audioList.value[currentAudioIndex.value] !== utterance.value?.text) {
window.speechSynthesis.cancel() window.speechSynthesis.cancel()
} }
if (window.speechSynthesis.paused) { if (window.speechSynthesis.paused) {
@ -158,10 +181,11 @@ const playAnswerText = (text: string) => {
return return
} }
// SpeechSynthesisUtterance // SpeechSynthesisUtterance
utterance.value = new SpeechSynthesisUtterance(text) utterance.value = new SpeechSynthesisUtterance(audioList.value[currentAudioIndex.value])
utterance.value.onend = () => { utterance.value.onend = () => {
audioPlayerStatus.value = false
utterance.value = null utterance.value = null
currentAudioIndex.value += 1
playAnswerTextPart()
} }
utterance.value.onerror = () => { utterance.value.onerror = () => {
audioPlayerStatus.value = false audioPlayerStatus.value = false
@ -169,15 +193,14 @@ const playAnswerText = (text: string) => {
} }
// //
window.speechSynthesis.speak(utterance.value) window.speechSynthesis.speak(utterance.value)
} } else if (props.tts_type === 'TTS') {
if (props.tts_type === 'TTS') {
// //
if (audioPlayer.value?.src) { if (audioPlayer.value && audioPlayer.value[currentAudioIndex.value]?.src) {
audioPlayer.value?.play() audioPlayer.value[currentAudioIndex.value].play()
return return
} }
applicationApi applicationApi
.postTextToSpeech(id || (props.applicationId as string), { text: text }, loading) .postTextToSpeech((props.applicationId as string) || (id as string), { text: audioList.value[currentAudioIndex.value] }, loading)
.then(async (res: any) => { .then(async (res: any) => {
if (res.type === 'application/json') { if (res.type === 'application/json') {
const text = await res.text() const text = await res.text()
@ -198,11 +221,12 @@ const playAnswerText = (text: string) => {
// link.click() // link.click()
// audioPlayer DOM // audioPlayer DOM
if (audioPlayer.value instanceof HTMLAudioElement) { if (audioPlayer.value) {
audioPlayer.value.src = url audioPlayer.value[currentAudioIndex.value].src = url
audioPlayer.value.play() // audioPlayer.value[currentAudioIndex.value].play() //
audioPlayer.value.onended = () => { audioPlayer.value[currentAudioIndex.value].onended = () => {
audioPlayerStatus.value = false currentAudioIndex.value += 1
playAnswerTextPart()
} }
} else { } else {
console.error('audioPlayer.value is not an instance of HTMLAudioElement') console.error('audioPlayer.value is not an instance of HTMLAudioElement')
@ -217,13 +241,18 @@ const playAnswerText = (text: string) => {
const pausePlayAnswerText = () => { const pausePlayAnswerText = () => {
audioPlayerStatus.value = false audioPlayerStatus.value = false
if (props.tts_type === 'TTS') { if (props.tts_type === 'TTS') {
audioPlayer.value?.pause() if (audioPlayer.value) {
audioPlayer.value?.forEach((item) => {
item.pause()
})
}
} }
if (props.tts_type === 'BROWSER') { if (props.tts_type === 'BROWSER') {
window.speechSynthesis.pause() window.speechSynthesis.pause()
} }
} }
function refreshMark() { function refreshMark() {
buttonData.value.improve_paragraph_id_list = [] buttonData.value.improve_paragraph_id_list = []
emit('update:data', buttonData.value) emit('update:data', buttonData.value)