Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
C
CharIP-Electron
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ali
CharIP-Electron
Commits
afe81980
You need to sign in or sign up before continuing.
Commit
afe81980
authored
Dec 06, 2023
by
ali
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
feat: tts 语音播放结束后,视频不不暂停问题;回答状态对应处理。
parent
83f308a7
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
57 additions
and
28 deletions
+57
-28
poster.jpg
src/renderer/public/libai/poster.jpg
+0
-0
ShowVideo.vue
src/renderer/screens/ShowVideo.vue
+55
-27
video.ts
src/renderer/store/video.ts
+2
-1
No files found.
src/renderer/public/libai/poster.jpg
0 → 100644
View file @
afe81980
86.3 KB
src/renderer/screens/ShowVideo.vue
View file @
afe81980
...
...
@@ -21,17 +21,16 @@ const iconMicrophone = new URL('/images/microphone-input.svg', import.meta.url).
const
recordVolume
=
ref
(
0
)
const
url
=
route
.
query
.
url
as
string
const
role
=
useVideo
.
list
.
find
((
i
)
=>
i
.
url
===
url
)
const
microphoneState
=
ref
<
'waitInput'
|
'input'
|
'loading'
|
'disabled'
>
(
'waitInput'
)
const
microphoneState
=
ref
<
'waitInput'
|
'input'
|
'loading'
|
'disabled'
|
'reply'
>
(
'waitInput'
)
const
videoElement
=
ref
<
HTMLVideoElement
|
null
>
(
null
)
const
videoElement2
=
ref
<
HTMLVideoElement
|
null
>
(
null
)
const
videos
=
[
videoElement
,
videoElement2
];
onMounted
(()
=>
{
// init();
})
async
function
init
()
{
const
videoEle
=
videoElement
.
value
}
router
.
beforeEach
((
g
)
=>
{
if
(
!
g
.
query
.
url
)
return
router
.
push
(
'/error'
)
})
...
...
@@ -184,7 +183,13 @@ async function startVoskWsAudioInput() {
source
.
connect
(
processor
)
processor
.
connect
(
audioContext
.
destination
)
processor
.
onaudioprocess
=
(
audioDataChunk
)
=>
postAudio
(
audioDataChunk
)
processor
.
onaudioprocess
=
(
audioDataChunk
)
=>
{
if
(
microphoneState
.
value
===
'loading'
||
microphoneState
.
value
===
'disabled'
||
microphoneState
.
value
===
'reply'
)
{
return
;
}
postAudio
(
audioDataChunk
);
}
await
analyzeMicrophoneVolume
(
mediaStream
,
(
val
)
=>
{
recordVolume
.
value
=
val
...
...
@@ -249,29 +254,41 @@ function endAudioInput() {
}
}
function
setVideoUrl
(
url
:
string
)
{
const
videoEle
=
videoElement
.
value
as
HTMLVideoElement
if
(
!
videoEle
)
return
const
canplay
=
()
=>
{
videos
[
1
].
value
!
.
style
.
opacity
=
'1'
;
videos
[
0
].
value
!
.
style
.
opacity
=
'0'
;
videos
[
0
].
value
!
.
pause
();
videos
[
1
].
value
!
.
play
();
videos
[
1
].
value
!
.
removeEventListener
(
'canplay'
,
canplay
);
videos
.
unshift
(
videos
.
pop
()
!
);
}
videoEle
.
src
=
url
videoEle
.
load
()
videoEle
.
play
()
function
loadVideo
(
url
:
string
)
{
videos
[
1
].
value
!
.
src
=
url
videos
[
1
].
value
!
.
style
.
opacity
=
'0'
;
videos
[
1
].
value
!
.
addEventListener
(
'canplay'
,
canplay
);
}
async
function
onAsr
(
question
:
string
)
{
endAudioInput
()
console
.
log
(
'---------------->'
,
question
)
if
(
!
role
)
return
if
(
!
role
)
return
;
microphoneState
.
value
=
'loading'
;
question
=
question
.
replace
(
/
\s
/g
,
''
)
for
(
let
i
=
0
;
i
<
role
.
qa
.
length
;
i
++
)
{
const
{
q
,
url
}
=
role
.
qa
[
i
]
console
.
log
(
question
+
' : '
+
q
)
if
(
q
.
includes
(
question
))
{
const
videoEle
=
videoElement
.
value
as
HTMLVideoElement
videoEle
&&
(
videoEle
.
loop
=
false
)
videoEle
&&
(
videoEle
.
muted
=
false
)
setVideoUrl
(
url
)
loadVideo
(
url
)
microphoneState
.
value
=
'reply'
;
const
videoEle
=
videos
[
1
].
value
videoEle
!
.
loop
=
false
videoEle
!
.
muted
=
false
videoEle
!
.
onended
=
()
=>
{
videoEle
!
.
onended
=
null
;
microphoneState
.
value
=
'input'
;
// 是否需要初始化
}
return
}
}
...
...
@@ -358,6 +375,7 @@ async function runTTSTask(tasks: string[]) {
runAudioPlay
()
}
}
catch
(
error
)
{
microphoneState
.
value
=
'input'
console
.
error
(
error
)
}
...
...
@@ -371,20 +389,21 @@ async function runAudioPlay() {
isPlayRunning
=
true
const
audio
=
ttsAudios
.
shift
()
const
videoEle
=
videoElement
.
value
as
HTMLVideoElement
;
if
(
!
audio
)
{
isPlayRunning
=
false
;
videoEle
.
pause
();
videos
[
0
].
value
!
.
pause
();
microphoneState
.
value
=
'input'
;
return
}
audio
.
onended
=
()
=>
{
isPlayRunning
=
false
videoEle
&&
(
videoEle
.
loop
=
true
)
video
Ele
&&
(
videoEle
.
muted
=
true
)
setVideoUrl
(
new
URL
(
'/libai/10.mp4'
,
import
.
meta
.
url
).
href
)
loadVideo
(
new
URL
(
'/libai/10.mp4'
,
import
.
meta
.
url
).
href
)
video
s
[
1
].
value
!
.
loop
=
true
videos
[
1
].
value
!
.
muted
=
true
runAudioPlay
()
}
await
audio
.
play
()
await
audio
.
play
();
microphoneState
.
value
=
'reply'
;
}
// eslint-disable-next-line no-unused-vars
...
...
@@ -400,6 +419,7 @@ async function xfTTS(text: string) {
})
console
.
log
(
'----------------> tts:'
,
res
)
}
</
script
>
<
template
>
...
...
@@ -408,7 +428,8 @@ async function xfTTS(text: string) {
class=
"d-flex justify-center align-center"
:style=
"
{ background: '#000' }"
>
<video
id=
"videoElement"
ref=
"videoElement"
:src=
"url"
class=
"video-ele"
></video>
<video
id=
"videoElement"
ref=
"videoElement"
:src=
"url"
class=
"video-ele active"
></video>
<video
id=
"videoElement2"
ref=
"videoElement2"
class=
"video-ele2"
></video>
</div>
<div
class=
"voice"
>
...
...
@@ -417,12 +438,13 @@ async function xfTTS(text: string) {
color=
"#fff"
variant=
"elevated"
size=
"x-large"
:disabled=
"microphoneState === 'loading' || microphoneState === 'disabled'"
:disabled=
"microphoneState === 'loading' || microphoneState === 'disabled'
|| microphoneState === 'reply'
"
@
pointerdown=
"startVoskWsAudioInput"
>
<v-icon
v-if=
"microphoneState === 'waitInput'"
icon=
"mdi-microphone"
></v-icon>
<v-icon
v-if=
"microphoneState === 'loading'"
icon=
"mdi-microphone-settings"
></v-icon>
<v-icon
v-if=
"microphoneState === 'disabled'"
icon=
"mdi-microphone-off"
></v-icon>
<v-icon
v-if=
"microphoneState === 'reply'"
icon=
"mdi-message-reply-text-outline"
></v-icon>
<template
v-if=
"microphoneState === 'input'"
>
<img
width=
"30"
height=
"30"
:src=
"iconMicrophone"
alt=
""
srcset=
""
/>
...
...
@@ -483,8 +505,14 @@ async function xfTTS(text: string) {
border-radius
:
36%
;
}
.video-ele
{
.video-ele
,
.video-ele2
{
position
:
absolute
;
width
:
100%
;
height
:
100%
;
opacity
:
0
;
}
.video-ele.active
,
.video-ele2.active
{
opacity
:
1
;
}
.q-list
{
...
...
src/renderer/store/video.ts
View file @
afe81980
import
{
defineStore
}
from
'pinia'
type
IVideo
=
{
list
:
{
url
:
string
;
name
:
string
;
qa
:
{
url
:
string
;
q
:
string
;
a
:
string
}[]
}[]
list
:
{
url
:
string
;
poster
:
string
;
name
:
string
;
qa
:
{
url
:
string
;
q
:
string
;
a
:
string
}[]
}[]
}
const
useVideoStore
=
defineStore
(
'video'
,
{
...
...
@@ -11,6 +11,7 @@ const useVideoStore = defineStore('video', {
list
:
[
{
url
:
new
URL
(
'/libai/wait.mp4'
,
import
.
meta
.
url
).
href
,
poster
:
new
URL
(
'/libai/poster.jpg'
,
import
.
meta
.
url
).
href
,
name
:
'李白'
,
qa
:
[
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment