Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
C
CharIP-Electron
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ali
CharIP-Electron
Commits
8d52bf08
Commit
8d52bf08
authored
Dec 04, 2023
by
ali
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
fix: 修复打包资源路径读取错误问题
parent
25325063
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
255 additions
and
200 deletions
+255
-200
IPCs.ts
src/main/IPCs.ts
+13
-8
index.ts
src/preload/index.ts
+8
-2
DefaultLayout.vue
src/renderer/components/layout/DefaultLayout.vue
+11
-1
HeaderLayout.vue
src/renderer/components/layout/HeaderLayout.vue
+8
-13
FetchTTS.ts
src/renderer/plugins/tts/FetchTTS.ts
+2
-3
PhotoScreen.vue
src/renderer/screens/PhotoScreen.vue
+1
-1
ShowPhoto.vue
src/renderer/screens/ShowPhoto.vue
+130
-109
ShowVideo.vue
src/renderer/screens/ShowVideo.vue
+53
-43
VideoScreen.vue
src/renderer/screens/VideoScreen.vue
+17
-10
index.ts
src/renderer/store/index.ts
+1
-1
settings.ts
src/renderer/store/settings.ts
+11
-9
No files found.
src/main/IPCs.ts
View file @
8d52bf08
...
...
@@ -8,6 +8,14 @@ export default class IPCs {
static
browserWindows
:
Map
<
string
,
BrowserWindow
[]
>
=
new
Map
()
static
initialize
(
window
:
BrowserWindow
):
void
{
ipcMain
.
on
(
'mesGetFilePath'
,
()
=>
{
if
(
Constants
.
IS_DEV_ENV
)
{
window
.
webContents
.
send
(
'msgReceivedFilePath'
,
`
${
Constants
.
APP_INDEX_URL_DEV
}
`
)
}
else
{
window
.
webContents
.
send
(
'msgReceivedFilePath'
,
Constants
.
APP_INDEX_URL_PROD
)
}
})
// Get application version
ipcMain
.
on
(
'msgRequestGetVersion'
,
()
=>
{
window
.
webContents
.
send
(
'msgReceivedVersion'
,
Constants
.
APP_VERSION
)
...
...
@@ -72,21 +80,18 @@ export default class IPCs {
}
)
ipcMain
.
on
(
'openDevTools'
,
async
(
event
,
isOpen
:
boolean
)
=>
{
ipcMain
.
on
(
'openDevTools'
,
async
(
event
,
isOpen
:
boolean
)
=>
{
if
(
isOpen
)
{
window
.
webContents
.
openDevTools
()
;
window
.
webContents
.
openDevTools
()
IPCs
.
browserWindows
.
forEach
((
wins
)
=>
{
wins
.
forEach
(
w
=>
{
wins
.
forEach
(
(
w
)
=>
{
!
w
.
isDestroyed
()
&&
w
.
webContents
.
openDevTools
()
})
})
}
else
{
window
.
webContents
.
closeDevTools
()
;
window
.
webContents
.
closeDevTools
()
IPCs
.
browserWindows
.
forEach
((
wins
)
=>
{
wins
.
forEach
(
w
=>
{
wins
.
forEach
(
(
w
)
=>
{
!
w
.
isDestroyed
()
&&
w
.
webContents
.
closeDevTools
()
})
})
...
...
src/preload/index.ts
View file @
8d52bf08
import
{
contextBridge
,
ipcRenderer
}
from
'electron'
// Whitelist of valid channels used for IPC communication (Send message from Renderer to Main)
const
mainAvailChannels
:
string
[]
=
[
'msgRequestGetVersion'
,
'msgOpenExternalLink'
,
'openWindow'
,
'openDevTools'
]
const
rendererAvailChannels
:
string
[]
=
[
'msgReceivedVersion'
]
const
mainAvailChannels
:
string
[]
=
[
'mesGetFilePath'
,
'msgRequestGetVersion'
,
'msgOpenExternalLink'
,
'openWindow'
,
'openDevTools'
]
const
rendererAvailChannels
:
string
[]
=
[
'msgReceivedVersion'
,
'msgReceivedFilePath'
]
contextBridge
.
exposeInMainWorld
(
'mainApi'
,
{
send
:
(
channel
:
string
,
...
data
:
any
[]):
void
=>
{
...
...
src/renderer/components/layout/DefaultLayout.vue
View file @
8d52bf08
...
...
@@ -2,13 +2,23 @@
import
HeaderLayout
from
'@/renderer/components/layout/HeaderLayout.vue'
import
{
ref
}
from
'vue'
import
{
useRouter
}
from
'vue-router'
import
useStore
from
'@/renderer/store'
import
{
storeToRefs
}
from
'pinia'
const
{
settings
}
=
useStore
()
const
setting
=
storeToRefs
(
settings
)
const
router
=
useRouter
()
const
isHeader
=
ref
(
true
)
router
.
beforeEach
((
guard
)
=>
{
router
.
beforeEach
(
async
(
guard
)
=>
{
isHeader
.
value
=
typeof
guard
.
meta
.
isHeader
===
'boolean'
?
(
guard
.
meta
.
isHeader
as
boolean
)
:
true
window
.
mainApi
.
send
(
'mesGetFilePath'
)
window
.
mainApi
.
receive
(
'msgReceivedFilePath'
,
(
event
:
Event
,
path
:
string
)
=>
{
setting
.
filePath
.
value
=
path
})
})
</
script
>
...
...
src/renderer/components/layout/HeaderLayout.vue
View file @
8d52bf08
...
...
@@ -10,7 +10,6 @@ const route: any = useRoute()
const
{
settings
}
=
useStore
()
const
setting
=
storeToRefs
(
settings
)
settings
.
tts
===
'xf_tts'
&&
settings
.
getSource
()
const
handleRoute
=
(
path
:
string
):
void
=>
{
...
...
@@ -53,19 +52,17 @@ async function changeSource() {
console
.
log
(
res
)
}
const
voskModelLoading
=
ref
(
false
)
;
async
function
changeVoskModel
(){
voskModelLoading
.
value
=
true
;
await
settings
.
downLoadVoskModel
()
;
voskModelLoading
.
value
=
false
;
const
voskModelLoading
=
ref
(
false
)
async
function
changeVoskModel
()
{
voskModelLoading
.
value
=
true
await
settings
.
downLoadVoskModel
()
voskModelLoading
.
value
=
false
}
changeVoskModel
();
changeVoskModel
()
async
function
changeOpenDevTools
()
{
await
window
.
mainApi
.
send
(
'openDevTools'
,
setting
.
isOpenDevTools
.
value
)
;
await
window
.
mainApi
.
send
(
'openDevTools'
,
setting
.
isOpenDevTools
.
value
)
}
</
script
>
<
template
>
<v-app-bar
color=
"#d71b1b"
density=
"compact"
class=
"header"
>
...
...
@@ -146,14 +143,13 @@ async function changeOpenDevTools() {
></v-text-field>
<v-text-field
style=
"margin-top: 22px
;
"
style=
"margin-top: 22px"
label=
"LLM 地址"
:rules=
"[(value) => !!value || 'LLM 地址必填']"
hide-details=
"auto"
:model-value=
"setting.llmUrl"
></v-text-field>
<v-switch
v-model=
"setting.isFullscreen.value"
hide-details
...
...
@@ -170,7 +166,6 @@ async function changeOpenDevTools() {
:label=
"`是否打开 devTool: ${setting.isOpenDevTools.value}`"
@
update:model-value=
"changeOpenDevTools"
></v-switch>
</v-form>
</v-sheet>
...
...
src/renderer/plugins/tts/FetchTTS.ts
View file @
8d52bf08
...
...
@@ -35,8 +35,7 @@ export async function audioAiTTS({
return
res
.
data
}
export
async
function
localTTS
({
url
,
text
}:
{
url
:
string
;
text
:
string
;
}){
export
async
function
localTTS
({
url
,
text
}:
{
url
:
string
;
text
:
string
})
{
const
resp
=
await
fetch
(
url
,
{
headers
:
{
accept
:
'application/json, text/plain, */*'
,
...
...
@@ -53,4 +52,4 @@ export async function localTTS({ url, text }: { url: string; text: string; }){
if
(
res
.
results
.
length
<
1
)
throw
new
Error
(
JSON
.
stringify
(
res
))
return
res
.
results
}
\ No newline at end of file
}
src/renderer/screens/PhotoScreen.vue
View file @
8d52bf08
...
...
@@ -83,7 +83,7 @@ function removePhoto(index: number) {
:width=
"200"
aspect-ratio=
"1/1"
cover
:src=
"item.url"
:src=
"
settings.filePath + './..' +
item.url"
@
click=
"handleOpen($event, item.url)"
></v-img>
<v-btn
...
...
src/renderer/screens/ShowPhoto.vue
View file @
8d52bf08
...
...
@@ -9,7 +9,7 @@ import type {
}
from
'@/renderer/plugins/asr/index'
import
{
audioAiTTS
,
localTTS
}
from
'../plugins/tts'
import
useStore
from
'@/renderer/store'
import
flvjs
from
'flv.js'
;
import
flvjs
from
'flv.js'
const
router
=
useRouter
()
const
route
=
useRoute
()
...
...
@@ -19,92 +19,100 @@ const sampleRate = 48000
const
recordVolume
=
ref
(
0
)
const
url
=
route
.
query
.
url
as
string
const
microphoneState
=
ref
<
'waitInput'
|
'input'
|
'loading'
|
'disabled'
>
(
'waitInput'
)
const
videoElement
=
ref
<
HTMLVideoElement
|
null
>
(
null
)
;
const
can
=
ref
<
HTMLCanvasElement
|
null
>
(
null
)
;
let
flvPlayer
:
flvjs
.
Player
|
null
=
null
;
const
videoElement
=
ref
<
HTMLVideoElement
|
null
>
(
null
)
const
can
=
ref
<
HTMLCanvasElement
|
null
>
(
null
)
let
flvPlayer
:
flvjs
.
Player
|
null
=
null
onMounted
(()
=>
{
init
()
;
})
;
init
()
})
function
loadImg
():
Promise
<
HTMLImageElement
>
{
const
img
=
new
Image
()
;
img
.
src
=
url
;
const
img
=
new
Image
()
img
.
src
=
settings
.
filePath
+
'./..'
+
url
return
new
Promise
((
resolve
,
reject
)
=>
{
img
.
onload
=
()
=>
resolve
(
img
)
;
img
.
onerror
=
reject
;
img
.
onload
=
()
=>
resolve
(
img
)
img
.
onerror
=
reject
})
}
async
function
init
(){
const
img
=
await
loadImg
()
;
const
videoEle
=
videoElement
.
value
;
const
canvasEle
=
can
.
value
;
async
function
init
()
{
const
img
=
await
loadImg
()
const
videoEle
=
videoElement
.
value
const
canvasEle
=
can
.
value
const
ctx
=
canvasEle
&&
canvasEle
.
getContext
(
'2d'
)
if
(
!
videoEle
||
!
canvasEle
||
!
ctx
)
return
;
if
(
!
videoEle
||
!
canvasEle
||
!
ctx
)
return
draw
(
ctx
,
img
)
canvasEle
.
width
=
img
.
naturalWidth
;
canvasEle
.
height
=
img
.
naturalHeight
;
canvasEle
.
width
=
img
.
naturalWidth
canvasEle
.
height
=
img
.
naturalHeight
// initPlayer(videoEle);
const
fps
=
1000
/
30
;
let
lastTime
=
Date
.
now
()
;
const
fps
=
1000
/
30
let
lastTime
=
Date
.
now
()
const
updateFrame
=
()
=>
{
if
(
Date
.
now
()
-
lastTime
>
fps
)
{
if
(
Date
.
now
()
-
lastTime
>
fps
)
{
draw
(
ctx
,
img
,
videoEle
,
{
"width"
:
579
,
"height"
:
579
,
"center"
:
{
"x"
:
295
,
"y"
:
168
width
:
579
,
height
:
579
,
center
:
{
x
:
295
,
y
:
168
},
"r_w"
:
304
,
"r_h"
:
304
})
;
lastTime
=
Date
.
now
()
;
r_w
:
304
,
r_h
:
304
})
lastTime
=
Date
.
now
()
}
requestAnimationFrame
(
updateFrame
)
;
requestAnimationFrame
(
updateFrame
)
}
requestAnimationFrame
(
updateFrame
)
;
requestAnimationFrame
(
updateFrame
)
}
function
draw
(
ctx
:
CanvasRenderingContext2D
,
img
:
HTMLImageElement
,
liveVideo
?:
HTMLVideoElement
,
videoInfo
?:
{
function
draw
(
ctx
:
CanvasRenderingContext2D
,
img
:
HTMLImageElement
,
liveVideo
?:
HTMLVideoElement
,
videoInfo
?:
{
center
:
{
x
:
number
;
y
:
number
;
};
width
:
number
;
height
:
number
;
r_w
:
number
;
r_h
:
number
;
})
{
ctx
.
clearRect
(
0
,
0
,
img
.
naturalWidth
,
img
.
naturalHeight
);
ctx
.
drawImage
(
img
,
0
,
0
,
img
.
naturalWidth
,
img
.
naturalHeight
);
if
(
liveVideo
&&
videoInfo
)
{
const
{
center
,
r_w
,
r_h
}
=
videoInfo
;
ctx
.
drawImage
(
liveVideo
,
center
.
x
-
r_w
/
2
,
center
.
y
-
r_h
/
2
,
r_w
,
r_h
);
x
:
number
y
:
number
}
width
:
number
height
:
number
r_w
:
number
r_h
:
number
}
)
{
ctx
.
clearRect
(
0
,
0
,
img
.
naturalWidth
,
img
.
naturalHeight
)
ctx
.
drawImage
(
img
,
0
,
0
,
img
.
naturalWidth
,
img
.
naturalHeight
)
if
(
liveVideo
&&
videoInfo
)
{
const
{
center
,
r_w
,
r_h
}
=
videoInfo
ctx
.
drawImage
(
liveVideo
,
center
.
x
-
r_w
/
2
,
center
.
y
-
r_h
/
2
,
r_w
,
r_h
)
}
}
// eslint-disable-next-line no-unused-vars
async
function
initPlayer
(
videoEle
:
HTMLVideoElement
){
flvPlayer
=
flvjs
.
createPlayer
({
url
:
'http://127.0.0.1:7001/live/movie.flv'
,
type
:
"flv"
,
isLive
:
true
,
cors
:
true
},
{
// enableWorker: true,
enableStashBuffer
:
false
,
stashInitialSize
:
128
,
});
flvPlayer
.
attachMediaElement
(
videoEle
);
flvPlayer
.
load
();
await
flvPlayer
.
play
();
async
function
initPlayer
(
videoEle
:
HTMLVideoElement
)
{
flvPlayer
=
flvjs
.
createPlayer
(
{
url
:
'http://127.0.0.1:7001/live/movie.flv'
,
type
:
'flv'
,
isLive
:
true
,
cors
:
true
},
{
// enableWorker: true,
enableStashBuffer
:
false
,
stashInitialSize
:
128
}
)
flvPlayer
.
attachMediaElement
(
videoEle
)
flvPlayer
.
load
()
await
flvPlayer
.
play
()
}
router
.
beforeEach
((
g
)
=>
{
...
...
@@ -119,7 +127,7 @@ async function initVosk({
partialResult
?:
(
string
)
=>
void
})
{
const
channel
=
new
MessageChannel
()
const
model
=
await
settings
.
downLoadVoskModel
()
;
const
model
=
await
settings
.
downLoadVoskModel
()
const
recognizer
=
new
model
.
KaldiRecognizer
(
sampleRate
)
model
.
registerPort
(
channel
.
port1
)
...
...
@@ -170,14 +178,14 @@ const inputContext: {
audioContext2
?:
AudioContext
scriptProcessorNode
?:
ScriptProcessorNode
model
?:
Model
ws
?:
WebSocket
;
ws
?:
WebSocket
}
=
{}
async
function
startAudioInput
()
{
if
(
microphoneState
.
value
===
'loading'
)
return
if
(
microphoneState
.
value
===
'input'
)
{
endAudioInput
()
;
endAudioInput
()
return
}
...
...
@@ -239,41 +247,44 @@ function endAudioInput() {
}
async
function
onAsr
(
question
:
string
)
{
const
ws
=
await
initSocket
()
;
inputContext
.
ws
=
ws
;
const
ws
=
await
initSocket
()
inputContext
.
ws
=
ws
let
sliceAnswer
=
''
;
let
answer
=
''
;
const
answerArray
:
string
[]
=
[]
;
let
isTime
=
true
;
let
sliceAnswer
=
''
let
answer
=
''
const
answerArray
:
string
[]
=
[]
let
isTime
=
true
ws
.
onmessage
=
(
message
)
=>
{
try
{
const
{
text
,
event
}
=
JSON
.
parse
(
message
.
data
)
as
{
event
:
string
;
message_num
:
number
;
text
:
string
;
}
const
{
text
,
event
}
=
JSON
.
parse
(
message
.
data
)
as
{
event
:
string
message_num
:
number
text
:
string
}
if
(
event
===
'stream_end'
){
if
(
event
===
'stream_end'
)
{
answerArray
.
push
(
sliceAnswer
)
sliceAnswer
=
''
;
inputContext
.
ws
?.
close
()
;
console
.
log
(
'----------------> answer: '
,
answer
)
;
return
;
sliceAnswer
=
''
inputContext
.
ws
?.
close
()
console
.
log
(
'----------------> answer: '
,
answer
)
return
}
answer
+=
text
;
isTime
&&
console
.
time
(
'sliceAnswer'
)
;
isTime
=
false
;
sliceAnswer
+=
text
;
answer
+=
text
isTime
&&
console
.
time
(
'sliceAnswer'
)
isTime
=
false
sliceAnswer
+=
text
if
(
/
[
。,?!;,.?!;
]
/
.
test
(
text
)
&&
sliceAnswer
.
length
>=
20
)
{
console
.
timeEnd
(
'sliceAnswer'
)
;
console
.
timeEnd
(
'sliceAnswer'
)
answerArray
.
push
(
sliceAnswer
)
runTTSTask
(
answerArray
)
;
sliceAnswer
=
''
;
isTime
=
true
;
runTTSTask
(
answerArray
)
sliceAnswer
=
''
isTime
=
true
}
}
catch
(
error
)
{
console
.
log
(
'返回答案错误 -----> '
+
JSON
.
stringify
(
error
))
console
.
log
(
'返回答案错误 -----> '
+
JSON
.
stringify
(
error
))
}
}
...
...
@@ -281,33 +292,33 @@ async function onAsr(question: string) {
ws
.
send
(
JSON
.
stringify
({
prompt
:
question
,
historys_list
:
[]
}))
}
function
initSocket
():
Promise
<
WebSocket
>
{
const
ws
=
new
WebSocket
(
settings
.
llmUrl
)
;
function
initSocket
():
Promise
<
WebSocket
>
{
const
ws
=
new
WebSocket
(
settings
.
llmUrl
)
return
new
Promise
((
resolve
,
reject
)
=>
{
ws
.
onopen
=
()
=>
resolve
(
ws
)
;
ws
.
onerror
=
reject
;
})
;
ws
.
onopen
=
()
=>
resolve
(
ws
)
ws
.
onerror
=
reject
})
}
let
isTTSRunning
=
false
;
let
isTTSRunning
=
false
async
function
runTTSTask
(
tasks
:
string
[])
{
if
(
isTTSRunning
)
return
;
isTTSRunning
=
true
;
if
(
isTTSRunning
)
return
isTTSRunning
=
true
try
{
while
(
tasks
.
length
)
{
const
task
=
tasks
.
shift
()
if
(
!
task
)
break
;
console
.
time
(
task
+
' TTS: '
);
const
res
=
await
localTTS
({
url
:
settings
.
ttsHost
,
text
:
task
});
console
.
log
(
'----------------> TTS:'
,
res
);
console
.
timeEnd
(
task
+
' TTS: '
);
}
}
catch
(
error
)
{
console
.
error
(
error
);
while
(
tasks
.
length
)
{
const
task
=
tasks
.
shift
()
if
(
!
task
)
break
console
.
time
(
task
+
' TTS: '
)
const
res
=
await
localTTS
({
url
:
settings
.
ttsHost
,
text
:
task
})
console
.
log
(
'----------------> TTS:'
,
res
)
console
.
timeEnd
(
task
+
' TTS: '
)
}
}
catch
(
error
)
{
console
.
error
(
error
)
}
isTTSRunning
=
false
;
isTTSRunning
=
false
}
// eslint-disable-next-line no-unused-vars
...
...
@@ -323,7 +334,6 @@ async function xfTTS(text: string) {
})
console
.
log
(
'----------------> tts:'
,
res
)
}
</
script
>
<
template
>
...
...
@@ -333,8 +343,13 @@ async function xfTTS(text: string) {
:style=
"
{ background: '#000' }"
>
<!--
<v-img
v-if=
"url"
aspect-ratio=
"9/16"
:src=
"url"
@
load=
"initPlayer()"
></v-img>
-->
<canvas
id=
"can"
ref=
"can"
style=
"width: 100%; height: 100%;"
></canvas>
<video
id=
"videoElement"
ref=
"videoElement"
class=
"video-ele"
:style=
"
{ top: 0, left: 0, width: '250px', height: '250px' }">
</video>
<canvas
id=
"can"
ref=
"can"
style=
"width: 100%; height: 100%"
></canvas>
<video
id=
"videoElement"
ref=
"videoElement"
class=
"video-ele"
:style=
"
{ top: 0, left: 0, width: '250px', height: '250px' }"
>
</video>
</div>
<div
class=
"voice"
>
...
...
@@ -351,7 +366,13 @@ async function xfTTS(text: string) {
<v-icon
v-if=
"microphoneState === 'disabled'"
icon=
"mdi-microphone-off"
></v-icon>
<template
v-if=
"microphoneState === 'input'"
>
<img
width=
"30"
height=
"30"
src=
"/images/microphone-input.svg"
alt=
""
srcset=
""
/>
<img
width=
"30"
height=
"30"
:src=
"settings.filePath + './..' + '/images/microphone-input.svg'"
alt=
""
srcset=
""
/>
<div
class=
"progress"
>
<span
class=
"volume"
...
...
src/renderer/screens/ShowVideo.vue
View file @
8d52bf08
...
...
@@ -18,19 +18,18 @@ const sampleRate = 48000
const
recordVolume
=
ref
(
0
)
const
url
=
route
.
query
.
url
as
string
const
role
=
useVideo
.
list
.
find
(
i
=>
i
.
url
===
url
);
const
role
=
useVideo
.
list
.
find
(
(
i
)
=>
i
.
url
===
url
)
const
microphoneState
=
ref
<
'waitInput'
|
'input'
|
'loading'
|
'disabled'
>
(
'waitInput'
)
const
videoElement
=
ref
<
HTMLVideoElement
|
null
>
(
null
)
;
const
videoElement
=
ref
<
HTMLVideoElement
|
null
>
(
null
)
onMounted
(()
=>
{
// init();
})
;
})
async
function
init
(){
const
videoEle
=
videoElement
.
value
;
async
function
init
()
{
const
videoEle
=
videoElement
.
value
}
router
.
beforeEach
((
g
)
=>
{
if
(
!
g
.
query
.
url
)
return
router
.
push
(
'/error'
)
})
...
...
@@ -43,7 +42,7 @@ async function initVosk({
partialResult
?:
(
string
)
=>
void
})
{
const
channel
=
new
MessageChannel
()
const
model
=
await
settings
.
downLoadVoskModel
()
;
const
model
=
await
settings
.
downLoadVoskModel
()
const
recognizer
=
new
model
.
KaldiRecognizer
(
sampleRate
)
model
.
registerPort
(
channel
.
port1
)
...
...
@@ -94,14 +93,14 @@ const inputContext: {
audioContext2
?:
AudioContext
scriptProcessorNode
?:
ScriptProcessorNode
model
?:
Model
ws
?:
WebSocket
;
ws
?:
WebSocket
}
=
{}
async
function
startAudioInput
()
{
if
(
microphoneState
.
value
===
'loading'
)
return
if
(
microphoneState
.
value
===
'input'
)
{
endAudioInput
()
;
endAudioInput
()
return
}
...
...
@@ -163,50 +162,50 @@ function endAudioInput() {
}
async
function
onAsr
(
question
:
string
)
{
endAudioInput
()
;
console
.
log
(
'---------------->'
,
question
)
;
const
videoEle
=
videoElement
.
value
as
HTMLVideoElement
;
if
(
!
role
||
!
videoEle
)
return
;
endAudioInput
()
console
.
log
(
'---------------->'
,
question
)
const
videoEle
=
videoElement
.
value
as
HTMLVideoElement
if
(
!
role
||
!
videoEle
)
return
question
=
question
.
replace
(
/
\s
/g
,
''
)
;
question
=
question
.
replace
(
/
\s
/g
,
''
)
for
(
let
i
=
0
;
i
<
role
.
qa
.
length
;
i
++
)
{
const
{
q
,
url
}
=
role
.
qa
[
i
]
;
console
.
log
(
question
+
' : '
+
q
);
const
{
q
,
url
}
=
role
.
qa
[
i
]
console
.
log
(
question
+
' : '
+
q
)
if
(
q
.
includes
(
question
))
{
videoEle
.
src
=
url
;
videoEle
.
load
()
;
videoEle
.
play
()
;
videoEle
.
src
=
settings
.
filePath
+
'./..'
+
url
videoEle
.
load
()
videoEle
.
play
()
}
}
}
function
initSocket
():
Promise
<
WebSocket
>
{
const
ws
=
new
WebSocket
(
settings
.
llmUrl
)
;
function
initSocket
():
Promise
<
WebSocket
>
{
const
ws
=
new
WebSocket
(
settings
.
llmUrl
)
return
new
Promise
((
resolve
,
reject
)
=>
{
ws
.
onopen
=
()
=>
resolve
(
ws
)
;
ws
.
onerror
=
reject
;
})
;
ws
.
onopen
=
()
=>
resolve
(
ws
)
ws
.
onerror
=
reject
})
}
let
isTTSRunning
=
false
;
let
isTTSRunning
=
false
async
function
runTTSTask
(
tasks
:
string
[])
{
if
(
isTTSRunning
)
return
;
isTTSRunning
=
true
;
if
(
isTTSRunning
)
return
isTTSRunning
=
true
try
{
while
(
tasks
.
length
)
{
const
task
=
tasks
.
shift
()
if
(
!
task
)
break
;
console
.
time
(
task
+
' TTS: '
);
const
res
=
await
localTTS
({
url
:
settings
.
ttsHost
,
text
:
task
});
console
.
log
(
'----------------> TTS:'
,
res
);
console
.
timeEnd
(
task
+
' TTS: '
);
}
}
catch
(
error
)
{
console
.
error
(
error
);
while
(
tasks
.
length
)
{
const
task
=
tasks
.
shift
()
if
(
!
task
)
break
console
.
time
(
task
+
' TTS: '
)
const
res
=
await
localTTS
({
url
:
settings
.
ttsHost
,
text
:
task
})
console
.
log
(
'----------------> TTS:'
,
res
)
console
.
timeEnd
(
task
+
' TTS: '
)
}
}
catch
(
error
)
{
console
.
error
(
error
)
}
isTTSRunning
=
false
;
isTTSRunning
=
false
}
// eslint-disable-next-line no-unused-vars
...
...
@@ -222,7 +221,6 @@ async function xfTTS(text: string) {
})
console
.
log
(
'----------------> tts:'
,
res
)
}
</
script
>
<
template
>
...
...
@@ -231,7 +229,12 @@ async function xfTTS(text: string) {
class=
"d-flex justify-center align-center"
:style=
"
{ background: '#000' }"
>
<video
id=
"videoElement"
ref=
"videoElement"
:src=
"url"
class=
"video-ele"
></video>
<video
id=
"videoElement"
ref=
"videoElement"
:src=
"settings.filePath + './..' + url"
class=
"video-ele"
></video>
</div>
<div
class=
"voice"
>
...
...
@@ -248,7 +251,13 @@ async function xfTTS(text: string) {
<v-icon
v-if=
"microphoneState === 'disabled'"
icon=
"mdi-microphone-off"
></v-icon>
<template
v-if=
"microphoneState === 'input'"
>
<img
width=
"30"
height=
"30"
src=
"/images/microphone-input.svg"
alt=
""
srcset=
""
/>
<img
width=
"30"
height=
"30"
:src=
"settings.filePath + './..' + '/images/microphone-input.svg'"
alt=
""
srcset=
""
/>
<div
class=
"progress"
>
<span
class=
"volume"
...
...
@@ -267,7 +276,8 @@ async function xfTTS(text: string) {
<v-chip
v-for=
"(item, index) in role?.qa"
:key=
"index"
class=
"mb-2 chip"
color=
"white"
class=
"mb-2 chip"
color=
"white"
variant=
"outlined"
@
click=
"onAsr(item.q)"
>
...
...
@@ -318,6 +328,6 @@ async function xfTTS(text: string) {
}
.chip
{
cursor
:
pointer
;
;
cursor
:
pointer
;
}
</
style
>
src/renderer/screens/VideoScreen.vue
View file @
8d52bf08
...
...
@@ -22,12 +22,12 @@ async function handleOpen(event: Event, url: string) {
}
function
handleEnter
(
e
:
Event
)
{
const
target
=
e
.
target
as
HTMLVideoElement
;
target
.
play
()
;
const
target
=
e
.
target
as
HTMLVideoElement
target
.
play
()
}
function
handleLeave
(
e
:
Event
)
{
const
target
=
e
.
target
as
HTMLVideoElement
;
target
.
pause
()
;
const
target
=
e
.
target
as
HTMLVideoElement
target
.
pause
()
}
// const validateURL = (url: string) => {
...
...
@@ -65,7 +65,6 @@ function handleLeave(e: Event) {
// function removePhoto(index: number) {
// video.list.value.splice(index, 1)
// }
</
script
>
<
template
>
...
...
@@ -78,7 +77,7 @@ function handleLeave(e: Event) {
validate-on=
"blur lazy"
></v-text-field>
</v-container>
-->
<v-container
class=
"d-flex flex-wrap"
>
<v-container
class=
"d-flex flex-wrap"
>
<v-sheet
v-for=
"item in video.list.value"
:key=
"item.url"
...
...
@@ -88,7 +87,15 @@ function handleLeave(e: Event) {
class=
"video-wrap d-flex spacing-playground pa-6 mr-4 mt-4"
rounded
>
<video
class=
"video-item"
loop
:src=
"item.url"
muted
@
click=
"handleOpen($event,item.url)"
@
pointerenter=
"handleEnter"
@
pointerleave=
"handleLeave"
></video>
<video
class=
"video-item"
loop
:src=
"settings.filePath + './..' + item.url"
muted
@
click=
"handleOpen($event, item.url)"
@
pointerenter=
"handleEnter"
@
pointerleave=
"handleLeave"
></video>
<!--
<v-btn
density=
"compact"
elevation=
"1"
...
...
@@ -107,7 +114,7 @@ function handleLeave(e: Event) {
.video-wrap
{
position
:
relative
;
}
.video-wrap
:hover
.video-overlay
{
.video-wrap
:hover
.video-overlay
{
opacity
:
1
;
}
.video-overlay
{
...
...
@@ -116,7 +123,7 @@ function handleLeave(e: Event) {
left
:
0
;
width
:
100%
;
height
:
100%
;
background
:
rgba
(
0
,
0
,
0
,
0.4
);
background
:
rgba
(
0
,
0
,
0
,
0.4
);
display
:
flex
;
justify-content
:
center
;
align-items
:
center
;
...
...
@@ -126,4 +133,4 @@ function handleLeave(e: Event) {
.overlay-hover
{
opacity
:
1
!important
;
}
</
style
>
\ No newline at end of file
</
style
>
src/renderer/store/index.ts
View file @
8d52bf08
...
...
@@ -6,6 +6,6 @@ export default function useStore() {
return
{
settings
:
useSettings
(),
photo
:
usePhoto
(),
video
:
useVideo
()
,
video
:
useVideo
()
}
}
src/renderer/store/settings.ts
View file @
8d52bf08
import
{
defineStore
}
from
'pinia'
import
{
Vosk
}
from
'@/renderer/plugins/asr/index'
import
type
{
Model
}
from
'@/renderer/plugins/asr/index'
import
type
{
Model
}
from
'@/renderer/plugins/asr/index'
const
voskModelMap
:
Map
<
string
,
Model
|
null
>
=
new
Map
()
;
const
voskModelMap
:
Map
<
string
,
Model
|
null
>
=
new
Map
()
export
type
ISettings
=
{
filePath
:
string
asr
:
'vosk_asr'
|
'xf_asr'
voskModels
:
string
[]
voskSelectModel
:
string
...
...
@@ -30,6 +29,7 @@ const useSettingsStore = defineStore('settings', {
persist
:
true
,
state
:
()
=>
({
filePath
:
''
,
asr
:
'vosk_asr'
,
tts
:
'xf_tts'
,
voskModels
:
[
...
...
@@ -53,7 +53,7 @@ const useSettingsStore = defineStore('settings', {
selectSource
:
''
,
isFullscreen
:
'no'
,
isOpenDevTools
:
false
,
llmUrl
:
'ws://192.168.50.50:9001/api/v1/stream'
,
llmUrl
:
'ws://192.168.50.50:9001/api/v1/stream'
})
as
ISettings
,
getters
:
{},
actions
:
{
...
...
@@ -69,14 +69,16 @@ const useSettingsStore = defineStore('settings', {
this
.
source
=
res
.
data
},
async
downLoadVoskModel
()
{
if
(
voskModelMap
.
has
(
this
.
$state
.
voskSelectModel
)){
if
(
voskModelMap
.
has
(
this
.
$state
.
voskSelectModel
))
{
return
voskModelMap
.
get
(
this
.
$state
.
voskSelectModel
)
as
Model
}
const
model
=
await
Vosk
.
createModel
(
`https://resources.laihua.com/2023-11-29/
${
this
.
$state
.
voskSelectModel
}
`
);
voskModelMap
.
set
(
this
.
$state
.
voskSelectModel
,
model
);
const
model
=
await
Vosk
.
createModel
(
`https://resources.laihua.com/2023-11-29/
${
this
.
$state
.
voskSelectModel
}
`
)
voskModelMap
.
set
(
this
.
$state
.
voskSelectModel
,
model
)
return
model
;
return
model
}
}
})
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment