Commit a5aa32b4 authored by ali's avatar ali

feat: 照片数字人直播接入

parent 9b2696f3
...@@ -20,6 +20,7 @@ ...@@ -20,6 +20,7 @@
"editor.tabSize": 2, "editor.tabSize": 2,
"cSpell.words": [ "cSpell.words": [
"flvjs", "flvjs",
"superres",
"Vosk" "Vosk"
], ],
"editor.inlineSuggest.showToolbar": "always" "editor.inlineSuggest.showToolbar": "always"
......
import type { HWLLSPlayer } from '@/renderer/utils/HWLLS_SDK_Web_2.3.0/export'; import type { HWLLSPlayer } from '@/renderer/utils/HWLLS_SDK_Web_2.3.0/export'
declare global { declare global {
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
......
...@@ -12,7 +12,6 @@ ...@@ -12,7 +12,6 @@
"axios": "^1.6.2", "axios": "^1.6.2",
"electron-store": "^8.1.0", "electron-store": "^8.1.0",
"EventEmitter": "^1.0.0", "EventEmitter": "^1.0.0",
"events": "^3.3.0",
"flv.js": "^1.6.2", "flv.js": "^1.6.2",
"pinia": "^2.1.7", "pinia": "^2.1.7",
"pinia-plugin-persistedstate": "^3.2.0", "pinia-plugin-persistedstate": "^3.2.0",
...@@ -5552,14 +5551,6 @@ ...@@ -5552,14 +5551,6 @@
"node": ">=0.12" "node": ">=0.12"
} }
}, },
"node_modules/events": {
"version": "3.3.0",
"resolved": "https://registry.npmmirror.com/events/-/events-3.3.0.tgz",
"integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==",
"engines": {
"node": ">=0.8.x"
}
},
"node_modules/exit-hook": { "node_modules/exit-hook": {
"version": "1.1.1", "version": "1.1.1",
"resolved": "https://registry.npmmirror.com/exit-hook/-/exit-hook-1.1.1.tgz", "resolved": "https://registry.npmmirror.com/exit-hook/-/exit-hook-1.1.1.tgz",
......
import { BrowserWindow, ipcMain, shell, BrowserWindowConstructorOptions, app } from 'electron' import { BrowserWindow, ipcMain, shell, BrowserWindowConstructorOptions, app } from 'electron'
import Constants from './utils/Constants' import Constants from './utils/Constants'
import fs from 'fs'
import http from './utils/http'
/* /*
* IPC Communications * IPC Communications
...@@ -7,6 +9,12 @@ import Constants from './utils/Constants' ...@@ -7,6 +9,12 @@ import Constants from './utils/Constants'
export default class IPCs { export default class IPCs {
static browserWindows: Map<string, BrowserWindow[]> = new Map() static browserWindows: Map<string, BrowserWindow[]> = new Map()
// 读取本地文件
static readFile(path) {
const file = fs.readFileSync(path)
return file
}
static initialize(window: BrowserWindow): void { static initialize(window: BrowserWindow): void {
ipcMain.on('mesGetUserData', () => { ipcMain.on('mesGetUserData', () => {
window.webContents.send('msgReceivedUserData', app.getPath('userData')) window.webContents.send('msgReceivedUserData', app.getPath('userData'))
...@@ -78,6 +86,9 @@ export default class IPCs { ...@@ -78,6 +86,9 @@ export default class IPCs {
await win.loadURL(url) await win.loadURL(url)
// Initialize IPC Communication
IPCs.initializeChildWindow(win)
if (!IPCs.browserWindows.has(url)) { if (!IPCs.browserWindows.has(url)) {
IPCs.browserWindows.set(url, []) IPCs.browserWindows.set(url, [])
} }
...@@ -106,4 +117,20 @@ export default class IPCs { ...@@ -106,4 +117,20 @@ export default class IPCs {
} }
}) })
} }
static initializeChildWindow(window: BrowserWindow) {
ipcMain.on('fileUpload', async (event, path: string) => {
const content = IPCs.readFile(path)
const formData = new FormData()
const blob = new Blob([content], { type: 'audio/wav' })
formData.append('file', blob)
const response = await http({
url: 'https://beta.laihua.com/api/upload/file',
method: 'POST',
data: formData
})
window.webContents.send('msgReceivedFileUploadResponse', response)
})
}
} }
import axios from 'axios'
import type { AxiosRequestConfig } from 'axios'
export const axiosInstance = axios.create()
export interface ApiResult<T = unknown> {
error?: boolean
code?: number
message?: string
msg?: string
data?: T
[k: string]: any
}
export default async function http<T>(input: AxiosRequestConfig): Promise<ApiResult<T>> {
try {
const response = await axiosInstance(input)
if (response.status === 200) {
return response.data
}
return { error: true }
} catch (error) {
return {
code: (error as any).response?.code || (error as any).code,
data: (error as any)?.response?.data
}
}
}
...@@ -8,13 +8,15 @@ const mainAvailChannels: string[] = [ ...@@ -8,13 +8,15 @@ const mainAvailChannels: string[] = [
'openWindow', 'openWindow',
'openDevTools', 'openDevTools',
'mesGetUserData', 'mesGetUserData',
'mesGetAppData' 'mesGetAppData',
'fileUpload'
] ]
const rendererAvailChannels: string[] = [ const rendererAvailChannels: string[] = [
'msgReceivedVersion', 'msgReceivedVersion',
'msgReceivedFilePath', 'msgReceivedFilePath',
'msgReceivedUserData', 'msgReceivedUserData',
'msgReceivedAppData' 'msgReceivedAppData',
'msgReceivedFileUploadResponse'
] ]
contextBridge.exposeInMainWorld('mainApi', { contextBridge.exposeInMainWorld('mainApi', {
......
...@@ -45,6 +45,12 @@ const asrItems = ref([ ...@@ -45,6 +45,12 @@ const asrItems = ref([
'vosk_ws' 'vosk_ws'
// 'Whisper Api' // 'Whisper Api'
]) ])
const liveHosts = ref([
'http://111.229.216.162:9000',
'http://124.221.182.173:9000',
'http://110.42.214.59:9000',
'http://122.51.32.12:9000'
])
const asrSelect = ref(setting.asr) const asrSelect = ref(setting.asr)
const source = computed(() => { const source = computed(() => {
...@@ -186,6 +192,15 @@ function clear() { ...@@ -186,6 +192,15 @@ function clear() {
:model-value="setting.llmUrl" :model-value="setting.llmUrl"
></v-text-field> ></v-text-field>
<v-select
v-model="setting.liveHost.value"
style="margin-top: 22px"
:items="liveHosts"
:rules="[(v) => !!v || '请选择音色']"
label="直播地址"
required
></v-select>
<v-slider <v-slider
v-model="setting.llmToTTSSliceLength.value" v-model="setting.llmToTTSSliceLength.value"
label="TTS 分句长度" label="TTS 分句长度"
......
...@@ -6,6 +6,6 @@ ...@@ -6,6 +6,6 @@
<body> <body>
<div id="app"></div> <div id="app"></div>
</body> </body>
<script src="./utils/HWLLS_SDK_Web_2.3.0/lib/HWLLSPlayer.js"></script> <script src="./HWLLSPlayer.js"></script>
<script type="module" src="./main.ts"></script> <script type="module" src="./main.ts"></script>
</html> </html>
import EventEmitter from 'EventEmitter'; import EventEmitter from 'EventEmitter'
/** /**
* *
...@@ -30,41 +30,41 @@ import EventEmitter from 'EventEmitter'; ...@@ -30,41 +30,41 @@ import EventEmitter from 'EventEmitter';
* *
*/ */
export type StartPlayOptions = { export type StartPlayOptions = {
objectFit?: 'contain' | 'cover' | 'fill'; objectFit?: 'contain' | 'cover' | 'fill'
muted?: boolean; muted?: boolean
sessionId?: string; sessionId?: string
showLoading?: boolean; showLoading?: boolean
autoPlay?: boolean; autoPlay?: boolean
poster?: { poster?: {
url?: string; url?: string
mode?: 'fill' | 'crop'; mode?: 'fill' | 'crop'
startEnable?: boolean; startEnable?: boolean
pauseEnable: boolean; pauseEnable: boolean
}; }
}; }
// 自定义事件类型 // 自定义事件类型
export type HwEventType = 'videoStart' | 'audioStart' | 'audioBroken' | 'videoBroken' | 'error'; // 场景页切换 export type HwEventType = 'videoStart' | 'audioStart' | 'audioBroken' | 'videoBroken' | 'error' // 场景页切换
export type HwEventTypeData<T extends HwEventType> = { export type HwEventTypeData<T extends HwEventType> = {
videoStart: []; videoStart: []
audioStart: []; audioStart: []
audioBroken: []; audioBroken: []
videoBroken: []; videoBroken: []
error: [{ code: number; message: string }]; error: [{ code: number; message: string }]
}[T]; }[T]
export type HwEventTypeFn<T extends HwEventType> = { export type HwEventTypeFn<T extends HwEventType> = {
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
[K in T]: (...args: HwEventTypeData<T>) => void; [K in T]: (...args: HwEventTypeData<T>) => void
}[T]; }[T]
export class HwWebRTC extends EventEmitter { export class HwWebRTC extends EventEmitter {
elementId = ''; elementId = ''
startPlayOptions: StartPlayOptions | null = null; startPlayOptions: StartPlayOptions | null = null
client: any = null; client: any = null
constructor(id: string, log: 'none' | 'error' | 'warn' | 'info' | 'debug' = 'none') { constructor(id: string, log: 'none' | 'error' | 'warn' | 'info' | 'debug' = 'none') {
super(); super()
this.elementId = id; this.elementId = id
// setLogLevel(log); // setLogLevel(log);
} }
...@@ -75,7 +75,7 @@ export class HwWebRTC extends EventEmitter { ...@@ -75,7 +75,7 @@ export class HwWebRTC extends EventEmitter {
* @returns 是否成功 * @returns 是否成功
*/ */
emit<T extends HwEventType>(event: T, ...args: HwEventTypeData<T>): boolean { emit<T extends HwEventType>(event: T, ...args: HwEventTypeData<T>): boolean {
return super.emit(event, ...args); return super.emit(event, ...args)
} }
/** /**
...@@ -86,16 +86,16 @@ export class HwWebRTC extends EventEmitter { ...@@ -86,16 +86,16 @@ export class HwWebRTC extends EventEmitter {
*/ */
on<T extends HwEventType>(event: T, fn: HwEventTypeFn<T>): this { on<T extends HwEventType>(event: T, fn: HwEventTypeFn<T>): this {
// fn 可能确实只有一个参数, 只能使用as // fn 可能确实只有一个参数, 只能使用as
return super.on(event, fn as (...args: any[]) => void); return super.on(event, fn as (...args: any[]) => void)
} }
/** /**
* 预处理:获取浏览器的版本号、检查兼容性 * 预处理:获取浏览器的版本号、检查兼容性
*/ */
static async isBrowserSupport() { static async isBrowserSupport() {
let check = false; let check = false
check = await window.HWLLSPlayer.checkSystemRequirements(); check = await window.HWLLSPlayer.checkSystemRequirements()
return check; return check
} }
/** /**
...@@ -109,51 +109,51 @@ export class HwWebRTC extends EventEmitter { ...@@ -109,51 +109,51 @@ export class HwWebRTC extends EventEmitter {
objectFit: 'contain' objectFit: 'contain'
} }
) { ) {
if (this.client) this.destroyed(); if (this.client) this.destroyed()
this.startPlayOptions = options; this.startPlayOptions = options
this.client = window.HWLLSPlayer.createClient('webrtc'); this.client = window.HWLLSPlayer.createClient('webrtc')
await this.client.startPlay(url, { await this.client.startPlay(url, {
elementId: this.elementId, elementId: this.elementId,
...this.startPlayOptions ...this.startPlayOptions
}); })
this.client.enableStreamStateDetection(true, 2); this.client.enableStreamStateDetection(true, 2)
this._bindEvents(); this._bindEvents()
} }
private _bindEvents() { private _bindEvents() {
this.client.on('video-start', () => { this.client.on('video-start', () => {
this.emit('videoStart'); this.emit('videoStart')
}); })
this.client.on('audio-start', () => { this.client.on('audio-start', () => {
this.emit('audioStart'); this.emit('audioStart')
}); })
this.client.on('audio-broken', () => { this.client.on('audio-broken', () => {
this.emit('audioBroken'); this.emit('audioBroken')
}); })
this.client.on('video-broken', () => { this.client.on('video-broken', () => {
this.emit('videoBroken'); this.emit('videoBroken')
}); })
// this.client.on('audio-recovery', () => { // this.client.on('audio-recovery', () => {
// logManage.log('----------------> audio-recovery', 1); // logManage.log('----------------> audio-recovery', 1);
// }); // });
// this.client.on('video-recovery', () => { // this.client.on('video-recovery', () => {
// logManage.log('----------------> video-recovery', 1); // logManage.log('----------------> video-recovery', 1);
// }); // });
this.client.on('Error', (error: any) => this.emit('error', error)); this.client.on('Error', (error: any) => this.emit('error', error))
} }
/** /**
* 停止播放:停止播放请求 * 停止播放:停止播放请求
*/ */
stopPlay() { stopPlay() {
this.client && this.client.stopPlay(); this.client && this.client.stopPlay()
} }
/** /**
* 后处理:销毁客户端等。 * 后处理:销毁客户端等。
*/ */
destroyed() { destroyed() {
this.client?.offAllEvents(); this.client?.offAllEvents()
this.client?.destoryClient(); this.client?.destoryClient()
} }
} }
This diff is collapsed.
This diff is collapsed.
...@@ -53,7 +53,7 @@ async function appendPhoto(url: string) { ...@@ -53,7 +53,7 @@ async function appendPhoto(url: string) {
return '图片加载失败!' return '图片加载失败!'
} }
photo.list.value.push({ url }) photo.list.value.push({ url, liveUrl: url })
urlValue.value = '' urlValue.value = ''
return true return true
......
...@@ -11,21 +11,21 @@ import type { ...@@ -11,21 +11,21 @@ import type {
import { audioAiTTS, localTTS } from '../plugins/tts' import { audioAiTTS, localTTS } from '../plugins/tts'
import useStore from '@/renderer/store' import useStore from '@/renderer/store'
import flvjs from 'flv.js' import flvjs from 'flv.js'
import { PhotoRole } from '@/renderer/plugins/live/PhotoRole'; import { PhotoAnswer, PhotoRole } from '@/renderer/plugins/live/PhotoRole'
const router = useRouter() const router = useRouter()
const route = useRoute() const route = useRoute()
const { settings } = useStore() const { settings, photo } = useStore()
let sampleRate = 48000 let sampleRate = 48000
const bufferSize = 8192 const bufferSize = 8192
const iconMicrophone = new URL('/images/microphone-input.svg', import.meta.url).href const iconMicrophone = new URL('/images/microphone-input.svg', import.meta.url).href
const recordVolume = ref(0) const recordVolume = ref(0)
const url = route.query.url as string const url = route.query.url as string
const microphoneState = ref<'waitInput' | 'input' | 'loading' | 'disabled'>('waitInput') const microphoneState = ref<'waitInput' | 'input' | 'loading' | 'disabled' | 'reply'>('waitInput')
const videoElement = ref<HTMLVideoElement | null>(null) const videoElement = ref<HTMLVideoElement | null>(null)
const can = ref<HTMLCanvasElement | null>(null) const can = ref<HTMLCanvasElement | null>(null)
let photoRole: PhotoRole | null = null; let photoRole: PhotoRole | null = null
let flvPlayer: flvjs.Player | null = null let flvPlayer: flvjs.Player | null = null
onMounted(() => { onMounted(() => {
...@@ -42,40 +42,55 @@ function loadImg(): Promise<HTMLImageElement> { ...@@ -42,40 +42,55 @@ function loadImg(): Promise<HTMLImageElement> {
} }
async function init() { async function init() {
microphoneState.value = 'loading'
const img = await loadImg() const img = await loadImg()
const videoEle = videoElement.value const videoEle = videoElement.value
const canvasEle = can.value const canvasEle = can.value
const ctx = canvasEle && canvasEle.getContext('2d') const ctx = canvasEle && canvasEle.getContext('2d')
if (!videoEle || !canvasEle || !ctx) return if (!videoEle || !canvasEle || !ctx) return
draw(ctx, img)
canvasEle.width = img.naturalWidth canvasEle.width = img.naturalWidth
canvasEle.height = img.naturalHeight canvasEle.height = img.naturalHeight
photoRole = new PhotoRole(url, canvasEle); const item = photo.list.find((i) => i.url === url)
photoRole = new PhotoRole(settings.liveHost, `${item?.liveUrl}`, canvasEle)
photoRole.on('asyncAnswer', (ans) => {
if (ans.playState === 'playing') {
microphoneState.value = 'reply'
return
}
if (
microphoneState.value === 'reply' &&
ans.playState === 'pause' &&
photoRole!.taskQueueLength === 0 &&
answerArray.length === 0
) {
microphoneState.value = 'input'
}
})
// initPlayer(videoEle); // initPlayer(videoEle);
try {
await photoRole.init()
} catch (error) {
console.error(error)
return
}
microphoneState.value = 'waitInput'
const fps = 1000 / 30 const fps = 1000 / 30
let lastTime = Date.now() let lastTime = Date.now()
const updateFrame = () => { const updateFrame = () => {
if (Date.now() - lastTime > fps) { if (Date.now() - lastTime > fps) {
draw(ctx, img, videoEle, { photoRole?.draw()
width: 579,
height: 579,
center: {
x: 295,
y: 168
},
r_w: 304,
r_h: 304
})
lastTime = Date.now() lastTime = Date.now()
} }
requestAnimationFrame(updateFrame) requestAnimationFrame(updateFrame)
} }
requestAnimationFrame(updateFrame) requestAnimationFrame(updateFrame)
await photoRole.initLive();
} }
function draw( function draw(
...@@ -258,7 +273,7 @@ async function startVoskWsAudioInput() { ...@@ -258,7 +273,7 @@ async function startVoskWsAudioInput() {
} }
await initVoskWS() await initVoskWS()
sampleRate = 8000 sampleRate = 16000
const mediaStream = await navigator.mediaDevices.getUserMedia({ const mediaStream = await navigator.mediaDevices.getUserMedia({
audio: { audio: {
echoCancellation: true, echoCancellation: true,
...@@ -275,7 +290,17 @@ async function startVoskWsAudioInput() { ...@@ -275,7 +290,17 @@ async function startVoskWsAudioInput() {
source.connect(processor) source.connect(processor)
processor.connect(audioContext.destination) processor.connect(audioContext.destination)
processor.onaudioprocess = (audioDataChunk) => postAudio(audioDataChunk) processor.onaudioprocess = (audioDataChunk) => {
if (
microphoneState.value === 'loading' ||
microphoneState.value === 'disabled' ||
microphoneState.value === 'reply'
) {
return
}
postAudio(audioDataChunk)
}
await analyzeMicrophoneVolume(mediaStream, (val) => { await analyzeMicrophoneVolume(mediaStream, (val) => {
recordVolume.value = val recordVolume.value = val
...@@ -340,16 +365,20 @@ function endAudioInput() { ...@@ -340,16 +365,20 @@ function endAudioInput() {
} }
} }
const answerArray: { text: string; isLast: boolean }[] = []
async function onAsr(question: string) { async function onAsr(question: string) {
console.log('---------------->question: ', question) console.log('---------------->question: ', question)
endAudioInput()
microphoneState.value = 'loading'
const ws = await initLLMSocket() const ws = await initLLMSocket()
inputContext.ws = ws inputContext.ws = ws
let sliceAnswer = '' let sliceAnswer = ''
let answer = '' let answer = ''
const answerArray: string[] = [] answerArray.length = 0
let isTime = true let isTime = true
photoRole!.answerArgs = new PhotoAnswer()
ws.onmessage = (message) => { ws.onmessage = (message) => {
try { try {
...@@ -360,18 +389,17 @@ async function onAsr(question: string) { ...@@ -360,18 +389,17 @@ async function onAsr(question: string) {
} }
if (event === 'stream_end') { if (event === 'stream_end') {
answerArray.push(sliceAnswer) answerArray.push({ text: sliceAnswer, isLast: true })
runTTSTask(answerArray)
sliceAnswer = ''
answerArray.push(sliceAnswer)
sliceAnswer = '' sliceAnswer = ''
runTTSTask(answerArray)
inputContext.ws?.close() inputContext.ws?.close()
console.log('----------------> answer: ', answer) console.log('----------------> answer: ', answer)
return return
} }
answer += text answer += text
photoRole!.answerArgs!.answer += answer
photoRole!.answerArgs!._typingAnswer.push(answer)
isTime && console.time('sliceAnswer') isTime && console.time('sliceAnswer')
isTime = false isTime = false
...@@ -381,7 +409,7 @@ async function onAsr(question: string) { ...@@ -381,7 +409,7 @@ async function onAsr(question: string) {
sliceAnswer += t sliceAnswer += t
if (/[。,?!;,.?!;]/.test(t) && sliceAnswer.length >= settings.llmToTTSSliceLength) { if (/[。,?!;,.?!;]/.test(t) && sliceAnswer.length >= settings.llmToTTSSliceLength) {
console.timeEnd('sliceAnswer') console.timeEnd('sliceAnswer')
answerArray.push(sliceAnswer) answerArray.push({ text: sliceAnswer, isLast: true })
runTTSTask(answerArray) runTTSTask(answerArray)
sliceAnswer = '' sliceAnswer = ''
isTime = true isTime = true
...@@ -405,7 +433,7 @@ function initLLMSocket(): Promise<WebSocket> { ...@@ -405,7 +433,7 @@ function initLLMSocket(): Promise<WebSocket> {
} }
let isTTSRunning = false let isTTSRunning = false
async function runTTSTask(tasks: string[]) { async function runTTSTask(tasks: { text: string; isLast: boolean }[]) {
if (isTTSRunning) return if (isTTSRunning) return
isTTSRunning = true isTTSRunning = true
...@@ -413,20 +441,24 @@ async function runTTSTask(tasks: string[]) { ...@@ -413,20 +441,24 @@ async function runTTSTask(tasks: string[]) {
while (tasks.length) { while (tasks.length) {
const task = tasks.shift() const task = tasks.shift()
if (!task) break if (!task) break
if (task.length < 1) continue if (task.text.trim().length < 1) continue
console.time(task + ' TTS: ') console.time(task + ' TTS: ')
const res = await localTTS({ const res = await localTTS({
url: settings.ttsHost, url: settings.ttsHost,
text: task, text: task.text,
audio_path: settings.userData audio_path: settings.userData
}) })
console.log('----------------> TTS:', res[0].text) console.log('----------------> TTS:', res[0].text)
console.timeEnd(task + ' TTS: ') console.timeEnd(task + ' TTS: ')
const audio = new Audio(`file://${res[0].text}`) console.log('---------------->', res[0].text)
audio.load()
ttsAudios.push(audio) const audioPath = await uploadFile({ filePath: res[0].text })
runAudioPlay() photoRole?.enQueue({
taskId: photoRole.sessionId,
audioUrl: `https://resources.laihua.com/${audioPath}`,
isLast: task.isLast
})
} }
} catch (error) { } catch (error) {
console.error(error) console.error(error)
...@@ -435,6 +467,21 @@ async function runTTSTask(tasks: string[]) { ...@@ -435,6 +467,21 @@ async function runTTSTask(tasks: string[]) {
isTTSRunning = false isTTSRunning = false
} }
function uploadFile({ filePath }: { filePath: string }) {
return new Promise<string>((resolve, reject) => {
window.mainApi.receive(
'msgReceivedFileUploadResponse',
(event: Event, result: { code: number; data: null | { filename: string } }) => {
if (result.code !== 200) {
return reject(JSON.stringify(result))
}
resolve(result.data?.filename || '')
}
)
window.mainApi.send('fileUpload', filePath)
})
}
const ttsAudios: HTMLAudioElement[] = [] const ttsAudios: HTMLAudioElement[] = []
let isPlayRunning = false let isPlayRunning = false
async function runAudioPlay() { async function runAudioPlay() {
...@@ -452,7 +499,6 @@ async function runAudioPlay() { ...@@ -452,7 +499,6 @@ async function runAudioPlay() {
} }
await audio.play() await audio.play()
} }
</script> </script>
<template> <template>
...@@ -477,12 +523,17 @@ async function runAudioPlay() { ...@@ -477,12 +523,17 @@ async function runAudioPlay() {
color="#fff" color="#fff"
variant="elevated" variant="elevated"
size="x-large" size="x-large"
:disabled="microphoneState === 'loading' || microphoneState === 'disabled'" :disabled="
microphoneState === 'loading' ||
microphoneState === 'disabled' ||
microphoneState === 'reply'
"
@pointerdown="startVoskWsAudioInput" @pointerdown="startVoskWsAudioInput"
> >
<v-icon v-if="microphoneState === 'waitInput'" icon="mdi-microphone"></v-icon> <v-icon v-if="microphoneState === 'waitInput'" icon="mdi-microphone"></v-icon>
<v-icon v-if="microphoneState === 'loading'" icon="mdi-microphone-settings"></v-icon> <v-icon v-if="microphoneState === 'loading'" icon="mdi-microphone-settings"></v-icon>
<v-icon v-if="microphoneState === 'disabled'" icon="mdi-microphone-off"></v-icon> <v-icon v-if="microphoneState === 'disabled'" icon="mdi-microphone-off"></v-icon>
<v-icon v-if="microphoneState === 'reply'" icon="mdi-message-reply-text-outline"></v-icon>
<template v-if="microphoneState === 'input'"> <template v-if="microphoneState === 'input'">
<img width="30" height="30" :src="iconMicrophone" alt="" srcset="" /> <img width="30" height="30" :src="iconMicrophone" alt="" srcset="" />
......
import { defineStore } from 'pinia' import { defineStore } from 'pinia'
type IPhoto = { type IPhoto = {
list: { url: string }[] list: { url: string; liveUrl: string }[]
} }
const usePhotoStore = defineStore('photo', { const usePhotoStore = defineStore('photo', {
...@@ -10,22 +10,33 @@ const usePhotoStore = defineStore('photo', { ...@@ -10,22 +10,33 @@ const usePhotoStore = defineStore('photo', {
({ ({
list: [ list: [
{ {
url: new URL('/images/photo/1.png', import.meta.url).href url: new URL('/images/photo/1.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/11772300-9a47-11ee-84b0-fbd08f47254f.png'
}, },
{ {
url: new URL('/images/photo/2.png', import.meta.url).href url: new URL('/images/photo/2.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/32b3e530-9a47-11ee-8702-5ddbbcc07698.png'
}, },
{ {
url: new URL('/images/photo/3.png', import.meta.url).href url: new URL('/images/photo/3.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/55060f00-9a47-11ee-8702-5ddbbcc07698.png'
}, },
{ {
url: new URL('/images/photo/4.png', import.meta.url).href url: new URL('/images/photo/4.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/81a0d220-9a47-11ee-84b0-fbd08f47254f.png'
}, },
{ {
url: new URL('/2023-11-2/93ffb6a7-ae93-4918-944e-877016ba266b.png', import.meta.url).href url: new URL('/2023-11-2/93ffb6a7-ae93-4918-944e-877016ba266b.png', import.meta.url).href,
liveUrl: 'https://resources.laihua.com/2023-11-2/93ffb6a7-ae93-4918-944e-877016ba266b.png'
}, },
{ {
url: new URL('/2023-11-2/6fa9a127-2ce5-43ea-a543-475bf9354eda.png', import.meta.url).href url: new URL('/2023-11-2/6fa9a127-2ce5-43ea-a543-475bf9354eda.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/b7523e40-9a47-11ee-84b0-fbd08f47254f.png'
} }
] ]
}) as IPhoto, }) as IPhoto,
......
...@@ -27,6 +27,7 @@ export type ISettings = { ...@@ -27,6 +27,7 @@ export type ISettings = {
llmUrl: string llmUrl: string
llmToTTSSliceLength: number llmToTTSSliceLength: number
voskWsLUrl: string voskWsLUrl: string
liveHost: string
} }
const useSettingsStore = defineStore('settings', { const useSettingsStore = defineStore('settings', {
...@@ -61,7 +62,8 @@ const useSettingsStore = defineStore('settings', { ...@@ -61,7 +62,8 @@ const useSettingsStore = defineStore('settings', {
isOpenDevTools: false, isOpenDevTools: false,
llmUrl: 'ws://127.0.0.1:9899/api/v1/stream', llmUrl: 'ws://127.0.0.1:9899/api/v1/stream',
llmToTTSSliceLength: 20, llmToTTSSliceLength: 20,
voskWsLUrl: 'ws://127.0.0.1:2700' voskWsLUrl: 'ws://127.0.0.1:2700',
liveHost: 'http://122.51.32.12:9000'
}) as ISettings, }) as ISettings,
getters: {}, getters: {},
actions: { actions: {
......
export * as HWLLSPlayer from './lib/HWLLSPlayer'; export * as HWLLSPlayer from './lib/HWLLSPlayer'
\ No newline at end of file
declare const _default: { declare const _default: {
getVersion: any; getVersion: any
checkSystemRequirements: any; checkSystemRequirements: any
setParameter: any; setParameter: any
createClient: any; createClient: any
saveLog: any; saveLog: any
setLogLevel: any; setLogLevel: any
uploadLog: any; uploadLog: any
}; }
export { _default as default }; export { _default as default }
import axios from 'axios'; import axios from 'axios'
import type { AxiosRequestConfig } from 'axios'; import type { AxiosRequestConfig } from 'axios'
export const axiosInstance = axios.create(); export const axiosInstance = axios.create()
export interface ApiResult<T = unknown> { export interface ApiResult<T = unknown> {
error?: boolean; error?: boolean
code?: number; code?: number
message?: string; message?: string
msg?: string; msg?: string
data?: T; data?: T
[k: string]: any; [k: string]: any
} }
export default async function http<T>(input: AxiosRequestConfig): Promise<ApiResult<T>> { export default async function http<T>(input: AxiosRequestConfig): Promise<ApiResult<T>> {
try { try {
const response = await axiosInstance(input); const response = await axiosInstance(input)
if (response.status === 200) { if (response.status === 200) {
return response.data; return response.data
} }
return { error: true }; return { error: true }
} catch (error) { } catch (error) {
return { return {
code: (error as any).response?.code || (error as any).code, code: (error as any).response?.code || (error as any).code,
data: (error as any)?.response?.data data: (error as any)?.response?.data
}; }
} }
} }
...@@ -9,10 +9,10 @@ export default class Utils { ...@@ -9,10 +9,10 @@ export default class Utils {
static guid() { static guid() {
function S4() { function S4() {
return (((1 + Math.random()) * 0x10000) | 0).toString(16).substring(1); return (((1 + Math.random()) * 0x10000) | 0).toString(16).substring(1)
} }
return S4() + S4() + '-' + S4() + '-' + S4() + '-' + S4() + '-' + S4() + S4() + S4(); return S4() + S4() + '-' + S4() + '-' + S4() + '-' + S4() + '-' + S4() + S4() + S4()
} }
} }
export const { getCurrentLocale, openExternal, guid} = Utils export const { getCurrentLocale, openExternal, guid } = Utils
...@@ -27,5 +27,12 @@ ...@@ -27,5 +27,12 @@
"path": "./tsconfig.node.json" "path": "./tsconfig.node.json"
} }
], ],
"exclude": ["node_modules", "dist", "rollup.config.js", "*.json", "*.js", "src/renderer/utils/HWLLS_SDK_Web_2.3.0/**'"] "exclude": [
"node_modules",
"dist",
"rollup.config.js",
"*.json",
"*.js",
"src/renderer/utils/HWLLS_SDK_Web_2.3.0/**'"
]
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment