Commit a5aa32b4 authored by ali's avatar ali

feat: 照片数字人直播接入

parent 9b2696f3
...@@ -20,6 +20,7 @@ ...@@ -20,6 +20,7 @@
"editor.tabSize": 2, "editor.tabSize": 2,
"cSpell.words": [ "cSpell.words": [
"flvjs", "flvjs",
"superres",
"Vosk" "Vosk"
], ],
"editor.inlineSuggest.showToolbar": "always" "editor.inlineSuggest.showToolbar": "always"
......
import type { HWLLSPlayer } from '@/renderer/utils/HWLLS_SDK_Web_2.3.0/export'; import type { HWLLSPlayer } from '@/renderer/utils/HWLLS_SDK_Web_2.3.0/export'
declare global { declare global {
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
interface Window { interface Window {
HWLLSPlayer: HWLLSPlayer HWLLSPlayer: HWLLSPlayer
} }
} }
\ No newline at end of file
...@@ -12,7 +12,6 @@ ...@@ -12,7 +12,6 @@
"axios": "^1.6.2", "axios": "^1.6.2",
"electron-store": "^8.1.0", "electron-store": "^8.1.0",
"EventEmitter": "^1.0.0", "EventEmitter": "^1.0.0",
"events": "^3.3.0",
"flv.js": "^1.6.2", "flv.js": "^1.6.2",
"pinia": "^2.1.7", "pinia": "^2.1.7",
"pinia-plugin-persistedstate": "^3.2.0", "pinia-plugin-persistedstate": "^3.2.0",
...@@ -5552,14 +5551,6 @@ ...@@ -5552,14 +5551,6 @@
"node": ">=0.12" "node": ">=0.12"
} }
}, },
"node_modules/events": {
"version": "3.3.0",
"resolved": "https://registry.npmmirror.com/events/-/events-3.3.0.tgz",
"integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==",
"engines": {
"node": ">=0.8.x"
}
},
"node_modules/exit-hook": { "node_modules/exit-hook": {
"version": "1.1.1", "version": "1.1.1",
"resolved": "https://registry.npmmirror.com/exit-hook/-/exit-hook-1.1.1.tgz", "resolved": "https://registry.npmmirror.com/exit-hook/-/exit-hook-1.1.1.tgz",
......
...@@ -74,4 +74,4 @@ ...@@ -74,4 +74,4 @@
"vue-tsc": "^1.8.22", "vue-tsc": "^1.8.22",
"xvfb-maybe": "^0.2.1" "xvfb-maybe": "^0.2.1"
} }
} }
\ No newline at end of file
import { BrowserWindow, ipcMain, shell, BrowserWindowConstructorOptions, app } from 'electron' import { BrowserWindow, ipcMain, shell, BrowserWindowConstructorOptions, app } from 'electron'
import Constants from './utils/Constants' import Constants from './utils/Constants'
import fs from 'fs'
import http from './utils/http'
/* /*
* IPC Communications * IPC Communications
...@@ -7,6 +9,12 @@ import Constants from './utils/Constants' ...@@ -7,6 +9,12 @@ import Constants from './utils/Constants'
export default class IPCs { export default class IPCs {
static browserWindows: Map<string, BrowserWindow[]> = new Map() static browserWindows: Map<string, BrowserWindow[]> = new Map()
// 读取本地文件
static readFile(path) {
const file = fs.readFileSync(path)
return file
}
static initialize(window: BrowserWindow): void { static initialize(window: BrowserWindow): void {
ipcMain.on('mesGetUserData', () => { ipcMain.on('mesGetUserData', () => {
window.webContents.send('msgReceivedUserData', app.getPath('userData')) window.webContents.send('msgReceivedUserData', app.getPath('userData'))
...@@ -78,6 +86,9 @@ export default class IPCs { ...@@ -78,6 +86,9 @@ export default class IPCs {
await win.loadURL(url) await win.loadURL(url)
// Initialize IPC Communication
IPCs.initializeChildWindow(win)
if (!IPCs.browserWindows.has(url)) { if (!IPCs.browserWindows.has(url)) {
IPCs.browserWindows.set(url, []) IPCs.browserWindows.set(url, [])
} }
...@@ -106,4 +117,20 @@ export default class IPCs { ...@@ -106,4 +117,20 @@ export default class IPCs {
} }
}) })
} }
static initializeChildWindow(window: BrowserWindow) {
ipcMain.on('fileUpload', async (event, path: string) => {
const content = IPCs.readFile(path)
const formData = new FormData()
const blob = new Blob([content], { type: 'audio/wav' })
formData.append('file', blob)
const response = await http({
url: 'https://beta.laihua.com/api/upload/file',
method: 'POST',
data: formData
})
window.webContents.send('msgReceivedFileUploadResponse', response)
})
}
} }
import axios from 'axios'
import type { AxiosRequestConfig } from 'axios'
export const axiosInstance = axios.create()
export interface ApiResult<T = unknown> {
error?: boolean
code?: number
message?: string
msg?: string
data?: T
[k: string]: any
}
export default async function http<T>(input: AxiosRequestConfig): Promise<ApiResult<T>> {
try {
const response = await axiosInstance(input)
if (response.status === 200) {
return response.data
}
return { error: true }
} catch (error) {
return {
code: (error as any).response?.code || (error as any).code,
data: (error as any)?.response?.data
}
}
}
...@@ -8,13 +8,15 @@ const mainAvailChannels: string[] = [ ...@@ -8,13 +8,15 @@ const mainAvailChannels: string[] = [
'openWindow', 'openWindow',
'openDevTools', 'openDevTools',
'mesGetUserData', 'mesGetUserData',
'mesGetAppData' 'mesGetAppData',
'fileUpload'
] ]
const rendererAvailChannels: string[] = [ const rendererAvailChannels: string[] = [
'msgReceivedVersion', 'msgReceivedVersion',
'msgReceivedFilePath', 'msgReceivedFilePath',
'msgReceivedUserData', 'msgReceivedUserData',
'msgReceivedAppData' 'msgReceivedAppData',
'msgReceivedFileUploadResponse'
] ]
contextBridge.exposeInMainWorld('mainApi', { contextBridge.exposeInMainWorld('mainApi', {
......
...@@ -45,6 +45,12 @@ const asrItems = ref([ ...@@ -45,6 +45,12 @@ const asrItems = ref([
'vosk_ws' 'vosk_ws'
// 'Whisper Api' // 'Whisper Api'
]) ])
const liveHosts = ref([
'http://111.229.216.162:9000',
'http://124.221.182.173:9000',
'http://110.42.214.59:9000',
'http://122.51.32.12:9000'
])
const asrSelect = ref(setting.asr) const asrSelect = ref(setting.asr)
const source = computed(() => { const source = computed(() => {
...@@ -186,6 +192,15 @@ function clear() { ...@@ -186,6 +192,15 @@ function clear() {
:model-value="setting.llmUrl" :model-value="setting.llmUrl"
></v-text-field> ></v-text-field>
<v-select
v-model="setting.liveHost.value"
style="margin-top: 22px"
:items="liveHosts"
:rules="[(v) => !!v || '请选择音色']"
label="直播地址"
required
></v-select>
<v-slider <v-slider
v-model="setting.llmToTTSSliceLength.value" v-model="setting.llmToTTSSliceLength.value"
label="TTS 分句长度" label="TTS 分句长度"
......
...@@ -6,6 +6,6 @@ ...@@ -6,6 +6,6 @@
<body> <body>
<div id="app"></div> <div id="app"></div>
</body> </body>
<script src="./utils/HWLLS_SDK_Web_2.3.0/lib/HWLLSPlayer.js"></script> <script src="./HWLLSPlayer.js"></script>
<script type="module" src="./main.ts"></script> <script type="module" src="./main.ts"></script>
</html> </html>
import EventEmitter from 'EventEmitter'; import EventEmitter from 'EventEmitter'
/** /**
* *
...@@ -30,41 +30,41 @@ import EventEmitter from 'EventEmitter'; ...@@ -30,41 +30,41 @@ import EventEmitter from 'EventEmitter';
* *
*/ */
export type StartPlayOptions = { export type StartPlayOptions = {
objectFit?: 'contain' | 'cover' | 'fill'; objectFit?: 'contain' | 'cover' | 'fill'
muted?: boolean; muted?: boolean
sessionId?: string; sessionId?: string
showLoading?: boolean; showLoading?: boolean
autoPlay?: boolean; autoPlay?: boolean
poster?: { poster?: {
url?: string; url?: string
mode?: 'fill' | 'crop'; mode?: 'fill' | 'crop'
startEnable?: boolean; startEnable?: boolean
pauseEnable: boolean; pauseEnable: boolean
}; }
}; }
// 自定义事件类型 // 自定义事件类型
export type HwEventType = 'videoStart' | 'audioStart' | 'audioBroken' | 'videoBroken' | 'error'; // 场景页切换 export type HwEventType = 'videoStart' | 'audioStart' | 'audioBroken' | 'videoBroken' | 'error' // 场景页切换
export type HwEventTypeData<T extends HwEventType> = { export type HwEventTypeData<T extends HwEventType> = {
videoStart: []; videoStart: []
audioStart: []; audioStart: []
audioBroken: []; audioBroken: []
videoBroken: []; videoBroken: []
error: [{ code: number; message: string }]; error: [{ code: number; message: string }]
}[T]; }[T]
export type HwEventTypeFn<T extends HwEventType> = { export type HwEventTypeFn<T extends HwEventType> = {
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
[K in T]: (...args: HwEventTypeData<T>) => void; [K in T]: (...args: HwEventTypeData<T>) => void
}[T]; }[T]
export class HwWebRTC extends EventEmitter { export class HwWebRTC extends EventEmitter {
elementId = ''; elementId = ''
startPlayOptions: StartPlayOptions | null = null; startPlayOptions: StartPlayOptions | null = null
client: any = null; client: any = null
constructor(id: string, log: 'none' | 'error' | 'warn' | 'info' | 'debug' = 'none') { constructor(id: string, log: 'none' | 'error' | 'warn' | 'info' | 'debug' = 'none') {
super(); super()
this.elementId = id; this.elementId = id
// setLogLevel(log); // setLogLevel(log);
} }
...@@ -75,7 +75,7 @@ export class HwWebRTC extends EventEmitter { ...@@ -75,7 +75,7 @@ export class HwWebRTC extends EventEmitter {
* @returns 是否成功 * @returns 是否成功
*/ */
emit<T extends HwEventType>(event: T, ...args: HwEventTypeData<T>): boolean { emit<T extends HwEventType>(event: T, ...args: HwEventTypeData<T>): boolean {
return super.emit(event, ...args); return super.emit(event, ...args)
} }
/** /**
...@@ -86,16 +86,16 @@ export class HwWebRTC extends EventEmitter { ...@@ -86,16 +86,16 @@ export class HwWebRTC extends EventEmitter {
*/ */
on<T extends HwEventType>(event: T, fn: HwEventTypeFn<T>): this { on<T extends HwEventType>(event: T, fn: HwEventTypeFn<T>): this {
// fn 可能确实只有一个参数, 只能使用as // fn 可能确实只有一个参数, 只能使用as
return super.on(event, fn as (...args: any[]) => void); return super.on(event, fn as (...args: any[]) => void)
} }
/** /**
* 预处理:获取浏览器的版本号、检查兼容性 * 预处理:获取浏览器的版本号、检查兼容性
*/ */
static async isBrowserSupport() { static async isBrowserSupport() {
let check = false; let check = false
check = await window.HWLLSPlayer.checkSystemRequirements(); check = await window.HWLLSPlayer.checkSystemRequirements()
return check; return check
} }
/** /**
...@@ -109,51 +109,51 @@ export class HwWebRTC extends EventEmitter { ...@@ -109,51 +109,51 @@ export class HwWebRTC extends EventEmitter {
objectFit: 'contain' objectFit: 'contain'
} }
) { ) {
if (this.client) this.destroyed(); if (this.client) this.destroyed()
this.startPlayOptions = options; this.startPlayOptions = options
this.client = window.HWLLSPlayer.createClient('webrtc'); this.client = window.HWLLSPlayer.createClient('webrtc')
await this.client.startPlay(url, { await this.client.startPlay(url, {
elementId: this.elementId, elementId: this.elementId,
...this.startPlayOptions ...this.startPlayOptions
}); })
this.client.enableStreamStateDetection(true, 2); this.client.enableStreamStateDetection(true, 2)
this._bindEvents(); this._bindEvents()
} }
private _bindEvents() { private _bindEvents() {
this.client.on('video-start', () => { this.client.on('video-start', () => {
this.emit('videoStart'); this.emit('videoStart')
}); })
this.client.on('audio-start', () => { this.client.on('audio-start', () => {
this.emit('audioStart'); this.emit('audioStart')
}); })
this.client.on('audio-broken', () => { this.client.on('audio-broken', () => {
this.emit('audioBroken'); this.emit('audioBroken')
}); })
this.client.on('video-broken', () => { this.client.on('video-broken', () => {
this.emit('videoBroken'); this.emit('videoBroken')
}); })
// this.client.on('audio-recovery', () => { // this.client.on('audio-recovery', () => {
// logManage.log('----------------> audio-recovery', 1); // logManage.log('----------------> audio-recovery', 1);
// }); // });
// this.client.on('video-recovery', () => { // this.client.on('video-recovery', () => {
// logManage.log('----------------> video-recovery', 1); // logManage.log('----------------> video-recovery', 1);
// }); // });
this.client.on('Error', (error: any) => this.emit('error', error)); this.client.on('Error', (error: any) => this.emit('error', error))
} }
/** /**
* 停止播放:停止播放请求 * 停止播放:停止播放请求
*/ */
stopPlay() { stopPlay() {
this.client && this.client.stopPlay(); this.client && this.client.stopPlay()
} }
/** /**
* 后处理:销毁客户端等。 * 后处理:销毁客户端等。
*/ */
destroyed() { destroyed() {
this.client?.offAllEvents(); this.client?.offAllEvents()
this.client?.destoryClient(); this.client?.destoryClient()
} }
} }
/* eslint-disable camelcase */ /* eslint-disable camelcase */
import http from '@/renderer/utils/http'; import http from '@/renderer/utils/http'
import { HwWebRTC } from './HwWebRTC'; import { HwWebRTC } from './HwWebRTC'
import { guid } from '@/renderer/utils/index'; import { guid } from '@/renderer/utils/index'
import EventEmitter from 'EventEmitter'; import EventEmitter from 'EventEmitter'
const HOST = 'http://122.51.32.12:9000';
type LiveOptions = { type LiveOptions = {
speaker?: string; speaker?: string
languageCode?: string; languageCode?: string
text?: string; text?: string
audioUrl?: string | null; audioUrl?: string | null
taskId: string; taskId: string
imgUrl: string; imgUrl: string
speed?: number; pushUrl: string
}; pullUrl: string
speed?: number
bitrate: number
superres: boolean
isLast: boolean
}
export class PhotoAnswer { export class PhotoAnswer {
question = ''; question = ''
answer = ''; answer = ''
/** 将答案分割,一段一段合成数字人直播流 */ /** 将答案分割,一段一段合成数字人直播流 */
_sliceAnswer = ''; _sliceAnswer = ''
/** 保存答案数组,用于实现打字输出效果 */ /** 保存答案数组,用于实现打字输出效果 */
_typingAnswer: string[] = []; _typingAnswer: string[] = []
/** 与 answer 内容一致,外部实现打字输出效果 */ /** 与 answer 内容一致,外部实现打字输出效果 */
asyncAnswer = ''; asyncAnswer = ''
/** 数字人播放状态 */ /** 数字人播放状态 */
playState: 'playing' | 'pause' = 'pause'; playState: 'playing' | 'pause' = 'pause'
/** 答案是否接收完毕 */ /** 答案是否接收完毕 */
answerEnd = false; answerEnd = false
play() { play() {}
}
stop() { stop() {}
}
pause() { pause() {}
}
destroy() { destroy() {}
}
} }
// 自定义事件类型 // 自定义事件类型
...@@ -50,250 +49,272 @@ export type PhotoEventType = ...@@ -50,250 +49,272 @@ export type PhotoEventType =
| 'audioBroken' | 'audioBroken'
| 'videoBroken' | 'videoBroken'
| 'liveStatusTrace' | 'liveStatusTrace'
| 'asyncAnswer'; | 'asyncAnswer'
export type PhotoEventTypeData<T extends PhotoEventType> = { export type PhotoEventTypeData<T extends PhotoEventType> = {
videoStart: []; videoStart: []
audioStart: []; audioStart: []
audioBroken: []; audioBroken: []
videoBroken: []; videoBroken: []
liveStatusTrace: ['init' | 'ready' | 'wait' | 'closing' | 'pushing']; liveStatusTrace: ['init' | 'ready' | 'wait' | 'closing' | 'pushing']
asyncAnswer: [PhotoAnswer]; asyncAnswer: [PhotoAnswer]
}[T]; }[T]
export type PhotoEventTypeFn<T extends PhotoEventType> = { export type PhotoEventTypeFn<T extends PhotoEventType> = {
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
[K in T]: (...args: PhotoEventTypeData<T>) => void; [K in T]: (...args: PhotoEventTypeData<T>) => void
}[T]; }[T]
export class PhotoRole extends EventEmitter { export class PhotoRole extends EventEmitter {
readonly view: HTMLCanvasElement; readonly host: string
readonly ctx: CanvasRenderingContext2D; readonly view: HTMLCanvasElement
private _webRTCContainer: HTMLDivElement = document.createElement('div'); readonly ctx: CanvasRenderingContext2D
private _rtc: HwWebRTC | null = null; private _webRTCContainer: HTMLDivElement = document.createElement('div')
private _image: HTMLImageElement = new Image(); private _rtc: HwWebRTC | null = null
private _rtcVideo: HTMLVideoElement | null = null; private _image: HTMLImageElement = new Image()
private _rtcVideo: HTMLVideoElement | null = null
private _rtcVideoInfo: { private _rtcVideoInfo: {
center: { center: {
x: number; x: number
y: number; y: number
}; }
width: number; width: number
height: number; height: number
r_w: number; r_w: number
r_h: number; r_h: number
} | null = null; } | null = null
private _liveStatus: 'init' | 'ready' | 'wait' | 'closing' | 'pushing' = 'closing'; private _liveStatus: 'init' | 'ready' | 'wait' | 'closing' | 'pushing' = 'closing'
private _pollTimeout = -1; private _pollTimeout = -1
answerArgs: PhotoAnswer | null = null; answerArgs: PhotoAnswer | null = null
readonly url: string; readonly url: string
readonly sessionId = guid(); readonly sessionId = guid()
constructor(url: string, view: HTMLCanvasElement) { constructor(host: string, url: string, view: HTMLCanvasElement) {
super(); super()
this.url = url; this.host = host
this.view = view; this.url = url
this.ctx = view.getContext('2d') as CanvasRenderingContext2D; this.view = view
this.ctx = view.getContext('2d') as CanvasRenderingContext2D
// this._dialogSession = new Dialog({ }
// type: 'photo',
// callbacks: { private _liveTaskQueue: LiveOptions[] = []
// chatConnect: (ans) => { private _isLiveTaskRunning = false
// this.answerArgs = ans; get taskQueueLength(): number {
// console.time('chat'); return this._liveTaskQueue.length
// },
// chatEnd: (ans) => {
// this._enQueue(this.sessionId, ans._sliceAnswer);
// ans._sliceAnswer = '';
// ans.answerEnd = true;
// console.timeEnd('chat');
// console.log('----------------> answer.length: ', ans.answer.length);
// },
// chatMessage: (message, ans) => {
// ans._typingAnswer.push(message);
// ans._sliceAnswer += message;
// if (/[。,?!;,.?!;]/.test(message) && ans._sliceAnswer.length >= 40) {
// this._enQueue(this.sessionId, ans._sliceAnswer);
// ans._sliceAnswer = '';
// }
// }
// }
// });
} }
private _liveTaskQueue: LiveOptions[] = []; enQueue({ taskId, audioUrl, isLast }: { taskId: string; audioUrl: string; isLast: boolean }) {
private _isLiveTaskRunning = false; if (audioUrl.length < 1) return
enQueue(taskId: string, text: string) {
if (text.length < 1) return;
this._liveTaskQueue.push({ this._liveTaskQueue.push({
imgUrl: this.url, imgUrl: this.url,
taskId, taskId,
pushUrl: `rtmp://push.laihua.com/web/${taskId}`,
pullUrl: `webrtc://pull.laihua.com/web/${taskId}`,
speaker: 'speaker', speaker: 'speaker',
languageCode: 'zh-CN', languageCode: 'zh-CN',
text, audioUrl,
speed: 10 speed: 10,
}); bitrate: 2000,
this._runTask(); superres: false,
isLast
})
this._runTask()
} }
private async _runTask() { private async _runTask() {
if (this._isLiveTaskRunning) return; if (this._isLiveTaskRunning) return
this._isLiveTaskRunning = true; this._isLiveTaskRunning = true
this.off('liveStatusTrace', this._liveStatusTrace); // @ts-ignore
this.on('liveStatusTrace', this._liveStatusTrace); this.off('liveStatusTrace', this._liveStatusTrace)
this.on('liveStatusTrace', this._liveStatusTrace)
try { try {
while (this._liveTaskQueue.length) { while (this._liveTaskQueue.length) {
const task = this._liveTaskQueue.shift() as LiveOptions; const task = this._liveTaskQueue.shift() as LiveOptions
console.time(task.text); console.time(task.text)
if (this._liveStatus === 'closing') await this.initLive(); if (this._liveStatus === 'closing') await this.initLive()
await this._createLive(task); console.log('----------------> append', task)
console.timeEnd(task.text); await this._appendLive(task)
console.timeEnd(task.text)
} }
} catch (error) { } catch (error) {
console.error(error); console.error(error)
} }
this._isLiveTaskRunning = false; this._isLiveTaskRunning = false
} }
private async _liveStatusTrace() { private async _liveStatusTrace() {
if (!this.answerArgs) return; if (!this.answerArgs) return
if (this._liveStatus === 'pushing') { if (this._liveStatus === 'pushing') {
this.answerArgs.playState = 'playing'; this.answerArgs.playState = 'playing'
this._typingOutAnswer(); this._typingOutAnswer()
return; return
} }
if (this._liveStatus === 'wait') { if (this._liveStatus === 'wait') {
this.answerArgs.playState = 'pause'; this.answerArgs.playState = 'pause'
this.emit('asyncAnswer', this.answerArgs) this.emit('asyncAnswer', this.answerArgs)
} }
} }
private _typingRunner = false; private _typingRunner = false
private async _typingOutAnswer() { private async _typingOutAnswer() {
if (!this.answerArgs || this._typingRunner) return; if (!this.answerArgs || this._typingRunner) return
this._typingRunner = true; this._typingRunner = true
// 加延迟是为了 playing 状态时,能跟声音保持相对同步 // 加延迟是为了 playing 状态时,能跟声音保持相对同步
await new Promise((resolve) => setTimeout(resolve, 2000)); await new Promise((resolve) => setTimeout(resolve, 2000))
while (this.answerArgs._typingAnswer.length) { while (this.answerArgs._typingAnswer.length) {
this.answerArgs.asyncAnswer += this.answerArgs._typingAnswer.shift(); this.answerArgs.asyncAnswer += this.answerArgs._typingAnswer.shift()
this.emit('asyncAnswer', this.answerArgs) this.emit('asyncAnswer', this.answerArgs)
await new Promise((resolve) => setTimeout(resolve, 100)); await new Promise((resolve) => setTimeout(resolve, 100))
} }
this._typingRunner = false; this._typingRunner = false
} }
private _play(url: string) { private _play(url: string) {
return new Promise<void>((resolve) => { return new Promise<void>((resolve) => {
// if (!this._rtc) return; if (!this._rtc) return
// let isPlaying = false; let isPlaying = false
// let timeout = -1; let timeout = -1
// this._rtc.once('videoStart', () => { // @ts-ignore
// isPlaying = true; this._rtc.once('videoStart', () => {
// clearTimeout(timeout); isPlaying = true
// resolve(); clearTimeout(timeout)
// }); resolve()
})
// const keepCalling = async () => {
// if (isPlaying) return; const keepCalling = async () => {
// try { if (isPlaying) return
// await this._rtc?.startPlay(url); try {
// if (!isPlaying) { await this._rtc?.startPlay(url)
// timeout = setTimeout(keepCalling, 400) as unknown as number; if (!isPlaying) {
// } timeout = setTimeout(keepCalling, 400) as unknown as number
// } catch (error) { }
// timeout = setTimeout(keepCalling, 200) as unknown as number; } catch (error) {
// } timeout = setTimeout(keepCalling, 200) as unknown as number
// }; }
// keepCalling(); }
}); keepCalling()
})
} }
draw() { draw() {
this.ctx.clearRect(0, 0, this._image.naturalWidth, this._image.naturalHeight); this.ctx.clearRect(0, 0, this._image.naturalWidth, this._image.naturalHeight)
this.ctx.drawImage(this._image, 0, 0, this._image.naturalWidth, this._image.naturalHeight); this.ctx.drawImage(this._image, 0, 0, this._image.naturalWidth, this._image.naturalHeight)
if (this._rtcVideo && this._rtcVideoInfo) { if (this._rtcVideo && this._rtcVideoInfo) {
const { center, r_w, r_h } = this._rtcVideoInfo; const { center, r_w, r_h } = this._rtcVideoInfo
this.ctx.drawImage(this._rtcVideo, center.x - r_w / 2, center.y - r_h / 2, r_w, r_h); this.ctx.drawImage(this._rtcVideo, center.x - r_w / 2, center.y - r_h / 2, r_w, r_h)
} }
} }
private async _createLive(options: LiveOptions) { private async _createLive(options: LiveOptions) {
console.log('---------------->', options)
const resp = (await http({
method: 'POST',
url: `${this.host}/create`,
data: { ...options } // ip: 'http://116.63.168.14:9000'
})) as {
code: number
taskId: string
pullUrl: string
imgInfo: {
center: { x: number; y: number }
r_w: number
r_h: number
width: number
height: number
}
msg?: string
}
if (resp.code && resp.code !== 200) {
throw new Error(resp.msg)
}
if (!resp.pullUrl || !resp.taskId) {
throw new Error(`Field is empty: taskId: ${resp.taskId}, pullUrl: ${resp.pullUrl}`)
}
return resp
}
private async _appendLive(options: LiveOptions) {
const resp = (await http({ const resp = (await http({
method: 'POST', method: 'POST',
url: `${HOST}/create`, url: `${this.host}/append`,
data: { ...options, isSdk: 1 } // ip: 'http://116.63.168.14:9000' data: { ...options } // ip: 'http://116.63.168.14:9000'
})) as { })) as {
code: number; code: number
taskId: string; taskId: string
pullUrl: string; pullUrl: string
imgInfo: { imgInfo: {
center: { x: number; y: number }; center: { x: number; y: number }
r_w: number; r_w: number
r_h: number; r_h: number
width: number; width: number
height: number; height: number
}; }
msg?: string; msg?: string
}; }
if (resp.code && resp.code !== 200) { if (resp.code && resp.code !== 200) {
throw new Error(resp.msg); throw new Error(resp.msg)
} }
if (!resp.pullUrl || !resp.taskId) { if (!resp.pullUrl || !resp.taskId) {
throw new Error(`Field is empty: taskId: ${resp.taskId}, pullUrl: ${resp.pullUrl}`); throw new Error(`Field is empty: taskId: ${resp.taskId}, pullUrl: ${resp.pullUrl}`)
} }
return resp; return resp
} }
private async _pushLive(taskId: string) { private async _pushLive(taskId: string) {
const resp = (await http({ const resp = (await http({
method: 'POST', method: 'POST',
url: `${HOST}/push`, url: `${this.host}/push`,
data: { taskId } data: { taskId }
})) as { })) as {
code: number; code: number
taskId: string; taskId: string
msg?: string; msg?: string
}; }
if (resp.code && resp.code !== 200) { if (resp.code && resp.code !== 200) {
throw new Error(resp.msg); throw new Error(resp.msg)
} }
return resp; return resp
} }
private async _getLiveStatus(taskId: string) { private async _getLiveStatus(taskId: string) {
const resp = (await http({ const resp = (await http({
method: 'GET', method: 'GET',
url: `${HOST}/status`, url: `${this.host}/status`,
params: { taskId } params: { taskId }
})) as { })) as {
code: number; code: number
status: 'init' | 'ready' | 'wait' | 'closing' | 'pushing'; status: 'init' | 'ready' | 'wait' | 'closing' | 'pushing'
msg?: string; msg?: string
}; }
if (resp.code && resp.code !== 200) { if (resp.code && resp.code !== 200) {
throw new Error(resp.msg); throw new Error(resp.msg)
} }
return resp; return resp
} }
private _checkStatus( private _checkStatus(
...@@ -301,82 +322,87 @@ export class PhotoRole extends EventEmitter { ...@@ -301,82 +322,87 @@ export class PhotoRole extends EventEmitter {
checkStatus: 'init' | 'ready' | 'wait' | 'closing' | 'pushing' checkStatus: 'init' | 'ready' | 'wait' | 'closing' | 'pushing'
) { ) {
return new Promise<boolean>((resolve) => { return new Promise<boolean>((resolve) => {
let count = 0; let count = 0
const keepCalling = async () => { const keepCalling = async () => {
if (count >= 10) return resolve(false); if (count >= 10) return resolve(false)
count++; count++
const { status } = await this._getLiveStatus(taskId); const { status } = await this._getLiveStatus(taskId)
if (status === checkStatus) return resolve(true); if (status === checkStatus) return resolve(true)
setTimeout(keepCalling, 1000) as unknown as number; setTimeout(keepCalling, 1000) as unknown as number
}; }
keepCalling(); keepCalling()
}); })
} }
private _pollStatus(taskId: string, callbacks: () => void) { private _pollStatus(taskId: string, callbacks: () => void) {
const keepCalling = async () => { const keepCalling = async () => {
try { try {
const { status } = await this._getLiveStatus(taskId); const { status } = await this._getLiveStatus(taskId)
this._liveStatus = status; this._liveStatus = status
} catch (error) { } catch (error) {
this._liveStatus = 'closing'; this._liveStatus = 'closing'
} }
this._pollTimeout = setTimeout(keepCalling, 200) as unknown as number; this._pollTimeout = setTimeout(keepCalling, 1000) as unknown as number
callbacks(); callbacks()
}; }
clearTimeout(this._pollTimeout); clearTimeout(this._pollTimeout)
keepCalling(); keepCalling()
} }
async initLive() { async initLive() {
console.time('init'); console.time('init')
console.time('init-_createLive'); console.time('init-_createLive')
const { pullUrl, taskId, imgInfo } = await this._createLive({ const { pullUrl, taskId, imgInfo } = await this._createLive({
imgUrl: this.url, imgUrl: this.url,
pushUrl: `rtmp://push.laihua.com/web/${this.sessionId}`,
pullUrl: `webrtc://pull.laihua.com/web/${this.sessionId}`,
taskId: this.sessionId, taskId: this.sessionId,
speaker: 'speaker', speaker: 'speaker',
languageCode: 'zh-CN', languageCode: 'zh-CN',
audioUrl: '' audioUrl: '',
}); bitrate: 2000,
console.timeEnd('init-_createLive'); superres: false,
isLast: true
this._rtcVideoInfo = imgInfo; })
console.time('play'); console.timeEnd('init-_createLive')
await this._checkStatus(taskId, 'ready');
await this._play(pullUrl); this._rtcVideoInfo = imgInfo
await this._pushLive(taskId); console.time('play')
console.timeEnd('play'); await this._checkStatus(taskId, 'ready')
this._rtcVideo = this._webRTCContainer.querySelector('video'); await this._play(pullUrl)
await this._pushLive(taskId)
console.timeEnd('play')
this._rtcVideo = this._webRTCContainer.querySelector('video')
// 加延迟是为了 playing 状态时,能跟声音保持相对同步 // 加延迟是为了 playing 状态时,能跟声音保持相对同步
const initTime = new Date().getTime(); const initTime = new Date().getTime()
this._pollStatus(this.sessionId, () => { this._pollStatus(this.sessionId, () => {
if (new Date().getTime() - initTime < 1000) return; if (new Date().getTime() - initTime < 1000) return
console.log('---------------->', this._liveStatus); console.log('---------------->', this._liveStatus)
this._liveStatus === 'closing' && clearTimeout(this._pollTimeout); this._liveStatus === 'closing' && clearTimeout(this._pollTimeout)
this.emit('liveStatusTrace', this._liveStatus); this.emit('liveStatusTrace', this._liveStatus)
}); })
console.timeEnd('init'); console.timeEnd('init')
} }
private _bindEvents() { private _bindEvents() {
// this._rtc?.on('videoStart', () => { this._rtc?.on('videoStart', () => {
// this.emit('videoStart'); this.emit('videoStart')
// }); })
// this._rtc?.on('audioStart', () => { this._rtc?.on('audioStart', () => {
// this.emit('audioStart'); this.emit('audioStart')
// }); })
// this._rtc?.on('audioBroken', () => { this._rtc?.on('audioBroken', () => {
// this.emit('audioBroken'); this.emit('audioBroken')
// }); })
// this._rtc?.on('videoBroken', () => { this._rtc?.on('videoBroken', () => {
// this.emit('videoBroken'); this.emit('videoBroken')
// }); })
} }
/** /**
...@@ -386,7 +412,7 @@ export class PhotoRole extends EventEmitter { ...@@ -386,7 +412,7 @@ export class PhotoRole extends EventEmitter {
* @returns 是否成功 * @returns 是否成功
*/ */
emit<T extends PhotoEventType>(event: T, ...args: PhotoEventTypeData<T>): boolean { emit<T extends PhotoEventType>(event: T, ...args: PhotoEventTypeData<T>): boolean {
return super.emit(event, ...args); return super.emit(event, ...args)
} }
/** /**
...@@ -397,47 +423,48 @@ export class PhotoRole extends EventEmitter { ...@@ -397,47 +423,48 @@ export class PhotoRole extends EventEmitter {
*/ */
on<T extends PhotoEventType>(event: T, fn: PhotoEventTypeFn<T>): this { on<T extends PhotoEventType>(event: T, fn: PhotoEventTypeFn<T>): this {
// fn 可能确实只有一个参数, 只能使用as // fn 可能确实只有一个参数, 只能使用as
return super.on(event, fn as (...args: any[]) => void); return super.on(event, fn as (...args: any[]) => void, this)
} }
get id(): string { get id(): string {
return this.sessionId; return this.sessionId
} }
async init() { async init() {
if (!(await HwWebRTC.isBrowserSupport())) { if (!(await HwWebRTC.isBrowserSupport())) {
throw new Error('WebRTC is not supported'); throw new Error('WebRTC is not supported')
} }
this._webRTCContainer.style.height = '256px'; this._webRTCContainer.style.height = '256px'
this._webRTCContainer.style.width = '256px'; this._webRTCContainer.style.width = '256px'
document.body.appendChild(this._webRTCContainer); document.body.appendChild(this._webRTCContainer)
this._image.src = this.url; this._image.src = this.url
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
this._image.onload = resolve; this._image.onload = resolve
this._image.onerror = reject; this._image.onerror = reject
}); })
this.ctx.canvas.width = this._image.naturalWidth; this.ctx.canvas.width = this._image.naturalWidth
this.ctx.canvas.height = this._image.naturalHeight; this.ctx.canvas.height = this._image.naturalHeight
this.draw()
this._webRTCContainer.id = 'webRTCContainer'; this._webRTCContainer.id = 'webRTCContainer'
this._webRTCContainer.style.display = 'none'; this._webRTCContainer.style.display = 'none'
this._rtc = new HwWebRTC(this._webRTCContainer.id); this._rtc = new HwWebRTC(this._webRTCContainer.id)
this._bindEvents(); this._bindEvents()
await this.initLive(); await this.initLive()
// this._pollStatus(this.sessionId); // this._pollStatus(this.sessionId);
} }
destroy() { destroy() {
this._webRTCContainer && document.body.removeChild(this._webRTCContainer); this._webRTCContainer && document.body.removeChild(this._webRTCContainer)
this._rtc?.stopPlay(); this._rtc?.stopPlay()
this._rtc?.destroyed(); this._rtc?.destroyed()
clearTimeout(this._pollTimeout); clearTimeout(this._pollTimeout)
return { return {
code: 1 code: 1
}; }
} }
} }
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -53,7 +53,7 @@ async function appendPhoto(url: string) { ...@@ -53,7 +53,7 @@ async function appendPhoto(url: string) {
return '图片加载失败!' return '图片加载失败!'
} }
photo.list.value.push({ url }) photo.list.value.push({ url, liveUrl: url })
urlValue.value = '' urlValue.value = ''
return true return true
......
...@@ -11,21 +11,21 @@ import type { ...@@ -11,21 +11,21 @@ import type {
import { audioAiTTS, localTTS } from '../plugins/tts' import { audioAiTTS, localTTS } from '../plugins/tts'
import useStore from '@/renderer/store' import useStore from '@/renderer/store'
import flvjs from 'flv.js' import flvjs from 'flv.js'
import { PhotoRole } from '@/renderer/plugins/live/PhotoRole'; import { PhotoAnswer, PhotoRole } from '@/renderer/plugins/live/PhotoRole'
const router = useRouter() const router = useRouter()
const route = useRoute() const route = useRoute()
const { settings } = useStore() const { settings, photo } = useStore()
let sampleRate = 48000 let sampleRate = 48000
const bufferSize = 8192 const bufferSize = 8192
const iconMicrophone = new URL('/images/microphone-input.svg', import.meta.url).href const iconMicrophone = new URL('/images/microphone-input.svg', import.meta.url).href
const recordVolume = ref(0) const recordVolume = ref(0)
const url = route.query.url as string const url = route.query.url as string
const microphoneState = ref<'waitInput' | 'input' | 'loading' | 'disabled'>('waitInput') const microphoneState = ref<'waitInput' | 'input' | 'loading' | 'disabled' | 'reply'>('waitInput')
const videoElement = ref<HTMLVideoElement | null>(null) const videoElement = ref<HTMLVideoElement | null>(null)
const can = ref<HTMLCanvasElement | null>(null) const can = ref<HTMLCanvasElement | null>(null)
let photoRole: PhotoRole | null = null; let photoRole: PhotoRole | null = null
let flvPlayer: flvjs.Player | null = null let flvPlayer: flvjs.Player | null = null
onMounted(() => { onMounted(() => {
...@@ -42,40 +42,55 @@ function loadImg(): Promise<HTMLImageElement> { ...@@ -42,40 +42,55 @@ function loadImg(): Promise<HTMLImageElement> {
} }
async function init() { async function init() {
microphoneState.value = 'loading'
const img = await loadImg() const img = await loadImg()
const videoEle = videoElement.value const videoEle = videoElement.value
const canvasEle = can.value const canvasEle = can.value
const ctx = canvasEle && canvasEle.getContext('2d') const ctx = canvasEle && canvasEle.getContext('2d')
if (!videoEle || !canvasEle || !ctx) return if (!videoEle || !canvasEle || !ctx) return
draw(ctx, img)
canvasEle.width = img.naturalWidth canvasEle.width = img.naturalWidth
canvasEle.height = img.naturalHeight canvasEle.height = img.naturalHeight
photoRole = new PhotoRole(url, canvasEle); const item = photo.list.find((i) => i.url === url)
photoRole = new PhotoRole(settings.liveHost, `${item?.liveUrl}`, canvasEle)
photoRole.on('asyncAnswer', (ans) => {
if (ans.playState === 'playing') {
microphoneState.value = 'reply'
return
}
if (
microphoneState.value === 'reply' &&
ans.playState === 'pause' &&
photoRole!.taskQueueLength === 0 &&
answerArray.length === 0
) {
microphoneState.value = 'input'
}
})
// initPlayer(videoEle); // initPlayer(videoEle);
try {
await photoRole.init()
} catch (error) {
console.error(error)
return
}
microphoneState.value = 'waitInput'
const fps = 1000 / 30 const fps = 1000 / 30
let lastTime = Date.now() let lastTime = Date.now()
const updateFrame = () => { const updateFrame = () => {
if (Date.now() - lastTime > fps) { if (Date.now() - lastTime > fps) {
draw(ctx, img, videoEle, { photoRole?.draw()
width: 579,
height: 579,
center: {
x: 295,
y: 168
},
r_w: 304,
r_h: 304
})
lastTime = Date.now() lastTime = Date.now()
} }
requestAnimationFrame(updateFrame) requestAnimationFrame(updateFrame)
} }
requestAnimationFrame(updateFrame) requestAnimationFrame(updateFrame)
await photoRole.initLive();
} }
function draw( function draw(
...@@ -258,7 +273,7 @@ async function startVoskWsAudioInput() { ...@@ -258,7 +273,7 @@ async function startVoskWsAudioInput() {
} }
await initVoskWS() await initVoskWS()
sampleRate = 8000 sampleRate = 16000
const mediaStream = await navigator.mediaDevices.getUserMedia({ const mediaStream = await navigator.mediaDevices.getUserMedia({
audio: { audio: {
echoCancellation: true, echoCancellation: true,
...@@ -275,7 +290,17 @@ async function startVoskWsAudioInput() { ...@@ -275,7 +290,17 @@ async function startVoskWsAudioInput() {
source.connect(processor) source.connect(processor)
processor.connect(audioContext.destination) processor.connect(audioContext.destination)
processor.onaudioprocess = (audioDataChunk) => postAudio(audioDataChunk) processor.onaudioprocess = (audioDataChunk) => {
if (
microphoneState.value === 'loading' ||
microphoneState.value === 'disabled' ||
microphoneState.value === 'reply'
) {
return
}
postAudio(audioDataChunk)
}
await analyzeMicrophoneVolume(mediaStream, (val) => { await analyzeMicrophoneVolume(mediaStream, (val) => {
recordVolume.value = val recordVolume.value = val
...@@ -340,16 +365,20 @@ function endAudioInput() { ...@@ -340,16 +365,20 @@ function endAudioInput() {
} }
} }
const answerArray: { text: string; isLast: boolean }[] = []
async function onAsr(question: string) { async function onAsr(question: string) {
console.log('---------------->question: ', question) console.log('---------------->question: ', question)
endAudioInput()
microphoneState.value = 'loading'
const ws = await initLLMSocket() const ws = await initLLMSocket()
inputContext.ws = ws inputContext.ws = ws
let sliceAnswer = '' let sliceAnswer = ''
let answer = '' let answer = ''
const answerArray: string[] = [] answerArray.length = 0
let isTime = true let isTime = true
photoRole!.answerArgs = new PhotoAnswer()
ws.onmessage = (message) => { ws.onmessage = (message) => {
try { try {
...@@ -360,18 +389,17 @@ async function onAsr(question: string) { ...@@ -360,18 +389,17 @@ async function onAsr(question: string) {
} }
if (event === 'stream_end') { if (event === 'stream_end') {
answerArray.push(sliceAnswer) answerArray.push({ text: sliceAnswer, isLast: true })
runTTSTask(answerArray)
sliceAnswer = ''
answerArray.push(sliceAnswer)
sliceAnswer = '' sliceAnswer = ''
runTTSTask(answerArray)
inputContext.ws?.close() inputContext.ws?.close()
console.log('----------------> answer: ', answer) console.log('----------------> answer: ', answer)
return return
} }
answer += text answer += text
photoRole!.answerArgs!.answer += answer
photoRole!.answerArgs!._typingAnswer.push(answer)
isTime && console.time('sliceAnswer') isTime && console.time('sliceAnswer')
isTime = false isTime = false
...@@ -381,7 +409,7 @@ async function onAsr(question: string) { ...@@ -381,7 +409,7 @@ async function onAsr(question: string) {
sliceAnswer += t sliceAnswer += t
if (/[。,?!;,.?!;]/.test(t) && sliceAnswer.length >= settings.llmToTTSSliceLength) { if (/[。,?!;,.?!;]/.test(t) && sliceAnswer.length >= settings.llmToTTSSliceLength) {
console.timeEnd('sliceAnswer') console.timeEnd('sliceAnswer')
answerArray.push(sliceAnswer) answerArray.push({ text: sliceAnswer, isLast: true })
runTTSTask(answerArray) runTTSTask(answerArray)
sliceAnswer = '' sliceAnswer = ''
isTime = true isTime = true
...@@ -405,7 +433,7 @@ function initLLMSocket(): Promise<WebSocket> { ...@@ -405,7 +433,7 @@ function initLLMSocket(): Promise<WebSocket> {
} }
let isTTSRunning = false let isTTSRunning = false
async function runTTSTask(tasks: string[]) { async function runTTSTask(tasks: { text: string; isLast: boolean }[]) {
if (isTTSRunning) return if (isTTSRunning) return
isTTSRunning = true isTTSRunning = true
...@@ -413,20 +441,24 @@ async function runTTSTask(tasks: string[]) { ...@@ -413,20 +441,24 @@ async function runTTSTask(tasks: string[]) {
while (tasks.length) { while (tasks.length) {
const task = tasks.shift() const task = tasks.shift()
if (!task) break if (!task) break
if (task.length < 1) continue if (task.text.trim().length < 1) continue
console.time(task + ' TTS: ') console.time(task + ' TTS: ')
const res = await localTTS({ const res = await localTTS({
url: settings.ttsHost, url: settings.ttsHost,
text: task, text: task.text,
audio_path: settings.userData audio_path: settings.userData
}) })
console.log('----------------> TTS:', res[0].text) console.log('----------------> TTS:', res[0].text)
console.timeEnd(task + ' TTS: ') console.timeEnd(task + ' TTS: ')
const audio = new Audio(`file://${res[0].text}`) console.log('---------------->', res[0].text)
audio.load()
ttsAudios.push(audio) const audioPath = await uploadFile({ filePath: res[0].text })
runAudioPlay() photoRole?.enQueue({
taskId: photoRole.sessionId,
audioUrl: `https://resources.laihua.com/${audioPath}`,
isLast: task.isLast
})
} }
} catch (error) { } catch (error) {
console.error(error) console.error(error)
...@@ -435,6 +467,21 @@ async function runTTSTask(tasks: string[]) { ...@@ -435,6 +467,21 @@ async function runTTSTask(tasks: string[]) {
isTTSRunning = false isTTSRunning = false
} }
function uploadFile({ filePath }: { filePath: string }) {
return new Promise<string>((resolve, reject) => {
window.mainApi.receive(
'msgReceivedFileUploadResponse',
(event: Event, result: { code: number; data: null | { filename: string } }) => {
if (result.code !== 200) {
return reject(JSON.stringify(result))
}
resolve(result.data?.filename || '')
}
)
window.mainApi.send('fileUpload', filePath)
})
}
const ttsAudios: HTMLAudioElement[] = [] const ttsAudios: HTMLAudioElement[] = []
let isPlayRunning = false let isPlayRunning = false
async function runAudioPlay() { async function runAudioPlay() {
...@@ -452,7 +499,6 @@ async function runAudioPlay() { ...@@ -452,7 +499,6 @@ async function runAudioPlay() {
} }
await audio.play() await audio.play()
} }
</script> </script>
<template> <template>
...@@ -477,12 +523,17 @@ async function runAudioPlay() { ...@@ -477,12 +523,17 @@ async function runAudioPlay() {
color="#fff" color="#fff"
variant="elevated" variant="elevated"
size="x-large" size="x-large"
:disabled="microphoneState === 'loading' || microphoneState === 'disabled'" :disabled="
microphoneState === 'loading' ||
microphoneState === 'disabled' ||
microphoneState === 'reply'
"
@pointerdown="startVoskWsAudioInput" @pointerdown="startVoskWsAudioInput"
> >
<v-icon v-if="microphoneState === 'waitInput'" icon="mdi-microphone"></v-icon> <v-icon v-if="microphoneState === 'waitInput'" icon="mdi-microphone"></v-icon>
<v-icon v-if="microphoneState === 'loading'" icon="mdi-microphone-settings"></v-icon> <v-icon v-if="microphoneState === 'loading'" icon="mdi-microphone-settings"></v-icon>
<v-icon v-if="microphoneState === 'disabled'" icon="mdi-microphone-off"></v-icon> <v-icon v-if="microphoneState === 'disabled'" icon="mdi-microphone-off"></v-icon>
<v-icon v-if="microphoneState === 'reply'" icon="mdi-message-reply-text-outline"></v-icon>
<template v-if="microphoneState === 'input'"> <template v-if="microphoneState === 'input'">
<img width="30" height="30" :src="iconMicrophone" alt="" srcset="" /> <img width="30" height="30" :src="iconMicrophone" alt="" srcset="" />
......
import { defineStore } from 'pinia' import { defineStore } from 'pinia'
type IPhoto = { type IPhoto = {
list: { url: string }[] list: { url: string; liveUrl: string }[]
} }
const usePhotoStore = defineStore('photo', { const usePhotoStore = defineStore('photo', {
...@@ -10,22 +10,33 @@ const usePhotoStore = defineStore('photo', { ...@@ -10,22 +10,33 @@ const usePhotoStore = defineStore('photo', {
({ ({
list: [ list: [
{ {
url: new URL('/images/photo/1.png', import.meta.url).href url: new URL('/images/photo/1.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/11772300-9a47-11ee-84b0-fbd08f47254f.png'
}, },
{ {
url: new URL('/images/photo/2.png', import.meta.url).href url: new URL('/images/photo/2.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/32b3e530-9a47-11ee-8702-5ddbbcc07698.png'
}, },
{ {
url: new URL('/images/photo/3.png', import.meta.url).href url: new URL('/images/photo/3.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/55060f00-9a47-11ee-8702-5ddbbcc07698.png'
}, },
{ {
url: new URL('/images/photo/4.png', import.meta.url).href url: new URL('/images/photo/4.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/81a0d220-9a47-11ee-84b0-fbd08f47254f.png'
}, },
{ {
url: new URL('/2023-11-2/93ffb6a7-ae93-4918-944e-877016ba266b.png', import.meta.url).href url: new URL('/2023-11-2/93ffb6a7-ae93-4918-944e-877016ba266b.png', import.meta.url).href,
liveUrl: 'https://resources.laihua.com/2023-11-2/93ffb6a7-ae93-4918-944e-877016ba266b.png'
}, },
{ {
url: new URL('/2023-11-2/6fa9a127-2ce5-43ea-a543-475bf9354eda.png', import.meta.url).href url: new URL('/2023-11-2/6fa9a127-2ce5-43ea-a543-475bf9354eda.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/b7523e40-9a47-11ee-84b0-fbd08f47254f.png'
} }
] ]
}) as IPhoto, }) as IPhoto,
......
...@@ -27,6 +27,7 @@ export type ISettings = { ...@@ -27,6 +27,7 @@ export type ISettings = {
llmUrl: string llmUrl: string
llmToTTSSliceLength: number llmToTTSSliceLength: number
voskWsLUrl: string voskWsLUrl: string
liveHost: string
} }
const useSettingsStore = defineStore('settings', { const useSettingsStore = defineStore('settings', {
...@@ -61,7 +62,8 @@ const useSettingsStore = defineStore('settings', { ...@@ -61,7 +62,8 @@ const useSettingsStore = defineStore('settings', {
isOpenDevTools: false, isOpenDevTools: false,
llmUrl: 'ws://127.0.0.1:9899/api/v1/stream', llmUrl: 'ws://127.0.0.1:9899/api/v1/stream',
llmToTTSSliceLength: 20, llmToTTSSliceLength: 20,
voskWsLUrl: 'ws://127.0.0.1:2700' voskWsLUrl: 'ws://127.0.0.1:2700',
liveHost: 'http://122.51.32.12:9000'
}) as ISettings, }) as ISettings,
getters: {}, getters: {},
actions: { actions: {
......
export * as HWLLSPlayer from './lib/HWLLSPlayer'; export * as HWLLSPlayer from './lib/HWLLSPlayer'
\ No newline at end of file
declare const _default: { declare const _default: {
getVersion: any; getVersion: any
checkSystemRequirements: any; checkSystemRequirements: any
setParameter: any; setParameter: any
createClient: any; createClient: any
saveLog: any; saveLog: any
setLogLevel: any; setLogLevel: any
uploadLog: any; uploadLog: any
}; }
export { _default as default }; export { _default as default }
This source diff could not be displayed because it is too large. You can view the blob instead.
import axios from 'axios'; import axios from 'axios'
import type { AxiosRequestConfig } from 'axios'; import type { AxiosRequestConfig } from 'axios'
export const axiosInstance = axios.create(); export const axiosInstance = axios.create()
export interface ApiResult<T = unknown> { export interface ApiResult<T = unknown> {
error?: boolean; error?: boolean
code?: number; code?: number
message?: string; message?: string
msg?: string; msg?: string
data?: T; data?: T
[k: string]: any; [k: string]: any
} }
export default async function http<T>(input: AxiosRequestConfig): Promise<ApiResult<T>> { export default async function http<T>(input: AxiosRequestConfig): Promise<ApiResult<T>> {
try { try {
const response = await axiosInstance(input); const response = await axiosInstance(input)
if (response.status === 200) { if (response.status === 200) {
return response.data; return response.data
} }
return { error: true }; return { error: true }
} catch (error) { } catch (error) {
return { return {
code: (error as any).response?.code || (error as any).code, code: (error as any).response?.code || (error as any).code,
data: (error as any)?.response?.data data: (error as any)?.response?.data
}; }
} }
} }
...@@ -9,10 +9,10 @@ export default class Utils { ...@@ -9,10 +9,10 @@ export default class Utils {
static guid() { static guid() {
function S4() { function S4() {
return (((1 + Math.random()) * 0x10000) | 0).toString(16).substring(1); return (((1 + Math.random()) * 0x10000) | 0).toString(16).substring(1)
} }
return S4() + S4() + '-' + S4() + '-' + S4() + '-' + S4() + '-' + S4() + S4() + S4(); return S4() + S4() + '-' + S4() + '-' + S4() + '-' + S4() + '-' + S4() + S4() + S4()
} }
} }
export const { getCurrentLocale, openExternal, guid} = Utils export const { getCurrentLocale, openExternal, guid } = Utils
...@@ -27,5 +27,12 @@ ...@@ -27,5 +27,12 @@
"path": "./tsconfig.node.json" "path": "./tsconfig.node.json"
} }
], ],
"exclude": ["node_modules", "dist", "rollup.config.js", "*.json", "*.js", "src/renderer/utils/HWLLS_SDK_Web_2.3.0/**'"] "exclude": [
"node_modules",
"dist",
"rollup.config.js",
"*.json",
"*.js",
"src/renderer/utils/HWLLS_SDK_Web_2.3.0/**'"
]
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment