Commit a5aa32b4 authored by ali's avatar ali

feat: 照片数字人直播接入

parent 9b2696f3
......@@ -20,6 +20,7 @@
"editor.tabSize": 2,
"cSpell.words": [
"flvjs",
"superres",
"Vosk"
],
"editor.inlineSuggest.showToolbar": "always"
......
import type { HWLLSPlayer } from '@/renderer/utils/HWLLS_SDK_Web_2.3.0/export';
import type { HWLLSPlayer } from '@/renderer/utils/HWLLS_SDK_Web_2.3.0/export'
declare global {
// eslint-disable-next-line no-unused-vars
interface Window {
HWLLSPlayer: HWLLSPlayer
}
}
\ No newline at end of file
}
......@@ -12,7 +12,6 @@
"axios": "^1.6.2",
"electron-store": "^8.1.0",
"EventEmitter": "^1.0.0",
"events": "^3.3.0",
"flv.js": "^1.6.2",
"pinia": "^2.1.7",
"pinia-plugin-persistedstate": "^3.2.0",
......@@ -5552,14 +5551,6 @@
"node": ">=0.12"
}
},
"node_modules/events": {
"version": "3.3.0",
"resolved": "https://registry.npmmirror.com/events/-/events-3.3.0.tgz",
"integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==",
"engines": {
"node": ">=0.8.x"
}
},
"node_modules/exit-hook": {
"version": "1.1.1",
"resolved": "https://registry.npmmirror.com/exit-hook/-/exit-hook-1.1.1.tgz",
......
......@@ -74,4 +74,4 @@
"vue-tsc": "^1.8.22",
"xvfb-maybe": "^0.2.1"
}
}
\ No newline at end of file
}
import { BrowserWindow, ipcMain, shell, BrowserWindowConstructorOptions, app } from 'electron'
import Constants from './utils/Constants'
import fs from 'fs'
import http from './utils/http'
/*
* IPC Communications
......@@ -7,6 +9,12 @@ import Constants from './utils/Constants'
export default class IPCs {
static browserWindows: Map<string, BrowserWindow[]> = new Map()
// 读取本地文件
static readFile(path) {
const file = fs.readFileSync(path)
return file
}
static initialize(window: BrowserWindow): void {
ipcMain.on('mesGetUserData', () => {
window.webContents.send('msgReceivedUserData', app.getPath('userData'))
......@@ -78,6 +86,9 @@ export default class IPCs {
await win.loadURL(url)
// Initialize IPC Communication
IPCs.initializeChildWindow(win)
if (!IPCs.browserWindows.has(url)) {
IPCs.browserWindows.set(url, [])
}
......@@ -106,4 +117,20 @@ export default class IPCs {
}
})
}
static initializeChildWindow(window: BrowserWindow) {
ipcMain.on('fileUpload', async (event, path: string) => {
const content = IPCs.readFile(path)
const formData = new FormData()
const blob = new Blob([content], { type: 'audio/wav' })
formData.append('file', blob)
const response = await http({
url: 'https://beta.laihua.com/api/upload/file',
method: 'POST',
data: formData
})
window.webContents.send('msgReceivedFileUploadResponse', response)
})
}
}
import axios from 'axios'
import type { AxiosRequestConfig } from 'axios'
export const axiosInstance = axios.create()
export interface ApiResult<T = unknown> {
error?: boolean
code?: number
message?: string
msg?: string
data?: T
[k: string]: any
}
export default async function http<T>(input: AxiosRequestConfig): Promise<ApiResult<T>> {
try {
const response = await axiosInstance(input)
if (response.status === 200) {
return response.data
}
return { error: true }
} catch (error) {
return {
code: (error as any).response?.code || (error as any).code,
data: (error as any)?.response?.data
}
}
}
......@@ -8,13 +8,15 @@ const mainAvailChannels: string[] = [
'openWindow',
'openDevTools',
'mesGetUserData',
'mesGetAppData'
'mesGetAppData',
'fileUpload'
]
const rendererAvailChannels: string[] = [
'msgReceivedVersion',
'msgReceivedFilePath',
'msgReceivedUserData',
'msgReceivedAppData'
'msgReceivedAppData',
'msgReceivedFileUploadResponse'
]
contextBridge.exposeInMainWorld('mainApi', {
......
......@@ -45,6 +45,12 @@ const asrItems = ref([
'vosk_ws'
// 'Whisper Api'
])
const liveHosts = ref([
'http://111.229.216.162:9000',
'http://124.221.182.173:9000',
'http://110.42.214.59:9000',
'http://122.51.32.12:9000'
])
const asrSelect = ref(setting.asr)
const source = computed(() => {
......@@ -186,6 +192,15 @@ function clear() {
:model-value="setting.llmUrl"
></v-text-field>
<v-select
v-model="setting.liveHost.value"
style="margin-top: 22px"
:items="liveHosts"
:rules="[(v) => !!v || '请选择音色']"
label="直播地址"
required
></v-select>
<v-slider
v-model="setting.llmToTTSSliceLength.value"
label="TTS 分句长度"
......
......@@ -6,6 +6,6 @@
<body>
<div id="app"></div>
</body>
<script src="./utils/HWLLS_SDK_Web_2.3.0/lib/HWLLSPlayer.js"></script>
<script src="./HWLLSPlayer.js"></script>
<script type="module" src="./main.ts"></script>
</html>
import EventEmitter from 'EventEmitter';
import EventEmitter from 'EventEmitter'
/**
*
......@@ -30,41 +30,41 @@ import EventEmitter from 'EventEmitter';
*
*/
export type StartPlayOptions = {
objectFit?: 'contain' | 'cover' | 'fill';
muted?: boolean;
sessionId?: string;
showLoading?: boolean;
autoPlay?: boolean;
objectFit?: 'contain' | 'cover' | 'fill'
muted?: boolean
sessionId?: string
showLoading?: boolean
autoPlay?: boolean
poster?: {
url?: string;
mode?: 'fill' | 'crop';
startEnable?: boolean;
pauseEnable: boolean;
};
};
url?: string
mode?: 'fill' | 'crop'
startEnable?: boolean
pauseEnable: boolean
}
}
// 自定义事件类型
export type HwEventType = 'videoStart' | 'audioStart' | 'audioBroken' | 'videoBroken' | 'error'; // 场景页切换
export type HwEventType = 'videoStart' | 'audioStart' | 'audioBroken' | 'videoBroken' | 'error' // 场景页切换
export type HwEventTypeData<T extends HwEventType> = {
videoStart: [];
audioStart: [];
audioBroken: [];
videoBroken: [];
error: [{ code: number; message: string }];
}[T];
videoStart: []
audioStart: []
audioBroken: []
videoBroken: []
error: [{ code: number; message: string }]
}[T]
export type HwEventTypeFn<T extends HwEventType> = {
// eslint-disable-next-line no-unused-vars
[K in T]: (...args: HwEventTypeData<T>) => void;
}[T];
[K in T]: (...args: HwEventTypeData<T>) => void
}[T]
export class HwWebRTC extends EventEmitter {
elementId = '';
startPlayOptions: StartPlayOptions | null = null;
client: any = null;
elementId = ''
startPlayOptions: StartPlayOptions | null = null
client: any = null
constructor(id: string, log: 'none' | 'error' | 'warn' | 'info' | 'debug' = 'none') {
super();
this.elementId = id;
super()
this.elementId = id
// setLogLevel(log);
}
......@@ -75,7 +75,7 @@ export class HwWebRTC extends EventEmitter {
* @returns 是否成功
*/
emit<T extends HwEventType>(event: T, ...args: HwEventTypeData<T>): boolean {
return super.emit(event, ...args);
return super.emit(event, ...args)
}
/**
......@@ -86,16 +86,16 @@ export class HwWebRTC extends EventEmitter {
*/
on<T extends HwEventType>(event: T, fn: HwEventTypeFn<T>): this {
// fn 可能确实只有一个参数, 只能使用as
return super.on(event, fn as (...args: any[]) => void);
return super.on(event, fn as (...args: any[]) => void)
}
/**
* 预处理:获取浏览器的版本号、检查兼容性
*/
static async isBrowserSupport() {
let check = false;
check = await window.HWLLSPlayer.checkSystemRequirements();
return check;
let check = false
check = await window.HWLLSPlayer.checkSystemRequirements()
return check
}
/**
......@@ -109,51 +109,51 @@ export class HwWebRTC extends EventEmitter {
objectFit: 'contain'
}
) {
if (this.client) this.destroyed();
this.startPlayOptions = options;
this.client = window.HWLLSPlayer.createClient('webrtc');
if (this.client) this.destroyed()
this.startPlayOptions = options
this.client = window.HWLLSPlayer.createClient('webrtc')
await this.client.startPlay(url, {
elementId: this.elementId,
...this.startPlayOptions
});
this.client.enableStreamStateDetection(true, 2);
this._bindEvents();
})
this.client.enableStreamStateDetection(true, 2)
this._bindEvents()
}
private _bindEvents() {
this.client.on('video-start', () => {
this.emit('videoStart');
});
this.emit('videoStart')
})
this.client.on('audio-start', () => {
this.emit('audioStart');
});
this.emit('audioStart')
})
this.client.on('audio-broken', () => {
this.emit('audioBroken');
});
this.emit('audioBroken')
})
this.client.on('video-broken', () => {
this.emit('videoBroken');
});
this.emit('videoBroken')
})
// this.client.on('audio-recovery', () => {
// logManage.log('----------------> audio-recovery', 1);
// });
// this.client.on('video-recovery', () => {
// logManage.log('----------------> video-recovery', 1);
// });
this.client.on('Error', (error: any) => this.emit('error', error));
this.client.on('Error', (error: any) => this.emit('error', error))
}
/**
* 停止播放:停止播放请求
*/
stopPlay() {
this.client && this.client.stopPlay();
this.client && this.client.stopPlay()
}
/**
* 后处理:销毁客户端等。
*/
destroyed() {
this.client?.offAllEvents();
this.client?.destoryClient();
this.client?.offAllEvents()
this.client?.destoryClient()
}
}
/* eslint-disable camelcase */
import http from '@/renderer/utils/http';
import { HwWebRTC } from './HwWebRTC';
import { guid } from '@/renderer/utils/index';
import EventEmitter from 'EventEmitter';
const HOST = 'http://122.51.32.12:9000';
import http from '@/renderer/utils/http'
import { HwWebRTC } from './HwWebRTC'
import { guid } from '@/renderer/utils/index'
import EventEmitter from 'EventEmitter'
type LiveOptions = {
speaker?: string;
languageCode?: string;
text?: string;
audioUrl?: string | null;
taskId: string;
imgUrl: string;
speed?: number;
};
speaker?: string
languageCode?: string
text?: string
audioUrl?: string | null
taskId: string
imgUrl: string
pushUrl: string
pullUrl: string
speed?: number
bitrate: number
superres: boolean
isLast: boolean
}
export class PhotoAnswer {
question = '';
answer = '';
question = ''
answer = ''
/** 将答案分割,一段一段合成数字人直播流 */
_sliceAnswer = '';
_sliceAnswer = ''
/** 保存答案数组,用于实现打字输出效果 */
_typingAnswer: string[] = [];
_typingAnswer: string[] = []
/** 与 answer 内容一致,外部实现打字输出效果 */
asyncAnswer = '';
asyncAnswer = ''
/** 数字人播放状态 */
playState: 'playing' | 'pause' = 'pause';
playState: 'playing' | 'pause' = 'pause'
/** 答案是否接收完毕 */
answerEnd = false;
answerEnd = false
play() {
}
play() {}
stop() {
}
stop() {}
pause() {
}
pause() {}
destroy() {
}
destroy() {}
}
// 自定义事件类型
......@@ -50,250 +49,272 @@ export type PhotoEventType =
| 'audioBroken'
| 'videoBroken'
| 'liveStatusTrace'
| 'asyncAnswer';
| 'asyncAnswer'
export type PhotoEventTypeData<T extends PhotoEventType> = {
videoStart: [];
audioStart: [];
audioBroken: [];
videoBroken: [];
liveStatusTrace: ['init' | 'ready' | 'wait' | 'closing' | 'pushing'];
asyncAnswer: [PhotoAnswer];
}[T];
videoStart: []
audioStart: []
audioBroken: []
videoBroken: []
liveStatusTrace: ['init' | 'ready' | 'wait' | 'closing' | 'pushing']
asyncAnswer: [PhotoAnswer]
}[T]
export type PhotoEventTypeFn<T extends PhotoEventType> = {
// eslint-disable-next-line no-unused-vars
[K in T]: (...args: PhotoEventTypeData<T>) => void;
}[T];
[K in T]: (...args: PhotoEventTypeData<T>) => void
}[T]
export class PhotoRole extends EventEmitter {
readonly view: HTMLCanvasElement;
readonly ctx: CanvasRenderingContext2D;
private _webRTCContainer: HTMLDivElement = document.createElement('div');
private _rtc: HwWebRTC | null = null;
private _image: HTMLImageElement = new Image();
private _rtcVideo: HTMLVideoElement | null = null;
readonly host: string
readonly view: HTMLCanvasElement
readonly ctx: CanvasRenderingContext2D
private _webRTCContainer: HTMLDivElement = document.createElement('div')
private _rtc: HwWebRTC | null = null
private _image: HTMLImageElement = new Image()
private _rtcVideo: HTMLVideoElement | null = null
private _rtcVideoInfo: {
center: {
x: number;
y: number;
};
width: number;
height: number;
r_w: number;
r_h: number;
} | null = null;
private _liveStatus: 'init' | 'ready' | 'wait' | 'closing' | 'pushing' = 'closing';
private _pollTimeout = -1;
answerArgs: PhotoAnswer | null = null;
readonly url: string;
readonly sessionId = guid();
constructor(url: string, view: HTMLCanvasElement) {
super();
this.url = url;
this.view = view;
this.ctx = view.getContext('2d') as CanvasRenderingContext2D;
// this._dialogSession = new Dialog({
// type: 'photo',
// callbacks: {
// chatConnect: (ans) => {
// this.answerArgs = ans;
// console.time('chat');
// },
// chatEnd: (ans) => {
// this._enQueue(this.sessionId, ans._sliceAnswer);
// ans._sliceAnswer = '';
// ans.answerEnd = true;
// console.timeEnd('chat');
// console.log('----------------> answer.length: ', ans.answer.length);
// },
// chatMessage: (message, ans) => {
// ans._typingAnswer.push(message);
// ans._sliceAnswer += message;
// if (/[。,?!;,.?!;]/.test(message) && ans._sliceAnswer.length >= 40) {
// this._enQueue(this.sessionId, ans._sliceAnswer);
// ans._sliceAnswer = '';
// }
// }
// }
// });
x: number
y: number
}
width: number
height: number
r_w: number
r_h: number
} | null = null
private _liveStatus: 'init' | 'ready' | 'wait' | 'closing' | 'pushing' = 'closing'
private _pollTimeout = -1
answerArgs: PhotoAnswer | null = null
readonly url: string
readonly sessionId = guid()
constructor(host: string, url: string, view: HTMLCanvasElement) {
super()
this.host = host
this.url = url
this.view = view
this.ctx = view.getContext('2d') as CanvasRenderingContext2D
}
private _liveTaskQueue: LiveOptions[] = []
private _isLiveTaskRunning = false
get taskQueueLength(): number {
return this._liveTaskQueue.length
}
private _liveTaskQueue: LiveOptions[] = [];
private _isLiveTaskRunning = false;
enQueue(taskId: string, text: string) {
if (text.length < 1) return;
enQueue({ taskId, audioUrl, isLast }: { taskId: string; audioUrl: string; isLast: boolean }) {
if (audioUrl.length < 1) return
this._liveTaskQueue.push({
imgUrl: this.url,
taskId,
pushUrl: `rtmp://push.laihua.com/web/${taskId}`,
pullUrl: `webrtc://pull.laihua.com/web/${taskId}`,
speaker: 'speaker',
languageCode: 'zh-CN',
text,
speed: 10
});
this._runTask();
audioUrl,
speed: 10,
bitrate: 2000,
superres: false,
isLast
})
this._runTask()
}
private async _runTask() {
if (this._isLiveTaskRunning) return;
this._isLiveTaskRunning = true;
if (this._isLiveTaskRunning) return
this._isLiveTaskRunning = true
this.off('liveStatusTrace', this._liveStatusTrace);
this.on('liveStatusTrace', this._liveStatusTrace);
// @ts-ignore
this.off('liveStatusTrace', this._liveStatusTrace)
this.on('liveStatusTrace', this._liveStatusTrace)
try {
while (this._liveTaskQueue.length) {
const task = this._liveTaskQueue.shift() as LiveOptions;
console.time(task.text);
if (this._liveStatus === 'closing') await this.initLive();
const task = this._liveTaskQueue.shift() as LiveOptions
console.time(task.text)
if (this._liveStatus === 'closing') await this.initLive()
await this._createLive(task);
console.log('----------------> append', task)
console.timeEnd(task.text);
await this._appendLive(task)
console.timeEnd(task.text)
}
} catch (error) {
console.error(error);
console.error(error)
}
this._isLiveTaskRunning = false;
this._isLiveTaskRunning = false
}
private async _liveStatusTrace() {
if (!this.answerArgs) return;
if (!this.answerArgs) return
if (this._liveStatus === 'pushing') {
this.answerArgs.playState = 'playing';
this._typingOutAnswer();
return;
this.answerArgs.playState = 'playing'
this._typingOutAnswer()
return
}
if (this._liveStatus === 'wait') {
this.answerArgs.playState = 'pause';
this.answerArgs.playState = 'pause'
this.emit('asyncAnswer', this.answerArgs)
}
}
private _typingRunner = false;
private _typingRunner = false
private async _typingOutAnswer() {
if (!this.answerArgs || this._typingRunner) return;
this._typingRunner = true;
if (!this.answerArgs || this._typingRunner) return
this._typingRunner = true
// 加延迟是为了 playing 状态时,能跟声音保持相对同步
await new Promise((resolve) => setTimeout(resolve, 2000));
await new Promise((resolve) => setTimeout(resolve, 2000))
while (this.answerArgs._typingAnswer.length) {
this.answerArgs.asyncAnswer += this.answerArgs._typingAnswer.shift();
this.answerArgs.asyncAnswer += this.answerArgs._typingAnswer.shift()
this.emit('asyncAnswer', this.answerArgs)
await new Promise((resolve) => setTimeout(resolve, 100));
await new Promise((resolve) => setTimeout(resolve, 100))
}
this._typingRunner = false;
this._typingRunner = false
}
private _play(url: string) {
return new Promise<void>((resolve) => {
// if (!this._rtc) return;
// let isPlaying = false;
// let timeout = -1;
// this._rtc.once('videoStart', () => {
// isPlaying = true;
// clearTimeout(timeout);
// resolve();
// });
// const keepCalling = async () => {
// if (isPlaying) return;
// try {
// await this._rtc?.startPlay(url);
// if (!isPlaying) {
// timeout = setTimeout(keepCalling, 400) as unknown as number;
// }
// } catch (error) {
// timeout = setTimeout(keepCalling, 200) as unknown as number;
// }
// };
// keepCalling();
});
if (!this._rtc) return
let isPlaying = false
let timeout = -1
// @ts-ignore
this._rtc.once('videoStart', () => {
isPlaying = true
clearTimeout(timeout)
resolve()
})
const keepCalling = async () => {
if (isPlaying) return
try {
await this._rtc?.startPlay(url)
if (!isPlaying) {
timeout = setTimeout(keepCalling, 400) as unknown as number
}
} catch (error) {
timeout = setTimeout(keepCalling, 200) as unknown as number
}
}
keepCalling()
})
}
draw() {
this.ctx.clearRect(0, 0, this._image.naturalWidth, this._image.naturalHeight);
this.ctx.drawImage(this._image, 0, 0, this._image.naturalWidth, this._image.naturalHeight);
this.ctx.clearRect(0, 0, this._image.naturalWidth, this._image.naturalHeight)
this.ctx.drawImage(this._image, 0, 0, this._image.naturalWidth, this._image.naturalHeight)
if (this._rtcVideo && this._rtcVideoInfo) {
const { center, r_w, r_h } = this._rtcVideoInfo;
this.ctx.drawImage(this._rtcVideo, center.x - r_w / 2, center.y - r_h / 2, r_w, r_h);
const { center, r_w, r_h } = this._rtcVideoInfo
this.ctx.drawImage(this._rtcVideo, center.x - r_w / 2, center.y - r_h / 2, r_w, r_h)
}
}
private async _createLive(options: LiveOptions) {
console.log('---------------->', options)
const resp = (await http({
method: 'POST',
url: `${this.host}/create`,
data: { ...options } // ip: 'http://116.63.168.14:9000'
})) as {
code: number
taskId: string
pullUrl: string
imgInfo: {
center: { x: number; y: number }
r_w: number
r_h: number
width: number
height: number
}
msg?: string
}
if (resp.code && resp.code !== 200) {
throw new Error(resp.msg)
}
if (!resp.pullUrl || !resp.taskId) {
throw new Error(`Field is empty: taskId: ${resp.taskId}, pullUrl: ${resp.pullUrl}`)
}
return resp
}
private async _appendLive(options: LiveOptions) {
const resp = (await http({
method: 'POST',
url: `${HOST}/create`,
data: { ...options, isSdk: 1 } // ip: 'http://116.63.168.14:9000'
url: `${this.host}/append`,
data: { ...options } // ip: 'http://116.63.168.14:9000'
})) as {
code: number;
taskId: string;
pullUrl: string;
code: number
taskId: string
pullUrl: string
imgInfo: {
center: { x: number; y: number };
r_w: number;
r_h: number;
width: number;
height: number;
};
msg?: string;
};
center: { x: number; y: number }
r_w: number
r_h: number
width: number
height: number
}
msg?: string
}
if (resp.code && resp.code !== 200) {
throw new Error(resp.msg);
throw new Error(resp.msg)
}
if (!resp.pullUrl || !resp.taskId) {
throw new Error(`Field is empty: taskId: ${resp.taskId}, pullUrl: ${resp.pullUrl}`);
throw new Error(`Field is empty: taskId: ${resp.taskId}, pullUrl: ${resp.pullUrl}`)
}
return resp;
return resp
}
private async _pushLive(taskId: string) {
const resp = (await http({
method: 'POST',
url: `${HOST}/push`,
url: `${this.host}/push`,
data: { taskId }
})) as {
code: number;
taskId: string;
msg?: string;
};
code: number
taskId: string
msg?: string
}
if (resp.code && resp.code !== 200) {
throw new Error(resp.msg);
throw new Error(resp.msg)
}
return resp;
return resp
}
private async _getLiveStatus(taskId: string) {
const resp = (await http({
method: 'GET',
url: `${HOST}/status`,
url: `${this.host}/status`,
params: { taskId }
})) as {
code: number;
status: 'init' | 'ready' | 'wait' | 'closing' | 'pushing';
msg?: string;
};
code: number
status: 'init' | 'ready' | 'wait' | 'closing' | 'pushing'
msg?: string
}
if (resp.code && resp.code !== 200) {
throw new Error(resp.msg);
throw new Error(resp.msg)
}
return resp;
return resp
}
private _checkStatus(
......@@ -301,82 +322,87 @@ export class PhotoRole extends EventEmitter {
checkStatus: 'init' | 'ready' | 'wait' | 'closing' | 'pushing'
) {
return new Promise<boolean>((resolve) => {
let count = 0;
let count = 0
const keepCalling = async () => {
if (count >= 10) return resolve(false);
count++;
if (count >= 10) return resolve(false)
count++
const { status } = await this._getLiveStatus(taskId);
if (status === checkStatus) return resolve(true);
const { status } = await this._getLiveStatus(taskId)
if (status === checkStatus) return resolve(true)
setTimeout(keepCalling, 1000) as unknown as number;
};
keepCalling();
});
setTimeout(keepCalling, 1000) as unknown as number
}
keepCalling()
})
}
private _pollStatus(taskId: string, callbacks: () => void) {
const keepCalling = async () => {
try {
const { status } = await this._getLiveStatus(taskId);
this._liveStatus = status;
const { status } = await this._getLiveStatus(taskId)
this._liveStatus = status
} catch (error) {
this._liveStatus = 'closing';
this._liveStatus = 'closing'
}
this._pollTimeout = setTimeout(keepCalling, 200) as unknown as number;
callbacks();
};
this._pollTimeout = setTimeout(keepCalling, 1000) as unknown as number
callbacks()
}
clearTimeout(this._pollTimeout);
keepCalling();
clearTimeout(this._pollTimeout)
keepCalling()
}
async initLive() {
console.time('init');
console.time('init')
console.time('init-_createLive');
console.time('init-_createLive')
const { pullUrl, taskId, imgInfo } = await this._createLive({
imgUrl: this.url,
pushUrl: `rtmp://push.laihua.com/web/${this.sessionId}`,
pullUrl: `webrtc://pull.laihua.com/web/${this.sessionId}`,
taskId: this.sessionId,
speaker: 'speaker',
languageCode: 'zh-CN',
audioUrl: ''
});
console.timeEnd('init-_createLive');
this._rtcVideoInfo = imgInfo;
console.time('play');
await this._checkStatus(taskId, 'ready');
await this._play(pullUrl);
await this._pushLive(taskId);
console.timeEnd('play');
this._rtcVideo = this._webRTCContainer.querySelector('video');
audioUrl: '',
bitrate: 2000,
superres: false,
isLast: true
})
console.timeEnd('init-_createLive')
this._rtcVideoInfo = imgInfo
console.time('play')
await this._checkStatus(taskId, 'ready')
await this._play(pullUrl)
await this._pushLive(taskId)
console.timeEnd('play')
this._rtcVideo = this._webRTCContainer.querySelector('video')
// 加延迟是为了 playing 状态时,能跟声音保持相对同步
const initTime = new Date().getTime();
const initTime = new Date().getTime()
this._pollStatus(this.sessionId, () => {
if (new Date().getTime() - initTime < 1000) return;
console.log('---------------->', this._liveStatus);
this._liveStatus === 'closing' && clearTimeout(this._pollTimeout);
this.emit('liveStatusTrace', this._liveStatus);
});
if (new Date().getTime() - initTime < 1000) return
console.log('---------------->', this._liveStatus)
this._liveStatus === 'closing' && clearTimeout(this._pollTimeout)
this.emit('liveStatusTrace', this._liveStatus)
})
console.timeEnd('init');
console.timeEnd('init')
}
private _bindEvents() {
// this._rtc?.on('videoStart', () => {
// this.emit('videoStart');
// });
// this._rtc?.on('audioStart', () => {
// this.emit('audioStart');
// });
// this._rtc?.on('audioBroken', () => {
// this.emit('audioBroken');
// });
// this._rtc?.on('videoBroken', () => {
// this.emit('videoBroken');
// });
this._rtc?.on('videoStart', () => {
this.emit('videoStart')
})
this._rtc?.on('audioStart', () => {
this.emit('audioStart')
})
this._rtc?.on('audioBroken', () => {
this.emit('audioBroken')
})
this._rtc?.on('videoBroken', () => {
this.emit('videoBroken')
})
}
/**
......@@ -386,7 +412,7 @@ export class PhotoRole extends EventEmitter {
* @returns 是否成功
*/
emit<T extends PhotoEventType>(event: T, ...args: PhotoEventTypeData<T>): boolean {
return super.emit(event, ...args);
return super.emit(event, ...args)
}
/**
......@@ -397,47 +423,48 @@ export class PhotoRole extends EventEmitter {
*/
on<T extends PhotoEventType>(event: T, fn: PhotoEventTypeFn<T>): this {
// fn 可能确实只有一个参数, 只能使用as
return super.on(event, fn as (...args: any[]) => void);
return super.on(event, fn as (...args: any[]) => void, this)
}
get id(): string {
return this.sessionId;
return this.sessionId
}
async init() {
if (!(await HwWebRTC.isBrowserSupport())) {
throw new Error('WebRTC is not supported');
throw new Error('WebRTC is not supported')
}
this._webRTCContainer.style.height = '256px';
this._webRTCContainer.style.width = '256px';
document.body.appendChild(this._webRTCContainer);
this._webRTCContainer.style.height = '256px'
this._webRTCContainer.style.width = '256px'
document.body.appendChild(this._webRTCContainer)
this._image.src = this.url;
this._image.src = this.url
await new Promise((resolve, reject) => {
this._image.onload = resolve;
this._image.onerror = reject;
});
this._image.onload = resolve
this._image.onerror = reject
})
this.ctx.canvas.width = this._image.naturalWidth;
this.ctx.canvas.height = this._image.naturalHeight;
this.ctx.canvas.width = this._image.naturalWidth
this.ctx.canvas.height = this._image.naturalHeight
this.draw()
this._webRTCContainer.id = 'webRTCContainer';
this._webRTCContainer.style.display = 'none';
this._rtc = new HwWebRTC(this._webRTCContainer.id);
this._bindEvents();
this._webRTCContainer.id = 'webRTCContainer'
this._webRTCContainer.style.display = 'none'
this._rtc = new HwWebRTC(this._webRTCContainer.id)
this._bindEvents()
await this.initLive();
await this.initLive()
// this._pollStatus(this.sessionId);
}
destroy() {
this._webRTCContainer && document.body.removeChild(this._webRTCContainer);
this._rtc?.stopPlay();
this._rtc?.destroyed();
clearTimeout(this._pollTimeout);
this._webRTCContainer && document.body.removeChild(this._webRTCContainer)
this._rtc?.stopPlay()
this._rtc?.destroyed()
clearTimeout(this._pollTimeout)
return {
code: 1
};
}
}
}
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -53,7 +53,7 @@ async function appendPhoto(url: string) {
return '图片加载失败!'
}
photo.list.value.push({ url })
photo.list.value.push({ url, liveUrl: url })
urlValue.value = ''
return true
......
......@@ -11,21 +11,21 @@ import type {
import { audioAiTTS, localTTS } from '../plugins/tts'
import useStore from '@/renderer/store'
import flvjs from 'flv.js'
import { PhotoRole } from '@/renderer/plugins/live/PhotoRole';
import { PhotoAnswer, PhotoRole } from '@/renderer/plugins/live/PhotoRole'
const router = useRouter()
const route = useRoute()
const { settings } = useStore()
const { settings, photo } = useStore()
let sampleRate = 48000
const bufferSize = 8192
const iconMicrophone = new URL('/images/microphone-input.svg', import.meta.url).href
const recordVolume = ref(0)
const url = route.query.url as string
const microphoneState = ref<'waitInput' | 'input' | 'loading' | 'disabled'>('waitInput')
const microphoneState = ref<'waitInput' | 'input' | 'loading' | 'disabled' | 'reply'>('waitInput')
const videoElement = ref<HTMLVideoElement | null>(null)
const can = ref<HTMLCanvasElement | null>(null)
let photoRole: PhotoRole | null = null;
let photoRole: PhotoRole | null = null
let flvPlayer: flvjs.Player | null = null
onMounted(() => {
......@@ -42,40 +42,55 @@ function loadImg(): Promise<HTMLImageElement> {
}
async function init() {
microphoneState.value = 'loading'
const img = await loadImg()
const videoEle = videoElement.value
const canvasEle = can.value
const ctx = canvasEle && canvasEle.getContext('2d')
if (!videoEle || !canvasEle || !ctx) return
draw(ctx, img)
canvasEle.width = img.naturalWidth
canvasEle.height = img.naturalHeight
photoRole = new PhotoRole(url, canvasEle);
const item = photo.list.find((i) => i.url === url)
photoRole = new PhotoRole(settings.liveHost, `${item?.liveUrl}`, canvasEle)
photoRole.on('asyncAnswer', (ans) => {
if (ans.playState === 'playing') {
microphoneState.value = 'reply'
return
}
if (
microphoneState.value === 'reply' &&
ans.playState === 'pause' &&
photoRole!.taskQueueLength === 0 &&
answerArray.length === 0
) {
microphoneState.value = 'input'
}
})
// initPlayer(videoEle);
try {
await photoRole.init()
} catch (error) {
console.error(error)
return
}
microphoneState.value = 'waitInput'
const fps = 1000 / 30
let lastTime = Date.now()
const updateFrame = () => {
if (Date.now() - lastTime > fps) {
draw(ctx, img, videoEle, {
width: 579,
height: 579,
center: {
x: 295,
y: 168
},
r_w: 304,
r_h: 304
})
photoRole?.draw()
lastTime = Date.now()
}
requestAnimationFrame(updateFrame)
}
requestAnimationFrame(updateFrame)
await photoRole.initLive();
}
function draw(
......@@ -258,7 +273,7 @@ async function startVoskWsAudioInput() {
}
await initVoskWS()
sampleRate = 8000
sampleRate = 16000
const mediaStream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
......@@ -275,7 +290,17 @@ async function startVoskWsAudioInput() {
source.connect(processor)
processor.connect(audioContext.destination)
processor.onaudioprocess = (audioDataChunk) => postAudio(audioDataChunk)
processor.onaudioprocess = (audioDataChunk) => {
if (
microphoneState.value === 'loading' ||
microphoneState.value === 'disabled' ||
microphoneState.value === 'reply'
) {
return
}
postAudio(audioDataChunk)
}
await analyzeMicrophoneVolume(mediaStream, (val) => {
recordVolume.value = val
......@@ -340,16 +365,20 @@ function endAudioInput() {
}
}
const answerArray: { text: string; isLast: boolean }[] = []
async function onAsr(question: string) {
console.log('---------------->question: ', question)
endAudioInput()
microphoneState.value = 'loading'
const ws = await initLLMSocket()
inputContext.ws = ws
let sliceAnswer = ''
let answer = ''
const answerArray: string[] = []
answerArray.length = 0
let isTime = true
photoRole!.answerArgs = new PhotoAnswer()
ws.onmessage = (message) => {
try {
......@@ -360,18 +389,17 @@ async function onAsr(question: string) {
}
if (event === 'stream_end') {
answerArray.push(sliceAnswer)
runTTSTask(answerArray)
sliceAnswer = ''
answerArray.push(sliceAnswer)
answerArray.push({ text: sliceAnswer, isLast: true })
sliceAnswer = ''
runTTSTask(answerArray)
inputContext.ws?.close()
console.log('----------------> answer: ', answer)
return
}
answer += text
photoRole!.answerArgs!.answer += answer
photoRole!.answerArgs!._typingAnswer.push(answer)
isTime && console.time('sliceAnswer')
isTime = false
......@@ -381,7 +409,7 @@ async function onAsr(question: string) {
sliceAnswer += t
if (/[。,?!;,.?!;]/.test(t) && sliceAnswer.length >= settings.llmToTTSSliceLength) {
console.timeEnd('sliceAnswer')
answerArray.push(sliceAnswer)
answerArray.push({ text: sliceAnswer, isLast: true })
runTTSTask(answerArray)
sliceAnswer = ''
isTime = true
......@@ -405,7 +433,7 @@ function initLLMSocket(): Promise<WebSocket> {
}
let isTTSRunning = false
async function runTTSTask(tasks: string[]) {
async function runTTSTask(tasks: { text: string; isLast: boolean }[]) {
if (isTTSRunning) return
isTTSRunning = true
......@@ -413,20 +441,24 @@ async function runTTSTask(tasks: string[]) {
while (tasks.length) {
const task = tasks.shift()
if (!task) break
if (task.length < 1) continue
if (task.text.trim().length < 1) continue
console.time(task + ' TTS: ')
const res = await localTTS({
url: settings.ttsHost,
text: task,
text: task.text,
audio_path: settings.userData
})
console.log('----------------> TTS:', res[0].text)
console.timeEnd(task + ' TTS: ')
const audio = new Audio(`file://${res[0].text}`)
audio.load()
ttsAudios.push(audio)
runAudioPlay()
console.log('---------------->', res[0].text)
const audioPath = await uploadFile({ filePath: res[0].text })
photoRole?.enQueue({
taskId: photoRole.sessionId,
audioUrl: `https://resources.laihua.com/${audioPath}`,
isLast: task.isLast
})
}
} catch (error) {
console.error(error)
......@@ -435,6 +467,21 @@ async function runTTSTask(tasks: string[]) {
isTTSRunning = false
}
function uploadFile({ filePath }: { filePath: string }) {
return new Promise<string>((resolve, reject) => {
window.mainApi.receive(
'msgReceivedFileUploadResponse',
(event: Event, result: { code: number; data: null | { filename: string } }) => {
if (result.code !== 200) {
return reject(JSON.stringify(result))
}
resolve(result.data?.filename || '')
}
)
window.mainApi.send('fileUpload', filePath)
})
}
const ttsAudios: HTMLAudioElement[] = []
let isPlayRunning = false
async function runAudioPlay() {
......@@ -452,7 +499,6 @@ async function runAudioPlay() {
}
await audio.play()
}
</script>
<template>
......@@ -477,12 +523,17 @@ async function runAudioPlay() {
color="#fff"
variant="elevated"
size="x-large"
:disabled="microphoneState === 'loading' || microphoneState === 'disabled'"
:disabled="
microphoneState === 'loading' ||
microphoneState === 'disabled' ||
microphoneState === 'reply'
"
@pointerdown="startVoskWsAudioInput"
>
<v-icon v-if="microphoneState === 'waitInput'" icon="mdi-microphone"></v-icon>
<v-icon v-if="microphoneState === 'loading'" icon="mdi-microphone-settings"></v-icon>
<v-icon v-if="microphoneState === 'disabled'" icon="mdi-microphone-off"></v-icon>
<v-icon v-if="microphoneState === 'reply'" icon="mdi-message-reply-text-outline"></v-icon>
<template v-if="microphoneState === 'input'">
<img width="30" height="30" :src="iconMicrophone" alt="" srcset="" />
......
import { defineStore } from 'pinia'
type IPhoto = {
list: { url: string }[]
list: { url: string; liveUrl: string }[]
}
const usePhotoStore = defineStore('photo', {
......@@ -10,22 +10,33 @@ const usePhotoStore = defineStore('photo', {
({
list: [
{
url: new URL('/images/photo/1.png', import.meta.url).href
url: new URL('/images/photo/1.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/11772300-9a47-11ee-84b0-fbd08f47254f.png'
},
{
url: new URL('/images/photo/2.png', import.meta.url).href
url: new URL('/images/photo/2.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/32b3e530-9a47-11ee-8702-5ddbbcc07698.png'
},
{
url: new URL('/images/photo/3.png', import.meta.url).href
url: new URL('/images/photo/3.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/55060f00-9a47-11ee-8702-5ddbbcc07698.png'
},
{
url: new URL('/images/photo/4.png', import.meta.url).href
url: new URL('/images/photo/4.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/81a0d220-9a47-11ee-84b0-fbd08f47254f.png'
},
{
url: new URL('/2023-11-2/93ffb6a7-ae93-4918-944e-877016ba266b.png', import.meta.url).href
url: new URL('/2023-11-2/93ffb6a7-ae93-4918-944e-877016ba266b.png', import.meta.url).href,
liveUrl: 'https://resources.laihua.com/2023-11-2/93ffb6a7-ae93-4918-944e-877016ba266b.png'
},
{
url: new URL('/2023-11-2/6fa9a127-2ce5-43ea-a543-475bf9354eda.png', import.meta.url).href
url: new URL('/2023-11-2/6fa9a127-2ce5-43ea-a543-475bf9354eda.png', import.meta.url).href,
liveUrl:
'https://resources.laihua.com/2023-12-14/b7523e40-9a47-11ee-84b0-fbd08f47254f.png'
}
]
}) as IPhoto,
......
......@@ -27,6 +27,7 @@ export type ISettings = {
llmUrl: string
llmToTTSSliceLength: number
voskWsLUrl: string
liveHost: string
}
const useSettingsStore = defineStore('settings', {
......@@ -61,7 +62,8 @@ const useSettingsStore = defineStore('settings', {
isOpenDevTools: false,
llmUrl: 'ws://127.0.0.1:9899/api/v1/stream',
llmToTTSSliceLength: 20,
voskWsLUrl: 'ws://127.0.0.1:2700'
voskWsLUrl: 'ws://127.0.0.1:2700',
liveHost: 'http://122.51.32.12:9000'
}) as ISettings,
getters: {},
actions: {
......
export * as HWLLSPlayer from './lib/HWLLSPlayer';
\ No newline at end of file
export * as HWLLSPlayer from './lib/HWLLSPlayer'
declare const _default: {
getVersion: any;
checkSystemRequirements: any;
setParameter: any;
createClient: any;
saveLog: any;
setLogLevel: any;
uploadLog: any;
};
export { _default as default };
getVersion: any
checkSystemRequirements: any
setParameter: any
createClient: any
saveLog: any
setLogLevel: any
uploadLog: any
}
export { _default as default }
This source diff could not be displayed because it is too large. You can view the blob instead.
import axios from 'axios';
import type { AxiosRequestConfig } from 'axios';
import axios from 'axios'
import type { AxiosRequestConfig } from 'axios'
export const axiosInstance = axios.create();
export const axiosInstance = axios.create()
export interface ApiResult<T = unknown> {
error?: boolean;
code?: number;
message?: string;
msg?: string;
data?: T;
[k: string]: any;
error?: boolean
code?: number
message?: string
msg?: string
data?: T
[k: string]: any
}
export default async function http<T>(input: AxiosRequestConfig): Promise<ApiResult<T>> {
try {
const response = await axiosInstance(input);
const response = await axiosInstance(input)
if (response.status === 200) {
return response.data;
return response.data
}
return { error: true };
return { error: true }
} catch (error) {
return {
code: (error as any).response?.code || (error as any).code,
data: (error as any)?.response?.data
};
}
}
}
......@@ -9,10 +9,10 @@ export default class Utils {
static guid() {
function S4() {
return (((1 + Math.random()) * 0x10000) | 0).toString(16).substring(1);
return (((1 + Math.random()) * 0x10000) | 0).toString(16).substring(1)
}
return S4() + S4() + '-' + S4() + '-' + S4() + '-' + S4() + '-' + S4() + S4() + S4();
return S4() + S4() + '-' + S4() + '-' + S4() + '-' + S4() + '-' + S4() + S4() + S4()
}
}
export const { getCurrentLocale, openExternal, guid} = Utils
export const { getCurrentLocale, openExternal, guid } = Utils
......@@ -27,5 +27,12 @@
"path": "./tsconfig.node.json"
}
],
"exclude": ["node_modules", "dist", "rollup.config.js", "*.json", "*.js", "src/renderer/utils/HWLLS_SDK_Web_2.3.0/**'"]
"exclude": [
"node_modules",
"dist",
"rollup.config.js",
"*.json",
"*.js",
"src/renderer/utils/HWLLS_SDK_Web_2.3.0/**'"
]
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment