Commit adee6933 authored by ali's avatar ali

feat: AnswerBox

parent d9ea3ea9
<script setup lang="ts">
import { ref } from 'vue';
import { ref, watch } from 'vue';
interface Props {
visible?: boolean;
......@@ -18,7 +18,7 @@ interface EmitType {
}
const emits = defineEmits<EmitType>();
withDefaults(defineProps<Props>(), {
const props = withDefaults(defineProps<Props>(), {
answerData() {
return [];
},
......@@ -50,10 +50,17 @@ function playVideo() {
}
const loadingImg = new URL('/images/Ellipsis.gif', import.meta.url).href;
const scroll = ref<HTMLDivElement | null>(null);
watch(props.answerData, (val) => {
if (scroll.value) {
scroll.value.scrollTop = 9999;
}
})
</script>
<template>
<div
:style="{ width: '170px' }"
:style="{ width: '30%' }"
class="ans-layout"
>
<div
......@@ -70,6 +77,7 @@ const loadingImg = new URL('/images/Ellipsis.gif', import.meta.url).href;
>
<div
class="scroll"
ref="scroll"
:style="{ 'max-height': '249.6px' }"
>
<div
......@@ -132,6 +140,7 @@ const loadingImg = new URL('/images/Ellipsis.gif', import.meta.url).href;
</ul>
</div>
<!-- 等待gif -->
</div>
<div
v-if="loading"
class="text-waitImage"
......@@ -141,7 +150,6 @@ const loadingImg = new URL('/images/Ellipsis.gif', import.meta.url).href;
</div>
</div>
</div>
</div>
<teleport to="body">
<div
v-if="isPreview"
......@@ -192,7 +200,7 @@ const loadingImg = new URL('/images/Ellipsis.gif', import.meta.url).href;
padding: 16px;
}
.text-box {
margin-bottom: 15px;
/* margin-bottom: 15px; */
}
.img-box,
.video-box {
......
......@@ -177,7 +177,7 @@ export class PhotoRole extends EventEmitter {
this._typingRunner = true
// 加延迟是为了 playing 状态时,能跟声音保持相对同步
await new Promise((resolve) => setTimeout(resolve, 2000))
await new Promise((resolve) => setTimeout(resolve, 1000))
while (this.answerArgs._typingAnswer.length) {
this.answerArgs.asyncAnswer += this.answerArgs._typingAnswer.shift()
......
......@@ -126,6 +126,7 @@ async function onAsyncAnswer(ans: PhotoAnswer) {
if (microphoneState.value === 'reply' && ans.playState === 'pause' && (await checkSteps())) {
microphoneState.value = 'input'
inputContext.answerProp.value.loading = false
}
}
......@@ -320,7 +321,8 @@ async function onQ(question: string) {
console.log('----------------> question: ', question)
microphoneState.value = 'loading'
inputContext.answerProp.value.question = question;
inputContext.answerProp.value.question = question.replace(/\s/g, '');
inputContext.answerProp.value.answerData.length = 0;
inputContext.answerProp.value.loading = true;
const { pose, stepResolve, stepReject } = createStep()
......@@ -393,7 +395,6 @@ async function llmLoop(question: string) {
const audioList = results[0].audio_list as string[]
if (audioList.length === 0) continue
const isEnd = audioList.at(-1) === 'stream_end'
inputContext.answerProp.value.loading = !isEnd
if (isEnd) audioList.pop()
......@@ -412,8 +413,8 @@ async function llmLoop(question: string) {
}
console.log(results[0].text[index + i] + ':' + newList[i])
photoRole!.answerArgs!.answer += newList[i]
photoRole!.answerArgs!._typingAnswer.push(...newList[i].split(''))
photoRole!.answerArgs!.answer += results[0].text[index + i]
photoRole!.answerArgs!._typingAnswer.push(...results[0].text[index + i].split(''))
const path = await uploadFile({ filePath: newList[i] })
photoRole?.enQueue({
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment