forked from tuvn86/webapp-conversation
feat: support steaming
This commit is contained in:
parent
cfd0c9532f
commit
a8be513d4b
4 changed files with 4 additions and 31 deletions
|
|
@ -1,6 +1,5 @@
|
|||
import { type NextRequest } from 'next/server'
|
||||
import { client, getInfo } from '@/app/api/utils/common'
|
||||
import { OpenAIStream } from '@/app/api/utils/stream'
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const body = await request.json()
|
||||
|
|
@ -12,6 +11,5 @@ export async function POST(request: NextRequest) {
|
|||
} = body
|
||||
const { user } = getInfo(request)
|
||||
const res = await client.createChatMessage(inputs, query, user, responseMode, conversationId)
|
||||
const stream = await OpenAIStream(res as any)
|
||||
return new Response(stream as any)
|
||||
return new Response(res.data as any)
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue