From be64b3469f0bd2ab592b573370f1f3b9a8ccc201 Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Sat, 27 May 2023 21:42:28 +0300 Subject: [PATCH] Properly fallback when w.ai model doesn't support streaming --- public/scripts/openai.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/public/scripts/openai.js b/public/scripts/openai.js index e62bba356..974076df0 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -610,7 +610,10 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) { try { if (stream) { - generatePromise.then(() => { finished = true; }).catch(handleWindowError); + generatePromise.then((res) => { + content = res[0]?.message?.content; + finished = true; + }).catch(handleWindowError); return windowStreamingFunction; } else { const result = await generatePromise;