diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 3eb1b506a..f803da07b 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -2098,7 +2098,7 @@ function getStreamingReply(data) { if (oai_settings.chat_completion_source === chat_completion_sources.CLAUDE) { return data?.delta?.text || ''; } else if (oai_settings.chat_completion_source === chat_completion_sources.MAKERSUITE) { - return data?.candidates?.[0]?.content?.parts?.[0]?.text || ''; + return data?.candidates?.[0]?.content?.parts?.map(x => x.text)?.join('\n\n') || ''; } else if (oai_settings.chat_completion_source === chat_completion_sources.COHERE) { return data?.delta?.message?.content?.text || data?.delta?.message?.tool_plan || ''; } else { @@ -2110,7 +2110,7 @@ function getStreamingReply(data) { * parseChatCompletionLogprobs converts the response data returned from a chat * completions-like source into an array of TokenLogprobs found in the response. * @param {Object} data - response data from a chat completions-like source - * @returns {import('logprobs.js').TokenLogprobs[] | null} converted logprobs + * @returns {import('./logprobs.js').TokenLogprobs[] | null} converted logprobs */ function parseChatCompletionLogprobs(data) { if (!data) { @@ -2139,7 +2139,7 @@ function parseChatCompletionLogprobs(data) { * completion API and converts into the structure used by the Token Probabilities * view. * @param {{content: { token: string, logprob: number, top_logprobs: { token: string, logprob: number }[] }[]}} logprobs - * @returns {import('logprobs.js').TokenLogprobs[] | null} converted logprobs + * @returns {import('./logprobs.js').TokenLogprobs[] | null} converted logprobs */ function parseOpenAIChatLogprobs(logprobs) { const { content } = logprobs ?? {}; @@ -2167,7 +2167,7 @@ function parseOpenAIChatLogprobs(logprobs) { * completion API and converts into the structure used by the Token Probabilities * view. * @param {{tokens: string[], token_logprobs: number[], top_logprobs: { token: string, logprob: number }[][]}} logprobs - * @returns {import('logprobs.js').TokenLogprobs[] | null} converted logprobs + * @returns {import('./logprobs.js').TokenLogprobs[] | null} converted logprobs */ function parseOpenAITextLogprobs(logprobs) { const { tokens, token_logprobs, top_logprobs } = logprobs ?? {}; diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index c8a76c85d..af7bd1816 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -367,15 +367,15 @@ async function sendMakerSuiteRequest(request, response) { } const responseContent = candidates[0].content ?? candidates[0].output; - const responseText = typeof responseContent === 'string' ? responseContent : responseContent?.parts?.[0]?.text; + console.log('Google AI Studio response:', responseContent); + + const responseText = typeof responseContent === 'string' ? responseContent : responseContent?.parts?.map(part => part.text)?.join('\n\n'); if (!responseText) { let message = 'Google AI Studio Candidate text empty'; console.log(message, generateResponseJson); return response.send({ error: { message } }); } - console.log('Google AI Studio response:', responseText); - // Wrap it back to OAI format const reply = { choices: [{ 'message': { 'content': responseText } }] }; return response.send(reply);