Gemini: Fix cross-chunk parsing of multipart replies

This commit is contained in:
Cohee
2024-12-27 23:15:09 +02:00
parent 77f5f00151
commit 39cfb35c1a
2 changed files with 7 additions and 7 deletions

View File

@ -2098,7 +2098,7 @@ function getStreamingReply(data) {
if (oai_settings.chat_completion_source === chat_completion_sources.CLAUDE) {
return data?.delta?.text || '';
} else if (oai_settings.chat_completion_source === chat_completion_sources.MAKERSUITE) {
return data?.candidates?.[0]?.content?.parts?.[0]?.text || '';
return data?.candidates?.[0]?.content?.parts?.map(x => x.text)?.join('\n\n') || '';
} else if (oai_settings.chat_completion_source === chat_completion_sources.COHERE) {
return data?.delta?.message?.content?.text || data?.delta?.message?.tool_plan || '';
} else {
@ -2110,7 +2110,7 @@ function getStreamingReply(data) {
* parseChatCompletionLogprobs converts the response data returned from a chat
* completions-like source into an array of TokenLogprobs found in the response.
* @param {Object} data - response data from a chat completions-like source
* @returns {import('logprobs.js').TokenLogprobs[] | null} converted logprobs
* @returns {import('./logprobs.js').TokenLogprobs[] | null} converted logprobs
*/
function parseChatCompletionLogprobs(data) {
if (!data) {
@ -2139,7 +2139,7 @@ function parseChatCompletionLogprobs(data) {
* completion API and converts into the structure used by the Token Probabilities
* view.
* @param {{content: { token: string, logprob: number, top_logprobs: { token: string, logprob: number }[] }[]}} logprobs
* @returns {import('logprobs.js').TokenLogprobs[] | null} converted logprobs
* @returns {import('./logprobs.js').TokenLogprobs[] | null} converted logprobs
*/
function parseOpenAIChatLogprobs(logprobs) {
const { content } = logprobs ?? {};
@ -2167,7 +2167,7 @@ function parseOpenAIChatLogprobs(logprobs) {
* completion API and converts into the structure used by the Token Probabilities
* view.
* @param {{tokens: string[], token_logprobs: number[], top_logprobs: { token: string, logprob: number }[][]}} logprobs
* @returns {import('logprobs.js').TokenLogprobs[] | null} converted logprobs
* @returns {import('./logprobs.js').TokenLogprobs[] | null} converted logprobs
*/
function parseOpenAITextLogprobs(logprobs) {
const { tokens, token_logprobs, top_logprobs } = logprobs ?? {};