From 3412d1ce1f3b030c148ec4c7741e2856afbef17b Mon Sep 17 00:00:00 2001 From: qvink Date: Mon, 17 Mar 2025 20:44:32 -0600 Subject: [PATCH 1/5] adding option to cleanUpMessage() to not include the user prompt bias, and removing the user prompt bias from generateRaw() results. --- public/script.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/public/script.js b/public/script.js index e50621cf4..023d867cd 100644 --- a/public/script.js +++ b/public/script.js @@ -3611,7 +3611,8 @@ export async function generateRaw(prompt, api, instructOverride, quietToLoud, sy throw new Error(data.response); } - const message = cleanUpMessage(extractMessageFromData(data), false, false, true); + // format result, exclude user prompt bias + const message = cleanUpMessage(extractMessageFromData(data), false, false, true, null, false); if (!message) { throw new Error('No message generated'); @@ -5883,13 +5884,14 @@ function extractMultiSwipes(data, type) { return swipes; } -export function cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncompleteSentences = false, stoppingStrings = null) { +export function cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncompleteSentences = false, stoppingStrings = null, include_user_prompt_bias=true) { if (!getMessage) { return ''; } // Add the prompt bias before anything else if ( + include_user_prompt_bias && power_user.user_prompt_bias && !isImpersonate && !isContinue && From eb52872b134f10bae6cb89d1f72f4bcb360068e2 Mon Sep 17 00:00:00 2001 From: GhostXia <33112711+GhostXia@users.noreply.github.com> Date: Tue, 18 Mar 2025 17:19:22 +0800 Subject: [PATCH 2/5] change 01ai endpoint --- src/endpoints/backends/chat-completions.js | 2 +- src/endpoints/openai.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index 9cd784a0c..4d2eee774 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -49,7 +49,7 @@ const API_COHERE_V2 = 'https://api.cohere.ai/v2'; const API_PERPLEXITY = 'https://api.perplexity.ai'; const API_GROQ = 'https://api.groq.com/openai/v1'; const API_MAKERSUITE = 'https://generativelanguage.googleapis.com'; -const API_01AI = 'https://api.01.ai/v1'; +const API_01AI = 'https://api.lingyiwanwu.com/v1'; const API_BLOCKENTROPY = 'https://api.blockentropy.ai/v1'; const API_AI21 = 'https://api.ai21.com/studio/v1'; const API_NANOGPT = 'https://nano-gpt.com/api/v1'; diff --git a/src/endpoints/openai.js b/src/endpoints/openai.js index 14ee69fb8..c8f1701ce 100644 --- a/src/endpoints/openai.js +++ b/src/endpoints/openai.js @@ -116,7 +116,7 @@ router.post('/caption-image', async (request, response) => { } if (request.body.api === 'zerooneai') { - apiUrl = 'https://api.01.ai/v1/chat/completions'; + apiUrl = 'https://api.lingyiwanwu.com/v1/chat/completions'; } if (request.body.api === 'groq') { From e2eec77a1930ec1b8a30b18358141861a2ee5fd9 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 18 Mar 2025 19:00:01 +0200 Subject: [PATCH 3/5] Update HTML link --- public/index.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/index.html b/public/index.html index a122b0082..59bcca949 100644 --- a/public/index.html +++ b/public/index.html @@ -3417,7 +3417,7 @@

- + 01.AI API Key

From 6e4bd00ef88e799d74aa5fa9c751fa88939327a3 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 18 Mar 2025 19:09:57 +0200 Subject: [PATCH 4/5] Update parameter naming --- public/script.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/script.js b/public/script.js index 023d867cd..35d09ce13 100644 --- a/public/script.js +++ b/public/script.js @@ -5884,14 +5884,14 @@ function extractMultiSwipes(data, type) { return swipes; } -export function cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncompleteSentences = false, stoppingStrings = null, include_user_prompt_bias=true) { +export function cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncompleteSentences = false, stoppingStrings = null, includeUserPromptBias = true) { if (!getMessage) { return ''; } // Add the prompt bias before anything else if ( - include_user_prompt_bias && + includeUserPromptBias && power_user.user_prompt_bias && !isImpersonate && !isContinue && From b6c1c9a40dbe7fce1e2ce37ee44f261de3086002 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Tue, 18 Mar 2025 19:53:02 +0200 Subject: [PATCH 5/5] MistralAI: Add new models --- public/index.html | 8 ++++++++ public/scripts/extensions/caption/settings.html | 3 +++ public/scripts/openai.js | 4 ++-- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/public/index.html b/public/index.html index 59bcca949..ec0e088a3 100644 --- a/public/index.html +++ b/public/index.html @@ -3193,6 +3193,7 @@ + @@ -3208,13 +3209,20 @@ + + + + + + + diff --git a/public/scripts/extensions/caption/settings.html b/public/scripts/extensions/caption/settings.html index 95e53304b..457ac13b5 100644 --- a/public/scripts/extensions/caption/settings.html +++ b/public/scripts/extensions/caption/settings.html @@ -42,6 +42,9 @@ + + + diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 3adf18615..80d1e9ed7 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -4437,9 +4437,9 @@ async function onModelChange() { if (oai_settings.chat_completion_source === chat_completion_sources.MISTRALAI) { if (oai_settings.max_context_unlocked) { $('#openai_max_context').attr('max', unlocked_max); - } else if (oai_settings.mistralai_model.includes('codestral-mamba')) { + } else if (['codestral-latest', 'codestral-mamba-2407', 'codestral-2411-rc5', 'codestral-2412', 'codestral-2501'].includes(oai_settings.mistralai_model)) { $('#openai_max_context').attr('max', max_256k); - } else if (['mistral-large-2407', 'mistral-large-2411', 'mistral-large-latest'].includes(oai_settings.mistralai_model)) { + } else if (['mistral-large-2407', 'mistral-large-2411', 'mistral-large-pixtral-2411', 'mistral-large-latest'].includes(oai_settings.mistralai_model)) { $('#openai_max_context').attr('max', max_128k); } else if (oai_settings.mistralai_model.includes('mistral-nemo')) { $('#openai_max_context').attr('max', max_128k);