diff --git a/public/index.html b/public/index.html
index 70bebe18a..95fb3884b 100644
--- a/public/index.html
+++ b/public/index.html
@@ -617,7 +617,7 @@
complete.
-
+
Top P
@@ -1710,7 +1710,7 @@
-
+
@@ -1904,6 +1904,7 @@
+
+
+
diff --git a/public/script.js b/public/script.js
index 94ecdd3b1..3bb0c09b6 100644
--- a/public/script.js
+++ b/public/script.js
@@ -2049,7 +2049,8 @@ function baseChatReplace(value, name1, name2) {
}
function isStreamingEnabled() {
- return ((main_api == 'openai' && oai_settings.stream_openai && oai_settings.chat_completion_source !== chat_completion_sources.SCALE && oai_settings.chat_completion_source !== chat_completion_sources.AI21)
+ const noStreamSources = [chat_completion_sources.SCALE, chat_completion_sources.AI21, chat_completion_sources.PALM];
+ return ((main_api == 'openai' && oai_settings.stream_openai && !noStreamSources.includes(oai_settings.chat_completion_source))
|| (main_api == 'kobold' && kai_settings.streaming_kobold && kai_flags.can_use_streaming)
|| (main_api == 'novel' && nai_settings.streaming_novel)
|| (main_api == 'textgenerationwebui' && textgenerationwebui_settings.streaming));
@@ -4633,6 +4634,7 @@ function changeMainAPI() {
case chat_completion_sources.CLAUDE:
case chat_completion_sources.OPENAI:
case chat_completion_sources.AI21:
+ case chat_completion_sources.PALM:
default:
setupChatCompletionPromptManager(oai_settings);
break;
@@ -6762,7 +6764,12 @@ function connectAPISlash(_, text) {
selected: 'openai',
source: 'ai21',
button: '#api_button_openai',
- }
+ },
+ 'palm': {
+ selected: 'openai',
+ source: 'palm',
+ button: '#api_button_openai',
+ },
};
const apiConfig = apiMap[text];
@@ -6995,7 +7002,7 @@ jQuery(async function () {
}
registerSlashCommand('dupe', DupeChar, [], "– duplicates the currently selected character", true, true);
- registerSlashCommand('api', connectAPISlash, [], "(kobold, horde, novel, ooba, oai, claude, windowai, ai21) – connect to an API", true, true);
+ registerSlashCommand('api', connectAPISlash, [], "(kobold, horde, novel, ooba, oai, claude, windowai, ai21, palm) – connect to an API", true, true);
registerSlashCommand('impersonate', doImpersonate, ['imp'], "- calls an impersonation response", true, true);
registerSlashCommand('delchat', doDeleteChat, [], "- deletes the current chat", true, true);
registerSlashCommand('closechat', doCloseChat, [], "- closes the current chat", true, true);
diff --git a/public/scripts/RossAscends-mods.js b/public/scripts/RossAscends-mods.js
index f5492e17b..2380bd094 100644
--- a/public/scripts/RossAscends-mods.js
+++ b/public/scripts/RossAscends-mods.js
@@ -415,6 +415,7 @@ function RA_autoconnect(PrevApi) {
|| (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI)
|| (secret_state[SECRET_KEYS.OPENROUTER] && oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER)
|| (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21)
+ || (secret_state[SECRET_KEYS.PALM] && oai_settings.chat_completion_source == chat_completion_sources.PALM)
) {
$("#api_button_openai").click();
}
diff --git a/public/scripts/openai.js b/public/scripts/openai.js
index 34faf9aac..5f29055c8 100644
--- a/public/scripts/openai.js
+++ b/public/scripts/openai.js
@@ -161,6 +161,7 @@ export const chat_completion_sources = {
SCALE: 'scale',
OPENROUTER: 'openrouter',
AI21: 'ai21',
+ PALM: 'palm',
};
const prefixMap = selected_group ? {
@@ -381,7 +382,7 @@ function setupChatCompletionPromptManager(openAiSettings) {
promptManager.tryGenerate = () => {
if (characters[this_chid]) {
return Generate('normal', {}, true);
- } else{
+ } else {
return Promise.resolve();
}
}
@@ -671,7 +672,7 @@ function populateChatCompletion(prompts, chatCompletion, { bias, quietPrompt, ty
// Authors Note
if (prompts.has('authorsNote')) {
- const authorsNote = prompts.get('authorsNote') ;
+ const authorsNote = prompts.get('authorsNote');
if (authorsNote.position) {
chatCompletion.insert(Message.fromPrompt(authorsNote), 'main', authorsNote.position);
@@ -726,7 +727,7 @@ function populateChatCompletion(prompts, chatCompletion, { bias, quietPrompt, ty
* @param {string} personaDescription
* @returns {Object} prompts - The prepared and merged system and user-defined prompts.
*/
-function preparePromptsForChatCompletion({Scenario, charPersonality, name2, worldInfoBefore, worldInfoAfter, charDescription, quietPrompt, bias, extensionPrompts, systemPromptOverride, jailbreakPromptOverride, personaDescription} = {}) {
+function preparePromptsForChatCompletion({ Scenario, charPersonality, name2, worldInfoBefore, worldInfoAfter, charDescription, quietPrompt, bias, extensionPrompts, systemPromptOverride, jailbreakPromptOverride, personaDescription } = {}) {
const scenarioText = Scenario ? `[Circumstances and context of the dialogue: ${Scenario}]` : '';
const charPersonalityText = charPersonality ? `[${name2}'s personality: ${charPersonality}]` : ''
const groupNudge = `[Write the next reply only as ${name2}]`;
@@ -882,7 +883,8 @@ function prepareOpenAIMessages({
extensionPrompts,
systemPromptOverride,
jailbreakPromptOverride,
- personaDescription});
+ personaDescription
+ });
// Fill the chat completion with as much context as the budget allows
populateChatCompletion(prompts, chatCompletion, { bias, quietPrompt, type, cyclePrompt });
@@ -1055,6 +1057,8 @@ function getChatCompletionModel() {
return oai_settings.windowai_model;
case chat_completion_sources.SCALE:
return '';
+ case chat_completion_sources.PALM:
+ return '';
case chat_completion_sources.OPENROUTER:
return oai_settings.openrouter_model !== openrouter_website_model ? oai_settings.openrouter_model : null;
case chat_completion_sources.AI21:
@@ -1182,16 +1186,18 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
const isOpenRouter = oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER;
const isScale = oai_settings.chat_completion_source == chat_completion_sources.SCALE;
const isAI21 = oai_settings.chat_completion_source == chat_completion_sources.AI21;
+ const isPalm = oai_settings.chat_completion_source == chat_completion_sources.PALM;
const isTextCompletion = oai_settings.chat_completion_source == chat_completion_sources.OPENAI && textCompletionModels.includes(oai_settings.openai_model);
const isQuiet = type === 'quiet';
+ const isImpersonate = type === 'impersonate';
const stream = oai_settings.stream_openai && !isQuiet && !isScale && !isAI21;
- if (isAI21) {
+ if (isAI21 || isPalm) {
const joinedMsgs = openai_msgs_tosend.reduce((acc, obj) => {
const prefix = prefixMap[obj.role];
return acc + (prefix ? (selected_group ? "\n" : prefix + " ") : "") + obj.content + "\n";
}, "");
- openai_msgs_tosend = substituteParams(joinedMsgs);
+ openai_msgs_tosend = substituteParams(joinedMsgs) + (isImpersonate ? `${name1}:` : `${name2}:`);
}
// If we're using the window.ai extension, use that instead
@@ -1256,6 +1262,13 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
generate_data['api_url_scale'] = oai_settings.api_url_scale;
}
+ if (isPalm) {
+ const nameStopString = isImpersonate ? `\n${name2}:` : `\n${name1}:`;
+ generate_data['use_palm'] = true;
+ generate_data['top_k'] = Number(oai_settings.top_k_openai);
+ generate_data['stop'] = [nameStopString, oai_settings.new_chat_prompt, ...getCustomStoppingStrings()];
+ }
+
if (isAI21) {
generate_data['use_ai21'] = true;
generate_data['top_k'] = Number(oai_settings.top_k_openai);
@@ -2052,7 +2065,8 @@ async function getStatusOpen() {
return resultCheckStatusOpen();
}
- if (oai_settings.chat_completion_source == chat_completion_sources.SCALE || oai_settings.chat_completion_source == chat_completion_sources.CLAUDE || oai_settings.chat_completion_source == chat_completion_sources.AI21) {
+ const noValidateSources = [chat_completion_sources.SCALE, chat_completion_sources.CLAUDE, chat_completion_sources.AI21, chat_completion_sources.PALM];
+ if (noValidateSources.includes(oai_settings.chat_completion_source)) {
let status = 'Unable to verify key; press "Test Message" to validate.';
setOnlineStatus(status);
return resultCheckStatusOpen();
@@ -2676,6 +2690,17 @@ async function onModelChange() {
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
}
+ if (oai_settings.chat_completion_source == chat_completion_sources.PALM) {
+ if (oai_settings.max_context_unlocked) {
+ $('#openai_max_context').attr('max', unlocked_max);
+ } else {
+ $('#openai_max_context').attr('max', palm2_max);
+ }
+
+ oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context);
+ $('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
+ }
+
if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max);
@@ -2856,7 +2881,19 @@ async function onConnectButtonClick(e) {
console.log("No cookie set for Scale");
return;
}
+ }
+ if (oai_settings.chat_completion_source == chat_completion_sources.PALM) {
+ const api_key_palm = String($('#api_key_palm').val()).trim();
+
+ if (api_key_palm.length) {
+ await writeSecret(SECRET_KEYS.PALM, api_key_palm);
+ }
+
+ if (!secret_state[SECRET_KEYS.PALM]) {
+ console.log('No secret key saved for PALM');
+ return;
+ }
}
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
@@ -2924,6 +2961,9 @@ function toggleChatCompletionForms() {
else if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
$('#model_scale_select').trigger('change');
}
+ else if (oai_settings.chat_completion_source == chat_completion_sources.PALM) {
+ $('#model_palm_select').trigger('change');
+ }
else if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) {
$('#model_openrouter_select').trigger('change');
}
@@ -3226,6 +3266,7 @@ $(document).ready(async function () {
$("#model_claude_select").on("change", onModelChange);
$("#model_windowai_select").on("change", onModelChange);
$("#model_scale_select").on("change", onModelChange);
+ $("#model_palm_select").on("change", onModelChange);
$("#model_openrouter_select").on("change", onModelChange);
$("#model_ai21_select").on("change", onModelChange);
$("#settings_perset_openai").on("change", onSettingsPresetChange);
diff --git a/public/scripts/secrets.js b/public/scripts/secrets.js
index c29cd2067..4408ba4ec 100644
--- a/public/scripts/secrets.js
+++ b/public/scripts/secrets.js
@@ -10,6 +10,7 @@ export const SECRET_KEYS = {
SCALE: 'api_key_scale',
AI21: 'api_key_ai21',
SCALE_COOKIE: 'scale_cookie',
+ PALM: 'api_key_palm',
}
const INPUT_MAP = {
@@ -22,6 +23,7 @@ const INPUT_MAP = {
[SECRET_KEYS.SCALE]: '#api_key_scale',
[SECRET_KEYS.AI21]: '#api_key_ai21',
[SECRET_KEYS.SCALE_COOKIE]: '#scale_cookie',
+ [SECRET_KEYS.PALM]: '#api_key_palm',
}
async function clearSecret() {
diff --git a/server.js b/server.js
index ef4271843..cac751a54 100644
--- a/server.js
+++ b/server.js
@@ -182,7 +182,7 @@ const AVATAR_WIDTH = 400;
const AVATAR_HEIGHT = 600;
const jsonParser = express.json({ limit: '100mb' });
const urlencodedParser = express.urlencoded({ extended: true, limit: '100mb' });
-const { DIRECTORIES, UPLOADS_PATH } = require('./src/constants');
+const { DIRECTORIES, UPLOADS_PATH, PALM_SAFETY } = require('./src/constants');
// CSRF Protection //
if (cliArguments.disableCsrf === false) {
@@ -1299,11 +1299,11 @@ app.post("/getstats", jsonParser, function (request, response) {
/**
* Endpoint: POST /recreatestats
- *
+ *
* Triggers the recreation of statistics from chat files.
* - If successful: returns a 200 OK status.
* - On failure: returns a 500 Internal Server Error status.
- *
+ *
* @param {Object} request - Express request object.
* @param {Object} response - Express response object.
*/
@@ -2926,6 +2926,69 @@ async function sendClaudeRequest(request, response) {
}
}
+/**
+ * @param {express.Request} request
+ * @param {express.Response} response
+ */
+async function sendPalmRequest(request, response) {
+ const api_key_palm = readSecret(SECRET_KEYS.PALM);
+
+ if (!api_key_palm) {
+ return response.status(401).send({ error: true });
+ }
+
+ const body = {
+ prompt: {
+ text: request.body.messages,
+ },
+ stopSequences: request.body.stop,
+ safetySettings: PALM_SAFETY,
+ temperature: request.body.temperature,
+ topP: request.body.top_p,
+ topK: request.body.top_k || undefined,
+ maxOutputTokens: request.body.max_tokens,
+ candidate_count: 1,
+ };
+
+ console.log('Palm request:', body);
+
+ try {
+ const controller = new AbortController();
+ request.socket.removeAllListeners('close');
+ request.socket.on('close', function () {
+ controller.abort();
+ });
+
+ const generateResponse = await fetch(`https://generativelanguage.googleapis.com/v1beta2/models/text-bison-001:generateText?key=${api_key_palm}`, {
+ body: JSON.stringify(body),
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json"
+ },
+ signal: controller.signal,
+ timeout: 0,
+ });
+
+ if (!generateResponse.ok) {
+ console.log(`Palm API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`);
+ return response.status(generateResponse.status).send({ error: true });
+ }
+
+ const generateResponseJson = await generateResponse.json();
+ const responseText = generateResponseJson.candidates[0]?.output;
+ console.log('Palm response:', responseText);
+
+ // Wrap it back to OAI format
+ const reply = { choices: [{ "message": { "content": responseText, } }] };
+ return response.send(reply);
+ } catch (error) {
+ console.log('Error communicating with Palm API: ', error);
+ if (!response.headersSent) {
+ return response.status(500).send({ error: true });
+ }
+ }
+}
+
app.post("/generate_openai", jsonParser, function (request, response_generate_openai) {
if (!request.body) return response_generate_openai.status(400).send({ error: true });
@@ -2941,6 +3004,10 @@ app.post("/generate_openai", jsonParser, function (request, response_generate_op
return sendAI21Request(request, response_generate_openai);
}
+ if (request.body.use_palm) {
+ return sendPalmRequest(request, response_generate_openai);
+ }
+
let api_url;
let api_key_openai;
let headers;
diff --git a/src/constants.js b/src/constants.js
index 4b93c207e..142cebde2 100644
--- a/src/constants.js
+++ b/src/constants.js
@@ -102,10 +102,42 @@ const UNSAFE_EXTENSIONS = [
".ws",
];
+const PALM_SAFETY = [
+ {
+ category: "HARM_CATEGORY_UNSPECIFIED",
+ threshold: "BLOCK_NONE"
+ },
+ {
+ category: "HARM_CATEGORY_DEROGATORY",
+ threshold: "BLOCK_NONE"
+ },
+ {
+ category: "HARM_CATEGORY_TOXICITY",
+ threshold: "BLOCK_NONE"
+ },
+ {
+ category: "HARM_CATEGORY_VIOLENCE",
+ threshold: "BLOCK_NONE"
+ },
+ {
+ category: "HARM_CATEGORY_SEXUAL",
+ threshold: "BLOCK_NONE"
+ },
+ {
+ category: "HARM_CATEGORY_MEDICAL",
+ threshold: "BLOCK_NONE"
+ },
+ {
+ category: "HARM_CATEGORY_DANGEROUS",
+ threshold: "BLOCK_NONE"
+ }
+];
+
const UPLOADS_PATH = './uploads';
module.exports = {
DIRECTORIES,
UNSAFE_EXTENSIONS,
UPLOADS_PATH,
+ PALM_SAFETY,
}
diff --git a/src/secrets.js b/src/secrets.js
index a422f36a7..6b4fcdf21 100644
--- a/src/secrets.js
+++ b/src/secrets.js
@@ -19,6 +19,7 @@ const SECRET_KEYS = {
SCALE_COOKIE: 'scale_cookie',
ONERING_URL: 'oneringtranslator_url',
DEEPLX_URL: 'deeplx_url',
+ PALM: 'api_key_palm',
}
/**