(WIP) PaLM API

This commit is contained in:
Cohee 2023-09-23 20:48:56 +03:00
parent 5275e0c904
commit a081f78bd8
8 changed files with 182 additions and 17 deletions

View File

@ -617,7 +617,7 @@
complete.</span> complete.</span>
</div> </div>
</div> </div>
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale"> <div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,palm">
<div class="range-block-title" data-i18n="Temperature"> <div class="range-block-title" data-i18n="Temperature">
Temperature Temperature
</div> </div>
@ -677,7 +677,7 @@
</div> </div>
</div> </div>
</div> </div>
<div data-newbie-hidden class="range-block" data-source="claude,openrouter,ai21"> <div data-newbie-hidden class="range-block" data-source="claude,openrouter,ai21,palm">
<div class="range-block-title" data-i18n="Top K"> <div class="range-block-title" data-i18n="Top K">
Top K Top K
</div> </div>
@ -692,7 +692,7 @@
</div> </div>
</div> </div>
</div> </div>
<div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale"> <div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,palm">
<div class="range-block-title" data-i18n="Top-p"> <div class="range-block-title" data-i18n="Top-p">
Top P Top P
</div> </div>
@ -1710,7 +1710,7 @@
<option value="koboldhorde"><span data-i18n="KoboldAI Horde">KoboldAI Horde</span></option> <option value="koboldhorde"><span data-i18n="KoboldAI Horde">KoboldAI Horde</span></option>
<option value="textgenerationwebui"><span data-i18n="Text Gen WebUI (ooba/Mancer)">Text Gen WebUI (ooba/Mancer)</span></option> <option value="textgenerationwebui"><span data-i18n="Text Gen WebUI (ooba/Mancer)">Text Gen WebUI (ooba/Mancer)</span></option>
<option value="novel"><span data-i18n="NovelAI">NovelAI</span></option> <option value="novel"><span data-i18n="NovelAI">NovelAI</span></option>
<option value="openai"><span data-i18n="Chat Completion (OpenAI, Claude, Window/OpenRouter, Scale, AI21)">Chat Completion (OpenAI, Claude, Window, OpenRouter, Scale, AI21)</span></option> <option value="openai"><span data-i18n="Chat Completion (OpenAI, Claude, Window/OpenRouter, Scale, AI21)">Chat Completion (OpenAI, Claude, Window, OpenRouter, Scale, AI21, PaLM)</span></option>
</select> </select>
</div> </div>
<div id="kobold_horde" style="position: relative;"> <!-- shows the kobold settings --> <div id="kobold_horde" style="position: relative;"> <!-- shows the kobold settings -->
@ -1904,6 +1904,7 @@
<option value="claude">Claude</option> <option value="claude">Claude</option>
<option value="scale">Scale</option> <option value="scale">Scale</option>
<option value="ai21">AI21</option> <option value="ai21">AI21</option>
<option value="palm">Google PaLM 2</option>
</select> </select>
<form id="openai_form" data-source="openai" action="javascript:void(null);" method="post" enctype="multipart/form-data"> <form id="openai_form" data-source="openai" action="javascript:void(null);" method="post" enctype="multipart/form-data">
<h4><span data-i18n="OpenAI API key">OpenAI API key</span></h4> <h4><span data-i18n="OpenAI API key">OpenAI API key</span></h4>
@ -2124,6 +2125,19 @@
</div> </div>
</form> </form>
<form id="palm_form" data-source="palm" action="javascript:void(null);" method="post" enctype="multipart/form-data">
<h4 data-i18n="PaLM API Key">PaLM API Key</h4>
<div class="flex-container">
<input id="api_key_palm" name="api_key_palm" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_palm"></div>
</div>
<div data-for="api_key_palm" class="neutral_warning">
For privacy reasons, your API key will be hidden after you reload the page.
</div>
<!-- Its only purpose is to trigger max context size check -->
<select id="model_palm_select" class="displayNone"></select>
</form>
<div class="flex-container flex"> <div class="flex-container flex">
<div id="api_button_openai" class="menu_button menu_button_icon" type="submit" data-i18n="Connect">Connect</div> <div id="api_button_openai" class="menu_button menu_button_icon" type="submit" data-i18n="Connect">Connect</div>
<div data-source="openrouter" id="openrouter_authorize" class="menu_button menu_button_icon" title="Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai" data-i18n="[title]Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai">Authorize</div> <div data-source="openrouter" id="openrouter_authorize" class="menu_button menu_button_icon" title="Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai" data-i18n="[title]Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai">Authorize</div>

View File

@ -2049,7 +2049,8 @@ function baseChatReplace(value, name1, name2) {
} }
function isStreamingEnabled() { function isStreamingEnabled() {
return ((main_api == 'openai' && oai_settings.stream_openai && oai_settings.chat_completion_source !== chat_completion_sources.SCALE && oai_settings.chat_completion_source !== chat_completion_sources.AI21) const noStreamSources = [chat_completion_sources.SCALE, chat_completion_sources.AI21, chat_completion_sources.PALM];
return ((main_api == 'openai' && oai_settings.stream_openai && !noStreamSources.includes(oai_settings.chat_completion_source))
|| (main_api == 'kobold' && kai_settings.streaming_kobold && kai_flags.can_use_streaming) || (main_api == 'kobold' && kai_settings.streaming_kobold && kai_flags.can_use_streaming)
|| (main_api == 'novel' && nai_settings.streaming_novel) || (main_api == 'novel' && nai_settings.streaming_novel)
|| (main_api == 'textgenerationwebui' && textgenerationwebui_settings.streaming)); || (main_api == 'textgenerationwebui' && textgenerationwebui_settings.streaming));
@ -4633,6 +4634,7 @@ function changeMainAPI() {
case chat_completion_sources.CLAUDE: case chat_completion_sources.CLAUDE:
case chat_completion_sources.OPENAI: case chat_completion_sources.OPENAI:
case chat_completion_sources.AI21: case chat_completion_sources.AI21:
case chat_completion_sources.PALM:
default: default:
setupChatCompletionPromptManager(oai_settings); setupChatCompletionPromptManager(oai_settings);
break; break;
@ -6762,7 +6764,12 @@ function connectAPISlash(_, text) {
selected: 'openai', selected: 'openai',
source: 'ai21', source: 'ai21',
button: '#api_button_openai', button: '#api_button_openai',
} },
'palm': {
selected: 'openai',
source: 'palm',
button: '#api_button_openai',
},
}; };
const apiConfig = apiMap[text]; const apiConfig = apiMap[text];
@ -6995,7 +7002,7 @@ jQuery(async function () {
} }
registerSlashCommand('dupe', DupeChar, [], " duplicates the currently selected character", true, true); registerSlashCommand('dupe', DupeChar, [], " duplicates the currently selected character", true, true);
registerSlashCommand('api', connectAPISlash, [], "(kobold, horde, novel, ooba, oai, claude, windowai, ai21) connect to an API", true, true); registerSlashCommand('api', connectAPISlash, [], "(kobold, horde, novel, ooba, oai, claude, windowai, ai21, palm) connect to an API", true, true);
registerSlashCommand('impersonate', doImpersonate, ['imp'], "- calls an impersonation response", true, true); registerSlashCommand('impersonate', doImpersonate, ['imp'], "- calls an impersonation response", true, true);
registerSlashCommand('delchat', doDeleteChat, [], "- deletes the current chat", true, true); registerSlashCommand('delchat', doDeleteChat, [], "- deletes the current chat", true, true);
registerSlashCommand('closechat', doCloseChat, [], "- closes the current chat", true, true); registerSlashCommand('closechat', doCloseChat, [], "- closes the current chat", true, true);

View File

@ -415,6 +415,7 @@ function RA_autoconnect(PrevApi) {
|| (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) || (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI)
|| (secret_state[SECRET_KEYS.OPENROUTER] && oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) || (secret_state[SECRET_KEYS.OPENROUTER] && oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER)
|| (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21) || (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21)
|| (secret_state[SECRET_KEYS.PALM] && oai_settings.chat_completion_source == chat_completion_sources.PALM)
) { ) {
$("#api_button_openai").click(); $("#api_button_openai").click();
} }

View File

@ -161,6 +161,7 @@ export const chat_completion_sources = {
SCALE: 'scale', SCALE: 'scale',
OPENROUTER: 'openrouter', OPENROUTER: 'openrouter',
AI21: 'ai21', AI21: 'ai21',
PALM: 'palm',
}; };
const prefixMap = selected_group ? { const prefixMap = selected_group ? {
@ -882,7 +883,8 @@ function prepareOpenAIMessages({
extensionPrompts, extensionPrompts,
systemPromptOverride, systemPromptOverride,
jailbreakPromptOverride, jailbreakPromptOverride,
personaDescription}); personaDescription
});
// Fill the chat completion with as much context as the budget allows // Fill the chat completion with as much context as the budget allows
populateChatCompletion(prompts, chatCompletion, { bias, quietPrompt, type, cyclePrompt }); populateChatCompletion(prompts, chatCompletion, { bias, quietPrompt, type, cyclePrompt });
@ -1055,6 +1057,8 @@ function getChatCompletionModel() {
return oai_settings.windowai_model; return oai_settings.windowai_model;
case chat_completion_sources.SCALE: case chat_completion_sources.SCALE:
return ''; return '';
case chat_completion_sources.PALM:
return '';
case chat_completion_sources.OPENROUTER: case chat_completion_sources.OPENROUTER:
return oai_settings.openrouter_model !== openrouter_website_model ? oai_settings.openrouter_model : null; return oai_settings.openrouter_model !== openrouter_website_model ? oai_settings.openrouter_model : null;
case chat_completion_sources.AI21: case chat_completion_sources.AI21:
@ -1182,16 +1186,18 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
const isOpenRouter = oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER; const isOpenRouter = oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER;
const isScale = oai_settings.chat_completion_source == chat_completion_sources.SCALE; const isScale = oai_settings.chat_completion_source == chat_completion_sources.SCALE;
const isAI21 = oai_settings.chat_completion_source == chat_completion_sources.AI21; const isAI21 = oai_settings.chat_completion_source == chat_completion_sources.AI21;
const isPalm = oai_settings.chat_completion_source == chat_completion_sources.PALM;
const isTextCompletion = oai_settings.chat_completion_source == chat_completion_sources.OPENAI && textCompletionModels.includes(oai_settings.openai_model); const isTextCompletion = oai_settings.chat_completion_source == chat_completion_sources.OPENAI && textCompletionModels.includes(oai_settings.openai_model);
const isQuiet = type === 'quiet'; const isQuiet = type === 'quiet';
const isImpersonate = type === 'impersonate';
const stream = oai_settings.stream_openai && !isQuiet && !isScale && !isAI21; const stream = oai_settings.stream_openai && !isQuiet && !isScale && !isAI21;
if (isAI21) { if (isAI21 || isPalm) {
const joinedMsgs = openai_msgs_tosend.reduce((acc, obj) => { const joinedMsgs = openai_msgs_tosend.reduce((acc, obj) => {
const prefix = prefixMap[obj.role]; const prefix = prefixMap[obj.role];
return acc + (prefix ? (selected_group ? "\n" : prefix + " ") : "") + obj.content + "\n"; return acc + (prefix ? (selected_group ? "\n" : prefix + " ") : "") + obj.content + "\n";
}, ""); }, "");
openai_msgs_tosend = substituteParams(joinedMsgs); openai_msgs_tosend = substituteParams(joinedMsgs) + (isImpersonate ? `${name1}:` : `${name2}:`);
} }
// If we're using the window.ai extension, use that instead // If we're using the window.ai extension, use that instead
@ -1256,6 +1262,13 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
generate_data['api_url_scale'] = oai_settings.api_url_scale; generate_data['api_url_scale'] = oai_settings.api_url_scale;
} }
if (isPalm) {
const nameStopString = isImpersonate ? `\n${name2}:` : `\n${name1}:`;
generate_data['use_palm'] = true;
generate_data['top_k'] = Number(oai_settings.top_k_openai);
generate_data['stop'] = [nameStopString, oai_settings.new_chat_prompt, ...getCustomStoppingStrings()];
}
if (isAI21) { if (isAI21) {
generate_data['use_ai21'] = true; generate_data['use_ai21'] = true;
generate_data['top_k'] = Number(oai_settings.top_k_openai); generate_data['top_k'] = Number(oai_settings.top_k_openai);
@ -2052,7 +2065,8 @@ async function getStatusOpen() {
return resultCheckStatusOpen(); return resultCheckStatusOpen();
} }
if (oai_settings.chat_completion_source == chat_completion_sources.SCALE || oai_settings.chat_completion_source == chat_completion_sources.CLAUDE || oai_settings.chat_completion_source == chat_completion_sources.AI21) { const noValidateSources = [chat_completion_sources.SCALE, chat_completion_sources.CLAUDE, chat_completion_sources.AI21, chat_completion_sources.PALM];
if (noValidateSources.includes(oai_settings.chat_completion_source)) {
let status = 'Unable to verify key; press "Test Message" to validate.'; let status = 'Unable to verify key; press "Test Message" to validate.';
setOnlineStatus(status); setOnlineStatus(status);
return resultCheckStatusOpen(); return resultCheckStatusOpen();
@ -2676,6 +2690,17 @@ async function onModelChange() {
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input'); $('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
} }
if (oai_settings.chat_completion_source == chat_completion_sources.PALM) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max);
} else {
$('#openai_max_context').attr('max', palm2_max);
}
oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context);
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
}
if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) { if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) {
if (oai_settings.max_context_unlocked) { if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max); $('#openai_max_context').attr('max', unlocked_max);
@ -2856,7 +2881,19 @@ async function onConnectButtonClick(e) {
console.log("No cookie set for Scale"); console.log("No cookie set for Scale");
return; return;
} }
}
if (oai_settings.chat_completion_source == chat_completion_sources.PALM) {
const api_key_palm = String($('#api_key_palm').val()).trim();
if (api_key_palm.length) {
await writeSecret(SECRET_KEYS.PALM, api_key_palm);
}
if (!secret_state[SECRET_KEYS.PALM]) {
console.log('No secret key saved for PALM');
return;
}
} }
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) { if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
@ -2924,6 +2961,9 @@ function toggleChatCompletionForms() {
else if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) { else if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
$('#model_scale_select').trigger('change'); $('#model_scale_select').trigger('change');
} }
else if (oai_settings.chat_completion_source == chat_completion_sources.PALM) {
$('#model_palm_select').trigger('change');
}
else if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) { else if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) {
$('#model_openrouter_select').trigger('change'); $('#model_openrouter_select').trigger('change');
} }
@ -3226,6 +3266,7 @@ $(document).ready(async function () {
$("#model_claude_select").on("change", onModelChange); $("#model_claude_select").on("change", onModelChange);
$("#model_windowai_select").on("change", onModelChange); $("#model_windowai_select").on("change", onModelChange);
$("#model_scale_select").on("change", onModelChange); $("#model_scale_select").on("change", onModelChange);
$("#model_palm_select").on("change", onModelChange);
$("#model_openrouter_select").on("change", onModelChange); $("#model_openrouter_select").on("change", onModelChange);
$("#model_ai21_select").on("change", onModelChange); $("#model_ai21_select").on("change", onModelChange);
$("#settings_perset_openai").on("change", onSettingsPresetChange); $("#settings_perset_openai").on("change", onSettingsPresetChange);

View File

@ -10,6 +10,7 @@ export const SECRET_KEYS = {
SCALE: 'api_key_scale', SCALE: 'api_key_scale',
AI21: 'api_key_ai21', AI21: 'api_key_ai21',
SCALE_COOKIE: 'scale_cookie', SCALE_COOKIE: 'scale_cookie',
PALM: 'api_key_palm',
} }
const INPUT_MAP = { const INPUT_MAP = {
@ -22,6 +23,7 @@ const INPUT_MAP = {
[SECRET_KEYS.SCALE]: '#api_key_scale', [SECRET_KEYS.SCALE]: '#api_key_scale',
[SECRET_KEYS.AI21]: '#api_key_ai21', [SECRET_KEYS.AI21]: '#api_key_ai21',
[SECRET_KEYS.SCALE_COOKIE]: '#scale_cookie', [SECRET_KEYS.SCALE_COOKIE]: '#scale_cookie',
[SECRET_KEYS.PALM]: '#api_key_palm',
} }
async function clearSecret() { async function clearSecret() {

View File

@ -182,7 +182,7 @@ const AVATAR_WIDTH = 400;
const AVATAR_HEIGHT = 600; const AVATAR_HEIGHT = 600;
const jsonParser = express.json({ limit: '100mb' }); const jsonParser = express.json({ limit: '100mb' });
const urlencodedParser = express.urlencoded({ extended: true, limit: '100mb' }); const urlencodedParser = express.urlencoded({ extended: true, limit: '100mb' });
const { DIRECTORIES, UPLOADS_PATH } = require('./src/constants'); const { DIRECTORIES, UPLOADS_PATH, PALM_SAFETY } = require('./src/constants');
// CSRF Protection // // CSRF Protection //
if (cliArguments.disableCsrf === false) { if (cliArguments.disableCsrf === false) {
@ -2926,6 +2926,69 @@ async function sendClaudeRequest(request, response) {
} }
} }
/**
* @param {express.Request} request
* @param {express.Response} response
*/
async function sendPalmRequest(request, response) {
const api_key_palm = readSecret(SECRET_KEYS.PALM);
if (!api_key_palm) {
return response.status(401).send({ error: true });
}
const body = {
prompt: {
text: request.body.messages,
},
stopSequences: request.body.stop,
safetySettings: PALM_SAFETY,
temperature: request.body.temperature,
topP: request.body.top_p,
topK: request.body.top_k || undefined,
maxOutputTokens: request.body.max_tokens,
candidate_count: 1,
};
console.log('Palm request:', body);
try {
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
const generateResponse = await fetch(`https://generativelanguage.googleapis.com/v1beta2/models/text-bison-001:generateText?key=${api_key_palm}`, {
body: JSON.stringify(body),
method: "POST",
headers: {
"Content-Type": "application/json"
},
signal: controller.signal,
timeout: 0,
});
if (!generateResponse.ok) {
console.log(`Palm API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`);
return response.status(generateResponse.status).send({ error: true });
}
const generateResponseJson = await generateResponse.json();
const responseText = generateResponseJson.candidates[0]?.output;
console.log('Palm response:', responseText);
// Wrap it back to OAI format
const reply = { choices: [{ "message": { "content": responseText, } }] };
return response.send(reply);
} catch (error) {
console.log('Error communicating with Palm API: ', error);
if (!response.headersSent) {
return response.status(500).send({ error: true });
}
}
}
app.post("/generate_openai", jsonParser, function (request, response_generate_openai) { app.post("/generate_openai", jsonParser, function (request, response_generate_openai) {
if (!request.body) return response_generate_openai.status(400).send({ error: true }); if (!request.body) return response_generate_openai.status(400).send({ error: true });
@ -2941,6 +3004,10 @@ app.post("/generate_openai", jsonParser, function (request, response_generate_op
return sendAI21Request(request, response_generate_openai); return sendAI21Request(request, response_generate_openai);
} }
if (request.body.use_palm) {
return sendPalmRequest(request, response_generate_openai);
}
let api_url; let api_url;
let api_key_openai; let api_key_openai;
let headers; let headers;

View File

@ -102,10 +102,42 @@ const UNSAFE_EXTENSIONS = [
".ws", ".ws",
]; ];
const PALM_SAFETY = [
{
category: "HARM_CATEGORY_UNSPECIFIED",
threshold: "BLOCK_NONE"
},
{
category: "HARM_CATEGORY_DEROGATORY",
threshold: "BLOCK_NONE"
},
{
category: "HARM_CATEGORY_TOXICITY",
threshold: "BLOCK_NONE"
},
{
category: "HARM_CATEGORY_VIOLENCE",
threshold: "BLOCK_NONE"
},
{
category: "HARM_CATEGORY_SEXUAL",
threshold: "BLOCK_NONE"
},
{
category: "HARM_CATEGORY_MEDICAL",
threshold: "BLOCK_NONE"
},
{
category: "HARM_CATEGORY_DANGEROUS",
threshold: "BLOCK_NONE"
}
];
const UPLOADS_PATH = './uploads'; const UPLOADS_PATH = './uploads';
module.exports = { module.exports = {
DIRECTORIES, DIRECTORIES,
UNSAFE_EXTENSIONS, UNSAFE_EXTENSIONS,
UPLOADS_PATH, UPLOADS_PATH,
PALM_SAFETY,
} }

View File

@ -19,6 +19,7 @@ const SECRET_KEYS = {
SCALE_COOKIE: 'scale_cookie', SCALE_COOKIE: 'scale_cookie',
ONERING_URL: 'oneringtranslator_url', ONERING_URL: 'oneringtranslator_url',
DEEPLX_URL: 'deeplx_url', DEEPLX_URL: 'deeplx_url',
PALM: 'api_key_palm',
} }
/** /**