[RAW / UNTESTED / NO STREAMING] Native Claude API supported

This commit is contained in:
Cohee
2023-06-06 19:16:46 +03:00
parent 8ed477a96b
commit 960bc32340
7 changed files with 827 additions and 646 deletions

View File

@ -6,7 +6,7 @@
"guikoboldaisettings":"GUI KoboldAI Settings", "guikoboldaisettings":"GUI KoboldAI Settings",
"novelaipreserts":"NovelAI Presets", "novelaipreserts":"NovelAI Presets",
"default":"Default", "default":"Default",
"openaipresets":"OpenAI Presets", "openaipresets":"Chat Completion Presets",
"text gen webio(ooba) presets":"Text Gen WebUI (ooba) presets", "text gen webio(ooba) presets":"Text Gen WebUI (ooba) presets",
"poe.com api settings":"Poe.com API Settings", "poe.com api settings":"Poe.com API Settings",
"response legth(tokens)":" Response Length (tokens)", "response legth(tokens)":" Response Length (tokens)",
@ -19,7 +19,7 @@
"Encoder Rep. Pen.":"Encoder Rep. Pen.", "Encoder Rep. Pen.":"Encoder Rep. Pen.",
"No Repeat Ngram Size":"No Repeat Ngram Size", "No Repeat Ngram Size":"No Repeat Ngram Size",
"Min Length":"Min Length", "Min Length":"Min Length",
"OpenAI Reverse Proxy":"OpenAI Reverse Proxy", "OpenAI Reverse Proxy":"OpenAI / Claude Reverse Proxy",
"Alternative server URL (leave empty to use the default value).":"Alternative server URL (leave empty to use the default value).", "Alternative server URL (leave empty to use the default value).":"Alternative server URL (leave empty to use the default value).",
"Remove your real OAI API Key from the API panel BEFORE typing anything into this box":"Remove your real OAI API Key from the API panel BEFORE typing anything into this box.", "Remove your real OAI API Key from the API panel BEFORE typing anything into this box":"Remove your real OAI API Key from the API panel BEFORE typing anything into this box.",
"We cannot provide support for problems encountered while using an unofficial OpenAI proxy":"We cannot provide support for problems encountered while using an unofficial OpenAI proxy.", "We cannot provide support for problems encountered while using an unofficial OpenAI proxy":"We cannot provide support for problems encountered while using an unofficial OpenAI proxy.",

View File

@ -134,7 +134,7 @@
</div> </div>
<div id="openai_api-presets"> <div id="openai_api-presets">
<div> <div>
<h4><span data-i18n="openaipresets">OpenAI Presets</span></h4> <h4><span data-i18n="openaipresets">Chat Completion Presets</span></h4>
<div class="openai_preset_buttons"> <div class="openai_preset_buttons">
<select id="settings_perset_openai"> <select id="settings_perset_openai">
<option value="gui" data-i18n="default">Default</option> <option value="gui" data-i18n="default">Default</option>
@ -443,7 +443,7 @@
<div id="range_block_openai"> <div id="range_block_openai">
<div class="range-block"> <div class="range-block">
<div class="range-block-title justifyLeft" data-i18n="OpenAI Reverse Proxy"> <div class="range-block-title justifyLeft" data-i18n="OpenAI Reverse Proxy">
OpenAI Reverse Proxy OpenAI / Claude Reverse Proxy
</div> </div>
<div class="toggle-description justifyLeft"> <div class="toggle-description justifyLeft">
<span data-i18n="Alternative server URL (leave empty to use the default value)."> <span data-i18n="Alternative server URL (leave empty to use the default value).">
@ -1186,7 +1186,7 @@
<option value="koboldhorde">KoboldAI Horde</option> <option value="koboldhorde">KoboldAI Horde</option>
<option value="textgenerationwebui">Text Gen WebUI (ooba)</option> <option value="textgenerationwebui">Text Gen WebUI (ooba)</option>
<option value="novel">NovelAI</option> <option value="novel">NovelAI</option>
<option value="openai">OpenAI</option> <option value="openai">Chat Completion (OpenAI, Claude, Window.ai)</option>
<option value="poe">Poe</option> <option value="poe">Poe</option>
</select> </select>
</div> </div>
@ -1336,15 +1336,16 @@
</div> </div>
</div> </div>
<div id="openai_api" style="display: none;position: relative;"> <div id="openai_api" style="display: none;position: relative;">
<label for="use_window_ai" class="checkbox_label"> <h3>
<input id="use_window_ai" type="checkbox" /> Chat Completion Source
Use Window.ai </h3>
<a href="https://docs.sillytavern.app/usage/guidebook/#windowai" class="notes-link" target="_blank"> <select id="chat_completion_source">
<span class="note-link-span">?</span> <option value="openai">OpenAI</option>
</a> <option value="windowai">Window.ai</option>
</label> <option value="claude">Claude</option>
<form id="openai_form" action="javascript:void(null);" method="post" enctype="multipart/form-data"> </select>
<h4>API key </h4> <form id="openai_form">
<h4>OpenAI API key</h4>
<span> <span>
<ol> <ol>
<li> <li>
@ -1360,12 +1361,6 @@
</div> </div>
<div class="neutral_warning">For privacy reasons, your API key will be hidden after you <div class="neutral_warning">For privacy reasons, your API key will be hidden after you
reload the page.</div> reload the page.</div>
<input id="api_button_openai" class="menu_button" type="submit" value="Connect">
<div id="api_loading_openai" class=" api-load-icon fa-solid fa-hourglass fa-spin"></div>
<div class="online_status4">
<div class="online_status_indicator4"></div>
<div class="online_status_text4">No connection...</div>
</div>
<div> <div>
<h4 data-i18n="OpenAI Model">OpenAI Model</h4> <h4 data-i18n="OpenAI Model">OpenAI Model</h4>
<select id="model_openai_select"> <select id="model_openai_select">
@ -1380,6 +1375,31 @@
<a id="openai_api_usage" href="javascript:void(0);">View API Usage Metrics</a> <a id="openai_api_usage" href="javascript:void(0);">View API Usage Metrics</a>
</div> </div>
</form> </form>
<form id="claude_form">
<h4>Claude API Key</h4>
<div class="flex-container">
<input id="api_key_claude" name="api_key_claude" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_claude"></div>
</div>
<div class="neutral_warning">For privacy reasons, your API key will be hidden after you
reload the page.</div>
<div>
<h4 data-i18n="Claude Model">Claude Model</h4>
<select id="model_claude_select">
<option value="claude-v1">claude-v1</option>
<option value="claude-v1-100k">claude-v1-100k</option>
<option value="claude-instant-v1">claude-instant-v1</option>
<option value="claude-instant-v1-100k">claude-instant-v1-100k</option>
</select>
</div>
</form>
<input id="api_button_openai" class="menu_button" type="submit" value="Connect">
<div id="api_loading_openai" class=" api-load-icon fa-solid fa-hourglass fa-spin"></div>
<div class="online_status4">
<div class="online_status_indicator4"></div>
<div class="online_status_text4">No connection...</div>
</div>
<br> <br>
</div> </div>
<div id="poe_api"> <div id="poe_api">

View File

@ -80,6 +80,7 @@ import {
is_get_status_openai, is_get_status_openai,
openai_messages_count, openai_messages_count,
getTokenCountOpenAI, getTokenCountOpenAI,
chat_completion_sources,
} from "./scripts/openai.js"; } from "./scripts/openai.js";
import { import {
@ -1561,7 +1562,7 @@ function appendToStoryString(value, prefix) {
} }
function isStreamingEnabled() { function isStreamingEnabled() {
return ((main_api == 'openai' && oai_settings.stream_openai) return ((main_api == 'openai' && oai_settings.stream_openai && oai_settings.chat_completion_source !== chat_completion_sources.CLAUDE)
|| (main_api == 'poe' && poe_settings.streaming) || (main_api == 'poe' && poe_settings.streaming)
|| (main_api == 'textgenerationwebui' && textgenerationwebui_settings.streaming)) || (main_api == 'textgenerationwebui' && textgenerationwebui_settings.streaming))
&& !isMultigenEnabled(); // Multigen has a quasi-streaming mode which breaks the real streaming && !isMultigenEnabled(); // Multigen has a quasi-streaming mode which breaks the real streaming
@ -3822,7 +3823,7 @@ function changeMainAPI() {
main_api = selectedVal; main_api = selectedVal;
online_status = "no_connection"; online_status = "no_connection";
if (main_api == 'openai' && oai_settings.use_window_ai) { if (main_api == 'openai' && oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) {
$('#api_button_openai').trigger('click'); $('#api_button_openai').trigger('click');
} }

View File

@ -21,13 +21,14 @@ import {
send_on_enter_options, send_on_enter_options,
} from "./power-user.js"; } from "./power-user.js";
import { LoadLocal, SaveLocal, ClearLocal, CheckLocal, LoadLocalBool } from "./f-localStorage.js"; import { LoadLocal, SaveLocal, CheckLocal, LoadLocalBool } from "./f-localStorage.js";
import { selected_group, is_group_generating, getGroupAvatar, groups } from "./group-chats.js"; import { selected_group, is_group_generating, getGroupAvatar, groups } from "./group-chats.js";
import { import {
SECRET_KEYS, SECRET_KEYS,
secret_state, secret_state,
} from "./secrets.js"; } from "./secrets.js";
import { sortByCssOrder } from "./utils.js"; import { sortByCssOrder } from "./utils.js";
import { chat_completion_sources, oai_settings } from "./openai.js";
var NavToggle = document.getElementById("nav-toggle"); var NavToggle = document.getElementById("nav-toggle");
@ -388,7 +389,7 @@ function RA_autoconnect(PrevApi) {
} }
break; break;
case 'openai': case 'openai':
if (secret_state[SECRET_KEYS.OPENAI]) { if (secret_state[SECRET_KEYS.OPENAI] || secret_state[SECRET_KEYS.CLAUDE] || oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) {
$("#api_button_openai").click(); $("#api_button_openai").click();
} }
break; break;

View File

@ -81,11 +81,19 @@ const default_bias_presets = {
const gpt3_max = 4095; const gpt3_max = 4095;
const gpt4_max = 8191; const gpt4_max = 8191;
const gpt4_32k_max = 32767; const gpt4_32k_max = 32767;
const claude_max = 7500;
const claude_100k_max = 99000;
const unlocked_max = 100 * 1024; const unlocked_max = 100 * 1024;
let biasCache = undefined; let biasCache = undefined;
const tokenCache = {}; const tokenCache = {};
export const chat_completion_sources = {
OPENAI: 'openai',
WINDOWAI: 'windowai',
CLAUDE: 'claude',
};
const default_settings = { const default_settings = {
preset_settings_openai: 'Default', preset_settings_openai: 'Default',
temp_openai: 0.9, temp_openai: 0.9,
@ -108,10 +116,11 @@ const default_settings = {
bias_presets: default_bias_presets, bias_presets: default_bias_presets,
wi_format: default_wi_format, wi_format: default_wi_format,
openai_model: 'gpt-3.5-turbo', openai_model: 'gpt-3.5-turbo',
claude_model: 'claude-instant-v1',
jailbreak_system: false, jailbreak_system: false,
reverse_proxy: '', reverse_proxy: '',
legacy_streaming: false, legacy_streaming: false,
use_window_ai: false, chat_completion_source: chat_completion_sources.OPENAI,
max_context_unlocked: false, max_context_unlocked: false,
}; };
@ -137,10 +146,11 @@ const oai_settings = {
bias_presets: default_bias_presets, bias_presets: default_bias_presets,
wi_format: default_wi_format, wi_format: default_wi_format,
openai_model: 'gpt-3.5-turbo', openai_model: 'gpt-3.5-turbo',
claude_model: 'claude-instant-v1',
jailbreak_system: false, jailbreak_system: false,
reverse_proxy: '', reverse_proxy: '',
legacy_streaming: false, legacy_streaming: false,
use_window_ai: false, chat_completion_source: chat_completion_sources.OPENAI,
max_context_unlocked: false, max_context_unlocked: false,
}; };
@ -505,6 +515,7 @@ function tryParseStreamingError(str) {
checkQuotaError(data); checkQuotaError(data);
if (data.error) { if (data.error) {
toastr.error(response.statusText, 'API returned an error');
throw new Error(data); throw new Error(data);
} }
} }
@ -624,24 +635,27 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
} }
let logit_bias = {}; let logit_bias = {};
const stream = type !== 'quiet' && oai_settings.stream_openai; const isClaude = oai_settings.chat_completion_source == chat_completion_sources.CLAUDE;
const stream = type !== 'quiet' && oai_settings.stream_openai && !isClaude;
// If we're using the window.ai extension, use that instead // If we're using the window.ai extension, use that instead
// Doesn't support logit bias yet // Doesn't support logit bias yet
if (oai_settings.use_window_ai) { if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) {
return sendWindowAIRequest(openai_msgs_tosend, signal, stream); return sendWindowAIRequest(openai_msgs_tosend, signal, stream);
} }
if (oai_settings.bias_preset_selected if (oai_settings.bias_preset_selected
&& !isClaude // Claude doesn't support logit bias
&& Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected]) && Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected])
&& oai_settings.bias_presets[oai_settings.bias_preset_selected].length) { && oai_settings.bias_presets[oai_settings.bias_preset_selected].length) {
logit_bias = biasCache || await calculateLogitBias(); logit_bias = biasCache || await calculateLogitBias();
biasCache = logit_bias; biasCache = logit_bias;
} }
const model = isClaude ? oai_settings.claude_model : oai_settings.openai_model;
const generate_data = { const generate_data = {
"messages": openai_msgs_tosend, "messages": openai_msgs_tosend,
"model": oai_settings.openai_model, "model": model,
"temperature": parseFloat(oai_settings.temp_openai), "temperature": parseFloat(oai_settings.temp_openai),
"frequency_penalty": parseFloat(oai_settings.freq_pen_openai), "frequency_penalty": parseFloat(oai_settings.freq_pen_openai),
"presence_penalty": parseFloat(oai_settings.pres_pen_openai), "presence_penalty": parseFloat(oai_settings.pres_pen_openai),
@ -650,6 +664,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
"stream": stream, "stream": stream,
"reverse_proxy": oai_settings.reverse_proxy, "reverse_proxy": oai_settings.reverse_proxy,
"logit_bias": logit_bias, "logit_bias": logit_bias,
"use_claude": isClaude,
}; };
const generate_url = '/generate_openai'; const generate_url = '/generate_openai';
@ -709,6 +724,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
checkQuotaError(data); checkQuotaError(data);
if (data.error) { if (data.error) {
toastr.error(response.statusText, 'API returned an error');
throw new Error(data); throw new Error(data);
} }
@ -833,10 +849,17 @@ function countTokens(messages, full = false) {
token_count += cachedCount; token_count += cachedCount;
} }
else { else {
let model = oai_settings.openai_model;
// We don't have a Claude tokenizer for JS yet. Turbo 3.5 should be able to handle this.
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
model = 'gpt-3.5-turbo';
}
jQuery.ajax({ jQuery.ajax({
async: false, async: false,
type: 'POST', // type: 'POST', //
url: `/tokenize_openai?model=${oai_settings.openai_model}`, url: `/tokenize_openai?model=${model}`,
data: JSON.stringify([message]), data: JSON.stringify([message]),
dataType: "json", dataType: "json",
contentType: "application/json", contentType: "application/json",
@ -882,10 +905,10 @@ function loadOpenAISettings(data, settings) {
oai_settings.bias_preset_selected = settings.bias_preset_selected ?? default_settings.bias_preset_selected; oai_settings.bias_preset_selected = settings.bias_preset_selected ?? default_settings.bias_preset_selected;
oai_settings.bias_presets = settings.bias_presets ?? default_settings.bias_presets; oai_settings.bias_presets = settings.bias_presets ?? default_settings.bias_presets;
oai_settings.legacy_streaming = settings.legacy_streaming ?? default_settings.legacy_streaming; oai_settings.legacy_streaming = settings.legacy_streaming ?? default_settings.legacy_streaming;
oai_settings.use_window_ai = settings.use_window_ai ?? default_settings.use_window_ai;
oai_settings.max_context_unlocked = settings.max_context_unlocked ?? default_settings.max_context_unlocked; oai_settings.max_context_unlocked = settings.max_context_unlocked ?? default_settings.max_context_unlocked;
oai_settings.nsfw_avoidance_prompt = settings.nsfw_avoidance_prompt ?? default_settings.nsfw_avoidance_prompt; oai_settings.nsfw_avoidance_prompt = settings.nsfw_avoidance_prompt ?? default_settings.nsfw_avoidance_prompt;
oai_settings.wi_format = settings.wi_format ?? default_settings.wi_format; oai_settings.wi_format = settings.wi_format ?? default_settings.wi_format;
oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source;
if (settings.nsfw_toggle !== undefined) oai_settings.nsfw_toggle = !!settings.nsfw_toggle; if (settings.nsfw_toggle !== undefined) oai_settings.nsfw_toggle = !!settings.nsfw_toggle;
if (settings.keep_example_dialogue !== undefined) oai_settings.keep_example_dialogue = !!settings.keep_example_dialogue; if (settings.keep_example_dialogue !== undefined) oai_settings.keep_example_dialogue = !!settings.keep_example_dialogue;
@ -897,7 +920,8 @@ function loadOpenAISettings(data, settings) {
$('#stream_toggle').prop('checked', oai_settings.stream_openai); $('#stream_toggle').prop('checked', oai_settings.stream_openai);
$(`#model_openai_select option[value="${oai_settings.openai_model}"`).attr('selected', true).trigger('change'); $(`#model_openai_select option[value="${oai_settings.openai_model}"`).attr('selected', true);
$(`#model_claude_select option[value="${oai_settings.claude_model}"`).attr('selected', true);
$('#openai_max_context').val(oai_settings.openai_max_context); $('#openai_max_context').val(oai_settings.openai_max_context);
$('#openai_max_context_counter').text(`${oai_settings.openai_max_context}`); $('#openai_max_context_counter').text(`${oai_settings.openai_max_context}`);
@ -951,14 +975,13 @@ function loadOpenAISettings(data, settings) {
} }
$('#openai_logit_bias_preset').trigger('change'); $('#openai_logit_bias_preset').trigger('change');
$('#use_window_ai').prop('checked', oai_settings.use_window_ai); $('#chat_completion_source').val(oai_settings.chat_completion_source).trigger('change');
$('#oai_max_context_unlocked').prop('checked', oai_settings.max_context_unlocked); $('#oai_max_context_unlocked').prop('checked', oai_settings.max_context_unlocked);
$('#openai_form').toggle(!oai_settings.use_window_ai);
} }
async function getStatusOpen() { async function getStatusOpen() {
if (is_get_status_openai) { if (is_get_status_openai) {
if (oai_settings.use_window_ai) { if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) {
let status; let status;
if ('ai' in window) { if ('ai' in window) {
@ -973,6 +996,12 @@ async function getStatusOpen() {
return resultCheckStatusOpen(); return resultCheckStatusOpen();
} }
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
let status = 'I can\'t validate your key, but I hope it is legit.';
setOnlineStatus(status);
return resultCheckStatusOpen();
}
let data = { let data = {
reverse_proxy: oai_settings.reverse_proxy, reverse_proxy: oai_settings.reverse_proxy,
}; };
@ -1063,6 +1092,8 @@ async function saveOpenAIPreset(name, settings) {
max_context_unlocked: settings.max_context_unlocked, max_context_unlocked: settings.max_context_unlocked,
nsfw_avoidance_prompt: settings.nsfw_avoidance_prompt, nsfw_avoidance_prompt: settings.nsfw_avoidance_prompt,
wi_format: settings.wi_format, wi_format: settings.wi_format,
claude_model: settings.claude_model,
chat_completion_source: settings.chat_completion_source,
}; };
const savePresetSettings = await fetch(`/savepreset_openai?name=${name}`, { const savePresetSettings = await fetch(`/savepreset_openai?name=${name}`, {
@ -1310,12 +1341,14 @@ function onSettingsPresetChange() {
const updateCheckbox = (selector, value) => $(selector).prop('checked', value).trigger('input'); const updateCheckbox = (selector, value) => $(selector).prop('checked', value).trigger('input');
const settingsToUpdate = { const settingsToUpdate = {
chat_completion_source: ['#chat_completion_source', 'chat_completion_source', false],
temperature: ['#temp_openai', 'temp_openai', false], temperature: ['#temp_openai', 'temp_openai', false],
frequency_penalty: ['#freq_pen_openai', 'freq_pen_openai', false], frequency_penalty: ['#freq_pen_openai', 'freq_pen_openai', false],
presence_penalty: ['#pres_pen_openai', 'pres_pen_openai', false], presence_penalty: ['#pres_pen_openai', 'pres_pen_openai', false],
top_p: ['#top_p_openai', 'top_p_openai', false], top_p: ['#top_p_openai', 'top_p_openai', false],
max_context_unlocked: ['#oai_max_context_unlocked', 'max_context_unlocked', true], max_context_unlocked: ['#oai_max_context_unlocked', 'max_context_unlocked', true],
openai_model: ['#model_openai_select', 'openai_model', false], openai_model: ['#model_openai_select', 'openai_model', false],
claude_model: ['#model_claude_select', 'claude_model', false],
openai_max_context: ['#openai_max_context', 'openai_max_context', false], openai_max_context: ['#openai_max_context', 'openai_max_context', false],
openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false], openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false],
nsfw_toggle: ['#nsfw_toggle', 'nsfw_toggle', true], nsfw_toggle: ['#nsfw_toggle', 'nsfw_toggle', true],
@ -1352,6 +1385,24 @@ function onSettingsPresetChange() {
function onModelChange() { function onModelChange() {
const value = $(this).val(); const value = $(this).val();
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
oai_settings.claude_model = value;
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max);
}
else if (value.endsWith('100k')) {
$('#openai_max_context').attr('max', claude_100k_max);
}
else {
$('#openai_max_context').attr('max', claude_max);
oai_settings.openai_max_context = Math.max(oai_settings.openai_max_context, claude_max);
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
}
$('#openai_reverse_proxy').attr('placeholder', 'https://api.anthropic.com/v1');
} else {
oai_settings.openai_model = value; oai_settings.openai_model = value;
if (oai_settings.max_context_unlocked) { if (oai_settings.max_context_unlocked) {
@ -1369,6 +1420,9 @@ function onModelChange() {
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input'); $('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
} }
$('#openai_reverse_proxy').attr('placeholder', 'https://api.openai.com/v1');
}
saveSettingsDebounced(); saveSettingsDebounced();
} }
@ -1396,12 +1450,26 @@ function onReverseProxyInput() {
async function onConnectButtonClick(e) { async function onConnectButtonClick(e) {
e.stopPropagation(); e.stopPropagation();
if (oai_settings.use_window_ai) { if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) {
is_get_status_openai = true; is_get_status_openai = true;
is_api_button_press_openai = true; is_api_button_press_openai = true;
return await getStatusOpen(); return await getStatusOpen();
} }
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
const api_key_claude = $('#api_key_claude').val().trim();
if (api_key_claude.length) {
await writeSecret(SECRET_KEYS.CLAUDE, api_key_claude);
}
if (!secret_state[SECRET_KEYS.CLAUDE]) {
console.log('No secret key saved for Claude');
return;
}
}
if (oai_settings.chat_completion_source == chat_completion_sources.OPENAI) {
const api_key_openai = $('#api_key_openai').val().trim(); const api_key_openai = $('#api_key_openai').val().trim();
if (api_key_openai.length) { if (api_key_openai.length) {
@ -1412,6 +1480,7 @@ async function onConnectButtonClick(e) {
console.log('No secret key saved for OpenAI'); console.log('No secret key saved for OpenAI');
return; return;
} }
}
$("#api_loading_openai").css("display", 'inline-block'); $("#api_loading_openai").css("display", 'inline-block');
$("#api_button_openai").css("display", 'none'); $("#api_button_openai").css("display", 'none');
@ -1421,6 +1490,17 @@ async function onConnectButtonClick(e) {
await getStatusOpen(); await getStatusOpen();
} }
function toggleChatCompletionForms() {
$("#claude_form").toggle(oai_settings.chat_completion_source == chat_completion_sources.CLAUDE);
$("#openai_form").toggle(oai_settings.chat_completion_source == chat_completion_sources.OPENAI);
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
$('#model_claude_select').trigger('change');
} else {
$('#model_openai_select').trigger('change');
}
}
$(document).ready(function () { $(document).ready(function () {
$(document).on('input', '#temp_openai', function () { $(document).on('input', '#temp_openai', function () {
oai_settings.temp_openai = $(this).val(); oai_settings.temp_openai = $(this).val();
@ -1589,9 +1669,9 @@ $(document).ready(function () {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$('#use_window_ai').on('input', function () { $('#chat_completion_source').on('change', function () {
oai_settings.use_window_ai = !!$(this).prop('checked'); oai_settings.chat_completion_source = $(this).find(":selected").val();
$('#openai_form').toggle(!oai_settings.use_window_ai); toggleChatCompletionForms();
setOnlineStatus('no_connection'); setOnlineStatus('no_connection');
resultCheckStatusOpen(); resultCheckStatusOpen();
$('#api_button_openai').trigger('click'); $('#api_button_openai').trigger('click');
@ -1607,6 +1687,7 @@ $(document).ready(function () {
$("#api_button_openai").on("click", onConnectButtonClick); $("#api_button_openai").on("click", onConnectButtonClick);
$("#openai_reverse_proxy").on("input", onReverseProxyInput); $("#openai_reverse_proxy").on("input", onReverseProxyInput);
$("#model_openai_select").on("change", onModelChange); $("#model_openai_select").on("change", onModelChange);
$("#model_claude_select").on("change", onModelChange);
$("#settings_perset_openai").on("change", onSettingsPresetChange); $("#settings_perset_openai").on("change", onSettingsPresetChange);
$("#new_oai_preset").on("click", onNewPresetClick); $("#new_oai_preset").on("click", onNewPresetClick);
$("#delete_oai_preset").on("click", onDeletePresetClick); $("#delete_oai_preset").on("click", onDeletePresetClick);

View File

@ -5,6 +5,7 @@ export const SECRET_KEYS = {
OPENAI: 'api_key_openai', OPENAI: 'api_key_openai',
POE: 'api_key_poe', POE: 'api_key_poe',
NOVEL: 'api_key_novel', NOVEL: 'api_key_novel',
CLAUDE: 'api_key_claude',
} }
const INPUT_MAP = { const INPUT_MAP = {
@ -12,6 +13,7 @@ const INPUT_MAP = {
[SECRET_KEYS.OPENAI]: '#api_key_openai', [SECRET_KEYS.OPENAI]: '#api_key_openai',
[SECRET_KEYS.POE]: '#poe_token', [SECRET_KEYS.POE]: '#poe_token',
[SECRET_KEYS.NOVEL]: '#api_key_novel', [SECRET_KEYS.NOVEL]: '#api_key_novel',
[SECRET_KEYS.CLAUDE]: '#api_key_claude',
} }
async function clearSecret() { async function clearSecret() {

View File

@ -109,6 +109,7 @@ const poe = require('./src/poe-client');
let api_server = "http://0.0.0.0:5000"; let api_server = "http://0.0.0.0:5000";
let api_novelai = "https://api.novelai.net"; let api_novelai = "https://api.novelai.net";
let api_openai = "https://api.openai.com/v1"; let api_openai = "https://api.openai.com/v1";
let api_claude = "https://api.anthropic.com/v1";
let main_api = "kobold"; let main_api = "kobold";
let response_generate_novel; let response_generate_novel;
@ -2694,14 +2695,88 @@ app.post("/deletepreset_openai", jsonParser, function (request, response) {
return response.send({ error: true }); return response.send({ error: true });
}); });
// Prompt Conversion script taken from RisuAI by @kwaroran (GPLv3).
function convertClaudePrompt(messages) {
let requestPrompt = messages.map((v) => {
let prefix = '';
switch (v.role){
case "assistant":
prefix = "\n\nAssistant: ";
break
case "user":
prefix = "\n\nHuman: ";
break
case "system":
prefix = "\n\nSystem: ";
break
}
return prefix + v.content;
}).join('') + '\n\nAssistant: ';
return requestPrompt;
}
async function sendClaudeRequest(request, response) {
const fetch = require('node-fetch').default;
const api_url = new URL(request.body.reverse_proxy || api_claude).toString();
const api_key_claude = readSecret(SECRET_KEYS.CLAUDE);
if (!api_key_claude) {
return response.status(401).send({ error: true });
}
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
const requestPrompt = convertClaudePrompt(request.body.messages);
console.log('Claude request:', requestPrompt);
const generateResponse = await fetch(api_url + '/complete', {
method: "POST",
signal: controller.signal,
body: JSON.stringify({
prompt : "\n\nHuman: " + requestPrompt,
model: request.body.model,
max_tokens_to_sample: request.body.max_tokens,
stop_sequences: ["\n\nHuman:", "\n\nSystem:", "\n\nAssistant:"],
temperature: request.body.temperature,
}),
headers: {
"Content-Type": "application/json",
"x-api-key": api_key_claude,
}
});
if (!generateResponse.ok) {
console.log(`Claude API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`);
return response.status(generateResponse.status).send({ error: true });
}
const generateResponseJson = await generateResponse.json();
const responseText = generateResponseJson.completion;
console.log('Claude response:', responseText);
// Wrap it back to OAI format
const reply = { choices: [{ "message": { "content": responseText, } }] };
return response.send(reply);
}
app.post("/generate_openai", jsonParser, function (request, response_generate_openai) { app.post("/generate_openai", jsonParser, function (request, response_generate_openai) {
if (!request.body) return response_generate_openai.sendStatus(400); if (!request.body) return response_generate_openai.status(400).send({ error: true });
if (request.body.use_claude) {
return sendClaudeRequest(request, response_generate_openai);
}
const api_url = new URL(request.body.reverse_proxy || api_openai).toString(); const api_url = new URL(request.body.reverse_proxy || api_openai).toString();
const api_key_openai = readSecret(SECRET_KEYS.OPENAI); const api_key_openai = readSecret(SECRET_KEYS.OPENAI);
if (!api_key_openai) { if (!api_key_openai) {
return response_generate_openai.sendStatus(401); return response_generate_openai.status(401).send({ error: true });
} }
const controller = new AbortController(); const controller = new AbortController();
@ -3009,6 +3084,7 @@ const SECRET_KEYS = {
OPENAI: 'api_key_openai', OPENAI: 'api_key_openai',
POE: 'api_key_poe', POE: 'api_key_poe',
NOVEL: 'api_key_novel', NOVEL: 'api_key_novel',
CLAUDE: 'api_key_claude',
} }
function migrateSecrets() { function migrateSecrets() {