mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-03-24 16:00:12 +01:00
scale cookie method
This commit is contained in:
parent
7528a3828a
commit
7ecc0295dc
@ -667,7 +667,7 @@
|
||||
Max prompt cost: <span id="openrouter_max_prompt_cost">Unknown</span>
|
||||
</div>
|
||||
<hr>
|
||||
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21">
|
||||
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale">
|
||||
<div class="range-block-title" data-i18n="Temperature">
|
||||
Temperature
|
||||
</div>
|
||||
@ -2000,18 +2000,34 @@
|
||||
</form>
|
||||
|
||||
<form id="scale_form" data-source="scale" action="javascript:void(null);" method="post" enctype="multipart/form-data">
|
||||
<h4>Scale API Key</h4>
|
||||
<div class="flex-container">
|
||||
<input id="api_key_scale" name="api_key_scale" class="text_pole flex1" maxlength="500" value="" autocomplete="off">
|
||||
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_scale"></div>
|
||||
<div id="normal_scale_form">
|
||||
<h4>Scale API Key</h4>
|
||||
<div class="flex-container">
|
||||
<input id="api_key_scale" name="api_key_scale" class="text_pole flex1" maxlength="500" value="" autocomplete="off">
|
||||
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_scale"></div>
|
||||
</div>
|
||||
<div data-for="api_key_scale" class="neutral_warning">
|
||||
For privacy reasons, your API key will be hidden after you reload the page.
|
||||
</div>
|
||||
<h4>Scale API URL</h4>
|
||||
<input id="api_url_scale" name="api_url_scale" class="text_pole" maxlength="500" value="" autocomplete="off" placeholder="https://dashboard.scale.com/spellbook/api/v2/deploy/xxxxxxx">
|
||||
</div>
|
||||
<div data-for="api_key_scale" class="neutral_warning">
|
||||
For privacy reasons, your API key will be hidden after you reload the page.
|
||||
<div id="alt_scale_form">
|
||||
<h4>Scale Cookie (_jwt)</h4>
|
||||
<div class="flex-container">
|
||||
<input id="scale_cookie" name="scale_cookie" class="text_pole flex1" maxlength="500" value="" autocomplete="off">
|
||||
<div title="Clear your cookie" data-i18n="[title]Clear your cookie" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="scale_cookie"></div>
|
||||
</div>
|
||||
<div data-for="scale_cookie" class="neutral_warning">
|
||||
For privacy reasons, your cookie will be hidden after you reload the page.
|
||||
</div>
|
||||
</div>
|
||||
<h4>Scale API URL</h4>
|
||||
<input id="api_url_scale" name="api_url_scale" class="text_pole" maxlength="500" value="" autocomplete="off" placeholder="https://dashboard.scale.com/spellbook/api/v2/deploy/xxxxxxx">
|
||||
<!-- Its only purpose is to trigger max context size check -->
|
||||
<select id="model_scale_select" class="displayNone"></select>
|
||||
<label for="scale-alt" class="checkbox_label">
|
||||
<input id="scale-alt" type="checkbox" checked>
|
||||
<span data-i18n="Alt Method">Alt Method</span>
|
||||
</label>
|
||||
</form>
|
||||
|
||||
<form id="ai21_form" data-source="ai21" action="javascript:void(null);" method="post" enctype="multipart/form-data">
|
||||
@ -4356,4 +4372,4 @@
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
</html>
|
||||
|
@ -474,7 +474,7 @@ function RA_autoconnect(PrevApi) {
|
||||
case 'openai':
|
||||
if (((secret_state[SECRET_KEYS.OPENAI] || oai_settings.reverse_proxy) && oai_settings.chat_completion_source == chat_completion_sources.OPENAI)
|
||||
|| ((secret_state[SECRET_KEYS.CLAUDE] || oai_settings.reverse_proxy) && oai_settings.chat_completion_source == chat_completion_sources.CLAUDE)
|
||||
|| (secret_state[SECRET_KEYS.SCALE] && oai_settings.chat_completion_source == chat_completion_sources.SCALE)
|
||||
|| ((secret_state[SECRET_KEYS.SCALE] || secret_state[SECRET_KEYS.SCALE_COOKIE]) && oai_settings.chat_completion_source == chat_completion_sources.SCALE)
|
||||
|| (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI)
|
||||
|| (secret_state[SECRET_KEYS.OPENROUTER] && oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER)
|
||||
|| (secret_state[SECRET_KEYS.AI21] && oai_settings.chat_completion_source == chat_completion_sources.AI21)
|
||||
|
@ -110,8 +110,8 @@ const max_4k = 4095;
|
||||
const max_8k = 8191;
|
||||
const max_16k = 16383;
|
||||
const max_32k = 32767;
|
||||
const scale_max = 7900; // Probably more. Save some for the system prompt defined on Scale site.
|
||||
const claude_max = 8000; // We have a proper tokenizer, so theoretically could be larger (up to 9k)
|
||||
const scale_max = 8191; // Probably more. Save some for the system prompt defined on Scale site.
|
||||
const claude_max = 9000; // We have a proper tokenizer, so theoretically could be larger (up to 9k)
|
||||
const palm2_max = 7500; // The real context window is 8192, spare some for padding due to using turbo tokenizer
|
||||
const claude_100k_max = 99000;
|
||||
let ai21_max = 9200; //can easily fit 9k gpt tokens because j2's tokenizer is efficient af
|
||||
@ -219,6 +219,7 @@ const default_settings = {
|
||||
proxy_password: '',
|
||||
assistant_prefill: '',
|
||||
use_ai21_tokenizer: false,
|
||||
use_alt_scale: true,
|
||||
};
|
||||
|
||||
const oai_settings = {
|
||||
@ -260,6 +261,7 @@ const oai_settings = {
|
||||
proxy_password: '',
|
||||
assistant_prefill: '',
|
||||
use_ai21_tokenizer: false,
|
||||
use_alt_scale: true,
|
||||
};
|
||||
|
||||
let openai_setting_names;
|
||||
@ -1062,6 +1064,35 @@ function saveModelList(data) {
|
||||
}
|
||||
}
|
||||
|
||||
async function sendAltScaleRequest(openai_msgs_tosend, signal) {
|
||||
const generate_url = '/generate_altscale';
|
||||
|
||||
let firstMsg = substituteParams(openai_msgs_tosend[0].content);
|
||||
let subsequentMsgs = openai_msgs_tosend.slice(1);
|
||||
|
||||
const joinedMsgs = subsequentMsgs.reduce((acc, obj) => {
|
||||
return acc + obj.role + ": " + obj.content + "\n";
|
||||
}, "");
|
||||
openai_msgs_tosend = substituteParams(joinedMsgs);
|
||||
console.log(openai_msgs_tosend)
|
||||
|
||||
const generate_data = {
|
||||
sysprompt: firstMsg,
|
||||
prompt: openai_msgs_tosend,
|
||||
temp: parseFloat(oai_settings.temp_openai),
|
||||
max_tokens: parseFloat(oai_settings.openai_max_tokens),
|
||||
}
|
||||
|
||||
const response = await fetch(generate_url, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(generate_data),
|
||||
headers: getRequestHeaders(),
|
||||
signal: signal
|
||||
});
|
||||
const data = await response.json();
|
||||
return data.output;
|
||||
}
|
||||
|
||||
async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
|
||||
// Provide default abort signal
|
||||
if (!signal) {
|
||||
@ -1092,6 +1123,10 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
|
||||
openai_msgs_tosend = substituteParams(joinedMsgs);
|
||||
}
|
||||
|
||||
if (isScale && !!$('#scale-alt').prop('checked')) {
|
||||
return sendAltScaleRequest(openai_msgs_tosend, signal)
|
||||
}
|
||||
|
||||
// If we're using the window.ai extension, use that instead
|
||||
// Doesn't support logit bias yet
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) {
|
||||
@ -1934,6 +1969,7 @@ function loadOpenAISettings(data, settings) {
|
||||
if (settings.names_in_completion !== undefined) oai_settings.names_in_completion = !!settings.names_in_completion;
|
||||
if (settings.openai_model !== undefined) oai_settings.openai_model = settings.openai_model;
|
||||
if (settings.use_ai21_tokenizer !== undefined) oai_settings.use_ai21_tokenizer = !!settings.use_ai21_tokenizer;
|
||||
if (settings.use_alt_scale !== undefined) { oai_settings.use_alt_scale = !!settings.use_alt_scale; updateScaleForm(); }
|
||||
$('#stream_toggle').prop('checked', oai_settings.stream_openai);
|
||||
$('#api_url_scale').val(oai_settings.api_url_scale);
|
||||
$('#openai_proxy_password').val(oai_settings.proxy_password);
|
||||
@ -1963,6 +1999,7 @@ function loadOpenAISettings(data, settings) {
|
||||
$('#openai_show_external_models').prop('checked', oai_settings.show_external_models);
|
||||
$('#openai_external_category').toggle(oai_settings.show_external_models);
|
||||
$('#use_ai21_tokenizer').prop('checked', oai_settings.use_ai21_tokenizer);
|
||||
$('#scale-alt').prop('checked', oai_settings.use_alt_scale);
|
||||
if (settings.impersonation_prompt !== undefined) oai_settings.impersonation_prompt = settings.impersonation_prompt;
|
||||
|
||||
$('#impersonation_prompt_textarea').val(oai_settings.impersonation_prompt);
|
||||
@ -2160,6 +2197,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
|
||||
show_external_models: settings.show_external_models,
|
||||
assistant_prefill: settings.assistant_prefill,
|
||||
use_ai21_tokenizer: settings.use_ai21_tokenizer,
|
||||
use_alt_scale: settings.use_alt_scale,
|
||||
};
|
||||
|
||||
const savePresetSettings = await fetch(`/savepreset_openai?name=${name}`, {
|
||||
@ -2496,6 +2534,7 @@ function onSettingsPresetChange() {
|
||||
proxy_password: ['#openai_proxy_password', 'proxy_password', false],
|
||||
assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false],
|
||||
use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', false],
|
||||
use_alt_scale: ['#use_alt_scale', 'use_alt_scale', false],
|
||||
};
|
||||
|
||||
const presetName = $('#settings_perset_openai').find(":selected").text();
|
||||
@ -2791,20 +2830,31 @@ async function onConnectButtonClick(e) {
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
|
||||
const api_key_scale = $('#api_key_scale').val().trim();
|
||||
const scale_cookie = $('#scale_cookie').val().trim();
|
||||
|
||||
if (api_key_scale.length) {
|
||||
await writeSecret(SECRET_KEYS.SCALE, api_key_scale);
|
||||
}
|
||||
|
||||
if (!oai_settings.api_url_scale) {
|
||||
if (scale_cookie.length) {
|
||||
await writeSecret(SECRET_KEYS.SCALE_COOKIE, scale_cookie);
|
||||
}
|
||||
|
||||
if (!oai_settings.api_url_scale && !oai_settings.use_alt_scale) {
|
||||
console.log('No API URL saved for Scale');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!secret_state[SECRET_KEYS.SCALE]) {
|
||||
if (!secret_state[SECRET_KEYS.SCALE] && !oai_settings.use_alt_scale) {
|
||||
console.log('No secret key saved for Scale');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!secret_state[SECRET_KEYS.SCALE_COOKIE] && oai_settings.use_alt_scale) {
|
||||
console.log("No cookie set for Scale");
|
||||
return;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
|
||||
@ -2914,11 +2964,27 @@ function onProxyPasswordShowClick() {
|
||||
$(this).toggleClass('fa-eye-slash fa-eye');
|
||||
}
|
||||
|
||||
function updateScaleForm() {
|
||||
if (oai_settings.use_alt_scale) {
|
||||
$('#normal_scale_form').css('display', 'none');
|
||||
$('#alt_scale_form').css('display', '');
|
||||
} else {
|
||||
$('#normal_scale_form').css('display', '');
|
||||
$('#alt_scale_form').css('display', 'none');
|
||||
}
|
||||
}
|
||||
|
||||
$(document).ready(async function () {
|
||||
await loadTokenCache();
|
||||
|
||||
$('#test_api_button').on('click', testApiConnection);
|
||||
|
||||
$('#scale-alt').on('change', function () {
|
||||
oai_settings.use_alt_scale = !!$('#scale-alt').prop('checked');
|
||||
saveSettingsDebounced();
|
||||
updateScaleForm();
|
||||
});
|
||||
|
||||
$(document).on('input', '#temp_openai', function () {
|
||||
oai_settings.temp_openai = $(this).val();
|
||||
$('#temp_counter_openai').text(Number($(this).val()).toFixed(2));
|
||||
|
@ -9,6 +9,7 @@ export const SECRET_KEYS = {
|
||||
OPENROUTER: 'api_key_openrouter',
|
||||
SCALE: 'api_key_scale',
|
||||
AI21: 'api_key_ai21',
|
||||
SCALE_COOKIE: 'scale_cookie',
|
||||
}
|
||||
|
||||
const INPUT_MAP = {
|
||||
@ -20,6 +21,7 @@ const INPUT_MAP = {
|
||||
[SECRET_KEYS.OPENROUTER]: '#api_key_openrouter',
|
||||
[SECRET_KEYS.SCALE]: '#api_key_scale',
|
||||
[SECRET_KEYS.AI21]: '#api_key_ai21',
|
||||
[SECRET_KEYS.SCALE_COOKIE]: '#scale_cookie',
|
||||
}
|
||||
|
||||
async function clearSecret() {
|
||||
|
72
server.js
72
server.js
@ -3231,6 +3231,75 @@ async function sendScaleRequest(request, response) {
|
||||
}
|
||||
}
|
||||
|
||||
app.post("/generate_altscale", jsonParser, function (request, response_generate_scale) {
|
||||
if(!request.body) return response_generate_scale.sendStatus(400);
|
||||
|
||||
fetch('https://dashboard.scale.com/spellbook/api/trpc/v2.variant.run', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'cookie': `_jwt=${readSecret(SECRET_KEYS.SCALE_COOKIE)}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
json: {
|
||||
variant: {
|
||||
name: 'New Variant',
|
||||
appId: '',
|
||||
taxonomy: null
|
||||
},
|
||||
prompt: {
|
||||
id: '',
|
||||
template: '{{input}}\n',
|
||||
exampleVariables: {},
|
||||
variablesSourceDataId: null,
|
||||
systemMessage: request.body.sysprompt
|
||||
},
|
||||
modelParameters: {
|
||||
id: '',
|
||||
modelId: 'GPT4',
|
||||
modelType: 'OpenAi',
|
||||
maxTokens: request.body.max_tokens,
|
||||
temperature: request.body.temp,
|
||||
stop: null,
|
||||
suffix: null,
|
||||
topP: null,
|
||||
logprobs: null,
|
||||
logitBias: null
|
||||
},
|
||||
inputs: [
|
||||
{
|
||||
index: '-1',
|
||||
valueByName: {
|
||||
input: request.body.prompt
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
meta: {
|
||||
values: {
|
||||
'variant.taxonomy': ['undefined'],
|
||||
'prompt.variablesSourceDataId': ['undefined'],
|
||||
'modelParameters.stop': ['undefined'],
|
||||
'modelParameters.suffix': ['undefined'],
|
||||
'modelParameters.topP': ['undefined'],
|
||||
'modelParameters.logprobs': ['undefined'],
|
||||
'modelParameters.logitBias': ['undefined']
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
console.log(data.result.data.json.outputs[0])
|
||||
return response_generate_scale.send({output: data.result.data.json.outputs[0]});
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Error:', error)
|
||||
return response_generate_scale.send({error: true})
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
async function sendClaudeRequest(request, response) {
|
||||
const fetch = require('node-fetch').default;
|
||||
|
||||
@ -3917,7 +3986,8 @@ const SECRET_KEYS = {
|
||||
DEEPL: 'deepl',
|
||||
OPENROUTER: 'api_key_openrouter',
|
||||
SCALE: 'api_key_scale',
|
||||
AI21: 'api_key_ai21'
|
||||
AI21: 'api_key_ai21',
|
||||
SCALE_COOKIE: 'scale_cookie',
|
||||
}
|
||||
|
||||
function migrateSecrets() {
|
||||
|
Loading…
x
Reference in New Issue
Block a user