Merge branch 'staging' into neo-server

This commit is contained in:
Cohee 2024-04-20 02:56:05 +03:00
commit b3bbec83b6
16 changed files with 256 additions and 19 deletions

View File

@ -535,6 +535,10 @@
"filename": "presets/context/Command R.json",
"type": "context"
},
{
"filename": "presets/context/Llama 3 Instruct.json",
"type": "context"
},
{
"filename": "presets/instruct/Adventure.json",
"type": "instruct"
@ -623,6 +627,10 @@
"filename": "presets/instruct/Command R.json",
"type": "instruct"
},
{
"filename": "presets/instruct/Llama 3 Instruct.json",
"type": "instruct"
},
{
"filename": "presets/moving-ui/Default.json",
"type": "moving_ui"

View File

@ -0,0 +1,12 @@
{
"story_string": "<|start_header_id|>system<|end_header_id|>\n\n{{#if system}}{{system}}\n{{/if}}{{#if wiBefore}}{{wiBefore}}\n{{/if}}{{#if description}}{{description}}\n{{/if}}{{#if personality}}{{char}}'s personality: {{personality}}\n{{/if}}{{#if scenario}}Scenario: {{scenario}}\n{{/if}}{{#if wiAfter}}{{wiAfter}}\n{{/if}}{{#if persona}}{{persona}}\n{{/if}}{{trim}}<|eot_id|>",
"example_separator": "",
"chat_start": "",
"use_stop_strings": false,
"allow_jailbreak": false,
"always_force_name2": true,
"trim_sentences": false,
"include_newline": false,
"single_line": false,
"name": "Llama 3 Instruct"
}

View File

@ -0,0 +1,24 @@
{
"system_prompt": "You are an expert actor that can fully immerse yourself into any role given. You do not break character for any reason, even if someone tries addressing you as an AI or language model. Currently your role is {{char}}, which is described in detail below. As {{char}}, continue the exchange with {{user}}.",
"input_sequence": "<|start_header_id|>user<|end_header_id|>\n\n",
"output_sequence": "<|start_header_id|>assistant<|end_header_id|>\n\n",
"last_output_sequence": "",
"system_sequence": "<|start_header_id|>system<|end_header_id|>\n\n",
"stop_sequence": "<|eot_id|>",
"wrap": false,
"macro": true,
"names": true,
"names_force_groups": true,
"activation_regex": "",
"system_sequence_prefix": "",
"system_sequence_suffix": "",
"first_output_sequence": "",
"skip_examples": false,
"output_suffix": "<|eot_id|>",
"input_suffix": "<|eot_id|>",
"system_suffix": "<|eot_id|>",
"user_alignment_message": "",
"system_same_as_user": false,
"last_system_sequence": "",
"name": "Llama 3 Instruct"
}

View File

@ -0,0 +1,3 @@
<svg width="400" height="400" viewBox="0 0 400 400" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M101.008 42L190.99 124.905L190.99 124.886L190.99 42.1913H208.506L208.506 125.276L298.891 42V136.524L336 136.524V272.866H299.005V357.035L208.506 277.525L208.506 357.948H190.99L190.99 278.836L101.11 358V272.866H64V136.524H101.008V42ZM177.785 153.826H81.5159V255.564H101.088V223.472L177.785 153.826ZM118.625 231.149V319.392L190.99 255.655L190.99 165.421L118.625 231.149ZM209.01 254.812V165.336L281.396 231.068V272.866H281.489V318.491L209.01 254.812ZM299.005 255.564H318.484V153.826L222.932 153.826L299.005 222.751V255.564ZM281.375 136.524V81.7983L221.977 136.524L281.375 136.524ZM177.921 136.524H118.524V81.7983L177.921 136.524Z" />
</svg>

After

Width:  |  Height:  |  Size: 786 B

View File

@ -471,7 +471,7 @@
</span>
</div>
</div>
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai,custom,cohere">
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai,custom,cohere,perplexity">
<div class="range-block-title" data-i18n="Temperature">
Temperature
</div>
@ -484,7 +484,7 @@
</div>
</div>
</div>
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21,custom,cohere">
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21,custom,cohere,perplexity">
<div class="range-block-title" data-i18n="Frequency Penalty">
Frequency Penalty
</div>
@ -497,7 +497,7 @@
</div>
</div>
</div>
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21,custom,cohere">
<div data-newbie-hidden class="range-block" data-source="openai,openrouter,ai21,custom,cohere,perplexity">
<div class="range-block-title" data-i18n="Presence Penalty">
Presence Penalty
</div>
@ -523,7 +523,7 @@
</div>
</div>
</div>
<div data-newbie-hidden class="range-block" data-source="claude,openrouter,ai21,makersuite,cohere">
<div data-newbie-hidden class="range-block" data-source="claude,openrouter,ai21,makersuite,cohere,perplexity">
<div class="range-block-title" data-i18n="Top K">
Top K
</div>
@ -536,7 +536,7 @@
</div>
</div>
</div>
<div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai,custom,cohere">
<div data-newbie-hidden class="range-block" data-source="openai,claude,openrouter,ai21,scale,makersuite,mistralai,custom,cohere,perplexity">
<div class="range-block-title" data-i18n="Top-p">
Top P
</div>
@ -2307,6 +2307,7 @@
<option value="makersuite">Google MakerSuite</option>
<option value="mistralai">MistralAI</option>
<option value="openrouter">OpenRouter</option>
<option value="perplexity">Perplexity</option>
<option value="scale">Scale</option>
<option value="windowai">Window AI</option>
</optgroup>
@ -2705,6 +2706,33 @@
</select>
</div>
</form>
<div id="perplexity_form" data-source="perplexity">
<h4 data-i18n="Perplexity API Key">Perplexity API Key</h4>
<div class="flex-container">
<input id="api_key_perplexity" name="api_key_perplexity" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_perplexity"></div>
</div>
<div data-for="api_key_perplexity" class="neutral_warning">
For privacy reasons, your API key will be hidden after you reload the page.
</div>
<h4 data-i18n="Perplexity Model">Perplexity Model</h4>
<select id="model_perplexity_select">
<optgroup label="Perplexity Models">
<option value="sonar-small-chat">sonar-small-chat</option>
<option value="sonar-small-online">sonar-small-online</option>
<option value="sonar-medium-chat">sonar-medium-chat</option>
<option value="sonar-medium-online">sonar-medium-online</option>
</optgroup>
<optgroup label="Open-Source Models">
<option value="llama-3-8b-instruct">llama-3-8b-instruct</option>
<option value="llama-3-70b-instruct">llama-3-70b-instruct</option>
<option value="codellama-70b-instruct">codellama-70b-instruct</option>
<option value="mistral-7b-instruct">mistral-7b-instruct (v0.2)</option>
<option value="mixtral-8x7b-instruct">mixtral-8x7b-instruct</option>
<option value="mixtral-8x22b-instruct">mixtral-8x22b-instruct</option>
</optgroup>
</select>
</div>
<form id="cohere_form" data-source="cohere" action="javascript:void(null);" method="post" enctype="multipart/form-data">
<h4 data-i18n="Cohere API Key">Cohere API Key</h4>
<div class="flex-container">

View File

@ -5657,7 +5657,7 @@ async function getChat() {
contentType: 'application/json',
});
if (response[0] !== undefined) {
chat.push(...response);
chat.splice(0, chat.length, ...response);
chat_create_date = chat[0]['create_date'];
chat_metadata = chat[0]['chat_metadata'] ?? {};
@ -8208,10 +8208,15 @@ const CONNECT_API_MAP = {
source: chat_completion_sources.CUSTOM,
},
'cohere': {
selected: 'cohere',
selected: 'openai',
button: '#api_button_openai',
source: chat_completion_sources.COHERE,
},
'perplexity': {
selected: 'openai',
button: '#api_button_openai',
source: chat_completion_sources.PERPLEXITY,
},
'infermaticai': {
selected: 'textgenerationwebui',
button: '#api_button_textgenerationwebui',

View File

@ -376,6 +376,7 @@ function RA_autoconnect(PrevApi) {
|| (secret_state[SECRET_KEYS.MAKERSUITE] && oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE)
|| (secret_state[SECRET_KEYS.MISTRALAI] && oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI)
|| (secret_state[SECRET_KEYS.COHERE] && oai_settings.chat_completion_source == chat_completion_sources.COHERE)
|| (secret_state[SECRET_KEYS.PERPLEXITY] && oai_settings.chat_completion_source == chat_completion_sources.PERPLEXITY)
|| (isValidUrl(oai_settings.custom_url) && oai_settings.chat_completion_source == chat_completion_sources.CUSTOM)
) {
$('#api_button_openai').trigger('click');

View File

@ -1084,6 +1084,7 @@ async function getExpressionLabel(text) {
case EXPRESSION_API.llm: {
const expressionsList = await getExpressionsList();
const prompt = await getLlmPrompt(expressionsList);
eventSource.once(event_types.TEXT_COMPLETION_SETTINGS_READY, onTextGenSettingsReady);
const emotionResponse = await generateQuietPrompt(prompt, false, false);
return parseLlmResponse(emotionResponse, expressionsList);
}
@ -1964,7 +1965,6 @@ function migrateSettings() {
});
eventSource.on(event_types.MOVABLE_PANELS_RESET, updateVisualNovelModeDebounced);
eventSource.on(event_types.GROUP_UPDATED, updateVisualNovelModeDebounced);
eventSource.on(event_types.TEXT_COMPLETION_SETTINGS_READY, onTextGenSettingsReady);
registerSlashCommand('sprite', setSpriteSlashCommand, ['emote'], '<span class="monospace">(spriteId)</span> force sets the sprite for the current character', true, true);
registerSlashCommand('spriteoverride', setSpriteSetCommand, ['costume'], '<span class="monospace">(optional folder)</span> sets an override sprite folder for the current character. If the name starts with a slash or a backslash, selects a sub-folder in the character-named folder. Empty value to reset to default.', true, true);
registerSlashCommand('lastsprite', (_, value) => lastExpression[value.trim()] ?? '', [], '<span class="monospace">(charName)</span> Returns the last set sprite / expression for the named character.', true, true);

View File

@ -172,6 +172,7 @@ export const chat_completion_sources = {
MISTRALAI: 'mistralai',
CUSTOM: 'custom',
COHERE: 'cohere',
PERPLEXITY: 'perplexity',
};
const character_names_behavior = {
@ -238,6 +239,7 @@ const default_settings = {
ai21_model: 'j2-ultra',
mistralai_model: 'mistral-medium-latest',
cohere_model: 'command-r',
perplexity_model: 'llama-3-70b-instruct',
custom_model: '',
custom_url: '',
custom_include_body: '',
@ -310,6 +312,7 @@ const oai_settings = {
ai21_model: 'j2-ultra',
mistralai_model: 'mistral-medium-latest',
cohere_model: 'command-r',
perplexity_model: 'llama-3-70b-instruct',
custom_model: '',
custom_url: '',
custom_include_body: '',
@ -1427,6 +1430,8 @@ function getChatCompletionModel() {
return oai_settings.custom_model;
case chat_completion_sources.COHERE:
return oai_settings.cohere_model;
case chat_completion_sources.PERPLEXITY:
return oai_settings.perplexity_model;
default:
throw new Error(`Unknown chat completion source: ${oai_settings.chat_completion_source}`);
}
@ -1652,6 +1657,7 @@ async function sendOpenAIRequest(type, messages, signal) {
const isMistral = oai_settings.chat_completion_source == chat_completion_sources.MISTRALAI;
const isCustom = oai_settings.chat_completion_source == chat_completion_sources.CUSTOM;
const isCohere = oai_settings.chat_completion_source == chat_completion_sources.COHERE;
const isPerplexity = oai_settings.chat_completion_source == chat_completion_sources.PERPLEXITY;
const isTextCompletion = (isOAI && textCompletionModels.includes(oai_settings.openai_model)) || (isOpenRouter && oai_settings.openrouter_force_instruct && power_user.instruct.enabled);
const isQuiet = type === 'quiet';
const isImpersonate = type === 'impersonate';
@ -1799,6 +1805,16 @@ async function sendOpenAIRequest(type, messages, signal) {
generate_data['websearch'] = oai_settings.websearch_cohere;
}
if (isPerplexity) {
generate_data['top_k'] = Number(oai_settings.top_k_openai);
// Normalize values. 1 == disabled. 0 == is usual disabled state in OpenAI.
generate_data['frequency_penalty'] = Math.max(0, Number(oai_settings.freq_pen_openai)) + 1;
generate_data['presence_penalty'] = Number(oai_settings.pres_pen_openai);
// YEAH BRO JUST USE OPENAI CLIENT BRO
delete generate_data['stop'];
}
if ((isOAI || isOpenRouter || isMistral || isCustom || isCohere) && oai_settings.seed >= 0) {
generate_data['seed'] = oai_settings.seed;
}
@ -1869,7 +1885,7 @@ function getStreamingReply(data) {
} else if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
return data?.candidates?.[0]?.content?.parts?.[0]?.text || '';
} else {
return data.choices[0]?.delta?.content || data.choices[0]?.message?.content || data.choices[0]?.text || '';
return data.choices[0]?.delta?.content ?? data.choices[0]?.message?.content ?? data.choices[0]?.text ?? '';
}
}
@ -2665,6 +2681,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model;
oai_settings.mistralai_model = settings.mistralai_model ?? default_settings.mistralai_model;
oai_settings.cohere_model = settings.cohere_model ?? default_settings.cohere_model;
oai_settings.perplexity_model = settings.perplexity_model ?? default_settings.perplexity_model;
oai_settings.custom_model = settings.custom_model ?? default_settings.custom_model;
oai_settings.custom_url = settings.custom_url ?? default_settings.custom_url;
oai_settings.custom_include_body = settings.custom_include_body ?? default_settings.custom_include_body;
@ -2730,6 +2747,8 @@ function loadOpenAISettings(data, settings) {
$(`#model_mistralai_select option[value="${oai_settings.mistralai_model}"`).attr('selected', true);
$('#model_cohere_select').val(oai_settings.cohere_model);
$(`#model_cohere_select option[value="${oai_settings.cohere_model}"`).attr('selected', true);
$('#model_perplexity_select').val(oai_settings.perplexity_model);
$(`#model_perplexity_select option[value="${oai_settings.perplexity_model}"`).attr('selected', true);
$('#custom_model_id').val(oai_settings.custom_model);
$('#custom_api_url_text').val(oai_settings.custom_url);
$('#openai_max_context').val(oai_settings.openai_max_context);
@ -2879,7 +2898,7 @@ async function getStatusOpen() {
return resultCheckStatus();
}
const noValidateSources = [chat_completion_sources.SCALE, chat_completion_sources.CLAUDE, chat_completion_sources.AI21, chat_completion_sources.MAKERSUITE];
const noValidateSources = [chat_completion_sources.SCALE, chat_completion_sources.CLAUDE, chat_completion_sources.AI21, chat_completion_sources.MAKERSUITE, chat_completion_sources.PERPLEXITY];
if (noValidateSources.includes(oai_settings.chat_completion_source)) {
let status = 'Unable to verify key; press "Test Message" to validate.';
setOnlineStatus(status);
@ -2970,6 +2989,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
ai21_model: settings.ai21_model,
mistralai_model: settings.mistralai_model,
cohere_model: settings.cohere_model,
perplexity_model: settings.perplexity_model,
custom_model: settings.custom_model,
custom_url: settings.custom_url,
custom_include_body: settings.custom_include_body,
@ -3362,6 +3382,7 @@ function onSettingsPresetChange() {
ai21_model: ['#model_ai21_select', 'ai21_model', false],
mistralai_model: ['#model_mistralai_select', 'mistralai_model', false],
cohere_model: ['#model_cohere_select', 'cohere_model', false],
perplexity_model: ['#model_perplexity_select', 'perplexity_model', false],
custom_model: ['#custom_model_id', 'custom_model', false],
custom_url: ['#custom_api_url_text', 'custom_url', false],
custom_include_body: ['#custom_include_body', 'custom_include_body', false],
@ -3585,6 +3606,11 @@ async function onModelChange() {
oai_settings.cohere_model = value;
}
if ($(this).is('#model_perplexity_select')) {
console.log('Perplexity model changed to', value);
oai_settings.perplexity_model = value;
}
if (value && $(this).is('#model_custom_select')) {
console.log('Custom model changed to', value);
oai_settings.custom_model = value;
@ -3730,6 +3756,28 @@ async function onModelChange() {
$('#temp_openai').attr('max', claude_max_temp).val(oai_settings.temp_openai).trigger('input');
}
if (oai_settings.chat_completion_source === chat_completion_sources.PERPLEXITY) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max);
}
else if (['sonar-small-chat', 'sonar-medium-chat', 'codellama-70b-instruct', 'mistral-7b-instruct', 'mixtral-8x7b-instruct', 'mixtral-8x22b-instruct'].includes(oai_settings.perplexity_model)) {
$('#openai_max_context').attr('max', max_16k);
}
else if (['llama-3-8b-instruct', 'llama-3-70b-instruct'].includes(oai_settings.perplexity_model)) {
$('#openai_max_context').attr('max', max_8k);
}
else if (['sonar-small-online', 'sonar-medium-online'].includes(oai_settings.perplexity_model)) {
$('#openai_max_context').attr('max', 12000);
}
else {
$('#openai_max_context').attr('max', max_4k);
}
oai_settings.openai_max_context = Math.min(Number($('#openai_max_context').attr('max')), oai_settings.openai_max_context);
$('#openai_max_context').val(oai_settings.openai_max_context).trigger('input');
oai_settings.temp_openai = Math.min(oai_max_temp, oai_settings.temp_openai);
$('#temp_openai').attr('max', oai_max_temp).val(oai_settings.temp_openai).trigger('input');
}
if (oai_settings.chat_completion_source == chat_completion_sources.AI21) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max);
@ -3937,6 +3985,19 @@ async function onConnectButtonClick(e) {
}
}
if (oai_settings.chat_completion_source == chat_completion_sources.PERPLEXITY) {
const api_key_perplexity = String($('#api_key_perplexity').val()).trim();
if (api_key_perplexity.length) {
await writeSecret(SECRET_KEYS.PERPLEXITY, api_key_perplexity);
}
if (!secret_state[SECRET_KEYS.PERPLEXITY]) {
console.log('No secret key saved for Perplexity');
return;
}
}
startStatusLoading();
saveSettingsDebounced();
await getStatusOpen();
@ -3975,6 +4036,9 @@ function toggleChatCompletionForms() {
else if (oai_settings.chat_completion_source == chat_completion_sources.COHERE) {
$('#model_cohere_select').trigger('change');
}
else if (oai_settings.chat_completion_source == chat_completion_sources.PERPLEXITY) {
$('#model_perplexity_select').trigger('change');
}
else if (oai_settings.chat_completion_source == chat_completion_sources.CUSTOM) {
$('#model_custom_select').trigger('change');
}
@ -4644,6 +4708,7 @@ $(document).ready(async function () {
$('#model_ai21_select').on('change', onModelChange);
$('#model_mistralai_select').on('change', onModelChange);
$('#model_cohere_select').on('change', onModelChange);
$('#model_perplexity_select').on('change', onModelChange);
$('#model_custom_select').on('change', onModelChange);
$('#settings_preset_openai').on('change', onSettingsPresetChange);
$('#new_oai_preset').on('click', onNewPresetClick);

View File

@ -35,7 +35,7 @@ import {
} from './instruct-mode.js';
import { registerSlashCommand } from './slash-commands.js';
import { tags } from './tags.js';
import { tag_map, tags } from './tags.js';
import { tokenizers } from './tokenizers.js';
import { BIAS_CACHE } from './logit-bias.js';
import { renderTemplateAsync } from './templates.js';
@ -2327,9 +2327,65 @@ function doNewChat() {
}, 1);
}
async function doRandomChat() {
/**
* Finds the ID of the tag with the given name.
* @param {string} name
* @returns {string} The ID of the tag with the given name.
*/
function findTagIdByName(name) {
const matchTypes = [
(a, b) => a === b,
(a, b) => a.startsWith(b),
(a, b) => a.includes(b),
];
// Only get tags that contain at least one record in the tag_map
const liveTagIds = new Set(Object.values(tag_map).flat());
const liveTags = tags.filter(x => liveTagIds.has(x.id));
const exactNameMatchIndex = liveTags.map(x => x.name.toLowerCase()).indexOf(name.toLowerCase());
if (exactNameMatchIndex !== -1) {
return liveTags[exactNameMatchIndex].id;
}
for (const matchType of matchTypes) {
const index = liveTags.findIndex(x => matchType(x.name.toLowerCase(), name.toLowerCase()));
if (index !== -1) {
return liveTags[index].id;
}
}
}
async function doRandomChat(_, tagName) {
/**
* Gets the ID of a random character.
* @returns {string} The order index of the randomly selected character.
*/
function getRandomCharacterId() {
if (!tagName) {
return Math.floor(Math.random() * characters.length).toString();
}
const tagId = findTagIdByName(tagName);
const taggedCharacters = Object.entries(tag_map)
.filter(x => x[1].includes(tagId)) // Get only records that include the tag
.map(x => x[0]) // Map the character avatar
.filter(x => characters.find(y => y.avatar === x)); // Filter out characters that don't exist
const randomCharacter = taggedCharacters[Math.floor(Math.random() * taggedCharacters.length)];
const randomIndex = characters.findIndex(x => x.avatar === randomCharacter);
if (randomIndex === -1) {
return;
}
return randomIndex.toString();
}
resetSelectedGroup();
const characterId = Math.floor(Math.random() * characters.length).toString();
const characterId = getRandomCharacterId();
if (!characterId) {
toastr.error('No characters found');
return;
}
setCharacterId(characterId);
setActiveCharacter(characters[characterId]?.avatar);
setActiveGroup(null);
@ -3522,7 +3578,7 @@ $(document).ready(() => {
registerSlashCommand('vn', toggleWaifu, [], ' swaps Visual Novel Mode On/Off', false, true);
registerSlashCommand('newchat', doNewChat, [], ' start a new chat with current character', true, true);
registerSlashCommand('random', doRandomChat, [], ' start a new chat with a random character', true, true);
registerSlashCommand('random', doRandomChat, [], '<span class="monospace">(optional tag name)</span> start a new chat with a random character. If an argument is provided, only considers characters that have the specified tag.', true, true);
registerSlashCommand('delmode', doDelMode, ['del'], '<span class="monospace">(optional number)</span> enter message deletion mode, and auto-deletes last N messages if numeric argument is provided', true, true);
registerSlashCommand('cut', doMesCut, [], '<span class="monospace">(number or range)</span> cuts the specified message or continuous chunk from the chat, e.g. <tt>/cut 0-10</tt>. Ranges are inclusive! Returns the text of cut messages separated by a newline.', true, true);
registerSlashCommand('resetpanels', doResetPanels, ['resetui'], ' resets UI panels to original state.', true, true);

View File

@ -24,6 +24,7 @@ export const SECRET_KEYS = {
KOBOLDCPP: 'api_key_koboldcpp',
LLAMACPP: 'api_key_llamacpp',
COHERE: 'api_key_cohere',
PERPLEXITY: 'api_key_perplexity',
};
const INPUT_MAP = {
@ -49,6 +50,7 @@ const INPUT_MAP = {
[SECRET_KEYS.KOBOLDCPP]: '#api_key_koboldcpp',
[SECRET_KEYS.LLAMACPP]: '#api_key_llamacpp',
[SECRET_KEYS.COHERE]: '#api_key_cohere',
[SECRET_KEYS.PERPLEXITY]: '#api_key_perplexity',
};
async function clearSecret() {

View File

@ -47,7 +47,7 @@ import { autoSelectPersona } from './personas.js';
import { addEphemeralStoppingString, chat_styles, flushEphemeralStoppingStrings, power_user } from './power-user.js';
import { textgen_types, textgenerationwebui_settings } from './textgen-settings.js';
import { decodeTextTokens, getFriendlyTokenizerName, getTextTokens, getTokenCountAsync } from './tokenizers.js';
import { delay, isFalseBoolean, isTrueBoolean, stringToRange, trimToEndSentence, trimToStartSentence, waitUntilCondition } from './utils.js';
import { debounce, delay, isFalseBoolean, isTrueBoolean, stringToRange, trimToEndSentence, trimToStartSentence, waitUntilCondition } from './utils.js';
import { registerVariableCommands, resolveVariable } from './variables.js';
import { background_settings } from './backgrounds.js';
@ -1682,6 +1682,7 @@ function modelCallback(_, model) {
{ id: 'model_mistralai_select', api: 'openai', type: chat_completion_sources.MISTRALAI },
{ id: 'model_custom_select', api: 'openai', type: chat_completion_sources.CUSTOM },
{ id: 'model_cohere_select', api: 'openai', type: chat_completion_sources.COHERE },
{ id: 'model_perplexity_select', api: 'openai', type: chat_completion_sources.PERPLEXITY },
{ id: 'model_novel_select', api: 'novel', type: null },
{ id: 'horde_model', api: 'koboldhorde', type: null },
];
@ -1863,11 +1864,24 @@ export async function executeSlashCommands(text, unescape = false) {
return { interrupt, newText, pipe: pipeResult };
}
/**
* @param {JQuery<HTMLElement>} textarea
*/
function setSlashCommandAutocomplete(textarea) {
const nativeElement = textarea.get(0);
let width = 0;
function setItemWidth() {
width = nativeElement.offsetWidth - 5;
}
const setWidthDebounced = debounce(setItemWidth);
$(window).on('resize', () => setWidthDebounced());
textarea.autocomplete({
source: (input, output) => {
// Only show for slash commands (requiring at least 1 letter after the slash) and if there's no space
if (!input.term.startsWith('/') || input.term.includes(' ') || input.term === '/') {
if (!input.term.startsWith('/') || input.term.includes(' ')) {
output([]);
return;
}
@ -1877,7 +1891,7 @@ function setSlashCommandAutocomplete(textarea) {
.keys(parser.helpStrings) // Get all slash commands
.filter(x => x.startsWith(slashCommand)) // Filter by the input
.sort((a, b) => a.localeCompare(b)) // Sort alphabetically
.slice(0, 5) // Limit to 5 results
.slice(0, 50) // Limit to 50 results
.map(x => ({ label: parser.helpStrings[x], value: `/${x} ` })); // Map to the help string
output(result); // Return the results
@ -1891,10 +1905,11 @@ function setSlashCommandAutocomplete(textarea) {
});
textarea.autocomplete('instance')._renderItem = function (ul, item) {
const width = $(textarea).innerWidth();
const content = $('<div></div>').html(item.label);
return $('<li>').width(width).append(content).appendTo(ul);
};
setItemWidth();
}
jQuery(function () {

View File

@ -497,6 +497,15 @@ export function getTokenizerModel() {
return oai_settings.custom_model;
}
if (oai_settings.chat_completion_source === chat_completion_sources.PERPLEXITY) {
if (oai_settings.perplexity_model.includes('llama')) {
return llamaTokenizer;
}
if (oai_settings.perplexity_model.includes('mistral')) {
return mistralTokenizer;
}
}
// Default to Turbo 3.5
return turboTokenizer;
}

View File

@ -191,6 +191,7 @@ const CHAT_COMPLETION_SOURCES = {
MISTRALAI: 'mistralai',
CUSTOM: 'custom',
COHERE: 'cohere',
PERPLEXITY: 'perplexity',
};
const UPLOADS_PATH = './uploads';

View File

@ -14,6 +14,7 @@ const API_OPENAI = 'https://api.openai.com/v1';
const API_CLAUDE = 'https://api.anthropic.com/v1';
const API_MISTRAL = 'https://api.mistral.ai/v1';
const API_COHERE = 'https://api.cohere.ai/v1';
const API_PERPLEXITY = 'https://api.perplexity.ai';
/**
* Applies a post-processing step to the generated messages.
@ -439,7 +440,7 @@ async function sendAI21Request(request, response) {
} else {
console.log(r.completions[0].data.text);
}
const reply = { choices: [{ 'message': { 'content': r.completions[0].data.text } }] };
const reply = { choices: [{ 'message': { 'content': r.completions?.[0]?.data?.text } }] };
return response.send(reply);
})
.catch(err => {
@ -896,6 +897,12 @@ router.post('/generate', jsonParser, function (request, response) {
request.body.char_name,
request.body.user_name);
}
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.PERPLEXITY) {
apiUrl = API_PERPLEXITY;
apiKey = readSecret(SECRET_KEYS.PERPLEXITY);
headers = {};
bodyParams = {};
request.body.messages = postProcessPrompt(request.body.messages, 'claude', request.body.char_name, request.body.user_name);
} else {
console.log('This chat completion source is not supported yet.');
return response.status(400).send({ error: true });

View File

@ -36,6 +36,7 @@ const SECRET_KEYS = {
KOBOLDCPP: 'api_key_koboldcpp',
LLAMACPP: 'api_key_llamacpp',
COHERE: 'api_key_cohere',
PERPLEXITY: 'api_key_perplexity',
};
// These are the keys that are safe to expose, even if allowKeysExposure is false