mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-01-20 21:41:32 +01:00
Merge branch 'staging' of https://github.com/Cohee1207/SillyTavern into staging
This commit is contained in:
commit
174c178485
@ -5,7 +5,7 @@
|
||||
<title>SillyTavern</title>
|
||||
<base href="/">
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, viewport-fit=cover, initial-scale=1, maximum-scale=1.0, user-scalable=no">
|
||||
<meta name="viewport" content="width=device-width, viewport-fit=cover, initial-scale=1, maximum-scale=1.0, user-scalable=no, interactive-widget=resizes-content">
|
||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||
<meta name="darkreader-lock">
|
||||
<meta name="robots" content="noindex, nofollow" />
|
||||
@ -1278,6 +1278,28 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div data-newbie-hidden data-tg-type="koboldcpp" id="xtc_block" class="wide100p">
|
||||
<h4 class="wide100p textAlignCenter">
|
||||
<label data-i18n="Exclude Top Choices (XTC)">Exclude Top Choices (XTC)</label>
|
||||
<a href="https://github.com/oobabooga/text-generation-webui/pull/6335" target="_blank">
|
||||
<div class=" fa-solid fa-circle-info opacity50p"></div>
|
||||
</a>
|
||||
</h4>
|
||||
<div class="flex-container flexFlowRow gap10px flexShrink">
|
||||
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Threshold">Threshold</small>
|
||||
<input class="neo-range-slider" type="range" id="xtc_threshold_textgenerationwebui" name="volume" min="0" max="0.5" step="0.01" />
|
||||
<input class="neo-range-input" type="number" min="0" max="0.5" step="0.01" data-for="xtc_threshold_textgenerationwebui" id="xtc_threshold_counter_textgenerationwebui">
|
||||
</div>
|
||||
<div class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Probability">Probability</small>
|
||||
<input class="neo-range-slider" type="range" id="xtc_probability_textgenerationwebui" name="volume" min="0" max="1" step="0.01" />
|
||||
<input class="neo-range-input" type="number" min="0" max="1" step="0.01" data-for="xtc_probability_textgenerationwebui" id="xtc_probability_counter_textgenerationwebui">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Enable for llama.cpp when the PR is merged: https://github.com/ggerganov/llama.cpp/pull/6839 -->
|
||||
<div data-newbie-hidden data-tg-type="ooba, koboldcpp" id="dryBlock" class="wide100p">
|
||||
<h4 class="wide100p textAlignCenter" title="DRY penalizes tokens that would extend the end of the input into a sequence that has previously occurred in the input. Set multiplier to 0 to disable." data-i18n="[title]DRY_Repetition_Penalty_desc">
|
||||
@ -1823,7 +1845,7 @@
|
||||
<div class="fa-solid fa-clock-rotate-left"></div>
|
||||
</div>
|
||||
</div>
|
||||
<textarea id="claude_human_sysprompt_textarea" class="text_pole textarea_compact" rows="4" maxlength="10000" data-i18n="[placeholder]Human message" placeholder="Human message, instruction, etc. Adds nothing when empty, i.e. requires a new prompt with the role 'user'."></textarea>
|
||||
<textarea id="claude_human_sysprompt_textarea" class="text_pole textarea_compact" rows="4" data-i18n="[placeholder]Human message" placeholder="Human message, instruction, etc. Adds nothing when empty, i.e. requires a new prompt with the role 'user'."></textarea>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@ -2931,6 +2953,8 @@
|
||||
<option value="command">command</option>
|
||||
<option value="command-r">command-r</option>
|
||||
<option value="command-r-plus">command-r-plus</option>
|
||||
<option value="command-r-08-2024">command-r-08-2024</option>
|
||||
<option value="command-r-plus-08-2024">command-r-plus-08-2024</option>
|
||||
</optgroup>
|
||||
<optgroup label="Nightly">
|
||||
<option value="command-light-nightly">command-light-nightly</option>
|
||||
|
120
public/script.js
120
public/script.js
@ -488,14 +488,6 @@ let default_user_name = 'User';
|
||||
export let name1 = default_user_name;
|
||||
export let name2 = 'SillyTavern System';
|
||||
export let chat = [];
|
||||
let safetychat = [
|
||||
{
|
||||
name: systemUserName,
|
||||
is_user: false,
|
||||
create_date: 0,
|
||||
mes: 'You deleted a character/chat and arrived back here for safety reasons! Pick another character!',
|
||||
},
|
||||
];
|
||||
let chatSaveTimeout;
|
||||
let importFlashTimeout;
|
||||
export let isChatSaving = false;
|
||||
@ -594,6 +586,17 @@ export const extension_prompt_roles = {
|
||||
|
||||
export const MAX_INJECTION_DEPTH = 1000;
|
||||
|
||||
const SAFETY_CHAT = [
|
||||
{
|
||||
name: systemUserName,
|
||||
force_avatar: system_avatar,
|
||||
is_system: true,
|
||||
is_user: false,
|
||||
create_date: 0,
|
||||
mes: 'You deleted a character/chat and arrived back here for safety reasons! Pick another character!',
|
||||
},
|
||||
];
|
||||
|
||||
export let system_messages = {};
|
||||
|
||||
async function getSystemMessages() {
|
||||
@ -3760,7 +3763,7 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
|
||||
}
|
||||
|
||||
let examplesString = '';
|
||||
let chatString = '';
|
||||
let chatString = addChatsPreamble(addChatsSeparator(''));
|
||||
let cyclePrompt = '';
|
||||
|
||||
async function getMessagesTokenCount() {
|
||||
@ -3769,10 +3772,10 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
|
||||
storyString,
|
||||
afterScenarioAnchor,
|
||||
examplesString,
|
||||
chatString,
|
||||
quiet_prompt,
|
||||
cyclePrompt,
|
||||
userAlignmentMessage,
|
||||
chatString,
|
||||
modifyLastPromptLine(''),
|
||||
cyclePrompt,
|
||||
].join('').replace(/\r/gm, '');
|
||||
return getTokenCountAsync(encodeString, power_user.token_padding);
|
||||
}
|
||||
@ -3803,8 +3806,8 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
|
||||
}
|
||||
|
||||
tokenCount += await getTokenCountAsync(item.replace(/\r/gm, ''));
|
||||
chatString = item + chatString;
|
||||
if (tokenCount < this_max_context) {
|
||||
chatString = chatString + item;
|
||||
arrMes[index] = item;
|
||||
lastAddedIndex = Math.max(lastAddedIndex, index);
|
||||
} else {
|
||||
@ -3830,8 +3833,8 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
|
||||
}
|
||||
|
||||
tokenCount += await getTokenCountAsync(item.replace(/\r/gm, ''));
|
||||
chatString = item + chatString;
|
||||
if (tokenCount < this_max_context) {
|
||||
chatString = chatString + item;
|
||||
arrMes[i] = item;
|
||||
lastAddedIndex = Math.max(lastAddedIndex, i);
|
||||
} else {
|
||||
@ -4028,15 +4031,16 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
|
||||
async function checkPromptSize() {
|
||||
console.debug('---checking Prompt size');
|
||||
setPromptString();
|
||||
const jointMessages = mesSend.map((e) => `${e.extensionPrompts.join('')}${e.message}`).join('');
|
||||
const prompt = [
|
||||
beforeScenarioAnchor,
|
||||
storyString,
|
||||
afterScenarioAnchor,
|
||||
mesExmString,
|
||||
mesSend.map((e) => `${e.extensionPrompts.join('')}${e.message}`).join(''),
|
||||
addChatsPreamble(addChatsSeparator(jointMessages)),
|
||||
'\n',
|
||||
modifyLastPromptLine(''),
|
||||
generatedPromptCache,
|
||||
quiet_prompt,
|
||||
].join('').replace(/\r/gm, '');
|
||||
let thisPromptContextSize = await getTokenCountAsync(prompt, power_user.token_padding);
|
||||
|
||||
@ -5679,7 +5683,7 @@ export function resetChatState() {
|
||||
// replaces deleted charcter name with system user since it will be displayed next.
|
||||
name2 = systemUserName;
|
||||
// sets up system user to tell user about having deleted a character
|
||||
chat = [...safetychat];
|
||||
chat.splice(0, chat.length, ...SAFETY_CHAT);
|
||||
// resets chat metadata
|
||||
chat_metadata = {};
|
||||
// resets the characters array, forcing getcharacters to reset
|
||||
@ -8840,72 +8844,74 @@ export async function handleDeleteCharacter(this_chid, delete_chats) {
|
||||
/**
|
||||
* Deletes a character completely, including associated chats if specified
|
||||
*
|
||||
* @param {string} characterKey - The key (avatar) of the character to be deleted
|
||||
* @param {string|string[]} characterKey - The key (avatar) of the character to be deleted
|
||||
* @param {Object} [options] - Optional parameters for the deletion
|
||||
* @param {boolean} [options.deleteChats=true] - Whether to delete associated chats or not
|
||||
* @return {Promise<void>} - A promise that resolves when the character is successfully deleted
|
||||
*/
|
||||
export async function deleteCharacter(characterKey, { deleteChats = true } = {}) {
|
||||
const character = characters.find(x => x.avatar == characterKey);
|
||||
if (!character) {
|
||||
toastr.warning(`Character ${characterKey} not found. Cannot be deleted.`);
|
||||
return;
|
||||
if (!Array.isArray(characterKey)) {
|
||||
characterKey = [characterKey];
|
||||
}
|
||||
|
||||
const chid = characters.indexOf(character);
|
||||
const pastChats = await getPastCharacterChats(chid);
|
||||
|
||||
const msg = { avatar_url: character.avatar, delete_chats: deleteChats };
|
||||
|
||||
const response = await fetch('/api/characters/delete', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify(msg),
|
||||
cache: 'no-cache',
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to delete character: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
await removeCharacterFromUI(character.name, character.avatar);
|
||||
|
||||
if (deleteChats) {
|
||||
for (const chat of pastChats) {
|
||||
const name = chat.file_name.replace('.jsonl', '');
|
||||
await eventSource.emit(event_types.CHAT_DELETED, name);
|
||||
for (const key of characterKey) {
|
||||
const character = characters.find(x => x.avatar == key);
|
||||
if (!character) {
|
||||
toastr.warning(`Character ${key} not found. Skipping deletion.`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const chid = characters.indexOf(character);
|
||||
const pastChats = await getPastCharacterChats(chid);
|
||||
|
||||
const msg = { avatar_url: character.avatar, delete_chats: deleteChats };
|
||||
|
||||
const response = await fetch('/api/characters/delete', {
|
||||
method: 'POST',
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify(msg),
|
||||
cache: 'no-cache',
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
toastr.error(`${response.status} ${response.statusText}`, 'Failed to delete character');
|
||||
continue;
|
||||
}
|
||||
|
||||
delete tag_map[character.avatar];
|
||||
select_rm_info('char_delete', character.name);
|
||||
|
||||
if (deleteChats) {
|
||||
for (const chat of pastChats) {
|
||||
const name = chat.file_name.replace('.jsonl', '');
|
||||
await eventSource.emit(event_types.CHAT_DELETED, name);
|
||||
}
|
||||
}
|
||||
|
||||
await eventSource.emit(event_types.CHARACTER_DELETED, { id: chid, character: character });
|
||||
}
|
||||
|
||||
eventSource.emit(event_types.CHARACTER_DELETED, { id: this_chid, character: characters[this_chid] });
|
||||
await removeCharacterFromUI();
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to delete a character from UI after character deletion API success.
|
||||
* It manages necessary UI changes such as closing advanced editing popup, unsetting
|
||||
* character ID, resetting characters array and chat metadata, deselecting character's tab
|
||||
* panel, removing character name from navigation tabs, clearing chat, removing character's
|
||||
* avatar from tag_map, fetching updated list of characters and updating the 'deleted
|
||||
* character' message.
|
||||
* panel, removing character name from navigation tabs, clearing chat, fetching updated list of characters.
|
||||
* It also ensures to save the settings after all the operations.
|
||||
*
|
||||
* @param {string} name - The name of the character to be deleted.
|
||||
* @param {string} avatar - The avatar URL of the character to be deleted.
|
||||
* @param {boolean} reloadCharacters - Whether the character list should be refreshed after deletion.
|
||||
*/
|
||||
async function removeCharacterFromUI(name, avatar, reloadCharacters = true) {
|
||||
async function removeCharacterFromUI() {
|
||||
await clearChat();
|
||||
$('#character_cross').click();
|
||||
this_chid = undefined;
|
||||
characters.length = 0;
|
||||
name2 = systemUserName;
|
||||
chat = [...safetychat];
|
||||
chat.splice(0, chat.length, ...SAFETY_CHAT);
|
||||
chat_metadata = {};
|
||||
$(document.getElementById('rm_button_selected_ch')).children('h2').text('');
|
||||
this_chid = undefined;
|
||||
delete tag_map[avatar];
|
||||
if (reloadCharacters) await getCharacters();
|
||||
select_rm_info('char_delete', name);
|
||||
await getCharacters();
|
||||
await printMessages();
|
||||
saveSettingsDebounced();
|
||||
}
|
||||
|
@ -108,14 +108,12 @@ class CharacterContextMenu {
|
||||
* Delete one or more characters,
|
||||
* opens a popup.
|
||||
*
|
||||
* @param {number} characterId
|
||||
* @param {string|string[]} characterKey
|
||||
* @param {boolean} [deleteChats]
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static delete = async (characterId, deleteChats = false) => {
|
||||
const character = CharacterContextMenu.#getCharacter(characterId);
|
||||
|
||||
await deleteCharacter(character.avatar, { deleteChats: deleteChats });
|
||||
static delete = async (characterKey, deleteChats = false) => {
|
||||
await deleteCharacter(characterKey, { deleteChats: deleteChats });
|
||||
};
|
||||
|
||||
static #getCharacter = (characterId) => characters[characterId] ?? null;
|
||||
@ -344,7 +342,7 @@ class BulkTagPopupHandler {
|
||||
const mutualTags = this.getMutualTags();
|
||||
|
||||
for (const characterId of this.characterIds) {
|
||||
for(const tag of mutualTags) {
|
||||
for (const tag of mutualTags) {
|
||||
removeTagFromMap(tag.id, characterId);
|
||||
}
|
||||
}
|
||||
@ -599,8 +597,7 @@ class BulkEditOverlay {
|
||||
|
||||
this.container.removeEventListener('mouseup', cancelHold);
|
||||
this.container.removeEventListener('touchend', cancelHold);
|
||||
},
|
||||
BulkEditOverlay.longPressDelay);
|
||||
}, BulkEditOverlay.longPressDelay);
|
||||
};
|
||||
|
||||
handleLongPressEnd = (event) => {
|
||||
@ -847,11 +844,14 @@ class BulkEditOverlay {
|
||||
const deleteChats = document.getElementById('del_char_checkbox').checked ?? false;
|
||||
|
||||
showLoader();
|
||||
toastr.info('We\'re deleting your characters, please wait...', 'Working on it');
|
||||
return Promise.allSettled(characterIds.map(async characterId => CharacterContextMenu.delete(characterId, deleteChats)))
|
||||
.then(() => getCharacters())
|
||||
const toast = toastr.info('We\'re deleting your characters, please wait...', 'Working on it');
|
||||
const avatarList = characterIds.map(id => characters[id]?.avatar).filter(a => a);
|
||||
return CharacterContextMenu.delete(avatarList, deleteChats)
|
||||
.then(() => this.browseState())
|
||||
.finally(() => hideLoader());
|
||||
.finally(() => {
|
||||
toastr.clear(toast);
|
||||
hideLoader();
|
||||
});
|
||||
});
|
||||
|
||||
// At this moment the popup is already changed in the dom, but not yet closed/resolved. We build the avatar list here
|
||||
|
@ -78,6 +78,19 @@ export function getKayraMaxContextTokens() {
|
||||
return null;
|
||||
}
|
||||
|
||||
export function getKayraMaxResponseTokens() {
|
||||
switch (novel_data?.tier) {
|
||||
case 1:
|
||||
return 100;
|
||||
case 2:
|
||||
return 100;
|
||||
case 3:
|
||||
return 150;
|
||||
}
|
||||
|
||||
return maximum_output_length;
|
||||
}
|
||||
|
||||
export function getNovelTier() {
|
||||
return nai_tiers[novel_data?.tier] ?? 'no_connection';
|
||||
}
|
||||
@ -438,12 +451,14 @@ export function getNovelGenerationData(finalPrompt, settings, maxLength, isImper
|
||||
console.log(finalPrompt);
|
||||
}
|
||||
|
||||
const adjustedMaxLength = nai_settings.model_novel.includes('kayra') ? getKayraMaxResponseTokens() : maximum_output_length;
|
||||
|
||||
return {
|
||||
'input': finalPrompt,
|
||||
'model': nai_settings.model_novel,
|
||||
'use_string': true,
|
||||
'temperature': Number(nai_settings.temperature),
|
||||
'max_length': maxLength < maximum_output_length ? maxLength : maximum_output_length,
|
||||
'max_length': maxLength < adjustedMaxLength ? maxLength : adjustedMaxLength,
|
||||
'min_length': Number(nai_settings.min_length),
|
||||
'tail_free_sampling': Number(nai_settings.tail_free_sampling),
|
||||
'repetition_penalty': Number(nai_settings.repetition_penalty),
|
||||
|
@ -133,7 +133,7 @@ const max_2mil = 2000 * 1000;
|
||||
const scale_max = 8191;
|
||||
const claude_max = 9000; // We have a proper tokenizer, so theoretically could be larger (up to 9k)
|
||||
const claude_100k_max = 99000;
|
||||
const unlocked_max = max_200k;
|
||||
const unlocked_max = max_2mil;
|
||||
const oai_max_temp = 2.0;
|
||||
const claude_max_temp = 1.0;
|
||||
const openrouter_website_model = 'OR_Website';
|
||||
@ -4191,7 +4191,7 @@ async function onModelChange() {
|
||||
else if (['command-light-nightly', 'command-nightly'].includes(oai_settings.cohere_model)) {
|
||||
$('#openai_max_context').attr('max', max_8k);
|
||||
}
|
||||
else if (['command-r', 'command-r-plus'].includes(oai_settings.cohere_model)) {
|
||||
else if (oai_settings.cohere_model.includes('command-r')) {
|
||||
$('#openai_max_context').attr('max', max_128k);
|
||||
}
|
||||
else if (['c4ai-aya-23'].includes(oai_settings.cohere_model)) {
|
||||
|
@ -188,6 +188,8 @@ const settings = {
|
||||
custom_model: '',
|
||||
bypass_status_check: false,
|
||||
openrouter_allow_fallbacks: true,
|
||||
xtc_threshold: 0.1,
|
||||
xtc_probability: 0,
|
||||
};
|
||||
|
||||
export let textgenerationwebui_banned_in_macros = [];
|
||||
@ -263,6 +265,8 @@ export const setting_names = [
|
||||
'custom_model',
|
||||
'bypass_status_check',
|
||||
'openrouter_allow_fallbacks',
|
||||
'xtc_threshold',
|
||||
'xtc_probability',
|
||||
];
|
||||
|
||||
const DYNATEMP_BLOCK = document.getElementById('dynatemp_block_ooba');
|
||||
@ -718,6 +722,8 @@ jQuery(function () {
|
||||
'dry_multiplier_textgenerationwebui': 0,
|
||||
'dry_base_textgenerationwebui': 1.75,
|
||||
'dry_penalty_last_n_textgenerationwebui': 0,
|
||||
'xtc_threshold_textgenerationwebui': 0.1,
|
||||
'xtc_probability_textgenerationwebui': 0,
|
||||
};
|
||||
|
||||
for (const [id, value] of Object.entries(inputs)) {
|
||||
@ -1156,6 +1162,8 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
||||
'api_server': getTextGenServer(),
|
||||
'legacy_api': settings.legacy_api && (settings.type === OOBA || settings.type === APHRODITE),
|
||||
'sampler_order': settings.type === textgen_types.KOBOLDCPP ? settings.sampler_order : undefined,
|
||||
'xtc_threshold': settings.xtc_threshold,
|
||||
'xtc_probability': settings.xtc_probability,
|
||||
};
|
||||
const nonAphroditeParams = {
|
||||
'rep_pen': settings.rep_pen,
|
||||
|
@ -4777,8 +4777,10 @@ jQuery(() => {
|
||||
world_info_min_activations = Number($(this).val());
|
||||
$('#world_info_min_activations_counter').val(world_info_min_activations);
|
||||
|
||||
if (world_info_min_activations !== 0) {
|
||||
if (world_info_min_activations !== 0 && world_info_max_recursion_steps !== 0) {
|
||||
$('#world_info_max_recursion_steps').val(0).trigger('input');
|
||||
flashHighlight($('#world_info_max_recursion_steps').parent()); // flash the other control to show it has changed
|
||||
console.info('[WI] Max recursion steps set to 0, as min activations is set to', world_info_min_activations);
|
||||
} else {
|
||||
saveSettings();
|
||||
}
|
||||
@ -4840,8 +4842,10 @@ jQuery(() => {
|
||||
$('#world_info_max_recursion_steps').on('input', function () {
|
||||
world_info_max_recursion_steps = Number($(this).val());
|
||||
$('#world_info_max_recursion_steps_counter').val(world_info_max_recursion_steps);
|
||||
if (world_info_max_recursion_steps !== 0) {
|
||||
if (world_info_max_recursion_steps !== 0 && world_info_min_activations !== 0) {
|
||||
$('#world_info_min_activations').val(0).trigger('input');
|
||||
flashHighlight($('#world_info_min_activations').parent()); // flash the other control to show it has changed
|
||||
console.info('[WI] Min activations set to 0, as max recursion steps is set to', world_info_max_recursion_steps);
|
||||
} else {
|
||||
saveSettings();
|
||||
}
|
||||
|
@ -572,6 +572,11 @@ async function sendCohereRequest(request, response) {
|
||||
search_queries_only: false,
|
||||
};
|
||||
|
||||
const canDoSafetyMode = String(request.body.model).endsWith('08-2024');
|
||||
if (canDoSafetyMode) {
|
||||
requestBody.safety_mode = 'NONE';
|
||||
}
|
||||
|
||||
console.log('Cohere request:', requestBody);
|
||||
|
||||
const config = {
|
||||
|
@ -6,6 +6,7 @@ const { readAllChunks, extractFileFromZipBuffer, forwardFetchResponse } = requir
|
||||
const { jsonParser } = require('../express-common');
|
||||
|
||||
const API_NOVELAI = 'https://api.novelai.net';
|
||||
const TEXT_NOVELAI = 'https://text.novelai.net';
|
||||
const IMAGE_NOVELAI = 'https://image.novelai.net';
|
||||
|
||||
// Ban bracket generation, plus defaults
|
||||
@ -155,7 +156,7 @@ router.post('/generate', jsonParser, async function (req, res) {
|
||||
'repetition_penalty_slope': req.body.repetition_penalty_slope,
|
||||
'repetition_penalty_frequency': req.body.repetition_penalty_frequency,
|
||||
'repetition_penalty_presence': req.body.repetition_penalty_presence,
|
||||
'repetition_penalty_whitelist': isNewModel ? repPenaltyAllowList : null,
|
||||
'repetition_penalty_whitelist': isNewModel ? repPenaltyAllowList.flat() : null,
|
||||
'top_a': req.body.top_a,
|
||||
'top_p': req.body.top_p,
|
||||
'top_k': req.body.top_k,
|
||||
@ -178,9 +179,7 @@ router.post('/generate', jsonParser, async function (req, res) {
|
||||
};
|
||||
|
||||
// Tells the model to stop generation at '>'
|
||||
if ('theme_textadventure' === req.body.prefix &&
|
||||
(true === req.body.model.includes('clio') ||
|
||||
true === req.body.model.includes('kayra'))) {
|
||||
if ('theme_textadventure' === req.body.prefix && isNewModel) {
|
||||
data.parameters.eos_token_id = 49405;
|
||||
}
|
||||
|
||||
@ -193,7 +192,8 @@ router.post('/generate', jsonParser, async function (req, res) {
|
||||
};
|
||||
|
||||
try {
|
||||
const url = req.body.streaming ? `${API_NOVELAI}/ai/generate-stream` : `${API_NOVELAI}/ai/generate`;
|
||||
const baseURL = req.body.model.includes('kayra') ? TEXT_NOVELAI : API_NOVELAI;
|
||||
const url = req.body.streaming ? `${baseURL}/ai/generate-stream` : `${baseURL}/ai/generate`;
|
||||
const response = await fetch(url, { method: 'POST', timeout: 0, ...args });
|
||||
|
||||
if (req.body.streaming) {
|
||||
|
Loading…
Reference in New Issue
Block a user