mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-02-03 04:37:40 +01:00
Merge branch 'staging' into tags-as-folders-enhancements
This commit is contained in:
commit
32049b7e1d
11
public/context/DreamGen Role-Play V1.json
Normal file
11
public/context/DreamGen Role-Play V1.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"story_string": "<|im_start|>system\n{{#if system}}{{system}}\n\n\n{{/if}}## Overall plot description:\n\n{{#if scenario}}{{scenario}}{{else}}Conversation between {{char}} and {{user}}.{{/if}}{{#if wiBefore}}\n\n{{wiBefore}}{{/if}}\n\n\n## Characters:\n\n### {{char}}\n\n{{#if description}}{{description}}\n\n{{/if}}{{#if personality}}{{personality}}\n\n{{/if}}### {{user}}\n\n{{#if persona}}{{persona}}{{else}}{{user}} is the protagonist of the role-play.{{/if}}{{#if wiAfter}}\n\n{{wiAfter}}{{/if}}{{#if mesExamples}}\n\n{{mesExamples}}{{/if}}",
|
||||
"example_separator": "",
|
||||
"chat_start": "",
|
||||
"use_stop_strings": false,
|
||||
"always_force_name2": false,
|
||||
"trim_sentences": true,
|
||||
"include_newline": false,
|
||||
"single_line": false,
|
||||
"name": "DreamGen Role-Play V1"
|
||||
}
|
1
public/img/dreamgen.svg
Normal file
1
public/img/dreamgen.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" aria-hidden="true" data-slot="icon"><path fill-rule="evenodd" d="M9.528 1.718a.75.75 0 0 1 .162.819A8.97 8.97 0 0 0 9 6a9 9 0 0 0 9 9 8.97 8.97 0 0 0 3.463-.69.75.75 0 0 1 .981.98 10.503 10.503 0 0 1-9.694 6.46c-5.799 0-10.5-4.7-10.5-10.5 0-4.368 2.667-8.112 6.46-9.694a.75.75 0 0 1 .818.162Z" clip-rule="evenodd"></path></svg>
|
After Width: | Height: | Size: 408 B |
@ -1271,7 +1271,7 @@
|
||||
<input class="neo-range-slider" type="range" id="no_repeat_ngram_size_textgenerationwebui" name="volume" min="0" max="20" step="1">
|
||||
<input class="neo-range-input" type="number" min="0" max="20" step="1" data-for="no_repeat_ngram_size_textgenerationwebui" id="no_repeat_ngram_size_counter_textgenerationwebui">
|
||||
</div>
|
||||
<div data-newbie-hidden data-tg-type="ooba" class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<div data-newbie-hidden data-tg-type="ooba, dreamgen" class="alignitemscenter flex-container flexFlowColumn flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Min Length">Min Length</small>
|
||||
<input class="neo-range-slider" type="range" id="min_length_textgenerationwebui" name="volume" min="0" max="2000" step="1" />
|
||||
<input class="neo-range-input" type="number" min="0" max="2000" step="1" data-for="min_length_textgenerationwebui" id="min_length_counter_textgenerationwebui">
|
||||
@ -1784,7 +1784,7 @@
|
||||
account for faster queue times</a>
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://github.com/db0/AI-Horde-Worker#readme" data-i18n="Learn how to contribute your idle GPU cycles to the Horde">Learn
|
||||
<a target="_blank" href="https://github.com/Haidra-Org/horde-worker-reGen?tab=readme-ov-file#ai-horde-worker-regen" data-i18n="Learn how to contribute your idle GPU cycles to the Horde">Learn
|
||||
how to contribute your idle GPU cycles to the Horde</a>
|
||||
</li>
|
||||
</ul>
|
||||
@ -1897,15 +1897,16 @@
|
||||
<h4 data-i18n="API Type">API Type</h4>
|
||||
<select id="textgen_type">
|
||||
<option value="ooba" data-i18n="Default (oobabooga)">Default (oobabooga)</option>
|
||||
<option value="mancer">Mancer</option>
|
||||
<option value="aphrodite">Aphrodite</option>
|
||||
<option value="tabby">TabbyAPI</option>
|
||||
<option value="dreamgen">DreamGen</option>
|
||||
<option value="infermaticai">InfermaticAI</option>
|
||||
<option value="koboldcpp">KoboldCpp</option>
|
||||
<option value="llamacpp">llama.cpp</option>
|
||||
<option value="openrouter">OpenRouter</option>
|
||||
<option value="mancer">Mancer</option>
|
||||
<option value="ollama">Ollama</option>
|
||||
<option value="openrouter">OpenRouter</option>
|
||||
<option value="tabby">TabbyAPI</option>
|
||||
<option value="togetherai">TogetherAI</option>
|
||||
<option value="infermaticai">InfermaticAI</option>
|
||||
</select>
|
||||
</div>
|
||||
<div data-tg-type="togetherai" class="flex-container flexFlowColumn">
|
||||
@ -1968,6 +1969,29 @@
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div data-tg-type="dreamgen" class="flex-container flexFlowColumn">
|
||||
<h4 data-i18n="DreamGen API key">
|
||||
DreamGen API key
|
||||
<a href="https://dreamgen.com/account/api-keys" class="notes-link" target="_blank">
|
||||
<span class="fa-solid fa-circle-question note-link-span"></span>
|
||||
</a>
|
||||
</h4>
|
||||
<div class="flex-container">
|
||||
<input id="api_key_dreamgen" name="api_key_dreamgen" class="text_pole flex1" maxlength="500" value="" type="text" autocomplete="off">
|
||||
<div title="Clear your API key" data-i18n="[title]Clear your API key" class="menu_button fa-solid fa-circle-xmark clear-api-key" data-key="api_key_dreamgen"></div>
|
||||
</div>
|
||||
<div data-for="api_key_dreamgen" class="neutral_warning">
|
||||
For privacy reasons, your API key will be hidden after you reload the page.
|
||||
</div>
|
||||
<div>
|
||||
<h4 data-i18n="DreamGen Model">DreamGen Model</h4>
|
||||
<select id="model_dreamgen_select">
|
||||
<option>
|
||||
-- Connect to the API --
|
||||
</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div data-tg-type="mancer" class="flex-container flexFlowColumn">
|
||||
<div class="flex-container flexFlowColumn">
|
||||
</div>
|
||||
|
18
public/instruct/DreamGen Role-Play V1.json
Normal file
18
public/instruct/DreamGen Role-Play V1.json
Normal file
@ -0,0 +1,18 @@
|
||||
{
|
||||
"system_prompt": "You are an intelligent, skilled, versatile writer.\n\nYour task is to write a role-play based on the information below.",
|
||||
"input_sequence": "<|im_end|>\n<|im_start|>text names= {{user}}\n",
|
||||
"output_sequence": "<|im_end|>\n<|im_start|>text names= {{char}}\n",
|
||||
"first_output_sequence": "",
|
||||
"last_output_sequence": "",
|
||||
"system_sequence_prefix": "",
|
||||
"system_sequence_suffix": "",
|
||||
"stop_sequence": "",
|
||||
"separator_sequence": "",
|
||||
"wrap": false,
|
||||
"macro": true,
|
||||
"names": false,
|
||||
"names_force_groups": false,
|
||||
"activation_regex": "",
|
||||
"skip_examples": false,
|
||||
"name": "DreamGen Role-Play V1"
|
||||
}
|
@ -22,7 +22,7 @@ import {
|
||||
parseTabbyLogprobs,
|
||||
} from './scripts/textgen-settings.js';
|
||||
|
||||
const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA, INFERMATICAI, OPENROUTER } = textgen_types;
|
||||
const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA, INFERMATICAI, DREAMGEN, OPENROUTER } = textgen_types;
|
||||
|
||||
import {
|
||||
world_info,
|
||||
@ -204,7 +204,7 @@ import { createPersona, initPersonas, selectCurrentPersona, setPersonaDescriptio
|
||||
import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_settings } from './scripts/backgrounds.js';
|
||||
import { hideLoader, showLoader } from './scripts/loader.js';
|
||||
import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js';
|
||||
import { loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels, loadOpenRouterModels, loadAphroditeModels } from './scripts/textgen-models.js';
|
||||
import { loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels, loadOpenRouterModels, loadAphroditeModels, loadDreamGenModels } from './scripts/textgen-models.js';
|
||||
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags } from './scripts/chats.js';
|
||||
import { initPresetManager } from './scripts/preset-manager.js';
|
||||
import { evaluateMacros } from './scripts/macros.js';
|
||||
@ -1074,6 +1074,9 @@ async function getStatusTextgen() {
|
||||
} else if (textgen_settings.type === INFERMATICAI) {
|
||||
loadInfermaticAIModels(data?.data);
|
||||
online_status = textgen_settings.infermaticai_model;
|
||||
} else if (textgen_settings.type === DREAMGEN) {
|
||||
loadDreamGenModels(data?.data);
|
||||
online_status = textgen_settings.dreamgen_model;
|
||||
} else if (textgen_settings.type === OPENROUTER) {
|
||||
loadOpenRouterModels(data?.data);
|
||||
online_status = textgen_settings.openrouter_model;
|
||||
@ -1406,12 +1409,18 @@ function getCharacterSource(chId = this_chid) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const chubId = characters[this_chid]?.data?.extensions?.chub?.full_path;
|
||||
const chubId = characters[chId]?.data?.extensions?.chub?.full_path;
|
||||
|
||||
if (chubId) {
|
||||
return `https://chub.ai/characters/${chubId}`;
|
||||
}
|
||||
|
||||
const pygmalionId = characters[chId]?.data?.extensions?.pygmalion_id;
|
||||
|
||||
if (pygmalionId) {
|
||||
return `https://pygmalion.chat/${pygmalionId}`;
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
@ -6013,7 +6022,7 @@ async function saveSettings(type) {
|
||||
}
|
||||
|
||||
export function setGenerationParamsFromPreset(preset) {
|
||||
const needsUnlock = preset.max_length > MAX_CONTEXT_DEFAULT || preset.genamt > MAX_RESPONSE_DEFAULT;
|
||||
const needsUnlock = (preset.max_length ?? max_context) > MAX_CONTEXT_DEFAULT || (preset.genamt ?? amount_gen) > MAX_RESPONSE_DEFAULT;
|
||||
$('#max_context_unlocked').prop('checked', needsUnlock).trigger('change');
|
||||
|
||||
if (preset.genamt !== undefined) {
|
||||
@ -7837,6 +7846,11 @@ const CONNECT_API_MAP = {
|
||||
button: '#api_button_textgenerationwebui',
|
||||
type: textgen_types.INFERMATICAI,
|
||||
},
|
||||
'dreamgen': {
|
||||
selected: 'textgenerationwebui',
|
||||
button: '#api_button_textgenerationwebui',
|
||||
type: textgen_types.DREAMGEN,
|
||||
},
|
||||
'openrouter-text': {
|
||||
selected: 'textgenerationwebui',
|
||||
button: '#api_button_textgenerationwebui',
|
||||
@ -8773,6 +8787,11 @@ jQuery(async function () {
|
||||
await writeSecret(SECRET_KEYS.INFERMATICAI, infermaticAIKey);
|
||||
}
|
||||
|
||||
const dreamgenKey = String($('#api_key_dreamgen').val()).trim();
|
||||
if (dreamgenKey.length) {
|
||||
await writeSecret(SECRET_KEYS.DREAMGEN, dreamgenKey);
|
||||
}
|
||||
|
||||
const openRouterKey = String($('#api_key_openrouter-tg').val()).trim();
|
||||
if (openRouterKey.length) {
|
||||
await writeSecret(SECRET_KEYS.OPENROUTER, openRouterKey);
|
||||
|
@ -325,8 +325,9 @@ function RA_autoconnect(PrevApi) {
|
||||
case 'textgenerationwebui':
|
||||
if ((textgen_settings.type === textgen_types.MANCER && secret_state[SECRET_KEYS.MANCER])
|
||||
|| (textgen_settings.type === textgen_types.TOGETHERAI && secret_state[SECRET_KEYS.TOGETHERAI])
|
||||
|| (textgen_settings.type === textgen_types.INFERMATICAI && secret_state[SECRET_KEYS.INFERMATICAI]
|
||||
|| (textgen_settings.type === textgen_types.OPENROUTER && secret_state[SECRET_KEYS.OPENROUTER]))
|
||||
|| (textgen_settings.type === textgen_types.INFERMATICAI && secret_state[SECRET_KEYS.INFERMATICAI])
|
||||
|| (textgen_settings.type === textgen_types.DREAMGEN && secret_state[SECRET_KEYS.DREAMGEN])
|
||||
|| (textgen_settings.type === textgen_types.OPENROUTER && secret_state[SECRET_KEYS.OPENROUTER])
|
||||
) {
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
}
|
||||
|
@ -4,7 +4,7 @@
|
||||
<div class="sd_comfy_workflow_editor_content">
|
||||
<div class="flex-container flexFlowColumn sd_comfy_workflow_editor_workflow_container">
|
||||
<label for="sd_comfy_workflow_editor_workflow">Workflow (JSON)</label>
|
||||
<textarea id="sd_comfy_workflow_editor_workflow" class="text_pole wide100p textarea_compact flex1" placeholder="Put the ComfyUI's workflow (JSON) here and replace the variable settings with placeholders."></textarea>
|
||||
<textarea id="sd_comfy_workflow_editor_workflow" class="text_pole wide100p textarea_compact flex1" placeholder="Insert your ComfyUI workflow here by copying the JSON data obtained via the 'Save (API Format)' option. This option becomes available after enabling 'Dev Mode' in the settings. Remember to replace specific values within your workflow with placeholders as required for your use case."></textarea>
|
||||
</div>
|
||||
<div class="sd_comfy_workflow_editor_placeholder_container">
|
||||
<div>Placeholders</div>
|
||||
|
@ -1603,6 +1603,8 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
'stop': getCustomStoppingStrings(openai_max_stop_strings),
|
||||
'chat_completion_source': oai_settings.chat_completion_source,
|
||||
'n': canMultiSwipe ? oai_settings.n : undefined,
|
||||
'user_name': name1,
|
||||
'char_name': name2,
|
||||
};
|
||||
|
||||
// Empty array will produce a validation error
|
||||
|
@ -315,6 +315,7 @@ class PresetManager {
|
||||
'custom_model',
|
||||
'bypass_status_check',
|
||||
'infermaticai_model',
|
||||
'dreamgen_model',
|
||||
'openrouter_model',
|
||||
'max_tokens_second',
|
||||
];
|
||||
|
@ -17,6 +17,7 @@ export const SECRET_KEYS = {
|
||||
MISTRALAI: 'api_key_mistralai',
|
||||
TOGETHERAI: 'api_key_togetherai',
|
||||
INFERMATICAI: 'api_key_infermaticai',
|
||||
DREAMGEN: 'api_key_dreamgen',
|
||||
CUSTOM: 'api_key_custom',
|
||||
OOBA: 'api_key_ooba',
|
||||
};
|
||||
@ -39,6 +40,7 @@ const INPUT_MAP = {
|
||||
[SECRET_KEYS.TOGETHERAI]: '#api_key_togetherai',
|
||||
[SECRET_KEYS.OOBA]: '#api_key_ooba',
|
||||
[SECRET_KEYS.INFERMATICAI]: '#api_key_infermaticai',
|
||||
[SECRET_KEYS.DREAMGEN]: '#api_key_dreamgen',
|
||||
};
|
||||
|
||||
async function clearSecret() {
|
||||
|
@ -1,11 +1,12 @@
|
||||
import { callPopup, getRequestHeaders, setGenerationParamsFromPreset } from '../script.js';
|
||||
import { isMobile } from './RossAscends-mods.js';
|
||||
import { callPopup, getRequestHeaders, setGenerationParamsFromPreset } from '../script.js';
|
||||
import { textgenerationwebui_settings as textgen_settings, textgen_types } from './textgen-settings.js';
|
||||
import { tokenizers } from './tokenizers.js';
|
||||
|
||||
let mancerModels = [];
|
||||
let togetherModels = [];
|
||||
let infermaticAIModels = [];
|
||||
let dreamGenModels = [];
|
||||
let aphroditeModels = [];
|
||||
export let openRouterModels = [];
|
||||
|
||||
@ -82,6 +83,32 @@ export async function loadInfermaticAIModels(data) {
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadDreamGenModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid DreamGen models data', data);
|
||||
return;
|
||||
}
|
||||
|
||||
dreamGenModels = data;
|
||||
|
||||
if (!data.find(x => x.id === textgen_settings.dreamgen_model)) {
|
||||
textgen_settings.dreamgen_model = data[0]?.id || '';
|
||||
}
|
||||
|
||||
$('#model_dreamgen_select').empty();
|
||||
for (const model of data) {
|
||||
if (model.display_type === 'image') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const option = document.createElement('option');
|
||||
option.value = model.id;
|
||||
option.text = model.id;
|
||||
option.selected = model.id === textgen_settings.dreamgen_model;
|
||||
$('#model_dreamgen_select').append(option);
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadMancerModels(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
console.error('Invalid Mancer models data', data);
|
||||
@ -173,6 +200,13 @@ function onInfermaticAIModelSelect() {
|
||||
setGenerationParamsFromPreset({ max_length: model.context_length });
|
||||
}
|
||||
|
||||
function onDreamGenModelSelect() {
|
||||
const modelName = String($('#model_dreamgen_select').val());
|
||||
textgen_settings.dreamgen_model = modelName;
|
||||
$('#api_button_textgenerationwebui').trigger('click');
|
||||
// TODO(DreamGen): Consider retuning max_tokens from API and setting it here.
|
||||
}
|
||||
|
||||
function onOllamaModelSelect() {
|
||||
const modelId = String($('#ollama_model').val());
|
||||
textgen_settings.ollama_model = modelId;
|
||||
@ -240,6 +274,20 @@ function getInfermaticAIModelTemplate(option) {
|
||||
`));
|
||||
}
|
||||
|
||||
function getDreamGenModelTemplate(option) {
|
||||
const model = dreamGenModels.find(x => x.id === option?.element?.value);
|
||||
|
||||
if (!option.id || !model) {
|
||||
return option.text;
|
||||
}
|
||||
|
||||
return $((`
|
||||
<div class="flex-container flexFlowColumn">
|
||||
<div><strong>${DOMPurify.sanitize(model.id)}</strong></div>
|
||||
</div>
|
||||
`));
|
||||
}
|
||||
|
||||
function getOpenRouterModelTemplate(option) {
|
||||
const model = openRouterModels.find(x => x.id === option?.element?.value);
|
||||
|
||||
@ -327,10 +375,25 @@ export function getCurrentOpenRouterModelTokenizer() {
|
||||
}
|
||||
}
|
||||
|
||||
export function getCurrentDreamGenModelTokenizer() {
|
||||
const modelId = textgen_settings.dreamgen_model;
|
||||
const model = dreamGenModels.find(x => x.id === modelId);
|
||||
if (model.id.startsWith('opus-v1-sm')) {
|
||||
return tokenizers.MISTRAL;
|
||||
} else if (model.id.startsWith('opus-v1-lg')) {
|
||||
return tokenizers.YI;
|
||||
} else if (model.id.startsWith('opus-v1-xl')) {
|
||||
return tokenizers.LLAMA;
|
||||
} else {
|
||||
return tokenizers.MISTRAL;
|
||||
}
|
||||
}
|
||||
|
||||
jQuery(function () {
|
||||
$('#mancer_model').on('change', onMancerModelSelect);
|
||||
$('#model_togetherai_select').on('change', onTogetherModelSelect);
|
||||
$('#model_infermaticai_select').on('change', onInfermaticAIModelSelect);
|
||||
$('#model_dreamgen_select').on('change', onDreamGenModelSelect);
|
||||
$('#ollama_model').on('change', onOllamaModelSelect);
|
||||
$('#openrouter_model').on('change', onOpenRouterModelSelect);
|
||||
$('#ollama_download_model').on('click', downloadOllamaModel);
|
||||
@ -364,6 +427,13 @@ jQuery(function () {
|
||||
width: '100%',
|
||||
templateResult: getInfermaticAIModelTemplate,
|
||||
});
|
||||
$('#model_dreamgen_select').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
searchInputCssClass: 'text_pole',
|
||||
width: '100%',
|
||||
templateResult: getDreamGenModelTemplate,
|
||||
});
|
||||
$('#openrouter_model').select2({
|
||||
placeholder: 'Select a model',
|
||||
searchInputPlaceholder: 'Search models...',
|
||||
|
@ -4,7 +4,6 @@ import {
|
||||
getRequestHeaders,
|
||||
getStoppingStrings,
|
||||
max_context,
|
||||
online_status,
|
||||
saveSettingsDebounced,
|
||||
setGenerationParamsFromPreset,
|
||||
setOnlineStatus,
|
||||
@ -14,7 +13,7 @@ import { BIAS_CACHE, createNewLogitBiasEntry, displayLogitBias, getLogitBiasList
|
||||
|
||||
import { power_user, registerDebugFunction } from './power-user.js';
|
||||
import EventSourceStream from './sse-stream.js';
|
||||
import { getCurrentOpenRouterModelTokenizer } from './textgen-models.js';
|
||||
import { getCurrentDreamGenModelTokenizer, getCurrentOpenRouterModelTokenizer } from './textgen-models.js';
|
||||
import { SENTENCEPIECE_TOKENIZERS, TEXTGEN_TOKENIZERS, getTextTokens, tokenizers } from './tokenizers.js';
|
||||
import { getSortableDelay, onlyUnique } from './utils.js';
|
||||
|
||||
@ -35,10 +34,11 @@ export const textgen_types = {
|
||||
LLAMACPP: 'llamacpp',
|
||||
OLLAMA: 'ollama',
|
||||
INFERMATICAI: 'infermaticai',
|
||||
DREAMGEN: 'dreamgen',
|
||||
OPENROUTER: 'openrouter',
|
||||
};
|
||||
|
||||
const { MANCER, APHRODITE, TABBY, TOGETHERAI, OOBA, OLLAMA, LLAMACPP, INFERMATICAI, OPENROUTER } = textgen_types;
|
||||
const { MANCER, APHRODITE, TABBY, TOGETHERAI, OOBA, OLLAMA, LLAMACPP, INFERMATICAI, DREAMGEN, OPENROUTER } = textgen_types;
|
||||
|
||||
const LLAMACPP_DEFAULT_ORDER = [
|
||||
'top_k',
|
||||
@ -71,6 +71,7 @@ const MANCER_SERVER_DEFAULT = 'https://neuro.mancer.tech';
|
||||
let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEFAULT;
|
||||
let TOGETHERAI_SERVER = 'https://api.together.xyz';
|
||||
let INFERMATICAI_SERVER = 'https://api.totalgpt.ai';
|
||||
let DREAMGEN_SERVER = 'https://dreamgen.com';
|
||||
let OPENROUTER_SERVER = 'https://openrouter.ai/api';
|
||||
|
||||
const SERVER_INPUTS = {
|
||||
@ -143,6 +144,7 @@ const settings = {
|
||||
ollama_model: '',
|
||||
openrouter_model: 'openrouter/auto',
|
||||
aphrodite_model: '',
|
||||
dreamgen_model: 'opus-v1-xl/text',
|
||||
legacy_api: false,
|
||||
sampler_order: KOBOLDCPP_ORDER,
|
||||
logit_bias: [],
|
||||
@ -247,6 +249,10 @@ export function getTextGenServer() {
|
||||
return INFERMATICAI_SERVER;
|
||||
}
|
||||
|
||||
if (settings.type === DREAMGEN) {
|
||||
return DREAMGEN_SERVER;
|
||||
}
|
||||
|
||||
if (settings.type === OPENROUTER) {
|
||||
return OPENROUTER_SERVER;
|
||||
}
|
||||
@ -275,7 +281,7 @@ async function selectPreset(name) {
|
||||
function formatTextGenURL(value) {
|
||||
try {
|
||||
// Mancer/Together/InfermaticAI doesn't need any formatting (it's hardcoded)
|
||||
if (settings.type === MANCER || settings.type === TOGETHERAI || settings.type === INFERMATICAI || settings.type === OPENROUTER) {
|
||||
if (settings.type === MANCER || settings.type === TOGETHERAI || settings.type === INFERMATICAI || settings.type === DREAMGEN || settings.type === OPENROUTER) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@ -312,6 +318,10 @@ function getTokenizerForTokenIds() {
|
||||
return getCurrentOpenRouterModelTokenizer();
|
||||
}
|
||||
|
||||
if (settings.type === DREAMGEN) {
|
||||
return getCurrentDreamGenModelTokenizer();
|
||||
}
|
||||
|
||||
return tokenizers.LLAMA;
|
||||
}
|
||||
|
||||
@ -937,6 +947,10 @@ function getModel() {
|
||||
return settings.infermaticai_model;
|
||||
}
|
||||
|
||||
if (settings.type === DREAMGEN) {
|
||||
return settings.dreamgen_model;
|
||||
}
|
||||
|
||||
if (settings.type === OPENROUTER) {
|
||||
return settings.openrouter_model;
|
||||
}
|
||||
@ -976,6 +990,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
||||
'presence_penalty': settings.presence_pen,
|
||||
'top_k': settings.top_k,
|
||||
'min_length': settings.type === OOBA ? settings.min_length : undefined,
|
||||
'minimum_message_content_tokens': settings.type === DREAMGEN ? settings.min_length : undefined,
|
||||
'min_tokens': settings.min_length,
|
||||
'num_beams': settings.type === OOBA ? settings.num_beams : undefined,
|
||||
'length_penalty': settings.length_penalty,
|
||||
|
@ -5,9 +5,9 @@ import { groups, selected_group } from './group-chats.js';
|
||||
import { getStringHash } from './utils.js';
|
||||
import { kai_flags } from './kai-settings.js';
|
||||
import { textgen_types, textgenerationwebui_settings as textgen_settings, getTextGenServer } from './textgen-settings.js';
|
||||
import { getCurrentOpenRouterModelTokenizer, openRouterModels } from './textgen-models.js';
|
||||
import { getCurrentDreamGenModelTokenizer, getCurrentOpenRouterModelTokenizer, openRouterModels } from './textgen-models.js';
|
||||
|
||||
const { OOBA, TABBY, KOBOLDCPP, APHRODITE, LLAMACPP, OPENROUTER } = textgen_types;
|
||||
const { OOBA, TABBY, KOBOLDCPP, APHRODITE, LLAMACPP, OPENROUTER, DREAMGEN } = textgen_types;
|
||||
|
||||
export const CHARACTERS_PER_TOKEN_RATIO = 3.35;
|
||||
const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
|
||||
@ -206,6 +206,9 @@ export function getTokenizerBestMatch(forApi) {
|
||||
if (forApi === 'textgenerationwebui' && textgen_settings.type === OPENROUTER) {
|
||||
return getCurrentOpenRouterModelTokenizer();
|
||||
}
|
||||
if (forApi === 'textgenerationwebui' && textgen_settings.type === DREAMGEN) {
|
||||
return getCurrentDreamGenModelTokenizer();
|
||||
}
|
||||
}
|
||||
|
||||
return tokenizers.LLAMA;
|
||||
|
@ -27,6 +27,14 @@ function getInfermaticAIHeaders() {
|
||||
}) : {};
|
||||
}
|
||||
|
||||
function getDreamGenHeaders() {
|
||||
const apiKey = readSecret(SECRET_KEYS.DREAMGEN);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
function getOpenRouterHeaders() {
|
||||
const apiKey = readSecret(SECRET_KEYS.OPENROUTER);
|
||||
const baseHeaders = { ...OPENROUTER_HEADERS };
|
||||
@ -98,6 +106,9 @@ function setAdditionalHeaders(request, args, server) {
|
||||
case TEXTGEN_TYPES.INFERMATICAI:
|
||||
headers = getInfermaticAIHeaders();
|
||||
break;
|
||||
case TEXTGEN_TYPES.DREAMGEN:
|
||||
headers = getDreamGenHeaders();
|
||||
break;
|
||||
case TEXTGEN_TYPES.OPENROUTER:
|
||||
headers = getOpenRouterHeaders();
|
||||
break;
|
||||
|
@ -177,6 +177,7 @@ const TEXTGEN_TYPES = {
|
||||
LLAMACPP: 'llamacpp',
|
||||
OLLAMA: 'ollama',
|
||||
INFERMATICAI: 'infermaticai',
|
||||
DREAMGEN: 'dreamgen',
|
||||
OPENROUTER: 'openrouter',
|
||||
};
|
||||
|
||||
@ -192,6 +193,23 @@ const INFERMATICAI_KEYS = [
|
||||
'stop',
|
||||
];
|
||||
|
||||
// https://dreamgen.com/docs/api#openai-text
|
||||
const DREAMGEN_KEYS = [
|
||||
'model',
|
||||
'prompt',
|
||||
'max_tokens',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'top_k',
|
||||
'min_p',
|
||||
'repetition_penalty',
|
||||
'frequency_penalty',
|
||||
'presence_penalty',
|
||||
'stop',
|
||||
'stream',
|
||||
'minimum_message_content_tokens'
|
||||
];
|
||||
|
||||
// https://docs.together.ai/reference/completions
|
||||
const TOGETHERAI_KEYS = [
|
||||
'model',
|
||||
@ -263,6 +281,7 @@ module.exports = {
|
||||
TOGETHERAI_KEYS,
|
||||
OLLAMA_KEYS,
|
||||
INFERMATICAI_KEYS,
|
||||
DREAMGEN_KEYS,
|
||||
OPENROUTER_HEADERS,
|
||||
OPENROUTER_KEYS,
|
||||
};
|
||||
|
@ -35,7 +35,7 @@ async function sendClaudeRequest(request, response) {
|
||||
controller.abort();
|
||||
});
|
||||
let use_system_prompt = (request.body.model.startsWith('claude-2') || request.body.model.startsWith('claude-3')) && request.body.claude_use_sysprompt;
|
||||
let converted_prompt = convertClaudeMessages(request.body.messages, request.body.assistant_prefill, use_system_prompt, request.body.human_sysprompt_message);
|
||||
let converted_prompt = convertClaudeMessages(request.body.messages, request.body.assistant_prefill, use_system_prompt, request.body.human_sysprompt_message, request.body.char_name, request.body.user_name);
|
||||
// Add custom stop sequences
|
||||
const stopSequences = ['\n\nHuman:', '\n\nSystem:', '\n\nAssistant:'];
|
||||
if (Array.isArray(request.body.stop)) {
|
||||
|
@ -4,7 +4,7 @@ const _ = require('lodash');
|
||||
const Readable = require('stream').Readable;
|
||||
|
||||
const { jsonParser } = require('../../express-common');
|
||||
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS, INFERMATICAI_KEYS, OPENROUTER_KEYS } = require('../../constants');
|
||||
const { TEXTGEN_TYPES, TOGETHERAI_KEYS, OLLAMA_KEYS, INFERMATICAI_KEYS, OPENROUTER_KEYS, DREAMGEN_KEYS } = require('../../constants');
|
||||
const { forwardFetchResponse, trimV1 } = require('../../util');
|
||||
const { setAdditionalHeaders } = require('../../additional-headers');
|
||||
|
||||
@ -110,6 +110,9 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
case TEXTGEN_TYPES.OPENROUTER:
|
||||
url += '/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.DREAMGEN:
|
||||
url += '/api/openai/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.MANCER:
|
||||
url += '/oai/v1/models';
|
||||
break;
|
||||
@ -238,6 +241,9 @@ router.post('/generate', jsonParser, async function (request, response) {
|
||||
case TEXTGEN_TYPES.INFERMATICAI:
|
||||
url += '/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.DREAMGEN:
|
||||
url += '/api/openai/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.MANCER:
|
||||
url += '/oai/v1/completions';
|
||||
break;
|
||||
@ -273,6 +279,13 @@ router.post('/generate', jsonParser, async function (request, response) {
|
||||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.DREAMGEN) {
|
||||
request.body = _.pickBy(request.body, (_, key) => DREAMGEN_KEYS.includes(key));
|
||||
// NOTE: DreamGen sometimes get confused by the unusual formatting in the character cards.
|
||||
request.body.stop?.push('### User', '## User');
|
||||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.OPENROUTER) {
|
||||
request.body = _.pickBy(request.body, (_, key) => OPENROUTER_KEYS.includes(key));
|
||||
args.body = JSON.stringify(request.body);
|
||||
|
@ -77,8 +77,10 @@ function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill,
|
||||
* @param {string} prefillString User determined prefill string
|
||||
* @param {boolean} useSysPrompt See if we want to use a system prompt
|
||||
* @param {string} humanMsgFix Add Human message between system prompt and assistant.
|
||||
* @param {string} charName Character name
|
||||
* @param {string} userName User name
|
||||
*/
|
||||
function convertClaudeMessages(messages, prefillString, useSysPrompt, humanMsgFix) {
|
||||
function convertClaudeMessages(messages, prefillString, useSysPrompt, humanMsgFix, charName = '', userName = '') {
|
||||
let systemPrompt = '';
|
||||
if (useSysPrompt) {
|
||||
// Collect all the system messages up until the first instance of a non-system message, and then remove them from the messages array.
|
||||
@ -87,6 +89,12 @@ function convertClaudeMessages(messages, prefillString, useSysPrompt, humanMsgFi
|
||||
if (messages[i].role !== 'system') {
|
||||
break;
|
||||
}
|
||||
if (userName && messages[i].name === 'example_user') {
|
||||
messages[i].content = `${userName}: ${messages[i].content}`;
|
||||
}
|
||||
if (charName && messages[i].name === 'example_assistant') {
|
||||
messages[i].content = `${charName}: ${messages[i].content}`;
|
||||
}
|
||||
systemPrompt += `${messages[i].content}\n\n`;
|
||||
}
|
||||
|
||||
@ -103,6 +111,12 @@ function convertClaudeMessages(messages, prefillString, useSysPrompt, humanMsgFi
|
||||
// Now replace all further messages that have the role 'system' with the role 'user'. (or all if we're not using one)
|
||||
messages.forEach((message) => {
|
||||
if (message.role === 'system') {
|
||||
if (userName && message.name === 'example_user') {
|
||||
message.content = `${userName}: ${message.content}`;
|
||||
}
|
||||
if (charName && message.name === 'example_assistant') {
|
||||
message.content = `${charName}: ${message.content}`;
|
||||
}
|
||||
message.role = 'user';
|
||||
}
|
||||
});
|
||||
@ -164,7 +178,7 @@ function convertClaudeMessages(messages, prefillString, useSysPrompt, humanMsgFi
|
||||
if (prefillString) {
|
||||
mergedMessages.push({
|
||||
role: 'assistant',
|
||||
content: prefillString,
|
||||
content: prefillString.trimEnd(),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -31,6 +31,7 @@ const SECRET_KEYS = {
|
||||
CUSTOM: 'api_key_custom',
|
||||
OOBA: 'api_key_ooba',
|
||||
INFERMATICAI: 'api_key_infermaticai',
|
||||
DREAMGEN: 'api_key_dreamgen',
|
||||
};
|
||||
|
||||
// These are the keys that are safe to expose, even if allowKeysExposure is false
|
||||
|
Loading…
x
Reference in New Issue
Block a user