mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-02-03 04:37:40 +01:00
Support more parameters for Infermatic
This commit is contained in:
parent
b188c176fd
commit
41ab90bb8e
@ -1141,7 +1141,7 @@
|
||||
<div class="fa-solid fa-circle-info opacity50p" title="Customize displayed samplers or add custom samplers." data-i18n="[title]Customize displayed samplers or add custom samplers."></div>
|
||||
</small>
|
||||
</div>
|
||||
<div data-newbie-hidden data-tg-type="mancer, vllm, aphrodite, tabby" class="flex-container flexFlowColumn alignitemscenter flexBasis100p flexGrow flexShrink gap0">
|
||||
<div data-newbie-hidden data-tg-type="mancer, vllm, aphrodite, tabby, infermaticai" class="flex-container flexFlowColumn alignitemscenter flexBasis100p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Multiple swipes per generation">Multiple swipes per generation</small>
|
||||
<input type="number" id="n_textgenerationwebui" class="text_pole textAlignCenter" min="1" value="1" step="1" />
|
||||
</div>
|
||||
@ -1263,7 +1263,7 @@
|
||||
<input class="neo-range-slider" type="range" id="skew_textgenerationwebui" name="volume" min="-5" max="5" step="0.01" />
|
||||
<input class="neo-range-input" type="number" min="-5" max="5" step="0.01" data-for="skew_textgenerationwebui" id="skew_counter_textgenerationwebui">
|
||||
</div>
|
||||
<div data-newbie-hidden data-tg-type="mancer, ooba, tabby, dreamgen" class="alignitemscenter flex-container flexFlowColumn flexBasis30p flexGrow flexShrink gap0">
|
||||
<div data-newbie-hidden data-tg-type="mancer, ooba, tabby, dreamgen, infermaticai" class="alignitemscenter flex-container flexFlowColumn flexBasis30p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Min Length">Min Length</small>
|
||||
<input class="neo-range-slider" type="range" id="min_length_textgenerationwebui" name="volume" min="0" max="2000" step="1" />
|
||||
<input class="neo-range-input" type="number" min="0" max="2000" step="1" data-for="min_length_textgenerationwebui" id="min_length_counter_textgenerationwebui">
|
||||
@ -1446,7 +1446,7 @@
|
||||
<div class="fa-solid fa-circle-info opacity50p " data-i18n="[title]Ban the eos_token. This forces the model to never end the generation prematurely" title="Ban the eos_token. This forces the model to never end the generation prematurely."></div>
|
||||
</label>
|
||||
</label>
|
||||
<label data-tg-type="vllm, aphrodite" class="checkbox_label" for="ignore_eos_token_textgenerationwebui">
|
||||
<label data-tg-type="vllm, aphrodite, infermaticai" class="checkbox_label" for="ignore_eos_token_textgenerationwebui">
|
||||
<input type="checkbox" id="ignore_eos_token_textgenerationwebui" />
|
||||
<small data-i18n="Ignore EOS Token">Ignore EOS Token
|
||||
<div class="fa-solid fa-circle-info opacity50p " data-i18n="[title]Ignore the EOS Token even if it generates." title="Ignore the EOS Token even if it generates."></div>
|
||||
@ -1471,13 +1471,13 @@
|
||||
</label>
|
||||
</label>
|
||||
|
||||
<label data-tg-type="vllm, aphrodite" class="checkbox_label" for="spaces_between_special_tokens_textgenerationwebui">
|
||||
<label data-tg-type="vllm, aphrodite, infermaticai" class="checkbox_label" for="spaces_between_special_tokens_textgenerationwebui">
|
||||
<input type="checkbox" id="spaces_between_special_tokens_textgenerationwebui" />
|
||||
<small data-i18n="Spaces Between Special Tokens">Spaces Between Special Tokens</small>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div data-tg-type="mancer, ooba, koboldcpp, vllm, aphrodite, llamacpp, ollama" data-newbie-hidden class="flex-container flexFlowColumn alignitemscenter flexBasis48p flexGrow flexShrink gap0">
|
||||
<div data-tg-type="mancer, ooba, koboldcpp, vllm, aphrodite, llamacpp, ollama, infermaticai" data-newbie-hidden class="flex-container flexFlowColumn alignitemscenter flexBasis48p flexGrow flexShrink gap0">
|
||||
<small data-i18n="Seed" class="textAlignCenter">Seed</small>
|
||||
<input type="number" id="seed_textgenerationwebui" class="text_pole textAlignCenter" min="-1" value="-1" maxlength="100" />
|
||||
</div>
|
||||
|
@ -5032,6 +5032,7 @@ function parseAndSaveLogprobs(data, continueFrom) {
|
||||
logprobs = data?.completion_probabilities?.map(x => parseTextgenLogprobs(x.content, [x])) || null;
|
||||
} break;
|
||||
case textgen_types.VLLM:
|
||||
case textgen_types.INFERMATICAI:
|
||||
case textgen_types.APHRODITE:
|
||||
case textgen_types.MANCER:
|
||||
case textgen_types.TABBY: {
|
||||
@ -5088,7 +5089,7 @@ function extractMultiSwipes(data, type) {
|
||||
return swipes;
|
||||
}
|
||||
|
||||
if (main_api === 'openai' || (main_api === 'textgenerationwebui' && [MANCER, VLLM, APHRODITE, TABBY].includes(textgen_settings.type))) {
|
||||
if (main_api === 'openai' || (main_api === 'textgenerationwebui' && [MANCER, VLLM, APHRODITE, TABBY, INFERMATICAI].includes(textgen_settings.type))) {
|
||||
if (!Array.isArray(data.choices)) {
|
||||
return swipes;
|
||||
}
|
||||
|
@ -905,6 +905,7 @@ export function parseTextgenLogprobs(token, logprobs) {
|
||||
case VLLM:
|
||||
case APHRODITE:
|
||||
case MANCER:
|
||||
case INFERMATICAI:
|
||||
case OOBA: {
|
||||
/** @type {Record<string, number>[]} */
|
||||
const topLogprobs = logprobs.top_logprobs;
|
||||
@ -1020,7 +1021,7 @@ export function isJsonSchemaSupported() {
|
||||
}
|
||||
|
||||
function getLogprobsNumber() {
|
||||
if (settings.type === VLLM) {
|
||||
if (settings.type === VLLM || settings.type === INFERMATICAI) {
|
||||
return 5;
|
||||
}
|
||||
|
||||
@ -1124,7 +1125,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
||||
'best_of': canMultiSwipe ? settings.n : 1,
|
||||
'ignore_eos': settings.ignore_eos_token,
|
||||
'spaces_between_special_tokens': settings.spaces_between_special_tokens,
|
||||
'seed': settings.seed,
|
||||
'seed': settings.seed >= 0 ? settings.seed : undefined,
|
||||
};
|
||||
const aphroditeParams = {
|
||||
'n': canMultiSwipe ? settings.n : 1,
|
||||
@ -1162,6 +1163,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
||||
|
||||
switch (settings.type) {
|
||||
case VLLM:
|
||||
case INFERMATICAI:
|
||||
params = Object.assign(params, vllmParams);
|
||||
break;
|
||||
|
||||
|
@ -224,6 +224,17 @@ const INFERMATICAI_KEYS = [
|
||||
'repetition_penalty',
|
||||
'stream',
|
||||
'stop',
|
||||
'presence_penalty',
|
||||
'frequency_penalty',
|
||||
'min_p',
|
||||
'seed',
|
||||
'ignore_eos',
|
||||
'n',
|
||||
'best_of',
|
||||
'min_tokens',
|
||||
'spaces_between_special_tokens',
|
||||
'skip_special_tokens',
|
||||
'logprobs',
|
||||
];
|
||||
|
||||
// https://dreamgen.com/docs/api#openai-text
|
||||
|
@ -340,7 +340,7 @@ router.post('/generate', jsonParser, async function (request, response) {
|
||||
|
||||
// Map InfermaticAI response to OAI completions format
|
||||
if (apiType === TEXTGEN_TYPES.INFERMATICAI) {
|
||||
data['choices'] = (data?.choices || []).map(choice => ({ text: choice?.message?.content || choice.text }));
|
||||
data['choices'] = (data?.choices || []).map(choice => ({ text: choice?.message?.content || choice.text, logprobs: choice?.logprobs, index: choice?.index }));
|
||||
}
|
||||
|
||||
return response.send(data);
|
||||
|
Loading…
x
Reference in New Issue
Block a user