Add stopping strings to Horde

This commit is contained in:
Cohee
2023-10-24 16:23:32 +03:00
parent 1bf6d6d9f2
commit 52537904f7
3 changed files with 31 additions and 18 deletions

View File

@ -118,7 +118,7 @@ import {
checkHordeStatus, checkHordeStatus,
getHordeModels, getHordeModels,
adjustHordeGenerationParams, adjustHordeGenerationParams,
MIN_AMOUNT_GEN, MIN_LENGTH,
} from "./scripts/horde.js"; } from "./scripts/horde.js";
import { import {
@ -2323,8 +2323,9 @@ export async function generateRaw(prompt, api) {
if (preset_settings === 'gui') { if (preset_settings === 'gui') {
generateData = { prompt: prompt, gui_settings: true, max_length: amount_gen, max_context_length: max_context, }; generateData = { prompt: prompt, gui_settings: true, max_length: amount_gen, max_context_length: max_context, };
} else { } else {
const isHorde = api === 'koboldhorde';
const koboldSettings = koboldai_settings[koboldai_setting_names[preset_settings]]; const koboldSettings = koboldai_settings[koboldai_setting_names[preset_settings]];
generateData = getKoboldGenerationData(prompt, koboldSettings, amount_gen, max_context, false, 'quiet'); generateData = getKoboldGenerationData(prompt, koboldSettings, amount_gen, max_context, isHorde, 'quiet');
} }
break; break;
case 'novel': case 'novel':
@ -3073,13 +3074,13 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
// Include the entire guidance scale object // Include the entire guidance scale object
const cfgValues = cfgGuidanceScale && cfgGuidanceScale?.value !== 1 ? ({ guidanceScale: cfgGuidanceScale, negativePrompt: negativePrompt }) : null; const cfgValues = cfgGuidanceScale && cfgGuidanceScale?.value !== 1 ? ({ guidanceScale: cfgGuidanceScale, negativePrompt: negativePrompt }) : null;
let this_amount_gen = Number(amount_gen); // how many tokens the AI will be requested to generate let maxLength = Number(amount_gen); // how many tokens the AI will be requested to generate
let thisPromptBits = []; let thisPromptBits = [];
// TODO: Make this a switch // TODO: Make this a switch
if (main_api == 'koboldhorde' && horde_settings.auto_adjust_response_length) { if (main_api == 'koboldhorde' && horde_settings.auto_adjust_response_length) {
this_amount_gen = Math.min(this_amount_gen, adjustedParams.maxLength); maxLength = Math.min(maxLength, adjustedParams.maxLength);
this_amount_gen = Math.max(this_amount_gen, MIN_AMOUNT_GEN); // prevent validation errors maxLength = Math.max(maxLength, MIN_LENGTH); // prevent validation errors
} }
let generate_data; let generate_data;
@ -3087,22 +3088,23 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
generate_data = { generate_data = {
prompt: finalPrompt, prompt: finalPrompt,
gui_settings: true, gui_settings: true,
max_length: amount_gen, max_length: maxLength,
max_context_length: max_context, max_context_length: max_context,
}; };
if (preset_settings != 'gui') { if (preset_settings != 'gui') {
const this_settings = koboldai_settings[koboldai_setting_names[preset_settings]]; const isHorde = main_api == 'koboldhorde';
const presetSettings = koboldai_settings[koboldai_setting_names[preset_settings]];
const maxContext = (adjustedParams && horde_settings.auto_adjust_context_length) ? adjustedParams.maxContextLength : max_context; const maxContext = (adjustedParams && horde_settings.auto_adjust_context_length) ? adjustedParams.maxContextLength : max_context;
generate_data = getKoboldGenerationData(finalPrompt, this_settings, this_amount_gen, maxContext, isImpersonate, type); generate_data = getKoboldGenerationData(finalPrompt, presetSettings, maxLength, maxContext, isHorde, type);
} }
} }
else if (main_api == 'textgenerationwebui') { else if (main_api == 'textgenerationwebui') {
generate_data = getTextGenGenerationData(finalPrompt, this_amount_gen, isImpersonate, cfgValues); generate_data = getTextGenGenerationData(finalPrompt, maxLength, isImpersonate, cfgValues);
} }
else if (main_api == 'novel') { else if (main_api == 'novel') {
const this_settings = novelai_settings[novelai_setting_names[nai_settings.preset_settings_novel]]; const presetSettings = novelai_settings[novelai_setting_names[nai_settings.preset_settings_novel]];
generate_data = getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate, cfgValues); generate_data = getNovelGenerationData(finalPrompt, presetSettings, maxLength, isImpersonate, cfgValues);
} }
else if (main_api == 'openai') { else if (main_api == 'openai') {
let [prompt, counts] = prepareOpenAIMessages({ let [prompt, counts] = prepareOpenAIMessages({

View File

@ -19,7 +19,7 @@ export {
loadHordeSettings, loadHordeSettings,
adjustHordeGenerationParams, adjustHordeGenerationParams,
getHordeModels, getHordeModels,
MIN_AMOUNT_GEN, MIN_LENGTH,
} }
let models = []; let models = [];
@ -33,7 +33,7 @@ let horde_settings = {
const MAX_RETRIES = 240; const MAX_RETRIES = 240;
const CHECK_INTERVAL = 5000; const CHECK_INTERVAL = 5000;
const MIN_AMOUNT_GEN = 16; const MIN_LENGTH = 16;
const getRequestArgs = () => ({ const getRequestArgs = () => ({
method: "GET", method: "GET",
headers: { headers: {

View File

@ -90,15 +90,26 @@ export function loadKoboldSettings(preset) {
} }
} }
export function getKoboldGenerationData(finalPrompt, this_settings, this_amount_gen, this_max_context, isImpersonate, type) { /**
const sampler_order = kai_settings.sampler_order || this_settings.sampler_order; * Gets the Kobold generation data.
* @param {string} finalPrompt Final text prompt.
* @param {object} settings Settings preset object.
* @param {number} maxLength Maximum length.
* @param {number} maxContextLength Maximum context length.
* @param {boolean} isHorde True if the generation is for a horde, false otherwise.
* @param {string} type Generation type.
* @returns {object} Kobold generation data.
*/
export function getKoboldGenerationData(finalPrompt, settings, maxLength, maxContextLength, isHorde, type) {
const isImpersonate = type === 'impersonate';
const sampler_order = kai_settings.sampler_order || settings.sampler_order;
let generate_data = { let generate_data = {
prompt: finalPrompt, prompt: finalPrompt,
gui_settings: false, gui_settings: false,
sampler_order: sampler_order, sampler_order: sampler_order,
max_context_length: Number(this_max_context), max_context_length: Number(maxContextLength),
max_length: this_amount_gen, max_length: maxLength,
rep_pen: Number(kai_settings.rep_pen), rep_pen: Number(kai_settings.rep_pen),
rep_pen_range: Number(kai_settings.rep_pen_range), rep_pen_range: Number(kai_settings.rep_pen_range),
rep_pen_slope: kai_settings.rep_pen_slope, rep_pen_slope: kai_settings.rep_pen_slope,
@ -117,7 +128,7 @@ export function getKoboldGenerationData(finalPrompt, this_settings, this_amount_
s7: sampler_order[6], s7: sampler_order[6],
use_world_info: false, use_world_info: false,
singleline: kai_settings.single_line, singleline: kai_settings.single_line,
stop_sequence: kai_flags.can_use_stop_sequence ? getStoppingStrings(isImpersonate) : undefined, stop_sequence: (kai_flags.can_use_stop_sequence || isHorde) ? getStoppingStrings(isImpersonate) : undefined,
streaming: kai_settings.streaming_kobold && kai_flags.can_use_streaming && type !== 'quiet', streaming: kai_settings.streaming_kobold && kai_flags.can_use_streaming && type !== 'quiet',
can_abort: kai_flags.can_use_streaming, can_abort: kai_flags.can_use_streaming,
mirostat: kai_flags.can_use_mirostat ? kai_settings.mirostat : undefined, mirostat: kai_flags.can_use_mirostat ? kai_settings.mirostat : undefined,