Merge remote-tracking branch 'upstream/staging' into staging

This commit is contained in:
DonMoralez
2023-12-23 00:25:57 +02:00
7 changed files with 322 additions and 55 deletions

View File

@@ -2,7 +2,6 @@ import {
saveSettingsDebounced,
callPopup,
setGenerationProgress,
CLIENT_VERSION,
getRequestHeaders,
max_context,
amount_gen,
@@ -34,19 +33,96 @@ let horde_settings = {
const MAX_RETRIES = 480;
const CHECK_INTERVAL = 2500;
const MIN_LENGTH = 16;
const getRequestArgs = () => ({
method: 'GET',
headers: {
'Client-Agent': CLIENT_VERSION,
},
});
async function getWorkers(workerType) {
const response = await fetch('https://horde.koboldai.net/api/v2/workers?type=text', getRequestArgs());
/**
* Gets the available workers from Horde.
* @param {boolean} force Do a force refresh of the workers
* @returns {Promise<Array>} Array of workers
*/
async function getWorkers(force) {
const response = await fetch('/api/horde/text-workers', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({ force }),
});
const data = await response.json();
return data;
}
/**
* Gets the available models from Horde.
* @param {boolean} force Do a force refresh of the models
* @returns {Promise<Array>} Array of models
*/
async function getModels(force) {
const response = await fetch('/api/horde/text-models', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({ force }),
});
const data = await response.json();
return data;
}
/**
* Gets the status of a Horde task.
* @param {string} taskId Task ID
* @returns {Promise<Object>} Task status
*/
async function getTaskStatus(taskId) {
const response = await fetch('/api/horde/task-status', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({ taskId }),
});
if (!response.ok) {
throw new Error(`Failed to get task status: ${response.statusText}`);
}
const data = await response.json();
return data;
}
/**
* Cancels a Horde task.
* @param {string} taskId Task ID
*/
async function cancelTask(taskId) {
const response = await fetch('/api/horde/cancel-task', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({ taskId }),
});
if (!response.ok) {
throw new Error(`Failed to cancel task: ${response.statusText}`);
}
}
/**
* Checks if Horde is online.
* @returns {Promise<boolean>} True if Horde is online, false otherwise
*/
async function checkHordeStatus() {
try {
const response = await fetch('/api/horde/status', {
method: 'POST',
headers: getRequestHeaders(),
});
if (!response.ok) {
return false;
}
const data = await response.json();
return data.ok;
} catch (error) {
console.error(error);
return false;
}
}
function validateHordeModel() {
let selectedModels = models.filter(m => horde_settings.models.includes(m.name));
@@ -60,7 +136,7 @@ function validateHordeModel() {
async function adjustHordeGenerationParams(max_context_length, max_length) {
console.log(max_context_length, max_length);
const workers = await getWorkers();
const workers = await getWorkers(false);
let maxContextLength = max_context_length;
let maxLength = max_length;
let availableWorkers = [];
@@ -126,10 +202,7 @@ async function generateHorde(prompt, params, signal, reportProgress) {
const response = await fetch('/api/horde/generate-text', {
method: 'POST',
headers: {
...getRequestHeaders(),
'Client-Agent': CLIENT_VERSION,
},
headers: getRequestHeaders(),
body: JSON.stringify(payload),
});
@@ -146,24 +219,17 @@ async function generateHorde(prompt, params, signal, reportProgress) {
throw new Error(`Horde generation failed: ${reason}`);
}
const task_id = responseJson.id;
const taskId = responseJson.id;
let queue_position_first = null;
console.log(`Horde task id = ${task_id}`);
console.log(`Horde task id = ${taskId}`);
for (let retryNumber = 0; retryNumber < MAX_RETRIES; retryNumber++) {
if (signal.aborted) {
fetch(`https://horde.koboldai.net/api/v2/generate/text/status/${task_id}`, {
method: 'DELETE',
headers: {
'Client-Agent': CLIENT_VERSION,
},
});
cancelTask(taskId);
throw new Error('Request aborted');
}
const statusCheckResponse = await fetch(`https://horde.koboldai.net/api/v2/generate/text/status/${task_id}`, getRequestArgs());
const statusCheckJson = await statusCheckResponse.json();
const statusCheckJson = await getTaskStatus(taskId);
console.log(statusCheckJson);
if (statusCheckJson.faulted === true) {
@@ -202,18 +268,13 @@ async function generateHorde(prompt, params, signal, reportProgress) {
throw new Error('Horde timeout');
}
async function checkHordeStatus() {
const response = await fetch('https://horde.koboldai.net/api/v2/status/heartbeat', getRequestArgs());
return response.ok;
}
async function getHordeModels() {
/**
* Displays the available models in the Horde model selection dropdown.
* @param {boolean} force Force refresh of the models
*/
async function getHordeModels(force) {
$('#horde_model').empty();
const response = await fetch('https://horde.koboldai.net/api/v2/status/models?type=text', getRequestArgs());
models = await response.json();
models.sort((a, b) => {
return b.performance - a.performance;
});
models = (await getModels(force)).sort((a, b) => b.performance - a.performance);
for (const model of models) {
const option = document.createElement('option');
option.value = model.name;
@@ -299,7 +360,7 @@ jQuery(function () {
await writeSecret(SECRET_KEYS.HORDE, key);
});
$('#horde_refresh').on('click', getHordeModels);
$('#horde_refresh').on('click', () => getHordeModels(true));
$('#horde_kudos').on('click', showKudos);
// Not needed on mobile

View File

@@ -240,6 +240,7 @@ const default_settings = {
squash_system_messages: false,
image_inlining: false,
bypass_status_check: false,
continue_prefill: false,
seed: -1,
};
@@ -304,6 +305,7 @@ const oai_settings = {
squash_system_messages: false,
image_inlining: false,
bypass_status_check: false,
continue_prefill: false,
seed: -1,
};
@@ -662,12 +664,20 @@ async function populateChatHistory(messages, prompts, chatCompletion, type = nul
let continueMessage = null;
const instruct = isOpenRouterWithInstruct();
if (type === 'continue' && cyclePrompt && !instruct) {
const continuePrompt = new Prompt({
identifier: 'continueNudge',
role: 'system',
content: oai_settings.continue_nudge_prompt.replace('{{lastChatMessage}}', cyclePrompt),
system_prompt: true,
});
const promptObject = oai_settings.continue_prefill ?
{
identifier: 'continueNudge',
role: 'assistant',
content: cyclePrompt,
system_prompt: true,
} :
{
identifier: 'continueNudge',
role: 'system',
content: oai_settings.continue_nudge_prompt.replace('{{lastChatMessage}}', cyclePrompt),
system_prompt: true,
};
const continuePrompt = new Prompt(promptObject);
const preparedPrompt = promptManager.preparePrompt(continuePrompt);
continueMessage = Message.fromPrompt(preparedPrompt);
chatCompletion.reserveBudget(continueMessage);
@@ -2379,6 +2389,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.new_example_chat_prompt = settings.new_example_chat_prompt ?? default_settings.new_example_chat_prompt;
oai_settings.continue_nudge_prompt = settings.continue_nudge_prompt ?? default_settings.continue_nudge_prompt;
oai_settings.squash_system_messages = settings.squash_system_messages ?? default_settings.squash_system_messages;
oai_settings.continue_prefill = settings.continue_prefill ?? default_settings.continue_prefill;
if (settings.wrap_in_quotes !== undefined) oai_settings.wrap_in_quotes = !!settings.wrap_in_quotes;
if (settings.names_in_completion !== undefined) oai_settings.names_in_completion = !!settings.names_in_completion;
@@ -2433,6 +2444,7 @@ function loadOpenAISettings(data, settings) {
$('#openrouter_force_instruct').prop('checked', oai_settings.openrouter_force_instruct);
$('#openrouter_group_models').prop('checked', oai_settings.openrouter_group_models);
$('#squash_system_messages').prop('checked', oai_settings.squash_system_messages);
$('#continue_prefill').prop('checked', oai_settings.continue_prefill);
if (settings.impersonation_prompt !== undefined) oai_settings.impersonation_prompt = settings.impersonation_prompt;
$('#impersonation_prompt_textarea').val(oai_settings.impersonation_prompt);
@@ -2598,6 +2610,10 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
ai21_model: settings.ai21_model,
mistralai_model: settings.mistralai_model,
custom_model: settings.custom_model,
custom_url: settings.custom_url,
custom_include_body: settings.custom_include_body,
custom_exclude_body: settings.custom_exclude_body,
custom_include_headers: settings.custom_include_headers,
google_model: settings.google_model,
temperature: settings.temp_openai,
frequency_penalty: settings.freq_pen_openai,
@@ -2640,6 +2656,8 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
use_alt_scale: settings.use_alt_scale,
squash_system_messages: settings.squash_system_messages,
image_inlining: settings.image_inlining,
bypass_status_check: settings.bypass_status_check,
continue_prefill: settings.continue_prefill,
seed: settings.seed,
};
@@ -3011,6 +3029,7 @@ function onSettingsPresetChange() {
use_alt_scale: ['#use_alt_scale', 'use_alt_scale', true],
squash_system_messages: ['#squash_system_messages', 'squash_system_messages', true],
image_inlining: ['#openai_image_inlining', 'image_inlining', true],
continue_prefill: ['#continue_prefill', 'continue_prefill', true],
seed: ['#seed_openai', 'seed', false],
};
@@ -3591,17 +3610,17 @@ function onCustomizeParametersClick() {
</div>
</div>`);
template.find('#custom_include_body').val(oai_settings.custom_include_body).on('input', function() {
template.find('#custom_include_body').val(oai_settings.custom_include_body).on('input', function () {
oai_settings.custom_include_body = String($(this).val());
saveSettingsDebounced();
});
template.find('#custom_exclude_body').val(oai_settings.custom_exclude_body).on('input', function() {
template.find('#custom_exclude_body').val(oai_settings.custom_exclude_body).on('input', function () {
oai_settings.custom_exclude_body = String($(this).val());
saveSettingsDebounced();
});
template.find('#custom_include_headers').val(oai_settings.custom_include_headers).on('input', function() {
template.find('#custom_include_headers').val(oai_settings.custom_include_headers).on('input', function () {
oai_settings.custom_include_headers = String($(this).val());
saveSettingsDebounced();
});
@@ -3940,6 +3959,11 @@ $(document).ready(async function () {
saveSettingsDebounced();
});
$('#continue_prefill').on('input', function () {
oai_settings.continue_prefill = !!$(this).prop('checked');
saveSettingsDebounced();
});
$('#seed_openai').on('input', function () {
oai_settings.seed = Number($(this).val());
saveSettingsDebounced();