Merge branch 'staging' into parser-followup-2

This commit is contained in:
LenAnderson
2024-07-18 18:08:10 -04:00
9 changed files with 173 additions and 28 deletions

View File

@ -2560,6 +2560,10 @@
<option value="gpt-4o">gpt-4o</option>
<option value="gpt-4o-2024-05-13">gpt-4o-2024-05-13</option>
</optgroup>
<optgroup label="gpt-4o-mini">
<option value="gpt-4o-mini">gpt-4o-mini</option>
<option value="gpt-4o-mini-2024-07-18">gpt-4o-mini-2024-07-18</option>
</optgroup>
<optgroup label="GPT-4 Turbo">
<option value="gpt-4-turbo">gpt-4-turbo</option>
<option value="gpt-4-turbo-2024-04-09">gpt-4-turbo-2024-04-09</option>
@ -2821,15 +2825,19 @@
<h4 data-i18n="MistralAI Model">MistralAI Model</h4>
<select id="model_mistralai_select">
<optgroup label="Latest">
<option value="open-mistral-nemo">open-mistral-nemo</option>
<option value="open-mistral-7b">open-mistral-7b</option>
<option value="open-mixtral-8x7b">open-mixtral-8x7b</option>
<option value="open-mixtral-8x22b">open-mixtral-8x22b</option>
<option value="open-codestral-mamba">open-codestral-mamba</option>
<option value="mistral-small-latest">mistral-small-latest</option>
<option value="mistral-medium-latest">mistral-medium-latest</option>
<option value="mistral-large-latest">mistral-large-latest</option>
<option value="codestral-latest">codestral-latest</option>
<option value="codestral-mamba-latest">codestral-mamba-latest</option>
</optgroup>
<optgroup label="Sub-versions">
<option value="open-mistral-nemo-2407">open-mistral-nemo-2407</option>
<option value="open-mixtral-8x22b-2404">open-mixtral-8x22b-2404</option>
<option value="mistral-tiny-2312">mistral-tiny-2312</option>
<option value="mistral-small-2312">mistral-small-2312</option>
@ -2837,6 +2845,7 @@
<option value="mistral-medium-2312">mistral-medium-2312</option>
<option value="mistral-large-2402">mistral-large-2402</option>
<option value="codestral-2405">codestral-2405</option>
<option value="codestral-mamba-2407">codestral-mamba-2407</option>
</optgroup>
</select>
</div>

View File

@ -35,6 +35,7 @@
<option data-type="openai" value="gpt-4-vision-preview">gpt-4-vision-preview</option>
<option data-type="openai" value="gpt-4-turbo">gpt-4-turbo</option>
<option data-type="openai" value="gpt-4o">gpt-4o</option>
<option data-type="openai" value="gpt-4o-mini">gpt-4o-mini</option>
<option data-type="anthropic" value="claude-3-5-sonnet-20240620">claude-3-5-sonnet-20240620</option>
<option data-type="anthropic" value="claude-3-opus-20240229">claude-3-opus-20240229</option>
<option data-type="anthropic" value="claude-3-sonnet-20240229">claude-3-sonnet-20240229</option>
@ -44,6 +45,7 @@
<option data-type="openrouter" value="openai/gpt-4-vision-preview">openai/gpt-4-vision-preview</option>
<option data-type="openrouter" value="openai/gpt-4o">openai/gpt-4o</option>
<option data-type="openrouter" value="openai/gpt-4-turbo">openai/gpt-4-turbo</option>
<option data-type="openrouter" value="openai/gpt-4o-mini">openai/gpt-4o-mini</option>
<option data-type="openrouter" value="haotian-liu/llava-13b">haotian-liu/llava-13b</option>
<option data-type="openrouter" value="fireworks/firellava-13b">fireworks/firellava-13b</option>
<option data-type="openrouter" value="anthropic/claude-3.5-sonnet">anthropic/claude-3.5-sonnet</option>

View File

@ -399,7 +399,14 @@ export class SlashCommandHandler {
enumProvider: localEnumProviders.qrIds,
}),
],
helpString: 'Deletes a Quick Reply from the specified set. If no label is provided, the entire set is deleted.',
unnamedArgumentList: [
SlashCommandArgument.fromProps({
description: 'label',
typeList: [ARGUMENT_TYPE.STRING],
enumProvider: localEnumProviders.qrEntries,
}),
],
helpString: 'Deletes a Quick Reply from the specified set. (Label must be provided via named or unnamed argument)',
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({ name: 'qr-contextadd',
callback: (args, name) => {

View File

@ -2289,24 +2289,34 @@ async function generatePicture(initiator, args, trigger, message, callback) {
}
const dimensions = setTypeSpecificDimensions(generationType);
const abortController = new AbortController();
let negativePromptPrefix = args?.negative || '';
let imagePath = '';
const stopListener = () => abortController.abort('Aborted by user');
const mesStop = document.getElementById('mes_stop');
try {
const combineNegatives = (prefix) => { negativePromptPrefix = combinePrefixes(negativePromptPrefix, prefix); };
const prompt = await getPrompt(generationType, message, trigger, quietPrompt, combineNegatives);
console.log('Processed image prompt:', prompt);
mesStop?.addEventListener('click', stopListener);
context.deactivateSendButtons();
hideSwipeButtons();
imagePath = await sendGenerationRequest(generationType, prompt, negativePromptPrefix, characterName, callback, initiator);
if (typeof args?._abortController?.addEventListener === 'function') {
args._abortController.addEventListener('abort', stopListener);
}
imagePath = await sendGenerationRequest(generationType, prompt, negativePromptPrefix, characterName, callback, initiator, abortController.signal);
} catch (err) {
console.trace(err);
throw new Error('SD prompt text generation failed.');
}
finally {
restoreOriginalDimensions(dimensions);
mesStop?.removeEventListener('click', stopListener);
context.activateSendButtons();
showSwipeButtons();
}
@ -2521,9 +2531,10 @@ async function generatePrompt(quietPrompt) {
* @param {string} characterName Name of the character
* @param {function} callback Callback function to be called after image generation
* @param {string} initiator The initiator of the image generation
* @param {AbortSignal} signal Abort signal to cancel the request
* @returns
*/
async function sendGenerationRequest(generationType, prompt, additionalNegativePrefix, characterName, callback, initiator) {
async function sendGenerationRequest(generationType, prompt, additionalNegativePrefix, characterName, callback, initiator, signal) {
const noCharPrefix = [generationMode.FREE, generationMode.BACKGROUND, generationMode.USER, generationMode.USER_MULTIMODAL, generationMode.FREE_EXTENDED];
const prefix = noCharPrefix.includes(generationType)
? extension_settings.sd.prompt_prefix
@ -2541,37 +2552,37 @@ async function sendGenerationRequest(generationType, prompt, additionalNegativeP
try {
switch (extension_settings.sd.source) {
case sources.extras:
result = await generateExtrasImage(prefixedPrompt, negativePrompt);
result = await generateExtrasImage(prefixedPrompt, negativePrompt, signal);
break;
case sources.horde:
result = await generateHordeImage(prefixedPrompt, negativePrompt);
result = await generateHordeImage(prefixedPrompt, negativePrompt, signal);
break;
case sources.vlad:
result = await generateAutoImage(prefixedPrompt, negativePrompt);
result = await generateAutoImage(prefixedPrompt, negativePrompt, signal);
break;
case sources.drawthings:
result = await generateDrawthingsImage(prefixedPrompt, negativePrompt);
result = await generateDrawthingsImage(prefixedPrompt, negativePrompt, signal);
break;
case sources.auto:
result = await generateAutoImage(prefixedPrompt, negativePrompt);
result = await generateAutoImage(prefixedPrompt, negativePrompt, signal);
break;
case sources.novel:
result = await generateNovelImage(prefixedPrompt, negativePrompt);
result = await generateNovelImage(prefixedPrompt, negativePrompt, signal);
break;
case sources.openai:
result = await generateOpenAiImage(prefixedPrompt);
result = await generateOpenAiImage(prefixedPrompt, signal);
break;
case sources.comfy:
result = await generateComfyImage(prefixedPrompt, negativePrompt);
result = await generateComfyImage(prefixedPrompt, negativePrompt, signal);
break;
case sources.togetherai:
result = await generateTogetherAIImage(prefixedPrompt, negativePrompt);
result = await generateTogetherAIImage(prefixedPrompt, negativePrompt, signal);
break;
case sources.pollinations:
result = await generatePollinationsImage(prefixedPrompt, negativePrompt);
result = await generatePollinationsImage(prefixedPrompt, negativePrompt, signal);
break;
case sources.stability:
result = await generateStabilityImage(prefixedPrompt, negativePrompt);
result = await generateStabilityImage(prefixedPrompt, negativePrompt, signal);
break;
}
@ -2600,12 +2611,14 @@ async function sendGenerationRequest(generationType, prompt, additionalNegativeP
* Generates an image using the TogetherAI API.
* @param {string} prompt - The main instruction used to guide the image generation.
* @param {string} negativePrompt - The instruction used to restrict the image generation.
* @param {AbortSignal} signal - An AbortSignal object that can be used to cancel the request.
* @returns {Promise<{format: string, data: string}>} - A promise that resolves when the image generation and processing are complete.
*/
async function generateTogetherAIImage(prompt, negativePrompt) {
async function generateTogetherAIImage(prompt, negativePrompt, signal) {
const result = await fetch('/api/sd/together/generate', {
method: 'POST',
headers: getRequestHeaders(),
signal: signal,
body: JSON.stringify({
prompt: prompt,
negative_prompt: negativePrompt,
@ -2630,12 +2643,14 @@ async function generateTogetherAIImage(prompt, negativePrompt) {
* Generates an image using the Pollinations API.
* @param {string} prompt - The main instruction used to guide the image generation.
* @param {string} negativePrompt - The instruction used to restrict the image generation.
* @param {AbortSignal} signal - An AbortSignal object that can be used to cancel the request.
* @returns {Promise<{format: string, data: string}>} - A promise that resolves when the image generation and processing are complete.
*/
async function generatePollinationsImage(prompt, negativePrompt) {
async function generatePollinationsImage(prompt, negativePrompt, signal) {
const result = await fetch('/api/sd/pollinations/generate', {
method: 'POST',
headers: getRequestHeaders(),
signal: signal,
body: JSON.stringify({
prompt: prompt,
negative_prompt: negativePrompt,
@ -2662,9 +2677,10 @@ async function generatePollinationsImage(prompt, negativePrompt) {
*
* @param {string} prompt - The main instruction used to guide the image generation.
* @param {string} negativePrompt - The instruction used to restrict the image generation.
* @param {AbortSignal} signal - An AbortSignal object that can be used to cancel the request.
* @returns {Promise<{format: string, data: string}>} - A promise that resolves when the image generation and processing are complete.
*/
async function generateExtrasImage(prompt, negativePrompt) {
async function generateExtrasImage(prompt, negativePrompt, signal) {
const url = new URL(getApiUrl());
url.pathname = '/api/image';
const result = await doExtrasFetch(url, {
@ -2672,6 +2688,7 @@ async function generateExtrasImage(prompt, negativePrompt) {
headers: {
'Content-Type': 'application/json',
},
signal: signal,
body: JSON.stringify({
prompt: prompt,
sampler: extension_settings.sd.sampler,
@ -2739,9 +2756,10 @@ function getClosestAspectRatio(width, height) {
* Generates an image using Stability AI.
* @param {string} prompt - The main instruction used to guide the image generation.
* @param {string} negativePrompt - The instruction used to restrict the image generation.
* @param {AbortSignal} signal - An AbortSignal object that can be used to cancel the request.
* @returns {Promise<{format: string, data: string}>} - A promise that resolves when the image generation and processing are complete.
*/
async function generateStabilityImage(prompt, negativePrompt) {
async function generateStabilityImage(prompt, negativePrompt, signal) {
const IMAGE_FORMAT = 'png';
const PROMPT_LIMIT = 10000;
@ -2749,6 +2767,7 @@ async function generateStabilityImage(prompt, negativePrompt) {
const response = await fetch('/api/sd/stability/generate', {
method: 'POST',
headers: getRequestHeaders(),
signal: signal,
body: JSON.stringify({
model: extension_settings.sd.model,
payload: {
@ -2783,12 +2802,14 @@ async function generateStabilityImage(prompt, negativePrompt) {
*
* @param {string} prompt - The main instruction used to guide the image generation.
* @param {string} negativePrompt - The instruction used to restrict the image generation.
* @param {AbortSignal} signal - An AbortSignal object that can be used to cancel the request.
* @returns {Promise<{format: string, data: string}>} - A promise that resolves when the image generation and processing are complete.
*/
async function generateHordeImage(prompt, negativePrompt) {
async function generateHordeImage(prompt, negativePrompt, signal) {
const result = await fetch('/api/horde/generate-image', {
method: 'POST',
headers: getRequestHeaders(),
signal: signal,
body: JSON.stringify({
prompt: prompt,
sampler: extension_settings.sd.sampler,
@ -2821,13 +2842,15 @@ async function generateHordeImage(prompt, negativePrompt) {
*
* @param {string} prompt - The main instruction used to guide the image generation.
* @param {string} negativePrompt - The instruction used to restrict the image generation.
* @param {AbortSignal} signal - An AbortSignal object that can be used to cancel the request.
* @returns {Promise<{format: string, data: string}>} - A promise that resolves when the image generation and processing are complete.
*/
async function generateAutoImage(prompt, negativePrompt) {
async function generateAutoImage(prompt, negativePrompt, signal) {
const isValidVae = extension_settings.sd.vae && !['N/A', placeholderVae].includes(extension_settings.sd.vae);
const result = await fetch('/api/sd/generate', {
method: 'POST',
headers: getRequestHeaders(),
signal: signal,
body: JSON.stringify({
...getSdRequestBody(),
prompt: prompt,
@ -2875,12 +2898,14 @@ async function generateAutoImage(prompt, negativePrompt) {
*
* @param {string} prompt - The main instruction used to guide the image generation.
* @param {string} negativePrompt - The instruction used to restrict the image generation.
* @param {AbortSignal} signal - An AbortSignal object that can be used to cancel the request.
* @returns {Promise<{format: string, data: string}>} - A promise that resolves when the image generation and processing are complete.
*/
async function generateDrawthingsImage(prompt, negativePrompt) {
async function generateDrawthingsImage(prompt, negativePrompt, signal) {
const result = await fetch('/api/sd/drawthings/generate', {
method: 'POST',
headers: getRequestHeaders(),
signal: signal,
body: JSON.stringify({
...getSdRequestBody(),
prompt: prompt,
@ -2914,14 +2939,16 @@ async function generateDrawthingsImage(prompt, negativePrompt) {
*
* @param {string} prompt - The main instruction used to guide the image generation.
* @param {string} negativePrompt - The instruction used to restrict the image generation.
* @param {AbortSignal} signal - An AbortSignal object that can be used to cancel the request.
* @returns {Promise<{format: string, data: string}>} - A promise that resolves when the image generation and processing are complete.
*/
async function generateNovelImage(prompt, negativePrompt) {
async function generateNovelImage(prompt, negativePrompt, signal) {
const { steps, width, height, sm, sm_dyn } = getNovelParams();
const result = await fetch('/api/novelai/generate-image', {
method: 'POST',
headers: getRequestHeaders(),
signal: signal,
body: JSON.stringify({
prompt: prompt,
model: extension_settings.sd.model,
@ -3010,7 +3037,13 @@ function getNovelParams() {
return { steps, width, height, sm, sm_dyn };
}
async function generateOpenAiImage(prompt) {
/**
* Generates an image in OpenAI API using the provided prompt and configuration settings.
* @param {string} prompt - The main instruction used to guide the image generation.
* @param {AbortSignal} signal - An AbortSignal object that can be used to cancel the request.
* @returns {Promise<{format: string, data: string}>} - A promise that resolves when the image generation and processing are complete.
*/
async function generateOpenAiImage(prompt, signal) {
const dalle2PromptLimit = 1000;
const dalle3PromptLimit = 4000;
@ -3045,6 +3078,7 @@ async function generateOpenAiImage(prompt) {
const result = await fetch('/api/openai/generate-image', {
method: 'POST',
headers: getRequestHeaders(),
signal: signal,
body: JSON.stringify({
prompt: prompt,
model: extension_settings.sd.model,
@ -3070,9 +3104,10 @@ async function generateOpenAiImage(prompt) {
*
* @param {string} prompt - The main instruction used to guide the image generation.
* @param {string} negativePrompt - The instruction used to restrict the image generation.
* @param {AbortSignal} signal - An AbortSignal object that can be used to cancel the request.
* @returns {Promise<{format: string, data: string}>} - A promise that resolves when the image generation and processing are complete.
*/
async function generateComfyImage(prompt, negativePrompt) {
async function generateComfyImage(prompt, negativePrompt, signal) {
const placeholders = [
'model',
'vae',
@ -3133,6 +3168,7 @@ async function generateComfyImage(prompt, negativePrompt) {
const promptResult = await fetch('/api/sd/comfy/generate', {
method: 'POST',
headers: getRequestHeaders(),
signal: signal,
body: JSON.stringify({
url: extension_settings.sd.comfy_url,
prompt: `{
@ -3245,7 +3281,7 @@ async function onComfyNewWorkflowClick() {
if (!name) {
return;
}
if (!name.toLowerCase().endsWith('.json')) {
if (!String(name).toLowerCase().endsWith('.json')) {
name += '.json';
}
extension_settings.sd.comfy_workflow = name;
@ -3431,6 +3467,7 @@ async function moduleWorker() {
}
setInterval(moduleWorker, UPDATE_INTERVAL);
let buttonAbortController = null;
async function sdMessageButton(e) {
function setBusyIcon(isBusy) {
@ -3450,11 +3487,13 @@ async function sdMessageButton(e) {
const hasSavedNegative = message?.extra?.negative;
if ($icon.hasClass(busyClass)) {
buttonAbortController?.abort('Aborted by user');
console.log('Previous image is still being generated...');
return;
}
let dimensions = null;
buttonAbortController = new AbortController();
try {
setBusyIcon(true);
@ -3466,7 +3505,7 @@ async function sdMessageButton(e) {
const generationType = message?.extra?.generationType ?? generationMode.FREE;
console.log('Regenerating an image, using existing prompt:', prompt);
dimensions = setTypeSpecificDimensions(generationType);
await sendGenerationRequest(generationType, prompt, negative, characterFileName, saveGeneratedImage, initiators.action);
await sendGenerationRequest(generationType, prompt, negative, characterFileName, saveGeneratedImage, initiators.action, buttonAbortController?.signal);
}
else {
console.log('doing /sd raw last');

View File

@ -125,6 +125,7 @@ const max_32k = 32767;
const max_64k = 65535;
const max_128k = 128 * 1000;
const max_200k = 200 * 1000;
const max_256k = 256 * 1000;
const max_1mil = 1000 * 1000;
const scale_max = 8191;
const claude_max = 9000; // We have a proper tokenizer, so theoretically could be larger (up to 9k)
@ -3992,7 +3993,7 @@ async function onModelChange() {
if ($(this).is('#model_mistralai_select')) {
// Upgrade old mistral models to new naming scheme
// would have done this in loadOpenAISettings, but it wasn't updating on preset change?
if (value === 'mistral-medium' || value === 'mistral-small' || value === 'mistral-tiny') {
if (value === 'mistral-medium' || value === 'mistral-small') {
value = value + '-latest';
} else if (value === '') {
value = default_settings.mistralai_model;
@ -4139,6 +4140,10 @@ async function onModelChange() {
if (oai_settings.chat_completion_source === chat_completion_sources.MISTRALAI) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', unlocked_max);
} else if (oai_settings.mistralai_model.includes('codestral-mamba')) {
$('#openai_max_context').attr('max', max_256k);
} else if (oai_settings.mistralai_model.includes('mistral-nemo')) {
$('#openai_max_context').attr('max', max_128k);
} else if (oai_settings.mistralai_model.includes('mixtral-8x22b')) {
$('#openai_max_context').attr('max', max_64k);
} else {
@ -4628,8 +4633,10 @@ export function isImageInliningSupported() {
'gemini-1.5-pro-latest',
'gemini-pro-vision',
'claude-3',
'claude-3-5',
'gpt-4-turbo',
'gpt-4o',
'gpt-4o-mini',
];
switch (oai_settings.chat_completion_source) {

View File

@ -0,0 +1,36 @@
/**
* @abstract
* @implements {EventTarget}
*/
export class AbstractEventTarget {
constructor() {
this.listeners = {};
}
addEventListener(type, callback, _options) {
if (!this.listeners[type]) {
this.listeners[type] = [];
}
this.listeners[type].push(callback);
}
dispatchEvent(event) {
if (!this.listeners[event.type] || this.listeners[event.type].length === 0) {
return true;
}
this.listeners[event.type].forEach(listener => {
listener(event);
});
return true;
}
removeEventListener(type, callback, _options) {
if (!this.listeners[type]) {
return;
}
const index = this.listeners[type].indexOf(callback);
if (index !== -1) {
this.listeners[type].splice(index, 1);
}
}
}

View File

@ -1,22 +1,28 @@
export class SlashCommandAbortController {
import { AbstractEventTarget } from './AbstractEventTarget.js';
export class SlashCommandAbortController extends AbstractEventTarget {
/**@type {SlashCommandAbortSignal}*/ signal;
constructor() {
super();
this.signal = new SlashCommandAbortSignal();
}
abort(reason = 'No reason.', isQuiet = false) {
this.signal.isQuiet = isQuiet;
this.signal.aborted = true;
this.signal.reason = reason;
this.dispatchEvent(new Event('abort'));
}
pause(reason = 'No reason.') {
this.signal.paused = true;
this.signal.reason = reason;
this.dispatchEvent(new Event('pause'));
}
continue(reason = 'No reason.') {
this.signal.paused = false;
this.signal.reason = reason;
this.dispatchEvent(new Event('continue'));
}
}

View File

@ -339,7 +339,18 @@ router.post('/generate-image', jsonParser, async (request, response) => {
return response.sendStatus(400);
}
console.log('Horde image generation request:', generation);
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
console.log('Horde image generation request aborted.');
controller.abort();
if (generation.id) ai_horde.deleteImageGenerationRequest(generation.id);
});
for (let attempt = 0; attempt < MAX_ATTEMPTS; attempt++) {
controller.signal.throwIfAborted();
await delay(CHECK_INTERVAL);
const check = await ai_horde.getImageGenerationCheck(generation.id);
console.log(check);

View File

@ -323,6 +323,17 @@ router.post('/generate', jsonParser, async (request, response) => {
const url = new URL(request.body.url);
url.pathname = '/sdapi/v1/txt2img';
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
if (!response.writableEnded) {
const url = new URL(request.body.url);
url.pathname = '/sdapi/v1/interrupt';
fetch(url, { method: 'POST', headers: { 'Authorization': getBasicAuthHeader(request.body.auth) } });
}
controller.abort();
});
const result = await fetch(url, {
method: 'POST',
body: JSON.stringify(request.body),
@ -331,6 +342,8 @@ router.post('/generate', jsonParser, async (request, response) => {
'Authorization': getBasicAuthHeader(request.body.auth),
},
timeout: 0,
// @ts-ignore
signal: controller.signal,
});
if (!result.ok) {
@ -556,6 +569,17 @@ comfy.post('/generate', jsonParser, async (request, response) => {
const url = new URL(request.body.url);
url.pathname = '/prompt';
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
if (!response.writableEnded && !item) {
const interruptUrl = new URL(request.body.url);
interruptUrl.pathname = '/interrupt';
fetch(interruptUrl, { method: 'POST', headers: { 'Authorization': getBasicAuthHeader(request.body.auth) } });
}
controller.abort();
});
const promptResult = await fetch(url, {
method: 'POST',
body: request.body.prompt,
@ -581,6 +605,9 @@ comfy.post('/generate', jsonParser, async (request, response) => {
}
await delay(100);
}
if (item.status.status_str === 'error') {
throw new Error('ComfyUI generation did not succeed.');
}
const imgInfo = Object.keys(item.outputs).map(it => item.outputs[it].images).flat()[0];
const imgUrl = new URL(request.body.url);
imgUrl.pathname = '/view';
@ -592,6 +619,7 @@ comfy.post('/generate', jsonParser, async (request, response) => {
const imgBuffer = await imgResponse.buffer();
return response.send(imgBuffer.toString('base64'));
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});