-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
@@ -5007,8 +5014,8 @@
-
-
+
+
Content
diff --git a/public/scripts/extensions/caption/index.js b/public/scripts/extensions/caption/index.js
index fff8a798a..8534fd0f6 100644
--- a/public/scripts/extensions/caption/index.js
+++ b/public/scripts/extensions/caption/index.js
@@ -30,7 +30,7 @@ function migrateSettings() {
if (extension_settings.caption.source === 'openai') {
extension_settings.caption.source = 'multimodal';
extension_settings.caption.multimodal_api = 'openai';
- extension_settings.caption.multimodal_model = 'gpt-4-vision-preview';
+ extension_settings.caption.multimodal_model = 'gpt-4-turbo';
}
if (!extension_settings.caption.multimodal_api) {
@@ -38,7 +38,7 @@ function migrateSettings() {
}
if (!extension_settings.caption.multimodal_model) {
- extension_settings.caption.multimodal_model = 'gpt-4-vision-preview';
+ extension_settings.caption.multimodal_model = 'gpt-4-turbo';
}
if (!extension_settings.caption.prompt) {
@@ -369,6 +369,7 @@ jQuery(function () {
Model
Source
@@ -158,6 +159,25 @@
+
CFG Scale ()
Sampling steps ()
diff --git a/public/scripts/openai.js b/public/scripts/openai.js
index 19268a005..98fa51b3d 100644
--- a/public/scripts/openai.js
+++ b/public/scripts/openai.js
@@ -186,6 +186,11 @@ const continue_postfix_types = {
DOUBLE_NEWLINE: '\n\n',
};
+const custom_prompt_post_processing_types = {
+ NONE: '',
+ CLAUDE: 'claude',
+};
+
const prefixMap = selected_group ? {
assistant: '',
user: '',
@@ -262,6 +267,7 @@ const default_settings = {
continue_prefill: false,
names_behavior: character_names_behavior.NONE,
continue_postfix: continue_postfix_types.SPACE,
+ custom_prompt_post_processing: custom_prompt_post_processing_types.NONE,
seed: -1,
n: 1,
};
@@ -331,6 +337,7 @@ const oai_settings = {
continue_prefill: false,
names_behavior: character_names_behavior.NONE,
continue_postfix: continue_postfix_types.SPACE,
+ custom_prompt_post_processing: custom_prompt_post_processing_types.NONE,
seed: -1,
n: 1,
};
@@ -1743,6 +1750,7 @@ async function sendOpenAIRequest(type, messages, signal) {
generate_data['custom_include_body'] = oai_settings.custom_include_body;
generate_data['custom_exclude_body'] = oai_settings.custom_exclude_body;
generate_data['custom_include_headers'] = oai_settings.custom_include_headers;
+ generate_data['custom_prompt_post_processing'] = oai_settings.custom_prompt_post_processing;
}
if (isCohere) {
@@ -2625,6 +2633,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.custom_include_body = settings.custom_include_body ?? default_settings.custom_include_body;
oai_settings.custom_exclude_body = settings.custom_exclude_body ?? default_settings.custom_exclude_body;
oai_settings.custom_include_headers = settings.custom_include_headers ?? default_settings.custom_include_headers;
+ oai_settings.custom_prompt_post_processing = settings.custom_prompt_post_processing ?? default_settings.custom_prompt_post_processing;
oai_settings.google_model = settings.google_model ?? default_settings.google_model;
oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source;
oai_settings.api_url_scale = settings.api_url_scale ?? default_settings.api_url_scale;
@@ -2770,6 +2779,8 @@ function loadOpenAISettings(data, settings) {
$('#chat_completion_source').val(oai_settings.chat_completion_source).trigger('change');
$('#oai_max_context_unlocked').prop('checked', oai_settings.max_context_unlocked);
+ $('#custom_prompt_post_processing').val(oai_settings.custom_prompt_post_processing);
+ $(`#custom_prompt_post_processing option[value="${oai_settings.custom_prompt_post_processing}"]`).attr('selected', true);
}
function setNamesBehaviorControls() {
@@ -2924,6 +2935,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
custom_include_body: settings.custom_include_body,
custom_exclude_body: settings.custom_exclude_body,
custom_include_headers: settings.custom_include_headers,
+ custom_prompt_post_processing: settings.custom_prompt_post_processing,
google_model: settings.google_model,
temperature: settings.temp_openai,
frequency_penalty: settings.freq_pen_openai,
@@ -3313,6 +3325,7 @@ function onSettingsPresetChange() {
custom_include_body: ['#custom_include_body', 'custom_include_body', false],
custom_exclude_body: ['#custom_exclude_body', 'custom_exclude_body', false],
custom_include_headers: ['#custom_include_headers', 'custom_include_headers', false],
+ custom_prompt_post_processing: ['#custom_prompt_post_processing', 'custom_prompt_post_processing', false],
google_model: ['#model_google_select', 'google_model', false],
openai_max_context: ['#openai_max_context', 'openai_max_context', false],
openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false],
@@ -4025,12 +4038,13 @@ export function isImageInliningSupported() {
'gemini-1.0-pro-vision-latest',
'gemini-1.5-pro-latest',
'gemini-pro-vision',
- 'claude-3'
+ 'claude-3',
+ 'gpt-4-turbo',
];
switch (oai_settings.chat_completion_source) {
case chat_completion_sources.OPENAI:
- return visionSupportedModels.some(model => oai_settings.openai_model.includes(model));
+ return visionSupportedModels.some(model => oai_settings.openai_model.includes(model) && !oai_settings.openai_model.includes('gpt-4-turbo-preview'));
case chat_completion_sources.MAKERSUITE:
return visionSupportedModels.some(model => oai_settings.google_model.includes(model));
case chat_completion_sources.CLAUDE:
@@ -4493,6 +4507,11 @@ $(document).ready(async function () {
saveSettingsDebounced();
});
+ $('#custom_prompt_post_processing').on('change', function () {
+ oai_settings.custom_prompt_post_processing = String($(this).val());
+ saveSettingsDebounced();
+ });
+
$('#names_behavior').on('input', function () {
oai_settings.names_behavior = Number($(this).val());
setNamesBehaviorControls();
diff --git a/public/scripts/textgen-settings.js b/public/scripts/textgen-settings.js
index f871434a3..4f8156a38 100644
--- a/public/scripts/textgen-settings.js
+++ b/public/scripts/textgen-settings.js
@@ -568,7 +568,7 @@ jQuery(function () {
const json_schema_string = String($(this).val());
try {
- settings.json_schema = JSON.parse(json_schema_string ?? '{}');
+ settings.json_schema = JSON.parse(json_schema_string || '{}');
} catch {
// Ignore errors from here
}
diff --git a/public/style.css b/public/style.css
index 8953009ee..cc45b92d0 100644
--- a/public/style.css
+++ b/public/style.css
@@ -986,9 +986,7 @@ body.reduced-motion #bg_custom {
}
.avatars_inline .avatar {
- margin-top: calc(var(--avatar-base-border-radius));
- margin-left: calc(var(--avatar-base-border-radius));
- margin-bottom: calc(var(--avatar-base-border-radius));
+ margin: calc(var(--avatar-base-border-radius));
}
.avatars_inline .avatar:last-of-type {
@@ -1101,8 +1099,8 @@ select {
@media screen and (min-width: 1001px) {
#description_textarea {
- height: 30vh;
- height: 30svh;
+ height: 29vh;
+ height: 29svh;
}
#firstmessage_textarea {
@@ -1189,7 +1187,7 @@ input[type="file"] {
#right-nav-panel-tabs {
display: flex;
align-items: center;
- gap: 10px;
+ gap: 5px;
overflow: hidden;
width: 100%;
}
@@ -1199,7 +1197,7 @@ input[type="file"] {
align-items: center;
flex-wrap: wrap;
gap: 10px;
- padding: 0 5px;
+ padding: 0px 10px 0px 5px;
}
#right-nav-panel-tabs .right_menu_button,
@@ -1653,6 +1651,7 @@ input[type=search]:focus::-webkit-search-cancel-button {
.missing-avatar.inline_avatar {
padding: unset;
border-radius: var(--avatar-base-border-radius-round);
+ width: fit-content;
}
/*applies to char list and mes_text char display name*/
@@ -2001,6 +2000,19 @@ grammarly-extension {
justify-content: center;
align-items: center;
align-self: center !important;
+ width: 100%;
+ height: 100%;
+ /* Avoids cutting off the box shadow on the avatar*/
+ margin: 10px;
+}
+
+#avatar_controls {
+ height: 100%;
+ width: 100%;
+ flex-grow: 1;
+ justify-content: flex-end;
+ flex-flow: column;
+ padding: 5px 5px 10px 0;
}
#description_div,
@@ -2211,11 +2223,11 @@ grammarly-extension {
font-weight: bold;
padding: 5px;
margin: 0;
- height: 26px;
filter: grayscale(0.5);
text-align: center;
font-size: 17px;
aspect-ratio: 1 / 1;
+ flex: 0.075;
}
.menu_button:hover,
@@ -2634,7 +2646,11 @@ input[type="range"]::-webkit-slider-thumb {
color: var(--SmartThemeBodyColor);
}
-#char-management-dropdown,
+#char-management-dropdown {
+ height: auto;
+ margin-bottom: 0;
+}
+
#tagInput {
height: 26px;
margin-bottom: 0;
diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js
index 8f3bea56a..a7ebad661 100644
--- a/src/endpoints/backends/chat-completions.js
+++ b/src/endpoints/backends/chat-completions.js
@@ -15,6 +15,23 @@ const API_CLAUDE = 'https://api.anthropic.com/v1';
const API_MISTRAL = 'https://api.mistral.ai/v1';
const API_COHERE = 'https://api.cohere.ai/v1';
+/**
+ * Applies a post-processing step to the generated messages.
+ * @param {object[]} messages Messages to post-process
+ * @param {string} type Prompt conversion type
+ * @param {string} charName Character name
+ * @param {string} userName User name
+ * @returns
+ */
+function postProcessPrompt(messages, type, charName, userName) {
+ switch (type) {
+ case 'claude':
+ return convertClaudeMessages(messages, '', false, '', charName, userName).messages;
+ default:
+ return messages;
+ }
+}
+
/**
* Ollama strikes back. Special boy #2's steaming routine.
* Wrap this abomination into proper SSE stream, again.
@@ -524,9 +541,8 @@ async function sendMistralAIRequest(request, response) {
/**
* Sends a request to Cohere API.
- * @param {import('express').Request} request
- * @param {import('express').Response} response
- * @returns {Promise
}
+ * @param {express.Request} request Express request
+ * @param {express.Response} response Express response
*/
async function sendCohereRequest(request, response) {
const apiKey = readSecret(request.user.directories, SECRET_KEYS.COHERE);
@@ -856,6 +872,15 @@ router.post('/generate', jsonParser, function (request, response) {
mergeObjectWithYaml(bodyParams, request.body.custom_include_body);
mergeObjectWithYaml(headers, request.body.custom_include_headers);
+
+ if (request.body.custom_prompt_post_processing) {
+ console.log('Applying custom prompt post-processing of type', request.body.custom_prompt_post_processing);
+ request.body.messages = postProcessPrompt(
+ request.body.messages,
+ request.body.custom_prompt_post_processing,
+ request.body.char_name,
+ request.body.user_name);
+ }
} else {
console.log('This chat completion source is not supported yet.');
return response.status(400).send({ error: true });
diff --git a/src/endpoints/backends/text-completions.js b/src/endpoints/backends/text-completions.js
index e02bfbc49..a9fa14cc5 100644
--- a/src/endpoints/backends/text-completions.js
+++ b/src/endpoints/backends/text-completions.js
@@ -473,6 +473,76 @@ llamacpp.post('/caption-image', jsonParser, async function (request, response) {
}
});
+llamacpp.post('/props', jsonParser, async function (request, response) {
+ try {
+ if (!request.body.server_url) {
+ return response.sendStatus(400);
+ }
+
+ console.log('LlamaCpp props request:', request.body);
+ const baseUrl = trimV1(request.body.server_url);
+
+ const fetchResponse = await fetch(`${baseUrl}/props`, {
+ method: 'GET',
+ timeout: 0,
+ });
+
+ if (!fetchResponse.ok) {
+ console.log('LlamaCpp props error:', fetchResponse.status, fetchResponse.statusText);
+ return response.status(500).send({ error: true });
+ }
+
+ const data = await fetchResponse.json();
+ console.log('LlamaCpp props response:', data);
+
+ return response.send(data);
+
+ } catch (error) {
+ console.error(error);
+ return response.status(500);
+ }
+});
+
+llamacpp.post('/slots', jsonParser, async function (request, response) {
+ try {
+ if (!request.body.server_url) {
+ return response.sendStatus(400);
+ }
+ if (!/^\d+$/.test(request.body.id_slot)) {
+ return response.sendStatus(400);
+ }
+ if (!/^(erase|restore|save)$/.test(request.body.action)) {
+ return response.sendStatus(400);
+ }
+
+ console.log('LlamaCpp slots request:', request.body);
+ const baseUrl = trimV1(request.body.server_url);
+
+ const fetchResponse = await fetch(`${baseUrl}/slots/${request.body.id_slot}?action=${request.body.action}`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ timeout: 0,
+ body: JSON.stringify({
+ filename: `${request.body.filename}`,
+ }),
+ });
+
+ if (!fetchResponse.ok) {
+ console.log('LlamaCpp slots error:', fetchResponse.status, fetchResponse.statusText);
+ return response.status(500).send({ error: true });
+ }
+
+ const data = await fetchResponse.json();
+ console.log('LlamaCpp slots response:', data);
+
+ return response.send(data);
+
+ } catch (error) {
+ console.error(error);
+ return response.status(500);
+ }
+});
+
router.use('/ollama', ollama);
router.use('/llamacpp', llamacpp);
diff --git a/src/endpoints/stable-diffusion.js b/src/endpoints/stable-diffusion.js
index ed2c15b82..55669adc2 100644
--- a/src/endpoints/stable-diffusion.js
+++ b/src/endpoints/stable-diffusion.js
@@ -709,8 +709,47 @@ drawthings.post('/generate', jsonParser, async (request, response) => {
}
});
+const pollinations = express.Router();
+
+pollinations.post('/generate', jsonParser, async (request, response) => {
+ try {
+ const promptUrl = new URL(`https://image.pollinations.ai/prompt/${encodeURIComponent(request.body.prompt)}`);
+ const params = new URLSearchParams({
+ model: String(request.body.model),
+ negative_prompt: String(request.body.negative_prompt),
+ seed: String(Math.floor(Math.random() * 10_000_000)),
+ enhance: String(request.body.enhance ?? false),
+ refine: String(request.body.refine ?? false),
+ width: String(request.body.width ?? 1024),
+ height: String(request.body.height ?? 1024),
+ nologo: String(true),
+ nofeed: String(true),
+ referer: 'sillytavern',
+ });
+ promptUrl.search = params.toString();
+
+ console.log('Pollinations request URL:', promptUrl.toString());
+
+ const result = await fetch(promptUrl);
+
+ if (!result.ok) {
+ console.log('Pollinations returned an error.', result.status, result.statusText);
+ throw new Error('Pollinations request failed.');
+ }
+
+ const buffer = await result.buffer();
+ const base64 = buffer.toString('base64');
+
+ return response.send({ image: base64 });
+ } catch (error) {
+ console.log(error);
+ return response.sendStatus(500);
+ }
+});
+
router.use('/comfy', comfy);
router.use('/together', together);
router.use('/drawthings', drawthings);
+router.use('/pollinations', pollinations);
module.exports = { router };