Compare commits

...

12 Commits

Author SHA1 Message Date
Cohee
20ab6193ab Send multiple image swipes per prompt
#3635
2025-03-16 21:17:53 +02:00
bmen25124
d42a81f97c New connection manager events, ConnectionManagerRequestService (#3603) 2025-03-16 16:58:34 +02:00
Cohee
46b9bb7854 Merge pull request #3695 from Succubyss/gpt_tokenizer_tweak
gpt-4.5 detection tweak
2025-03-16 02:42:55 +02:00
Succubyss
fff1dd59c3 minor 4.5 detection tweak 2025-03-15 19:27:42 -05:00
Cohee
f88e95d9be Merge branch 'release' into staging 2025-03-16 02:25:20 +02:00
Cohee
248132dd89 Set debug level to unshallow warnings 2025-03-16 02:25:07 +02:00
Cohee
ead37defeb Merge pull request #3694 from SillyTavern/workflow-hotfixes
Workflow hotfixes
2025-03-16 01:06:19 +02:00
Wolfsblvt
f18cb91ef9 on push, check all pushed commits - duh 2025-03-16 00:02:24 +01:00
Wolfsblvt
892fe7bd34 Workflows ensure explicit versions of actions 2025-03-16 00:02:24 +01:00
Wolfsblvt
0126e5e5a3 Add explicit workflow permissions 2025-03-16 00:02:24 +01:00
Cohee
b8afa96de5 Replace link to docs about regex flags 2025-03-15 23:07:58 +02:00
Cohee
9c42391706 Uncomment cookieSecret config removal 2025-03-15 23:04:02 +02:00
25 changed files with 874 additions and 148 deletions

View File

@@ -7,6 +7,10 @@ on:
issue_comment:
types: [created]
permissions:
contents: read
issues: write
jobs:
label-on-content:
name: 🏷️ Label Issues by Content
@@ -16,7 +20,7 @@ jobs:
- name: Checkout Repository
# Checkout
# https://github.com/marketplace/actions/checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.2.2
- name: Auto-Label Issues (Based on Issue Content)
# only auto label based on issue content once, on open (to prevent re-labeling removed labels)
@@ -24,7 +28,7 @@ jobs:
# Issue Labeler
# https://github.com/marketplace/actions/regex-issue-labeler
uses: github/issue-labeler@v3
uses: github/issue-labeler@v3.4
with:
configuration-path: .github/issues-auto-labels.yml
enable-versioned-regex: 0
@@ -39,7 +43,7 @@ jobs:
if: contains(fromJSON('["👩‍💻 Good First Issue", "🙏 Help Wanted", "🪲 Confirmed", "⚠️ High Priority", "❕ Medium Priority", "💤 Low Priority"]'), github.event.label.name)
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'add-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -49,7 +53,7 @@ jobs:
if: contains(fromJSON('["✅ Done", "✅ Done (staging)", "⚰️ Stale", "❌ wontfix"]'), github.event.label.name)
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'remove-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -59,7 +63,7 @@ jobs:
if: contains(fromJSON('["❌ wontfix","👍 Approved","👩‍💻 Good First Issue"]'), github.event.label.name)
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'remove-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -69,7 +73,7 @@ jobs:
if: github.event.label.name == '🪲 Confirmed'
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'remove-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -85,7 +89,7 @@ jobs:
- name: Remove Stale Label
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'remove-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -101,12 +105,12 @@ jobs:
- name: Checkout Repository
# Checkout
# https://github.com/marketplace/actions/checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.2.2
- name: Post Issue Comments Based on Labels
# Label Commenter
# https://github.com/marketplace/actions/label-commenter
uses: peaceiris/actions-label-commenter@v1
uses: peaceiris/actions-label-commenter@v1.10.0
with:
config_file: .github/issues-auto-comments.yml
github_token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

View File

@@ -6,6 +6,10 @@ on:
- staging
- release
permissions:
contents: read
issues: write
jobs:
# This runs commits to staging/release, reading the commit messages. Check `pr-auto-manager.yml`:`update-linked-issues` for PR-linked updates.
update-linked-issues:
@@ -16,12 +20,12 @@ jobs:
- name: Checkout Repository
# Checkout
# https://github.com/marketplace/actions/checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.2.2
- name: Extract Linked Issues from Commit Message
id: extract_issues
run: |
ISSUES=$(git log -1 --pretty=%B | grep -oiE '(close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved) #([0-9]+)' | awk '{print $2}' | tr -d '#' | jq -R -s -c 'split("\n")[:-1]')
ISSUES=$(git log ${{ github.event.before }}..${{ github.event.after }} --pretty=%B | grep -oiE '(close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved) #([0-9]+)' | awk '{print $2}' | tr -d '#' | jq -R -s -c 'split("\n")[:-1]')
echo "issues=$ISSUES" >> $GITHUB_ENV
- name: Label Linked Issues

View File

@@ -6,6 +6,11 @@ on:
schedule:
- cron: '0 0 * * *' # Runs every day at midnight UTC
permissions:
contents: read
issues: write
pull-requests: write
jobs:
mark-inactivity:
name: ⏳ Mark Issues/PRs without Activity
@@ -15,7 +20,7 @@ jobs:
- name: Mark Issues/PRs without Activity
# Close Stale Issues and PRs
# https://github.com/marketplace/actions/close-stale-issues
uses: actions/stale@v9
uses: actions/stale@v9.1.0
with:
repo-token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
days-before-stale: 183
@@ -49,7 +54,7 @@ jobs:
- name: Mark Issues/PRs Awaiting User Response
# Close Stale Issues and PRs
# https://github.com/marketplace/actions/close-stale-issues
uses: actions/stale@v9
uses: actions/stale@v9.1.0
with:
repo-token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
days-before-stale: 7
@@ -76,7 +81,7 @@ jobs:
- name: Mark Issues with Alternative Exists
# Close Stale Issues and PRs
# https://github.com/marketplace/actions/close-stale-issues
uses: actions/stale@v9
uses: actions/stale@v9.1.0
with:
repo-token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
days-before-stale: 7

View File

@@ -6,6 +6,11 @@ on:
pull_request_target:
types: [closed]
permissions:
contents: read
issues: write
pull-requests: write
jobs:
remove-labels:
name: 🗑️ Remove Pending Labels on Close
@@ -15,7 +20,7 @@ jobs:
- name: Remove Pending Labels on Close
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: remove-labels
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

View File

@@ -6,6 +6,11 @@ on:
pull_request_target:
types: [opened]
permissions:
contents: read
issues: write
pull-requests: write
jobs:
label-maintainer:
name: 🏷️ Label if Author is a Repo Maintainer
@@ -16,7 +21,7 @@ jobs:
- name: Label if Author is a Repo Maintainer
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'add-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

View File

@@ -6,6 +6,10 @@ on:
pull_request_review_comment:
types: [created]
permissions:
contents: read
pull-requests: write
jobs:
label-by-size:
name: 🏷️ Label PR by Size
@@ -15,7 +19,7 @@ jobs:
- name: Label PR Size
# Pull Request Size Labeler
# https://github.com/marketplace/actions/pull-request-size-labeler
uses: codelytv/pr-size-labeler@v1
uses: codelytv/pr-size-labeler@v1.10.2
with:
GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
xs_label: '🟩 ⬤○○○○'
@@ -43,12 +47,12 @@ jobs:
- name: Checkout Repository
# Checkout
# https://github.com/marketplace/actions/checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.2.2
- name: Apply Labels Based on Branch Name and Target Branch
# Pull Request Labeler
# https://github.com/marketplace/actions/labeler
uses: actions/labeler@v5
uses: actions/labeler@v5.0.0
with:
configuration-path: .github/pr-auto-labels-by-branch.yml
repo-token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -61,12 +65,12 @@ jobs:
- name: Checkout Repository
# Checkout
# https://github.com/marketplace/actions/checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.2.2
- name: Apply Labels Based on Changed Files
# Pull Request Labeler
# https://github.com/marketplace/actions/labeler
uses: actions/labeler@v5
uses: actions/labeler@v5.0.0
with:
configuration-path: .github/pr-auto-labels-by-files.yml
repo-token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -75,13 +79,13 @@ jobs:
name: 🗑️ Remove Stale Label on Comment
runs-on: ubuntu-latest
# Only runs when this is not done by the github actions bot
if: github.actor != 'github-actions[bot]'
if: github.event_name == 'pull_request_review_comment' && github.actor != 'github-actions[bot]'
steps:
- name: Remove Stale Label
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'remove-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -95,12 +99,18 @@ jobs:
# Run, even if the previous jobs were skipped/failed
if: always()
# Override permissions, as this needs to write a check
permissions:
checks: write
contents: read
pull-requests: read
steps:
- name: Check Merge Blocking
# GitHub Script
# https://github.com/marketplace/actions/github-scriptLabels
# https://github.com/marketplace/actions/github-script
id: label-check
uses: actions/github-script@v7
uses: actions/github-script@v7.0.1
with:
script: |
const prLabels = context.payload.pull_request.labels.map(label => label.name);
@@ -143,12 +153,12 @@ jobs:
- name: Checkout Repository
# Checkout
# https://github.com/marketplace/actions/checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.2.2
- name: Post PR Comments Based on Labels
# Label Commenter for PRs
# https://github.com/marketplace/actions/label-commenter
uses: peaceiris/actions-label-commenter@v1
uses: peaceiris/actions-label-commenter@v1.10.0
with:
config_file: .github/pr-auto-comments.yml
github_token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

View File

@@ -7,6 +7,10 @@ on:
pull_request_target:
types: [synchronize]
permissions:
contents: read
pull-requests: write
jobs:
check-merge-conflicts:
name: ⚔️ Check Merge Conflicts
@@ -16,7 +20,7 @@ jobs:
- name: Check Merge Conflicts
# Label Conflicting Pull Requests
# https://github.com/marketplace/actions/label-conflicting-pull-requests
uses: eps1lon/actions-label-merge-conflict@v3
uses: eps1lon/actions-label-merge-conflict@v3.0.3
with:
dirtyLabel: '🚫 Merge Conflicts'
repoToken: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

View File

@@ -101,15 +101,12 @@ const keyMigrationMap = [
newKey: 'performance.memoryCacheCapacity',
migrate: (value) => `${value}mb`,
},
// uncomment one release after 1.12.13
/*
{
oldKey: 'cookieSecret',
newKey: 'cookieSecret',
migrate: () => void 0,
remove: true,
},
*/
];
/**

4
public/global.d.ts vendored
View File

@@ -1,7 +1,11 @@
import libs from './lib';
import getContext from './scripts/st-context';
import { power_user } from './scripts/power-user';
declare global {
// Custom types
declare type InstructSettings = typeof power_user.instruct;
// Global namespace modules
interface Window {
ai: any;

View File

@@ -6432,10 +6432,13 @@
<div class="mes_reasoning"></div>
</details>
<div class="mes_text"></div>
<div class="mes_img_container">
<div class="mes_img_container" data-img-prompt-type="">
<div class="mes_img_controls">
<div title="Enlarge" class="right_menu_button fa-lg fa-solid fa-magnifying-glass mes_img_enlarge" data-i18n="[title]Enlarge"></div>
<div title="Caption" class="right_menu_button fa-lg fa-solid fa-envelope-open-text mes_img_caption" data-i18n="[title]Caption"></div>
<div title="Include just this swipe in prompts" class="right_menu_button fa-lg fa-solid fa-1 mes_img_prompt_type" data-img-prompt-type="one" data-i18n="[title]Include just this swipe in prompts"></div>
<div title="Include all swipes in prompts" class="right_menu_button fa-lg fa-solid fa-infinity mes_img_prompt_type" data-img-prompt-type="all" data-i18n="[title]Include all swipes in prompts"></div>
<div title="Exclude this image from prompts" class="right_menu_button fa-lg fa-solid fa-0 mes_img_prompt_type" data-img-prompt-type="none" data-i18n="[title]Exclude this image from prompts"></div>
<div title="Delete" class="right_menu_button fa-lg fa-solid fa-trash-can mes_img_delete" data-i18n="[title]Delete"></div>
</div>
<div class="mes_img_swipes">

View File

@@ -243,7 +243,7 @@ import { getBackgrounds, initBackgrounds, loadBackgroundSettings, background_set
import { hideLoader, showLoader } from './scripts/loader.js';
import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay.js';
import { loadFeatherlessModels, loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels, loadOpenRouterModels, loadVllmModels, loadAphroditeModels, loadDreamGenModels, initTextGenModels, loadTabbyModels, loadGenericModels } from './scripts/textgen-models.js';
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags, isExternalMediaAllowed, getCurrentEntityId, preserveNeutralChat, restoreNeutralChat } from './scripts/chats.js';
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags, isExternalMediaAllowed, getCurrentEntityId, preserveNeutralChat, restoreNeutralChat, IMAGE_PROMPT_TYPE } from './scripts/chats.js';
import { getPresetManager, initPresetManager } from './scripts/preset-manager.js';
import { evaluateMacros, getLastMessageId, initMacros } from './scripts/macros.js';
import { currentUser, setUserControls } from './scripts/user.js';
@@ -514,6 +514,9 @@ export const event_types = {
ONLINE_STATUS_CHANGED: 'online_status_changed',
IMAGE_SWIPED: 'image_swiped',
CONNECTION_PROFILE_LOADED: 'connection_profile_loaded',
CONNECTION_PROFILE_CREATED: 'connection_profile_created',
CONNECTION_PROFILE_DELETED: 'connection_profile_deleted',
CONNECTION_PROFILE_UPDATED: 'connection_profile_updated',
TOOL_CALLS_PERFORMED: 'tool_calls_performed',
TOOL_CALLS_RENDERED: 'tool_calls_rendered',
};
@@ -2317,6 +2320,7 @@ export function updateMessageBlock(messageId, message, { rerenderMessage = true
export function appendMediaToMessage(mes, messageElement, adjustScroll = true) {
// Add image to message
if (mes.extra?.image) {
const promptType = mes.extra.image_prompt_type ?? IMAGE_PROMPT_TYPE.ONE;
const container = messageElement.find('.mes_img_container');
const chatHeight = $('#chat').prop('scrollHeight');
const image = messageElement.find('.mes_img');
@@ -2336,6 +2340,7 @@ export function appendMediaToMessage(mes, messageElement, adjustScroll = true) {
container.addClass('img_extra');
image.toggleClass('img_inline', isInline);
text.toggleClass('displayNone', !isInline);
container.attr('data-img-prompt-type', promptType);
const imageSwipes = mes.extra.image_swipes;
if (Array.isArray(imageSwipes) && imageSwipes.length > 0) {
@@ -6861,14 +6866,14 @@ export function buildAvatarList(block, entities, { templateId = 'inline_avatar_t
*/
export async function unshallowCharacter(characterId) {
if (characterId === undefined) {
console.warn('Undefined character cannot be unshallowed');
console.debug('Undefined character cannot be unshallowed');
return;
}
/** @type {import('./scripts/char-data.js').v1CharData} */
const character = characters[characterId];
if (!character) {
console.warn('Character not found:', characterId);
console.debug('Character not found:', characterId);
return;
}
@@ -6879,7 +6884,7 @@ export async function unshallowCharacter(characterId) {
const avatar = character.avatar;
if (!avatar) {
console.warn('Character has no avatar field:', characterId);
console.debug('Character has no avatar field:', characterId);
return;
}
@@ -9196,6 +9201,17 @@ function swipe_right(_event, { source, repeated } = {}) {
}
}
/**
* @typedef {object} ConnectAPIMap
* @property {string} selected - API name (e.g. "textgenerationwebui", "openai")
* @property {string?} [button] - CSS selector for the API button
* @property {string?} [type] - API type, mostly used by text completion. (e.g. "openrouter")
* @property {string?} [source] - API source, mostly used by chat completion. (e.g. "openai")
*/
/**
* @type {Record<string, ConnectAPIMap>}
*/
export const CONNECT_API_MAP = {
// Default APIs not contined inside text gen / chat gen
'kobold': {

View File

@@ -85,6 +85,16 @@ const converters = {
'application/vnd.oasis.opendocument.spreadsheet': extractTextFromOffice,
};
/**
* @enum {string}
* @readonly
*/
export const IMAGE_PROMPT_TYPE = Object.freeze({
ALL: 'all',
ONE: 'one',
NONE: 'none',
});
/**
* Finds a matching key in the converters object.
* @param {string} type MIME type
@@ -201,8 +211,45 @@ export async function populateFileAttachment(message, inputId = 'file_form_input
// If file is image
if (file.type.startsWith('image/')) {
let addSwipe = false;
if (message.extra.image) {
const popupResult = await callGenericPopup(t`This message already has an image attached. Replace it?`, POPUP_TYPE.TEXT, '', {
okButton: t`Add swipe`,
customButtons: [
{
text: t`Replace image`,
appendAtEnd: true,
result: POPUP_RESULT.CUSTOM1,
},
{
text: t`Cancel`,
appendAtEnd: true,
result: POPUP_RESULT.CANCELLED,
},
],
});
if (!popupResult) {
return;
}
addSwipe = popupResult === POPUP_RESULT.AFFIRMATIVE;
}
const extension = file.type.split('/')[1];
const imageUrl = await saveBase64AsFile(base64Data, name2, fileNamePrefix, extension);
if (addSwipe) {
if (!message.extra.image_swipes) {
message.extra.image_swipes = [];
}
if (message.extra.image && !message.extra.image_swipes.includes(message.extra.image)) {
message.extra.image_swipes.push(message.extra.image);
}
message.extra.image_swipes.push(imageUrl);
}
message.extra.image = imageUrl;
message.extra.inline_image = true;
} else {
@@ -575,6 +622,40 @@ export function isExternalMediaAllowed() {
return !power_user.forbid_external_media;
}
function switchMessageImagePromptType() {
const mesBlock = $(this).closest('.mes');
const mesId = mesBlock.attr('mesid');
const message = chat[mesId];
if (!message) {
return;
}
if (!message.extra) {
message.extra = {};
}
const existingType = message.extra.image_prompt_type ?? IMAGE_PROMPT_TYPE.ONE;
const typeValues = Object.values(IMAGE_PROMPT_TYPE);
const nextType = typeValues[(typeValues.findIndex(type => type === existingType) + 1) % typeValues.length];
message.extra.image_prompt_type = nextType;
appendMediaToMessage(message, mesBlock);
saveChatDebounced();
switch (nextType) {
case IMAGE_PROMPT_TYPE.ALL:
toastr.info(t`All image swipes will be sent in prompt for this message`);
break;
case IMAGE_PROMPT_TYPE.ONE:
toastr.info(t`Only the shown image swipe will be sent in prompt for this message`);
break;
case IMAGE_PROMPT_TYPE.NONE:
toastr.info(t`No images will be sent in prompt for this message`);
break;
}
}
async function enlargeMessageImage() {
const mesBlock = $(this).closest('.mes');
const mesId = mesBlock.attr('mesid');
@@ -1605,6 +1686,7 @@ jQuery(function () {
$(document).on('click', '.mes_img_enlarge', enlargeMessageImage);
$(document).on('click', '.mes_img_delete', deleteMessageImage);
$(document).on('click', '.mes_img_prompt_type', switchMessageImagePromptType);
$('#file_form_input').on('change', async () => {
const fileInput = document.getElementById('file_form_input');

View File

@@ -1,20 +1,20 @@
import { getPresetManager } from './preset-manager.js';
import { extractMessageFromData, getGenerateUrl, getRequestHeaders } from '../script.js';
import { getTextGenServer } from './textgen-settings.js';
import { extractReasoningFromData } from './reasoning.js';
import { formatInstructModeChat, formatInstructModePrompt, names_behavior_types } from './instruct-mode.js';
// #region Type Definitions
/**
* @typedef {Object} TextCompletionRequestBase
* @property {string} prompt - The text prompt for completion
* @property {number} max_tokens - Maximum number of tokens to generate
* @property {string} [model] - Optional model name
* @property {string} api_type - Type of API to use
* @property {string} [api_server] - Optional API server URL
* @property {number} [temperature] - Optional temperature parameter
* @property {number} [min_p] - Optional min_p parameter
*/
/** @typedef {Record<string, any> & TextCompletionRequestBase} TextCompletionRequest */
/**
* @typedef {Object} TextCompletionPayloadBase
* @property {string} prompt - The text prompt for completion
@@ -44,6 +44,13 @@ import { getTextGenServer } from './textgen-settings.js';
*/
/** @typedef {Record<string, any> & ChatCompletionPayloadBase} ChatCompletionPayload */
/**
* @typedef {Object} ExtractedData
* @property {string} content - Extracted content.
* @property {string} reasoning - Extracted reasoning.
*/
// #endregion
/**
@@ -53,11 +60,11 @@ export class TextCompletionService {
static TYPE = 'textgenerationwebui';
/**
* @param {TextCompletionRequest} custom
* @param {Record<string, any> & TextCompletionRequestBase & {prompt: string}} custom
* @returns {TextCompletionPayload}
*/
static createRequestData({ prompt, max_tokens, model, api_type, api_server, temperature, ...props }) {
return {
static createRequestData({ prompt, max_tokens, model, api_type, api_server, temperature, min_p, ...props }) {
const payload = {
...props,
prompt,
max_tokens,
@@ -66,15 +73,25 @@ export class TextCompletionService {
api_type,
api_server: api_server ?? getTextGenServer(api_type),
temperature,
min_p,
stream: false,
};
// Remove undefined values to avoid API errors
Object.keys(payload).forEach(key => {
if (payload[key] === undefined) {
delete payload[key];
}
});
return payload;
}
/**
* Sends a text completion request to the specified server
* @param {TextCompletionPayload} data Request data
* @param {boolean?} extractData Extract message from the response. Default true
* @returns {Promise<string | any>} Extracted data or the raw response
* @returns {Promise<ExtractedData | any>} Extracted data or the raw response
* @throws {Error}
*/
static async sendRequest(data, extractData = true) {
@@ -91,31 +108,150 @@ export class TextCompletionService {
throw json;
}
return extractData ? extractMessageFromData(json, this.TYPE) : json;
if (!extractData) {
return json;
}
return {
content: extractMessageFromData(json, this.TYPE),
reasoning: extractReasoningFromData(json, {
mainApi: this.TYPE,
textGenType: data.api_type,
ignoreShowThoughts: true,
}),
};
}
/**
* @param {string} presetName
* @param {TextCompletionRequest} custom
* @param {boolean?} extractData Extract message from the response. Default true
* @returns {Promise<string | any>} Extracted data or the raw response
* Process and send a text completion request with optional preset & instruct
* @param {Record<string, any> & TextCompletionRequestBase & {prompt: (ChatCompletionMessage & {ignoreInstruct?: boolean})[] |string}} custom
* @param {Object} options - Configuration options
* @param {string?} [options.presetName] - Name of the preset to use for generation settings
* @param {string?} [options.instructName] - Name of instruct preset for message formatting
* @param {boolean} extractData - Whether to extract structured data from response
* @returns {Promise<ExtractedData | any>} Extracted data or the raw response
* @throws {Error}
*/
static async sendRequestWithPreset(presetName, custom, extractData = true) {
const presetManager = getPresetManager(this.TYPE);
if (!presetManager) {
throw new Error('Preset manager not found');
static async processRequest(
custom,
options = {},
extractData = true,
) {
const { presetName, instructName } = options;
let requestData = { ...custom };
const prompt = custom.prompt;
// Apply generation preset if specified
if (presetName) {
const presetManager = getPresetManager(this.TYPE);
if (presetManager) {
const preset = presetManager.getCompletionPresetByName(presetName);
if (preset) {
// Convert preset to payload and merge with custom parameters
const presetPayload = this.presetToGeneratePayload(preset, {});
requestData = { ...presetPayload, ...requestData };
} else {
console.warn(`Preset "${presetName}" not found, continuing with default settings`);
}
} else {
console.warn('Preset manager not found, continuing with default settings');
}
}
const preset = presetManager.getCompletionPresetByName(presetName);
if (!preset) {
throw new Error('Preset not found');
// Handle instruct formatting if requested
if (Array.isArray(prompt) && instructName) {
const instructPresetManager = getPresetManager('instruct');
let instructPreset = instructPresetManager?.getCompletionPresetByName(instructName);
if (instructPreset) {
// Clone the preset to avoid modifying the original
instructPreset = structuredClone(instructPreset);
instructPreset.macro = false;
instructPreset.names_behavior = names_behavior_types.NONE;
// Format messages using instruct formatting
const formattedMessages = [];
for (const message of prompt) {
let messageContent = message.content;
if (!message.ignoreInstruct) {
messageContent = formatInstructModeChat(
message.role,
message.content,
message.role === 'user',
false,
undefined,
undefined,
undefined,
undefined,
instructPreset,
);
// Add prompt formatting for the last message
if (message === prompt[prompt.length - 1]) {
messageContent += formatInstructModePrompt(
undefined,
false,
undefined,
undefined,
undefined,
false,
false,
instructPreset,
);
}
}
formattedMessages.push(messageContent);
}
requestData.prompt = formattedMessages.join('');
if (instructPreset.output_suffix) {
requestData.stop = [instructPreset.output_suffix];
requestData.stopping_strings = [instructPreset.output_suffix];
}
} else {
console.warn(`Instruct preset "${instructName}" not found, using basic formatting`);
requestData.prompt = prompt.map(x => x.content).join('\n\n');
}
} else if (typeof prompt === 'string') {
requestData.prompt = prompt;
} else {
requestData.prompt = prompt.map(x => x.content).join('\n\n');
}
const data = this.createRequestData({ ...preset, ...custom });
// @ts-ignore
const data = this.createRequestData(requestData);
return await this.sendRequest(data, extractData);
}
/**
* Converts a preset to a valid text completion payload.
* Only supports temperature.
* @param {Object} preset - The preset configuration
* @param {Object} customPreset - Additional parameters to override preset values
* @returns {Object} - Formatted payload for text completion API
*/
static presetToGeneratePayload(preset, customPreset = {}) {
if (!preset || typeof preset !== 'object') {
throw new Error('Invalid preset: must be an object');
}
// Merge preset with custom parameters
const settings = { ...preset, ...customPreset };
// Initialize base payload with common parameters
let payload = {
'temperature': settings.temp ? Number(settings.temp) : undefined,
'min_p': settings.min_p ? Number(settings.min_p) : undefined,
};
// Remove undefined values to avoid API errors
Object.keys(payload).forEach(key => {
if (payload[key] === undefined) {
delete payload[key];
}
});
return payload;
}
}
/**
@@ -129,7 +265,7 @@ export class ChatCompletionService {
* @returns {ChatCompletionPayload}
*/
static createRequestData({ messages, model, chat_completion_source, max_tokens, temperature, ...props }) {
return {
const payload = {
...props,
messages,
model,
@@ -138,13 +274,22 @@ export class ChatCompletionService {
temperature,
stream: false,
};
// Remove undefined values to avoid API errors
Object.keys(payload).forEach(key => {
if (payload[key] === undefined) {
delete payload[key];
}
});
return payload;
}
/**
* Sends a chat completion request
* @param {ChatCompletionPayload} data Request data
* @param {boolean?} extractData Extract message from the response. Default true
* @returns {Promise<string | any>} Extracted data or the raw response
* @returns {Promise<ExtractedData | any>} Extracted data or the raw response
* @throws {Error}
*/
static async sendRequest(data, extractData = true) {
@@ -161,29 +306,82 @@ export class ChatCompletionService {
throw json;
}
return extractData ? extractMessageFromData(json, this.TYPE) : json;
if (!extractData) {
return json;
}
return {
content: extractMessageFromData(json, this.TYPE),
reasoning: extractReasoningFromData(json, {
mainApi: this.TYPE,
textGenType: data.chat_completion_source,
ignoreShowThoughts: true,
}),
};
}
/**
* @param {string} presetName
* Process and send a chat completion request with optional preset
* @param {ChatCompletionPayload} custom
* @param {boolean} extractData Extract message from the response. Default true
* @returns {Promise<string | any>} Extracted data or the raw response
* @param {Object} options - Configuration options
* @param {string?} [options.presetName] - Name of the preset to use for generation settings
* @param {boolean} extractData - Whether to extract structured data from response
* @returns {Promise<ExtractedData | any>} Extracted data or the raw response
* @throws {Error}
*/
static async sendRequestWithPreset(presetName, custom, extractData = true) {
const presetManager = getPresetManager(this.TYPE);
if (!presetManager) {
throw new Error('Preset manager not found');
static async processRequest(custom, options, extractData = true) {
const { presetName } = options;
let requestData = { ...custom };
// Apply generation preset if specified
if (presetName) {
const presetManager = getPresetManager(this.TYPE);
if (presetManager) {
const preset = presetManager.getCompletionPresetByName(presetName);
if (preset) {
// Convert preset to payload and merge with custom parameters
const presetPayload = this.presetToGeneratePayload(preset, {});
requestData = { ...presetPayload, ...requestData };
} else {
console.warn(`Preset "${presetName}" not found, continuing with default settings`);
}
} else {
console.warn('Preset manager not found, continuing with default settings');
}
}
const preset = presetManager.getCompletionPresetByName(presetName);
if (!preset) {
throw new Error('Preset not found');
}
const data = this.createRequestData({ ...preset, ...custom });
const data = this.createRequestData(requestData);
return await this.sendRequest(data, extractData);
}
/**
* Converts a preset to a valid chat completion payload
* Only supports temperature.
* @param {Object} preset - The preset configuration
* @param {Object} customParams - Additional parameters to override preset values
* @returns {Object} - Formatted payload for chat completion API
*/
static presetToGeneratePayload(preset, customParams = {}) {
if (!preset || typeof preset !== 'object') {
throw new Error('Invalid preset: must be an object');
}
// Merge preset with custom parameters
const settings = { ...preset, ...customParams };
// Initialize base payload with common parameters
const payload = {
temperature: settings.temperature ? Number(settings.temperature) : undefined,
};
// Remove undefined values to avoid API errors
Object.keys(payload).forEach(key => {
if (payload[key] === undefined) {
delete payload[key];
}
});
return payload;
}
}

View File

@@ -1,4 +1,4 @@
import { Fuse } from '../../../lib.js';
import { DOMPurify, Fuse } from '../../../lib.js';
import { event_types, eventSource, main_api, saveSettingsDebounced } from '../../../script.js';
import { extension_settings, renderExtensionTemplateAsync } from '../../extensions.js';
@@ -267,11 +267,16 @@ async function createConnectionProfile(forceName = null) {
});
const isNameTaken = (n) => extension_settings.connectionManager.profiles.some(p => p.name === n);
const suggestedName = getUniqueName(collapseSpaces(`${profile.api ?? ''} ${profile.model ?? ''} - ${profile.preset ?? ''}`), isNameTaken);
const name = forceName ?? await callGenericPopup(template, POPUP_TYPE.INPUT, suggestedName, { rows: 2 });
let name = forceName ?? await callGenericPopup(template, POPUP_TYPE.INPUT, suggestedName, { rows: 2 });
// If it's cancelled, it will be false
if (!name) {
return null;
}
name = DOMPurify.sanitize(String(name));
if (!name) {
toastr.error('Name cannot be empty.');
return null;
}
if (isNameTaken(name) || name === NONE) {
toastr.error('A profile with the same name already exists.');
@@ -303,7 +308,8 @@ async function deleteConnectionProfile() {
return;
}
const name = extension_settings.connectionManager.profiles[index].name;
const profile = extension_settings.connectionManager.profiles[index];
const name = profile.name;
const confirm = await Popup.show.confirm(t`Are you sure you want to delete the selected profile?`, name);
if (!confirm) {
@@ -313,6 +319,8 @@ async function deleteConnectionProfile() {
extension_settings.connectionManager.profiles.splice(index, 1);
extension_settings.connectionManager.selectedProfile = null;
saveSettingsDebounced();
await eventSource.emit(event_types.CONNECTION_PROFILE_DELETED, profile);
}
/**
@@ -512,6 +520,7 @@ async function renderDetailsContent(detailsContent) {
saveSettingsDebounced();
renderConnectionProfiles(profiles);
await renderDetailsContent(detailsContent);
await eventSource.emit(event_types.CONNECTION_PROFILE_CREATED, profile);
await eventSource.emit(event_types.CONNECTION_PROFILE_LOADED, profile.name);
});
@@ -523,9 +532,11 @@ async function renderDetailsContent(detailsContent) {
console.log('No profile selected');
return;
}
const oldProfile = structuredClone(profile);
await updateConnectionProfile(profile);
await renderDetailsContent(detailsContent);
saveSettingsDebounced();
await eventSource.emit(event_types.CONNECTION_PROFILE_UPDATED, oldProfile, profile);
await eventSource.emit(event_types.CONNECTION_PROFILE_LOADED, profile.name);
toastr.success('Connection profile updated', '', { timeOut: 1500 });
});
@@ -559,7 +570,7 @@ async function renderDetailsContent(detailsContent) {
return acc;
}, {});
const template = $(await renderExtensionTemplateAsync(MODULE_NAME, 'edit', { name: profile.name, settings }));
const newName = await callGenericPopup(template, POPUP_TYPE.INPUT, profile.name, {
let newName = await callGenericPopup(template, POPUP_TYPE.INPUT, profile.name, {
rows: 2,
customButtons: [{
text: t`Save and Update`,
@@ -571,9 +582,15 @@ async function renderDetailsContent(detailsContent) {
}],
});
// If it's cancelled, it will be false
if (!newName) {
return;
}
newName = DOMPurify.sanitize(String(newName));
if (!newName) {
toastr.error('Name cannot be empty.');
return;
}
if (profile.name !== newName && extension_settings.connectionManager.profiles.some(p => p.name === newName)) {
toastr.error('A profile with the same name already exists.');
@@ -584,6 +601,7 @@ async function renderDetailsContent(detailsContent) {
return Object.entries(FANCY_NAMES).find(x => x[1] === String($(this).val()))?.[0];
}).get();
const oldProfile = structuredClone(profile);
if (newExcludeList.length !== profile.exclude.length || !newExcludeList.every(e => profile.exclude.includes(e))) {
profile.exclude = newExcludeList;
for (const command of newExcludeList) {
@@ -598,10 +616,11 @@ async function renderDetailsContent(detailsContent) {
if (profile.name !== newName) {
toastr.success('Connection profile renamed.');
profile.name = String(newName);
profile.name = newName;
}
saveSettingsDebounced();
await eventSource.emit(event_types.CONNECTION_PROFILE_UPDATED, oldProfile, profile);
renderConnectionProfiles(profiles);
await renderDetailsContent(detailsContent);
});
@@ -704,6 +723,7 @@ async function renderDetailsContent(detailsContent) {
saveSettingsDebounced();
renderConnectionProfiles(profiles);
await renderDetailsContent(detailsContent);
await eventSource.emit(event_types.CONNECTION_PROFILE_CREATED, profile);
return profile.name;
},
}));
@@ -718,9 +738,11 @@ async function renderDetailsContent(detailsContent) {
toastr.warning('No profile selected.');
return '';
}
const oldProfile = structuredClone(profile);
await updateConnectionProfile(profile);
await renderDetailsContent(detailsContent);
saveSettingsDebounced();
await eventSource.emit(event_types.CONNECTION_PROFILE_UPDATED, oldProfile, profile);
return profile.name;
},
}));

View File

@@ -18,8 +18,7 @@
<div id="regex_info_block_wrapper">
<div id="regex_info_block" class="info-block"></div>
<!-- TODO replace 3rd-party link with our own docs when it's done -->
<a id="regex_info_block_flags_hint" href="http://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_expressions#advanced_searching_with_flags" target="_blank" rel="noopener noreferrer">
<a id="regex_info_block_flags_hint" href="https://docs.sillytavern.app/extensions/regex/#flags" target="_blank" rel="noopener noreferrer">
<i class="fa-solid fa-circle-info" title="Click here to learn more about regex flags."></i>
</a>
</div>

View File

@@ -1,5 +1,6 @@
import { getRequestHeaders } from '../../script.js';
import { CONNECT_API_MAP, getRequestHeaders } from '../../script.js';
import { extension_settings, openThirdPartyExtensionMenu } from '../extensions.js';
import { t } from '../i18n.js';
import { oai_settings } from '../openai.js';
import { SECRET_KEYS, secret_state } from '../secrets.js';
import { textgen_types, textgenerationwebui_settings } from '../textgen-settings.js';
@@ -273,3 +274,309 @@ export async function getWebLlmContextSize() {
const model = await engine.getCurrentModelInfo();
return model?.context_size;
}
/**
* It uses the profiles to send a generate request to the API. Doesn't support streaming.
*/
export class ConnectionManagerRequestService {
static defaultSendRequestParams = {
extractData: true,
includePreset: true,
includeInstruct: true,
};
static getAllowedTypes() {
return {
openai: t`Chat Completion`,
textgenerationwebui: t`Text Completion`,
};
}
/**
* @param {string} profileId
* @param {string | (import('../custom-request.js').ChatCompletionMessage & {ignoreInstruct?: boolean})[]} prompt
* @param {number} maxTokens
* @param {{extractData?: boolean, includePreset?: boolean, includeInstruct?: boolean}} custom - default values are true
* @returns {Promise<import('../custom-request.js').ExtractedData | any>} Extracted data or the raw response
*/
static async sendRequest(profileId, prompt, maxTokens, custom = this.defaultSendRequestParams) {
const { extractData, includePreset, includeInstruct } = { ...this.defaultSendRequestParams, ...custom };
const context = SillyTavern.getContext();
if (context.extensionSettings.disabledExtensions.includes('connection-manager')) {
throw new Error('Connection Manager is not available');
}
const profile = context.extensionSettings.connectionManager.profiles.find((p) => p.id === profileId);
const selectedApiMap = this.validateProfile(profile);
try {
switch (selectedApiMap.selected) {
case 'openai': {
if (!selectedApiMap.source) {
throw new Error(`API type ${selectedApiMap.selected} does not support chat completions`);
}
const messages = Array.isArray(prompt) ? prompt : [{ role: 'user', content: prompt }];
return await context.ChatCompletionService.processRequest({
messages,
max_tokens: maxTokens,
model: profile.model,
chat_completion_source: selectedApiMap.source,
}, {
presetName: includePreset ? profile.preset : undefined,
}, extractData);
}
case 'textgenerationwebui': {
if (!selectedApiMap.type) {
throw new Error(`API type ${selectedApiMap.selected} does not support text completions`);
}
return await context.TextCompletionService.processRequest({
prompt,
max_tokens: maxTokens,
model: profile.model,
api_type: selectedApiMap.type,
api_server: profile['api-url'],
}, {
instructName: includeInstruct ? profile.instruct : undefined,
presetName: includePreset ? profile.preset : undefined,
}, extractData);
}
default: {
throw new Error(`Unknown API type ${selectedApiMap.selected}`);
}
}
} catch (error) {
throw new Error('API request failed', { cause: error });
}
}
/**
* Respects allowed types.
* @returns {import('./connection-manager/index.js').ConnectionProfile[]}
*/
static getSupportedProfiles() {
const context = SillyTavern.getContext();
if (context.extensionSettings.disabledExtensions.includes('connection-manager')) {
throw new Error('Connection Manager is not available');
}
const profiles = context.extensionSettings.connectionManager.profiles;
return profiles.filter((p) => this.isProfileSupported(p));
}
/**
* @param {import('./connection-manager/index.js').ConnectionProfile?} [profile]
* @returns {boolean}
*/
static isProfileSupported(profile) {
if (!profile) {
return false;
}
const apiMap = CONNECT_API_MAP[profile.api];
if (!Object.hasOwn(this.getAllowedTypes(), apiMap.selected)) {
return false;
}
// Some providers not need model, like koboldcpp. But I don't want to check by provider.
switch (apiMap.selected) {
case 'openai':
return !!apiMap.source;
case 'textgenerationwebui':
return !!apiMap.type;
}
return false;
}
/**
* @param {import('./connection-manager/index.js').ConnectionProfile?} [profile]
* @return {import('../../script.js').ConnectAPIMap}
* @throws {Error}
*/
static validateProfile(profile) {
if (!profile) {
throw new Error('Could not find profile.');
}
if (!profile.api) {
throw new Error('Select a connection profile that has an API');
}
const context = SillyTavern.getContext();
const selectedApiMap = context.CONNECT_API_MAP[profile.api];
if (!selectedApiMap) {
throw new Error(`Unknown API type ${profile.api}`);
}
if (!Object.hasOwn(this.getAllowedTypes(), selectedApiMap.selected)) {
throw new Error(`API type ${selectedApiMap.selected} is not supported. Supported types: ${Object.values(this.getAllowedTypes()).join(', ')}`);
}
return selectedApiMap;
}
/**
* Create profiles dropdown and updates select element accordingly. Use onChange, onCreate, unUpdate, onDelete callbacks for custom behaviour. e.g updating extension settings.
* @param {string} selector
* @param {string} initialSelectedProfileId
* @param {(profile?: import('./connection-manager/index.js').ConnectionProfile) => Promise<void> | void} onChange - 3 cases. 1- When user selects new profile. 2- When user deletes selected profile. 3- When user updates selected profile.
* @param {(profile: import('./connection-manager/index.js').ConnectionProfile) => Promise<void> | void} onCreate
* @param {(oldProfile: import('./connection-manager/index.js').ConnectionProfile, newProfile: import('./connection-manager/index.js').ConnectionProfile) => Promise<void> | void} unUpdate
* @param {(profile: import('./connection-manager/index.js').ConnectionProfile) => Promise<void> | void} onDelete
*/
static handleDropdown(
selector,
initialSelectedProfileId,
onChange = () => { },
onCreate = () => { },
unUpdate = () => { },
onDelete = () => { },
) {
const context = SillyTavern.getContext();
if (context.extensionSettings.disabledExtensions.includes('connection-manager')) {
throw new Error('Connection Manager is not available');
}
/**
* @type {JQuery<HTMLSelectElement>}
*/
const dropdown = $(selector);
if (!dropdown || !dropdown.length) {
throw new Error(`Could not find dropdown with selector ${selector}`);
}
dropdown.empty();
// Create default option using document.createElement
const defaultOption = document.createElement('option');
defaultOption.value = '';
defaultOption.textContent = 'Select a Connection Profile';
defaultOption.dataset.i18n = 'Select a Connection Profile';
dropdown.append(defaultOption);
const profiles = context.extensionSettings.connectionManager.profiles;
// Create optgroups using document.createElement
const groups = {};
for (const [apiType, groupLabel] of Object.entries(this.getAllowedTypes())) {
const optgroup = document.createElement('optgroup');
optgroup.label = groupLabel;
groups[apiType] = optgroup;
}
const sortedProfilesByGroup = {};
for (const apiType of Object.keys(this.getAllowedTypes())) {
sortedProfilesByGroup[apiType] = [];
}
for (const profile of profiles) {
if (this.isProfileSupported(profile)) {
const apiMap = CONNECT_API_MAP[profile.api];
if (sortedProfilesByGroup[apiMap.selected]) {
sortedProfilesByGroup[apiMap.selected].push(profile);
}
}
}
// Sort each group alphabetically and add to dropdown
for (const [apiType, groupProfiles] of Object.entries(sortedProfilesByGroup)) {
if (groupProfiles.length === 0) continue;
groupProfiles.sort((a, b) => a.name.localeCompare(b.name));
const group = groups[apiType];
for (const profile of groupProfiles) {
const option = document.createElement('option');
option.value = profile.id;
option.textContent = profile.name;
group.appendChild(option);
}
}
for (const group of Object.values(groups)) {
if (group.children.length > 0) {
dropdown.append(group);
}
}
const selectedProfile = profiles.find((p) => p.id === initialSelectedProfileId);
if (selectedProfile) {
dropdown.val(selectedProfile.id);
}
context.eventSource.on(context.eventTypes.CONNECTION_PROFILE_CREATED, async (profile) => {
const isSupported = this.isProfileSupported(profile);
if (!isSupported) {
return;
}
const group = groups[CONNECT_API_MAP[profile.api].selected];
const option = document.createElement('option');
option.value = profile.id;
option.textContent = profile.name;
group.appendChild(option);
await onCreate(profile);
});
context.eventSource.on(context.eventTypes.CONNECTION_PROFILE_UPDATED, async (oldProfile, newProfile) => {
const currentSelected = dropdown.val();
const isSelectedProfile = currentSelected === oldProfile.id;
await unUpdate(oldProfile, newProfile);
if (!this.isProfileSupported(newProfile)) {
if (isSelectedProfile) {
dropdown.val('');
dropdown.trigger('change');
}
return;
}
const group = groups[CONNECT_API_MAP[newProfile.api].selected];
const oldOption = group.querySelector(`option[value="${oldProfile.id}"]`);
if (oldOption) {
oldOption.remove();
}
const option = document.createElement('option');
option.value = newProfile.id;
option.textContent = newProfile.name;
group.appendChild(option);
if (isSelectedProfile) {
// Ackchyually, we don't need to reselect but what if id changes? It is not possible for now I couldn't stop myself.
dropdown.val(newProfile.id);
dropdown.trigger('change');
}
});
context.eventSource.on(context.eventTypes.CONNECTION_PROFILE_DELETED, async (profile) => {
const currentSelected = dropdown.val();
const isSelectedProfile = currentSelected === profile.id;
if (!this.isProfileSupported(profile)) {
return;
}
const group = groups[CONNECT_API_MAP[profile.api].selected];
const optionToRemove = group.querySelector(`option[value="${profile.id}"]`);
if (optionToRemove) {
optionToRemove.remove();
}
if (isSelectedProfile) {
dropdown.val('');
dropdown.trigger('change');
}
await onDelete(profile);
});
dropdown.on('change', async () => {
const profileId = dropdown.val();
const profile = context.extensionSettings.connectionManager.profiles.find((p) => p.id === profileId);
await onChange(profile);
});
}
}

View File

@@ -2337,10 +2337,10 @@ function processReply(str) {
str = str.replaceAll('“', '');
str = str.replaceAll('\n', ', ');
str = str.normalize('NFD');
// Strip out non-alphanumeric characters barring model syntax exceptions
str = str.replace(/[^a-zA-Z0-9.,:_(){}<>[\]\-'|#]+/g, ' ');
str = str.replace(/\s+/g, ' '); // Collapse multiple whitespaces into one
str = str.trim();
@@ -3234,7 +3234,7 @@ function getNovelParams() {
extension_settings.sd.scheduler = 'karras';
}
if (extension_settings.sd.sampler === 'ddim' ||
if (extension_settings.sd.sampler === 'ddim' ||
['nai-diffusion-4-curated-preview', 'nai-diffusion-4-full'].includes(extension_settings.sd.model)) {
sm = false;
sm_dyn = false;
@@ -4000,6 +4000,7 @@ async function onImageSwiped({ message, element, direction }) {
}
const currentIndex = swipes.indexOf(message.extra.image);
const canGenerate = !!message.extra.title;
if (currentIndex === -1) {
console.warn('Current image not found in the swipes');
@@ -4015,8 +4016,17 @@ async function onImageSwiped({ message, element, direction }) {
appendMediaToMessage(message, element, false);
}
// Wrap around at the end if the image is missing a prompt
if (direction === 'right' && !canGenerate) {
const newIndex = currentIndex === swipes.length - 1 ? 0 : currentIndex + 1;
message.extra.image = swipes[newIndex];
// Update the image in the message
appendMediaToMessage(message, element, false);
}
// Switch to next image or generate a new one if at the end
if (direction === 'right') {
if (direction === 'right' && canGenerate) {
const newIndex = currentIndex === swipes.length - 1 ? swipes.length : currentIndex + 1;
if (newIndex === swipes.length) {

View File

@@ -320,59 +320,61 @@ export const force_output_sequence = {
* @param {string} name1 User name.
* @param {string} name2 Character name.
* @param {boolean|number} forceOutputSequence Force to use first/last output sequence (if configured).
* @param {InstructSettings} customInstruct Custom instruct mode settings.
* @returns {string} Formatted instruct mode chat message.
*/
export function formatInstructModeChat(name, mes, isUser, isNarrator, forceAvatar, name1, name2, forceOutputSequence) {
let includeNames = isNarrator ? false : power_user.instruct.names_behavior === names_behavior_types.ALWAYS;
export function formatInstructModeChat(name, mes, isUser, isNarrator, forceAvatar, name1, name2, forceOutputSequence, customInstruct = null) {
const instruct = structuredClone(customInstruct ?? power_user.instruct);
let includeNames = isNarrator ? false : instruct.names_behavior === names_behavior_types.ALWAYS;
if (!isNarrator && power_user.instruct.names_behavior === names_behavior_types.FORCE && ((selected_group && name !== name1) || (forceAvatar && name !== name1))) {
if (!isNarrator && instruct.names_behavior === names_behavior_types.FORCE && ((selected_group && name !== name1) || (forceAvatar && name !== name1))) {
includeNames = true;
}
function getPrefix() {
if (isNarrator) {
return power_user.instruct.system_same_as_user ? power_user.instruct.input_sequence : power_user.instruct.system_sequence;
return instruct.system_same_as_user ? instruct.input_sequence : instruct.system_sequence;
}
if (isUser) {
if (forceOutputSequence === force_output_sequence.FIRST) {
return power_user.instruct.first_input_sequence || power_user.instruct.input_sequence;
return instruct.first_input_sequence || instruct.input_sequence;
}
if (forceOutputSequence === force_output_sequence.LAST) {
return power_user.instruct.last_input_sequence || power_user.instruct.input_sequence;
return instruct.last_input_sequence || instruct.input_sequence;
}
return power_user.instruct.input_sequence;
return instruct.input_sequence;
}
if (forceOutputSequence === force_output_sequence.FIRST) {
return power_user.instruct.first_output_sequence || power_user.instruct.output_sequence;
return instruct.first_output_sequence || instruct.output_sequence;
}
if (forceOutputSequence === force_output_sequence.LAST) {
return power_user.instruct.last_output_sequence || power_user.instruct.output_sequence;
return instruct.last_output_sequence || instruct.output_sequence;
}
return power_user.instruct.output_sequence;
return instruct.output_sequence;
}
function getSuffix() {
if (isNarrator) {
return power_user.instruct.system_same_as_user ? power_user.instruct.input_suffix : power_user.instruct.system_suffix;
return instruct.system_same_as_user ? instruct.input_suffix : instruct.system_suffix;
}
if (isUser) {
return power_user.instruct.input_suffix;
return instruct.input_suffix;
}
return power_user.instruct.output_suffix;
return instruct.output_suffix;
}
let prefix = getPrefix() || '';
let suffix = getSuffix() || '';
if (power_user.instruct.macro) {
if (instruct.macro) {
prefix = substituteParams(prefix, name1, name2);
prefix = prefix.replace(/{{name}}/gi, name || 'System');
@@ -380,11 +382,11 @@ export function formatInstructModeChat(name, mes, isUser, isNarrator, forceAvata
suffix = suffix.replace(/{{name}}/gi, name || 'System');
}
if (!suffix && power_user.instruct.wrap) {
if (!suffix && instruct.wrap) {
suffix = '\n';
}
const separator = power_user.instruct.wrap ? '\n' : '';
const separator = instruct.wrap ? '\n' : '';
// Don't include the name if it's empty
const textArray = includeNames && name ? [prefix, `${name}: ${mes}` + suffix] : [prefix, mes + suffix];
@@ -504,30 +506,32 @@ export function formatInstructModeExamples(mesExamplesArray, name1, name2) {
* @param {string} name2 Character name.
* @param {boolean} isQuiet Is quiet mode generation.
* @param {boolean} isQuietToLoud Is quiet to loud generation.
* @param {InstructSettings} customInstruct Custom instruct settings.
* @returns {string} Formatted instruct mode last prompt line.
*/
export function formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2, isQuiet, isQuietToLoud) {
const includeNames = name && (power_user.instruct.names_behavior === names_behavior_types.ALWAYS || (!!selected_group && power_user.instruct.names_behavior === names_behavior_types.FORCE)) && !(isQuiet && !isQuietToLoud);
export function formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2, isQuiet, isQuietToLoud, customInstruct = null) {
const instruct = structuredClone(customInstruct ?? power_user.instruct);
const includeNames = name && (instruct.names_behavior === names_behavior_types.ALWAYS || (!!selected_group && instruct.names_behavior === names_behavior_types.FORCE)) && !(isQuiet && !isQuietToLoud);
function getSequence() {
// User impersonation prompt
if (isImpersonate) {
return power_user.instruct.input_sequence;
return instruct.input_sequence;
}
// Neutral / system / quiet prompt
// Use a special quiet instruct sequence if defined, or assistant's output sequence otherwise
if (isQuiet && !isQuietToLoud) {
return power_user.instruct.last_system_sequence || power_user.instruct.output_sequence;
return instruct.last_system_sequence || instruct.output_sequence;
}
// Quiet in-character prompt
if (isQuiet && isQuietToLoud) {
return power_user.instruct.last_output_sequence || power_user.instruct.output_sequence;
return instruct.last_output_sequence || instruct.output_sequence;
}
// Default AI response
return power_user.instruct.last_output_sequence || power_user.instruct.output_sequence;
return instruct.last_output_sequence || instruct.output_sequence;
}
let sequence = getSequence() || '';
@@ -536,21 +540,21 @@ export function formatInstructModePrompt(name, isImpersonate, promptBias, name1,
// A hack for Mistral's formatting that has a normal output sequence ending with a space
if (
includeNames &&
power_user.instruct.last_output_sequence &&
power_user.instruct.output_sequence &&
sequence === power_user.instruct.last_output_sequence &&
/\s$/.test(power_user.instruct.output_sequence) &&
!/\s$/.test(power_user.instruct.last_output_sequence)
instruct.last_output_sequence &&
instruct.output_sequence &&
sequence === instruct.last_output_sequence &&
/\s$/.test(instruct.output_sequence) &&
!/\s$/.test(instruct.last_output_sequence)
) {
nameFiller = power_user.instruct.output_sequence.slice(-1);
nameFiller = instruct.output_sequence.slice(-1);
}
if (power_user.instruct.macro) {
if (instruct.macro) {
sequence = substituteParams(sequence, name1, name2);
sequence = sequence.replace(/{{name}}/gi, name || 'System');
}
const separator = power_user.instruct.wrap ? '\n' : '';
const separator = instruct.wrap ? '\n' : '';
let text = includeNames ? (separator + sequence + separator + nameFiller + `${name}:`) : (separator + sequence);
// Quiet prompt already has a newline at the end
@@ -562,7 +566,7 @@ export function formatInstructModePrompt(name, isImpersonate, promptBias, name1,
text += (includeNames ? promptBias : (separator + promptBias.trimStart()));
}
return (power_user.instruct.wrap ? text.trimEnd() : text) + (includeNames ? '' : separator);
return (instruct.wrap ? text.trimEnd() : text) + (includeNames ? '' : separator);
}
/**

View File

@@ -75,6 +75,7 @@ import { Popup, POPUP_RESULT } from './popup.js';
import { t } from './i18n.js';
import { ToolManager } from './tool-calling.js';
import { accountStorage } from './util/AccountStorage.js';
import { IMAGE_PROMPT_TYPE } from './chats.js';
export {
openai_messages_count,
@@ -560,7 +561,17 @@ function setOpenAIMessages(chat) {
const name = chat[j]['name'];
const image = chat[j]?.extra?.image;
const invocations = chat[j]?.extra?.tool_invocations;
messages[i] = { 'role': role, 'content': content, name: name, 'image': image, 'invocations': invocations };
const imagePromptType = chat[j]?.extra?.image_prompt_type ?? IMAGE_PROMPT_TYPE.ONE;
const imageSwipes = chat[j]?.extra?.image_swipes ?? [];
messages[i] = {
role,
content,
name,
invocations,
image,
imagePromptType,
imageSwipes,
};
j++;
}
@@ -845,7 +856,7 @@ async function populateChatHistory(messages, prompts, chatCompletion, type = nul
}
if (imageInlining && chatPrompt.image) {
await chatMessage.addImage(chatPrompt.image);
await chatMessage.addImage(chatPrompt.image, chatPrompt.imagePromptType, chatPrompt.imageSwipes);
}
if (canUseTools && Array.isArray(chatPrompt.invocations)) {
@@ -2048,7 +2059,7 @@ async function sendOpenAIRequest(type, messages, signal) {
delete generate_data.stop;
delete generate_data.logprobs;
}
if (isOAI && oai_settings.openai_model.includes('gpt-4.5-preview') || isOpenRouter && oai_settings.openrouter_model.includes('gpt-4.5-preview')) {
if (isOAI && oai_settings.openai_model.includes('gpt-4.5') || isOpenRouter && oai_settings.openrouter_model.includes('gpt-4.5')) {
delete generate_data.logprobs;
}
@@ -2602,38 +2613,55 @@ class Message {
/**
* Adds an image to the message.
* @param {string} image Image URL or Data URL.
* @param {string} sourceImage Image URL or Data URL.
* @param {string} imagePromptType Type of image prompt.
* @param {string[]} imageSwipes Swipes for the image.
* @returns {Promise<void>}
*/
async addImage(image) {
async addImage(sourceImage, imagePromptType = IMAGE_PROMPT_TYPE.ONE, imageSwipes = []) {
const quality = oai_settings.inline_image_quality || default_settings.inline_image_quality;
const textContent = this.content;
const isDataUrl = isDataURL(image);
if (!isDataUrl) {
try {
const response = await fetch(image, { method: 'GET', cache: 'force-cache' });
if (!response.ok) throw new Error('Failed to fetch image');
const blob = await response.blob();
image = await getBase64Async(blob);
} catch (error) {
console.error('Image adding skipped', error);
const sourceImages = [];
switch (imagePromptType) {
case IMAGE_PROMPT_TYPE.NONE:
return;
}
case IMAGE_PROMPT_TYPE.ONE:
sourceImages.push(sourceImage);
break;
case IMAGE_PROMPT_TYPE.ALL:
sourceImages.push(...imageSwipes);
break;
}
image = await this.compressImage(image);
const quality = oai_settings.inline_image_quality || default_settings.inline_image_quality;
this.content = [
{ type: 'text', text: textContent },
{ type: 'image_url', image_url: { 'url': image, 'detail': quality } },
];
try {
const tokens = await this.getImageTokenCost(image, quality);
this.tokens += tokens;
} catch (error) {
this.tokens += Message.tokensPerImage;
console.error('Failed to get image token cost', error);
for (let image of sourceImages) {
const isDataUrl = isDataURL(image);
if (!isDataUrl) {
try {
const response = await fetch(image, { method: 'GET', cache: 'force-cache' });
if (!response.ok) throw new Error('Failed to fetch image');
const blob = await response.blob();
image = await getBase64Async(blob);
} catch (error) {
console.error('Image adding skipped', error);
continue;
}
}
image = await this.compressImage(image);
this.content.push({ type: 'image_url', image_url: { url: image, detail: quality } });
try {
const tokens = await this.getImageTokenCost(image, quality);
this.tokens += tokens;
} catch (error) {
this.tokens += Message.tokensPerImage;
console.error('Failed to get image token cost', error);
}
}
}

View File

@@ -218,7 +218,9 @@ let power_user = {
system_sequence: '',
system_suffix: '',
last_system_sequence: '',
first_input_sequence: '',
first_output_sequence: '',
last_input_sequence: '',
last_output_sequence: '',
system_sequence_prefix: '',
system_sequence_suffix: '',

View File

@@ -57,19 +57,24 @@ function toggleReasoningAutoExpand() {
* @param {object} data Response data
* @returns {string} Extracted reasoning
*/
export function extractReasoningFromData(data) {
switch (main_api) {
export function extractReasoningFromData(data, {
mainApi = null,
ignoreShowThoughts = false,
textGenType = null,
chatCompletionSource = null
} = {}) {
switch (mainApi ?? main_api) {
case 'textgenerationwebui':
switch (textgenerationwebui_settings.type) {
switch (textGenType ?? textgenerationwebui_settings.type) {
case textgen_types.OPENROUTER:
return data?.choices?.[0]?.reasoning ?? '';
}
break;
case 'openai':
if (!oai_settings.show_thoughts) break;
if (!ignoreShowThoughts && !oai_settings.show_thoughts) break;
switch (oai_settings.chat_completion_source) {
switch (chatCompletionSource ?? oai_settings.chat_completion_source) {
case chat_completion_sources.DEEPSEEK:
return data?.choices?.[0]?.message?.reasoning_content ?? '';
case chat_completion_sources.OPENROUTER:

View File

@@ -80,6 +80,7 @@ import { timestampToMoment, uuidv4 } from './utils.js';
import { getGlobalVariable, getLocalVariable, setGlobalVariable, setLocalVariable } from './variables.js';
import { convertCharacterBook, loadWorldInfo, saveWorldInfo, updateWorldInfoList } from './world-info.js';
import { ChatCompletionService, TextCompletionService } from './custom-request.js';
import { ConnectionManagerRequestService } from './extensions/shared.js';
import { updateReasoningUI, parseReasoningFromString } from './reasoning.js';
export function getContext() {
@@ -215,6 +216,7 @@ export function getContext() {
clearChat,
ChatCompletionService,
TextCompletionService,
ConnectionManagerRequestService,
updateReasoningUI,
parseReasoningFromString,
unshallowCharacter,

View File

@@ -86,7 +86,7 @@ const OOBA_DEFAULT_ORDER = [
'encoder_repetition_penalty',
'no_repeat_ngram',
];
const APHRODITE_DEFAULT_ORDER = [
export const APHRODITE_DEFAULT_ORDER = [
'dry',
'penalties',
'no_repeat_ngram',

View File

@@ -5098,6 +5098,16 @@ body:not(.sd) .mes_img_swipes {
max-width: 100% !important;
}
.mes_img_container div[data-img-prompt-type] {
display: none;
}
.mes_img_container[data-img-prompt-type="none"] div[data-img-prompt-type="none"],
.mes_img_container[data-img-prompt-type="one"] div[data-img-prompt-type="one"],
.mes_img_container[data-img-prompt-type="all"] div[data-img-prompt-type="all"] {
display: initial;
}
/* Align the content of this span to the right */
.delete-button {
margin-right: 10px;

View File

@@ -411,7 +411,7 @@ export function getTokenizerModel(requestModel) {
return 'gpt-4o';
}
if (requestModel.includes('gpt-4.5-preview')) {
if (requestModel.includes('gpt-4.5')) {
return 'gpt-4o';
}