Merge branch 'staging' into integrity

This commit is contained in:
Cohee
2025-03-16 21:28:37 +02:00
19 changed files with 751 additions and 135 deletions

View File

@@ -7,6 +7,10 @@ on:
issue_comment:
types: [created]
permissions:
contents: read
issues: write
jobs:
label-on-content:
name: 🏷️ Label Issues by Content
@@ -16,7 +20,7 @@ jobs:
- name: Checkout Repository
# Checkout
# https://github.com/marketplace/actions/checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.2.2
- name: Auto-Label Issues (Based on Issue Content)
# only auto label based on issue content once, on open (to prevent re-labeling removed labels)
@@ -24,7 +28,7 @@ jobs:
# Issue Labeler
# https://github.com/marketplace/actions/regex-issue-labeler
uses: github/issue-labeler@v3
uses: github/issue-labeler@v3.4
with:
configuration-path: .github/issues-auto-labels.yml
enable-versioned-regex: 0
@@ -39,7 +43,7 @@ jobs:
if: contains(fromJSON('["👩‍💻 Good First Issue", "🙏 Help Wanted", "🪲 Confirmed", "⚠️ High Priority", "❕ Medium Priority", "💤 Low Priority"]'), github.event.label.name)
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'add-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -49,7 +53,7 @@ jobs:
if: contains(fromJSON('["✅ Done", "✅ Done (staging)", "⚰️ Stale", "❌ wontfix"]'), github.event.label.name)
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'remove-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -59,7 +63,7 @@ jobs:
if: contains(fromJSON('["❌ wontfix","👍 Approved","👩‍💻 Good First Issue"]'), github.event.label.name)
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'remove-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -69,7 +73,7 @@ jobs:
if: github.event.label.name == '🪲 Confirmed'
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'remove-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -85,7 +89,7 @@ jobs:
- name: Remove Stale Label
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'remove-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -101,12 +105,12 @@ jobs:
- name: Checkout Repository
# Checkout
# https://github.com/marketplace/actions/checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.2.2
- name: Post Issue Comments Based on Labels
# Label Commenter
# https://github.com/marketplace/actions/label-commenter
uses: peaceiris/actions-label-commenter@v1
uses: peaceiris/actions-label-commenter@v1.10.0
with:
config_file: .github/issues-auto-comments.yml
github_token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

View File

@@ -6,6 +6,10 @@ on:
- staging
- release
permissions:
contents: read
issues: write
jobs:
# This runs commits to staging/release, reading the commit messages. Check `pr-auto-manager.yml`:`update-linked-issues` for PR-linked updates.
update-linked-issues:
@@ -16,12 +20,12 @@ jobs:
- name: Checkout Repository
# Checkout
# https://github.com/marketplace/actions/checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.2.2
- name: Extract Linked Issues from Commit Message
id: extract_issues
run: |
ISSUES=$(git log -1 --pretty=%B | grep -oiE '(close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved) #([0-9]+)' | awk '{print $2}' | tr -d '#' | jq -R -s -c 'split("\n")[:-1]')
ISSUES=$(git log ${{ github.event.before }}..${{ github.event.after }} --pretty=%B | grep -oiE '(close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved) #([0-9]+)' | awk '{print $2}' | tr -d '#' | jq -R -s -c 'split("\n")[:-1]')
echo "issues=$ISSUES" >> $GITHUB_ENV
- name: Label Linked Issues

View File

@@ -6,6 +6,11 @@ on:
schedule:
- cron: '0 0 * * *' # Runs every day at midnight UTC
permissions:
contents: read
issues: write
pull-requests: write
jobs:
mark-inactivity:
name: ⏳ Mark Issues/PRs without Activity
@@ -15,7 +20,7 @@ jobs:
- name: Mark Issues/PRs without Activity
# Close Stale Issues and PRs
# https://github.com/marketplace/actions/close-stale-issues
uses: actions/stale@v9
uses: actions/stale@v9.1.0
with:
repo-token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
days-before-stale: 183
@@ -49,7 +54,7 @@ jobs:
- name: Mark Issues/PRs Awaiting User Response
# Close Stale Issues and PRs
# https://github.com/marketplace/actions/close-stale-issues
uses: actions/stale@v9
uses: actions/stale@v9.1.0
with:
repo-token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
days-before-stale: 7
@@ -76,7 +81,7 @@ jobs:
- name: Mark Issues with Alternative Exists
# Close Stale Issues and PRs
# https://github.com/marketplace/actions/close-stale-issues
uses: actions/stale@v9
uses: actions/stale@v9.1.0
with:
repo-token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
days-before-stale: 7

View File

@@ -6,6 +6,11 @@ on:
pull_request_target:
types: [closed]
permissions:
contents: read
issues: write
pull-requests: write
jobs:
remove-labels:
name: 🗑️ Remove Pending Labels on Close
@@ -15,7 +20,7 @@ jobs:
- name: Remove Pending Labels on Close
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: remove-labels
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

View File

@@ -6,6 +6,11 @@ on:
pull_request_target:
types: [opened]
permissions:
contents: read
issues: write
pull-requests: write
jobs:
label-maintainer:
name: 🏷️ Label if Author is a Repo Maintainer
@@ -16,7 +21,7 @@ jobs:
- name: Label if Author is a Repo Maintainer
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'add-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

View File

@@ -6,6 +6,10 @@ on:
pull_request_review_comment:
types: [created]
permissions:
contents: read
pull-requests: write
jobs:
label-by-size:
name: 🏷️ Label PR by Size
@@ -15,7 +19,7 @@ jobs:
- name: Label PR Size
# Pull Request Size Labeler
# https://github.com/marketplace/actions/pull-request-size-labeler
uses: codelytv/pr-size-labeler@v1
uses: codelytv/pr-size-labeler@v1.10.2
with:
GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
xs_label: '🟩 ⬤○○○○'
@@ -43,12 +47,12 @@ jobs:
- name: Checkout Repository
# Checkout
# https://github.com/marketplace/actions/checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.2.2
- name: Apply Labels Based on Branch Name and Target Branch
# Pull Request Labeler
# https://github.com/marketplace/actions/labeler
uses: actions/labeler@v5
uses: actions/labeler@v5.0.0
with:
configuration-path: .github/pr-auto-labels-by-branch.yml
repo-token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -61,12 +65,12 @@ jobs:
- name: Checkout Repository
# Checkout
# https://github.com/marketplace/actions/checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.2.2
- name: Apply Labels Based on Changed Files
# Pull Request Labeler
# https://github.com/marketplace/actions/labeler
uses: actions/labeler@v5
uses: actions/labeler@v5.0.0
with:
configuration-path: .github/pr-auto-labels-by-files.yml
repo-token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -75,13 +79,13 @@ jobs:
name: 🗑️ Remove Stale Label on Comment
runs-on: ubuntu-latest
# Only runs when this is not done by the github actions bot
if: github.actor != 'github-actions[bot]'
if: github.event_name == 'pull_request_review_comment' && github.actor != 'github-actions[bot]'
steps:
- name: Remove Stale Label
# 🤖 Issues Helper
# https://github.com/marketplace/actions/issues-helper
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'remove-labels'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
@@ -95,12 +99,18 @@ jobs:
# Run, even if the previous jobs were skipped/failed
if: always()
# Override permissions, as this needs to write a check
permissions:
checks: write
contents: read
pull-requests: read
steps:
- name: Check Merge Blocking
# GitHub Script
# https://github.com/marketplace/actions/github-scriptLabels
# https://github.com/marketplace/actions/github-script
id: label-check
uses: actions/github-script@v7
uses: actions/github-script@v7.0.1
with:
script: |
const prLabels = context.payload.pull_request.labels.map(label => label.name);
@@ -143,12 +153,12 @@ jobs:
- name: Checkout Repository
# Checkout
# https://github.com/marketplace/actions/checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.2.2
- name: Post PR Comments Based on Labels
# Label Commenter for PRs
# https://github.com/marketplace/actions/label-commenter
uses: peaceiris/actions-label-commenter@v1
uses: peaceiris/actions-label-commenter@v1.10.0
with:
config_file: .github/pr-auto-comments.yml
github_token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

View File

@@ -7,6 +7,10 @@ on:
pull_request_target:
types: [synchronize]
permissions:
contents: read
pull-requests: write
jobs:
check-merge-conflicts:
name: ⚔️ Check Merge Conflicts
@@ -16,7 +20,7 @@ jobs:
- name: Check Merge Conflicts
# Label Conflicting Pull Requests
# https://github.com/marketplace/actions/label-conflicting-pull-requests
uses: eps1lon/actions-label-merge-conflict@v3
uses: eps1lon/actions-label-merge-conflict@v3.0.3
with:
dirtyLabel: '🚫 Merge Conflicts'
repoToken: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

4
public/global.d.ts vendored
View File

@@ -1,7 +1,11 @@
import libs from './lib';
import getContext from './scripts/st-context';
import { power_user } from './scripts/power-user';
declare global {
// Custom types
declare type InstructSettings = typeof power_user.instruct;
// Global namespace modules
interface Window {
ai: any;

View File

@@ -515,6 +515,9 @@ export const event_types = {
ONLINE_STATUS_CHANGED: 'online_status_changed',
IMAGE_SWIPED: 'image_swiped',
CONNECTION_PROFILE_LOADED: 'connection_profile_loaded',
CONNECTION_PROFILE_CREATED: 'connection_profile_created',
CONNECTION_PROFILE_DELETED: 'connection_profile_deleted',
CONNECTION_PROFILE_UPDATED: 'connection_profile_updated',
TOOL_CALLS_PERFORMED: 'tool_calls_performed',
TOOL_CALLS_RENDERED: 'tool_calls_rendered',
};
@@ -1371,8 +1374,11 @@ export function resultCheckStatus() {
* If the character ID doesn't exist, if the chat is being saved, or if a group is being generated, this function does nothing.
* If the character is different from the currently selected one, it will clear the chat and reset any selected character or group.
* @param {number} id The ID of the character to switch to.
* @param {object} [options] Options for the switch.
* @param {boolean} [options.switchMenu=true] Whether to switch the right menu to the character edit menu if the character is already selected.
* @returns {Promise<void>} A promise that resolves when the character is switched.
*/
export async function selectCharacterById(id) {
export async function selectCharacterById(id, { switchMenu = true } = {}) {
if (characters[id] === undefined) {
return;
}
@@ -1401,9 +1407,9 @@ export async function selectCharacterById(id) {
}
} else {
//if clicked on character that was already selected
selected_button = 'character_edit';
switchMenu && (selected_button = 'character_edit');
await unshallowCharacter(this_chid);
select_selected_character(this_chid);
select_selected_character(this_chid, { switchMenu });
}
}
@@ -1788,6 +1794,7 @@ export async function getCharacters() {
body: JSON.stringify({}),
});
if (response.ok === true) {
const previousAvatar = this_chid !== undefined ? characters[this_chid]?.avatar : null;
characters.splice(0, characters.length);
const getData = await response.json();
for (let i = 0; i < getData.length; i++) {
@@ -1801,8 +1808,16 @@ export async function getCharacters() {
characters[i]['chat'] = String(characters[i]['chat']);
}
if (this_chid !== undefined) {
$('#avatar_url_pole').val(characters[this_chid].avatar);
if (previousAvatar) {
const newCharacterId = characters.findIndex(x => x.avatar === previousAvatar);
if (newCharacterId >= 0) {
setCharacterId(newCharacterId);
await selectCharacterById(newCharacterId, { switchMenu: false });
} else {
await Popup.show.text(t`ERROR: The active character is no longer available.`, t`The page will be refreshed to prevent data loss. Press "OK" to continue.`);
return location.reload();
}
}
await getGroups();
@@ -6528,6 +6543,8 @@ export async function renameCharacter(name = null, { silent = false, renameChats
await eventSource.emit(event_types.CHARACTER_RENAMED, oldAvatar, newAvatar);
// Unload current character
setCharacterId(undefined);
// Reload characters list
await getCharacters();
@@ -6536,7 +6553,6 @@ export async function renameCharacter(name = null, { silent = false, renameChats
if (newChId !== -1) {
// Select the character after the renaming
setCharacterId(undefined);
await selectCharacterById(newChId);
// Async delay to update UI
@@ -6871,14 +6887,14 @@ export function buildAvatarList(block, entities, { templateId = 'inline_avatar_t
*/
export async function unshallowCharacter(characterId) {
if (characterId === undefined) {
console.warn('Undefined character cannot be unshallowed');
console.debug('Undefined character cannot be unshallowed');
return;
}
/** @type {import('./scripts/char-data.js').v1CharData} */
const character = characters[characterId];
if (!character) {
console.warn('Character not found:', characterId);
console.debug('Character not found:', characterId);
return;
}
@@ -6889,7 +6905,7 @@ export async function unshallowCharacter(characterId) {
const avatar = character.avatar;
if (!avatar) {
console.warn('Character has no avatar field:', characterId);
console.debug('Character has no avatar field:', characterId);
return;
}
@@ -7895,14 +7911,19 @@ export function select_rm_info(type, charId, previousCharId = null) {
}
}
export function select_selected_character(chid) {
/**
* Selects the right menu for displaying the character editor.
* @param {number|string} chid Character array index
* @param {object} [param1] Options for the switch
* @param {boolean} [param1.switchMenu=true] Whether to switch the menu
*/
export function select_selected_character(chid, { switchMenu = true } = {}) {
//character select
//console.log('select_selected_character() -- starting with input of -- ' + chid + ' (name:' + characters[chid].name + ')');
select_rm_create();
setMenuType('character_edit');
select_rm_create({ switchMenu });
switchMenu && setMenuType('character_edit');
$('#delete_button').css('display', 'flex');
$('#export_button').css('display', 'flex');
var display_name = characters[chid].name;
//create text poles
$('#rm_button_back').css('display', 'none');
@@ -7917,7 +7938,7 @@ export function select_selected_character(chid) {
// Don't update the navbar name if we're peeking the group member defs
if (!selected_group) {
$('#rm_button_selected_ch').children('h2').text(display_name);
$('#rm_button_selected_ch').children('h2').text(characters[chid].name);
}
$('#add_avatar_button').val('');
@@ -7948,22 +7969,20 @@ export function select_selected_character(chid) {
$('#chat_import_avatar_url').val(characters[chid].avatar);
$('#chat_import_character_name').val(characters[chid].name);
$('#character_json_data').val(characters[chid].json_data);
let this_avatar = default_avatar;
if (characters[chid].avatar != 'none') {
this_avatar = getThumbnailUrl('avatar', characters[chid].avatar);
}
updateFavButtonState(characters[chid].fav || characters[chid].fav == 'true');
$('#avatar_load_preview').attr('src', this_avatar);
$('#name_div').removeClass('displayBlock');
$('#name_div').addClass('displayNone');
$('#renameCharButton').css('display', '');
const avatarUrl = characters[chid].avatar != 'none' ? getThumbnailUrl('avatar', characters[chid].avatar) : default_avatar;
$('#avatar_load_preview').attr('src', avatarUrl);
$('.open_alternate_greetings').data('chid', chid);
$('#set_character_world').data('chid', chid);
setWorldInfoButtonClass(chid);
checkEmbeddedWorld(chid);
$('#name_div').removeClass('displayBlock');
$('#name_div').addClass('displayNone');
$('#renameCharButton').css('display', '');
$('#form_create').attr('actiontype', 'editcharacter');
$('.form_create_bottom_buttons_block .chat_lorebook_button').show();
@@ -7975,8 +7994,13 @@ export function select_selected_character(chid) {
saveSettingsDebounced();
}
function select_rm_create() {
setMenuType('create');
/**
* Selects the right menu for creating a new character.
* @param {object} [options] Options for the switch
* @param {boolean} [options.switchMenu=true] Whether to switch the menu
*/
function select_rm_create({ switchMenu = true } = {}) {
switchMenu && setMenuType('create');
//console.log('select_rm_Create() -- selected button: '+selected_button);
if (selected_button == 'create') {
@@ -7986,7 +8010,7 @@ function select_rm_create() {
}
}
selectRightMenuWithAnimation('rm_ch_create_block');
switchMenu && selectRightMenuWithAnimation('rm_ch_create_block');
$('#set_chat_scenario').hide();
$('#delete_button_div').css('display', 'none');
@@ -9209,6 +9233,17 @@ function swipe_right(_event, { source, repeated } = {}) {
}
}
/**
* @typedef {object} ConnectAPIMap
* @property {string} selected - API name (e.g. "textgenerationwebui", "openai")
* @property {string?} [button] - CSS selector for the API button
* @property {string?} [type] - API type, mostly used by text completion. (e.g. "openrouter")
* @property {string?} [source] - API source, mostly used by chat completion. (e.g. "openai")
*/
/**
* @type {Record<string, ConnectAPIMap>}
*/
export const CONNECT_API_MAP = {
// Default APIs not contined inside text gen / chat gen
'kobold': {

View File

@@ -1,20 +1,20 @@
import { getPresetManager } from './preset-manager.js';
import { extractMessageFromData, getGenerateUrl, getRequestHeaders } from '../script.js';
import { getTextGenServer } from './textgen-settings.js';
import { extractReasoningFromData } from './reasoning.js';
import { formatInstructModeChat, formatInstructModePrompt, names_behavior_types } from './instruct-mode.js';
// #region Type Definitions
/**
* @typedef {Object} TextCompletionRequestBase
* @property {string} prompt - The text prompt for completion
* @property {number} max_tokens - Maximum number of tokens to generate
* @property {string} [model] - Optional model name
* @property {string} api_type - Type of API to use
* @property {string} [api_server] - Optional API server URL
* @property {number} [temperature] - Optional temperature parameter
* @property {number} [min_p] - Optional min_p parameter
*/
/** @typedef {Record<string, any> & TextCompletionRequestBase} TextCompletionRequest */
/**
* @typedef {Object} TextCompletionPayloadBase
* @property {string} prompt - The text prompt for completion
@@ -44,6 +44,13 @@ import { getTextGenServer } from './textgen-settings.js';
*/
/** @typedef {Record<string, any> & ChatCompletionPayloadBase} ChatCompletionPayload */
/**
* @typedef {Object} ExtractedData
* @property {string} content - Extracted content.
* @property {string} reasoning - Extracted reasoning.
*/
// #endregion
/**
@@ -53,11 +60,11 @@ export class TextCompletionService {
static TYPE = 'textgenerationwebui';
/**
* @param {TextCompletionRequest} custom
* @param {Record<string, any> & TextCompletionRequestBase & {prompt: string}} custom
* @returns {TextCompletionPayload}
*/
static createRequestData({ prompt, max_tokens, model, api_type, api_server, temperature, ...props }) {
return {
static createRequestData({ prompt, max_tokens, model, api_type, api_server, temperature, min_p, ...props }) {
const payload = {
...props,
prompt,
max_tokens,
@@ -66,15 +73,25 @@ export class TextCompletionService {
api_type,
api_server: api_server ?? getTextGenServer(api_type),
temperature,
min_p,
stream: false,
};
// Remove undefined values to avoid API errors
Object.keys(payload).forEach(key => {
if (payload[key] === undefined) {
delete payload[key];
}
});
return payload;
}
/**
* Sends a text completion request to the specified server
* @param {TextCompletionPayload} data Request data
* @param {boolean?} extractData Extract message from the response. Default true
* @returns {Promise<string | any>} Extracted data or the raw response
* @returns {Promise<ExtractedData | any>} Extracted data or the raw response
* @throws {Error}
*/
static async sendRequest(data, extractData = true) {
@@ -91,31 +108,150 @@ export class TextCompletionService {
throw json;
}
return extractData ? extractMessageFromData(json, this.TYPE) : json;
if (!extractData) {
return json;
}
return {
content: extractMessageFromData(json, this.TYPE),
reasoning: extractReasoningFromData(json, {
mainApi: this.TYPE,
textGenType: data.api_type,
ignoreShowThoughts: true,
}),
};
}
/**
* @param {string} presetName
* @param {TextCompletionRequest} custom
* @param {boolean?} extractData Extract message from the response. Default true
* @returns {Promise<string | any>} Extracted data or the raw response
* Process and send a text completion request with optional preset & instruct
* @param {Record<string, any> & TextCompletionRequestBase & {prompt: (ChatCompletionMessage & {ignoreInstruct?: boolean})[] |string}} custom
* @param {Object} options - Configuration options
* @param {string?} [options.presetName] - Name of the preset to use for generation settings
* @param {string?} [options.instructName] - Name of instruct preset for message formatting
* @param {boolean} extractData - Whether to extract structured data from response
* @returns {Promise<ExtractedData | any>} Extracted data or the raw response
* @throws {Error}
*/
static async sendRequestWithPreset(presetName, custom, extractData = true) {
const presetManager = getPresetManager(this.TYPE);
if (!presetManager) {
throw new Error('Preset manager not found');
static async processRequest(
custom,
options = {},
extractData = true,
) {
const { presetName, instructName } = options;
let requestData = { ...custom };
const prompt = custom.prompt;
// Apply generation preset if specified
if (presetName) {
const presetManager = getPresetManager(this.TYPE);
if (presetManager) {
const preset = presetManager.getCompletionPresetByName(presetName);
if (preset) {
// Convert preset to payload and merge with custom parameters
const presetPayload = this.presetToGeneratePayload(preset, {});
requestData = { ...presetPayload, ...requestData };
} else {
console.warn(`Preset "${presetName}" not found, continuing with default settings`);
}
} else {
console.warn('Preset manager not found, continuing with default settings');
}
}
const preset = presetManager.getCompletionPresetByName(presetName);
if (!preset) {
throw new Error('Preset not found');
// Handle instruct formatting if requested
if (Array.isArray(prompt) && instructName) {
const instructPresetManager = getPresetManager('instruct');
let instructPreset = instructPresetManager?.getCompletionPresetByName(instructName);
if (instructPreset) {
// Clone the preset to avoid modifying the original
instructPreset = structuredClone(instructPreset);
instructPreset.macro = false;
instructPreset.names_behavior = names_behavior_types.NONE;
// Format messages using instruct formatting
const formattedMessages = [];
for (const message of prompt) {
let messageContent = message.content;
if (!message.ignoreInstruct) {
messageContent = formatInstructModeChat(
message.role,
message.content,
message.role === 'user',
false,
undefined,
undefined,
undefined,
undefined,
instructPreset,
);
// Add prompt formatting for the last message
if (message === prompt[prompt.length - 1]) {
messageContent += formatInstructModePrompt(
undefined,
false,
undefined,
undefined,
undefined,
false,
false,
instructPreset,
);
}
}
formattedMessages.push(messageContent);
}
requestData.prompt = formattedMessages.join('');
if (instructPreset.output_suffix) {
requestData.stop = [instructPreset.output_suffix];
requestData.stopping_strings = [instructPreset.output_suffix];
}
} else {
console.warn(`Instruct preset "${instructName}" not found, using basic formatting`);
requestData.prompt = prompt.map(x => x.content).join('\n\n');
}
} else if (typeof prompt === 'string') {
requestData.prompt = prompt;
} else {
requestData.prompt = prompt.map(x => x.content).join('\n\n');
}
const data = this.createRequestData({ ...preset, ...custom });
// @ts-ignore
const data = this.createRequestData(requestData);
return await this.sendRequest(data, extractData);
}
/**
* Converts a preset to a valid text completion payload.
* Only supports temperature.
* @param {Object} preset - The preset configuration
* @param {Object} customPreset - Additional parameters to override preset values
* @returns {Object} - Formatted payload for text completion API
*/
static presetToGeneratePayload(preset, customPreset = {}) {
if (!preset || typeof preset !== 'object') {
throw new Error('Invalid preset: must be an object');
}
// Merge preset with custom parameters
const settings = { ...preset, ...customPreset };
// Initialize base payload with common parameters
let payload = {
'temperature': settings.temp ? Number(settings.temp) : undefined,
'min_p': settings.min_p ? Number(settings.min_p) : undefined,
};
// Remove undefined values to avoid API errors
Object.keys(payload).forEach(key => {
if (payload[key] === undefined) {
delete payload[key];
}
});
return payload;
}
}
/**
@@ -129,7 +265,7 @@ export class ChatCompletionService {
* @returns {ChatCompletionPayload}
*/
static createRequestData({ messages, model, chat_completion_source, max_tokens, temperature, ...props }) {
return {
const payload = {
...props,
messages,
model,
@@ -138,13 +274,22 @@ export class ChatCompletionService {
temperature,
stream: false,
};
// Remove undefined values to avoid API errors
Object.keys(payload).forEach(key => {
if (payload[key] === undefined) {
delete payload[key];
}
});
return payload;
}
/**
* Sends a chat completion request
* @param {ChatCompletionPayload} data Request data
* @param {boolean?} extractData Extract message from the response. Default true
* @returns {Promise<string | any>} Extracted data or the raw response
* @returns {Promise<ExtractedData | any>} Extracted data or the raw response
* @throws {Error}
*/
static async sendRequest(data, extractData = true) {
@@ -161,29 +306,82 @@ export class ChatCompletionService {
throw json;
}
return extractData ? extractMessageFromData(json, this.TYPE) : json;
if (!extractData) {
return json;
}
return {
content: extractMessageFromData(json, this.TYPE),
reasoning: extractReasoningFromData(json, {
mainApi: this.TYPE,
textGenType: data.chat_completion_source,
ignoreShowThoughts: true,
}),
};
}
/**
* @param {string} presetName
* Process and send a chat completion request with optional preset
* @param {ChatCompletionPayload} custom
* @param {boolean} extractData Extract message from the response. Default true
* @returns {Promise<string | any>} Extracted data or the raw response
* @param {Object} options - Configuration options
* @param {string?} [options.presetName] - Name of the preset to use for generation settings
* @param {boolean} extractData - Whether to extract structured data from response
* @returns {Promise<ExtractedData | any>} Extracted data or the raw response
* @throws {Error}
*/
static async sendRequestWithPreset(presetName, custom, extractData = true) {
const presetManager = getPresetManager(this.TYPE);
if (!presetManager) {
throw new Error('Preset manager not found');
static async processRequest(custom, options, extractData = true) {
const { presetName } = options;
let requestData = { ...custom };
// Apply generation preset if specified
if (presetName) {
const presetManager = getPresetManager(this.TYPE);
if (presetManager) {
const preset = presetManager.getCompletionPresetByName(presetName);
if (preset) {
// Convert preset to payload and merge with custom parameters
const presetPayload = this.presetToGeneratePayload(preset, {});
requestData = { ...presetPayload, ...requestData };
} else {
console.warn(`Preset "${presetName}" not found, continuing with default settings`);
}
} else {
console.warn('Preset manager not found, continuing with default settings');
}
}
const preset = presetManager.getCompletionPresetByName(presetName);
if (!preset) {
throw new Error('Preset not found');
}
const data = this.createRequestData({ ...preset, ...custom });
const data = this.createRequestData(requestData);
return await this.sendRequest(data, extractData);
}
/**
* Converts a preset to a valid chat completion payload
* Only supports temperature.
* @param {Object} preset - The preset configuration
* @param {Object} customParams - Additional parameters to override preset values
* @returns {Object} - Formatted payload for chat completion API
*/
static presetToGeneratePayload(preset, customParams = {}) {
if (!preset || typeof preset !== 'object') {
throw new Error('Invalid preset: must be an object');
}
// Merge preset with custom parameters
const settings = { ...preset, ...customParams };
// Initialize base payload with common parameters
const payload = {
temperature: settings.temperature ? Number(settings.temperature) : undefined,
};
// Remove undefined values to avoid API errors
Object.keys(payload).forEach(key => {
if (payload[key] === undefined) {
delete payload[key];
}
});
return payload;
}
}

View File

@@ -1,4 +1,4 @@
import { Fuse } from '../../../lib.js';
import { DOMPurify, Fuse } from '../../../lib.js';
import { event_types, eventSource, main_api, saveSettingsDebounced } from '../../../script.js';
import { extension_settings, renderExtensionTemplateAsync } from '../../extensions.js';
@@ -267,11 +267,16 @@ async function createConnectionProfile(forceName = null) {
});
const isNameTaken = (n) => extension_settings.connectionManager.profiles.some(p => p.name === n);
const suggestedName = getUniqueName(collapseSpaces(`${profile.api ?? ''} ${profile.model ?? ''} - ${profile.preset ?? ''}`), isNameTaken);
const name = forceName ?? await callGenericPopup(template, POPUP_TYPE.INPUT, suggestedName, { rows: 2 });
let name = forceName ?? await callGenericPopup(template, POPUP_TYPE.INPUT, suggestedName, { rows: 2 });
// If it's cancelled, it will be false
if (!name) {
return null;
}
name = DOMPurify.sanitize(String(name));
if (!name) {
toastr.error('Name cannot be empty.');
return null;
}
if (isNameTaken(name) || name === NONE) {
toastr.error('A profile with the same name already exists.');
@@ -303,7 +308,8 @@ async function deleteConnectionProfile() {
return;
}
const name = extension_settings.connectionManager.profiles[index].name;
const profile = extension_settings.connectionManager.profiles[index];
const name = profile.name;
const confirm = await Popup.show.confirm(t`Are you sure you want to delete the selected profile?`, name);
if (!confirm) {
@@ -313,6 +319,8 @@ async function deleteConnectionProfile() {
extension_settings.connectionManager.profiles.splice(index, 1);
extension_settings.connectionManager.selectedProfile = null;
saveSettingsDebounced();
await eventSource.emit(event_types.CONNECTION_PROFILE_DELETED, profile);
}
/**
@@ -512,6 +520,7 @@ async function renderDetailsContent(detailsContent) {
saveSettingsDebounced();
renderConnectionProfiles(profiles);
await renderDetailsContent(detailsContent);
await eventSource.emit(event_types.CONNECTION_PROFILE_CREATED, profile);
await eventSource.emit(event_types.CONNECTION_PROFILE_LOADED, profile.name);
});
@@ -523,9 +532,11 @@ async function renderDetailsContent(detailsContent) {
console.log('No profile selected');
return;
}
const oldProfile = structuredClone(profile);
await updateConnectionProfile(profile);
await renderDetailsContent(detailsContent);
saveSettingsDebounced();
await eventSource.emit(event_types.CONNECTION_PROFILE_UPDATED, oldProfile, profile);
await eventSource.emit(event_types.CONNECTION_PROFILE_LOADED, profile.name);
toastr.success('Connection profile updated', '', { timeOut: 1500 });
});
@@ -559,7 +570,7 @@ async function renderDetailsContent(detailsContent) {
return acc;
}, {});
const template = $(await renderExtensionTemplateAsync(MODULE_NAME, 'edit', { name: profile.name, settings }));
const newName = await callGenericPopup(template, POPUP_TYPE.INPUT, profile.name, {
let newName = await callGenericPopup(template, POPUP_TYPE.INPUT, profile.name, {
rows: 2,
customButtons: [{
text: t`Save and Update`,
@@ -571,9 +582,15 @@ async function renderDetailsContent(detailsContent) {
}],
});
// If it's cancelled, it will be false
if (!newName) {
return;
}
newName = DOMPurify.sanitize(String(newName));
if (!newName) {
toastr.error('Name cannot be empty.');
return;
}
if (profile.name !== newName && extension_settings.connectionManager.profiles.some(p => p.name === newName)) {
toastr.error('A profile with the same name already exists.');
@@ -584,6 +601,7 @@ async function renderDetailsContent(detailsContent) {
return Object.entries(FANCY_NAMES).find(x => x[1] === String($(this).val()))?.[0];
}).get();
const oldProfile = structuredClone(profile);
if (newExcludeList.length !== profile.exclude.length || !newExcludeList.every(e => profile.exclude.includes(e))) {
profile.exclude = newExcludeList;
for (const command of newExcludeList) {
@@ -598,10 +616,11 @@ async function renderDetailsContent(detailsContent) {
if (profile.name !== newName) {
toastr.success('Connection profile renamed.');
profile.name = String(newName);
profile.name = newName;
}
saveSettingsDebounced();
await eventSource.emit(event_types.CONNECTION_PROFILE_UPDATED, oldProfile, profile);
renderConnectionProfiles(profiles);
await renderDetailsContent(detailsContent);
});
@@ -704,6 +723,7 @@ async function renderDetailsContent(detailsContent) {
saveSettingsDebounced();
renderConnectionProfiles(profiles);
await renderDetailsContent(detailsContent);
await eventSource.emit(event_types.CONNECTION_PROFILE_CREATED, profile);
return profile.name;
},
}));
@@ -718,9 +738,11 @@ async function renderDetailsContent(detailsContent) {
toastr.warning('No profile selected.');
return '';
}
const oldProfile = structuredClone(profile);
await updateConnectionProfile(profile);
await renderDetailsContent(detailsContent);
saveSettingsDebounced();
await eventSource.emit(event_types.CONNECTION_PROFILE_UPDATED, oldProfile, profile);
return profile.name;
},
}));

View File

@@ -1,5 +1,6 @@
import { getRequestHeaders } from '../../script.js';
import { CONNECT_API_MAP, getRequestHeaders } from '../../script.js';
import { extension_settings, openThirdPartyExtensionMenu } from '../extensions.js';
import { t } from '../i18n.js';
import { oai_settings } from '../openai.js';
import { SECRET_KEYS, secret_state } from '../secrets.js';
import { textgen_types, textgenerationwebui_settings } from '../textgen-settings.js';
@@ -273,3 +274,309 @@ export async function getWebLlmContextSize() {
const model = await engine.getCurrentModelInfo();
return model?.context_size;
}
/**
* It uses the profiles to send a generate request to the API. Doesn't support streaming.
*/
export class ConnectionManagerRequestService {
static defaultSendRequestParams = {
extractData: true,
includePreset: true,
includeInstruct: true,
};
static getAllowedTypes() {
return {
openai: t`Chat Completion`,
textgenerationwebui: t`Text Completion`,
};
}
/**
* @param {string} profileId
* @param {string | (import('../custom-request.js').ChatCompletionMessage & {ignoreInstruct?: boolean})[]} prompt
* @param {number} maxTokens
* @param {{extractData?: boolean, includePreset?: boolean, includeInstruct?: boolean}} custom - default values are true
* @returns {Promise<import('../custom-request.js').ExtractedData | any>} Extracted data or the raw response
*/
static async sendRequest(profileId, prompt, maxTokens, custom = this.defaultSendRequestParams) {
const { extractData, includePreset, includeInstruct } = { ...this.defaultSendRequestParams, ...custom };
const context = SillyTavern.getContext();
if (context.extensionSettings.disabledExtensions.includes('connection-manager')) {
throw new Error('Connection Manager is not available');
}
const profile = context.extensionSettings.connectionManager.profiles.find((p) => p.id === profileId);
const selectedApiMap = this.validateProfile(profile);
try {
switch (selectedApiMap.selected) {
case 'openai': {
if (!selectedApiMap.source) {
throw new Error(`API type ${selectedApiMap.selected} does not support chat completions`);
}
const messages = Array.isArray(prompt) ? prompt : [{ role: 'user', content: prompt }];
return await context.ChatCompletionService.processRequest({
messages,
max_tokens: maxTokens,
model: profile.model,
chat_completion_source: selectedApiMap.source,
}, {
presetName: includePreset ? profile.preset : undefined,
}, extractData);
}
case 'textgenerationwebui': {
if (!selectedApiMap.type) {
throw new Error(`API type ${selectedApiMap.selected} does not support text completions`);
}
return await context.TextCompletionService.processRequest({
prompt,
max_tokens: maxTokens,
model: profile.model,
api_type: selectedApiMap.type,
api_server: profile['api-url'],
}, {
instructName: includeInstruct ? profile.instruct : undefined,
presetName: includePreset ? profile.preset : undefined,
}, extractData);
}
default: {
throw new Error(`Unknown API type ${selectedApiMap.selected}`);
}
}
} catch (error) {
throw new Error('API request failed', { cause: error });
}
}
/**
* Respects allowed types.
* @returns {import('./connection-manager/index.js').ConnectionProfile[]}
*/
static getSupportedProfiles() {
const context = SillyTavern.getContext();
if (context.extensionSettings.disabledExtensions.includes('connection-manager')) {
throw new Error('Connection Manager is not available');
}
const profiles = context.extensionSettings.connectionManager.profiles;
return profiles.filter((p) => this.isProfileSupported(p));
}
/**
* @param {import('./connection-manager/index.js').ConnectionProfile?} [profile]
* @returns {boolean}
*/
static isProfileSupported(profile) {
if (!profile) {
return false;
}
const apiMap = CONNECT_API_MAP[profile.api];
if (!Object.hasOwn(this.getAllowedTypes(), apiMap.selected)) {
return false;
}
// Some providers not need model, like koboldcpp. But I don't want to check by provider.
switch (apiMap.selected) {
case 'openai':
return !!apiMap.source;
case 'textgenerationwebui':
return !!apiMap.type;
}
return false;
}
/**
* @param {import('./connection-manager/index.js').ConnectionProfile?} [profile]
* @return {import('../../script.js').ConnectAPIMap}
* @throws {Error}
*/
static validateProfile(profile) {
if (!profile) {
throw new Error('Could not find profile.');
}
if (!profile.api) {
throw new Error('Select a connection profile that has an API');
}
const context = SillyTavern.getContext();
const selectedApiMap = context.CONNECT_API_MAP[profile.api];
if (!selectedApiMap) {
throw new Error(`Unknown API type ${profile.api}`);
}
if (!Object.hasOwn(this.getAllowedTypes(), selectedApiMap.selected)) {
throw new Error(`API type ${selectedApiMap.selected} is not supported. Supported types: ${Object.values(this.getAllowedTypes()).join(', ')}`);
}
return selectedApiMap;
}
/**
* Create profiles dropdown and updates select element accordingly. Use onChange, onCreate, unUpdate, onDelete callbacks for custom behaviour. e.g updating extension settings.
* @param {string} selector
* @param {string} initialSelectedProfileId
* @param {(profile?: import('./connection-manager/index.js').ConnectionProfile) => Promise<void> | void} onChange - 3 cases. 1- When user selects new profile. 2- When user deletes selected profile. 3- When user updates selected profile.
* @param {(profile: import('./connection-manager/index.js').ConnectionProfile) => Promise<void> | void} onCreate
* @param {(oldProfile: import('./connection-manager/index.js').ConnectionProfile, newProfile: import('./connection-manager/index.js').ConnectionProfile) => Promise<void> | void} unUpdate
* @param {(profile: import('./connection-manager/index.js').ConnectionProfile) => Promise<void> | void} onDelete
*/
static handleDropdown(
selector,
initialSelectedProfileId,
onChange = () => { },
onCreate = () => { },
unUpdate = () => { },
onDelete = () => { },
) {
const context = SillyTavern.getContext();
if (context.extensionSettings.disabledExtensions.includes('connection-manager')) {
throw new Error('Connection Manager is not available');
}
/**
* @type {JQuery<HTMLSelectElement>}
*/
const dropdown = $(selector);
if (!dropdown || !dropdown.length) {
throw new Error(`Could not find dropdown with selector ${selector}`);
}
dropdown.empty();
// Create default option using document.createElement
const defaultOption = document.createElement('option');
defaultOption.value = '';
defaultOption.textContent = 'Select a Connection Profile';
defaultOption.dataset.i18n = 'Select a Connection Profile';
dropdown.append(defaultOption);
const profiles = context.extensionSettings.connectionManager.profiles;
// Create optgroups using document.createElement
const groups = {};
for (const [apiType, groupLabel] of Object.entries(this.getAllowedTypes())) {
const optgroup = document.createElement('optgroup');
optgroup.label = groupLabel;
groups[apiType] = optgroup;
}
const sortedProfilesByGroup = {};
for (const apiType of Object.keys(this.getAllowedTypes())) {
sortedProfilesByGroup[apiType] = [];
}
for (const profile of profiles) {
if (this.isProfileSupported(profile)) {
const apiMap = CONNECT_API_MAP[profile.api];
if (sortedProfilesByGroup[apiMap.selected]) {
sortedProfilesByGroup[apiMap.selected].push(profile);
}
}
}
// Sort each group alphabetically and add to dropdown
for (const [apiType, groupProfiles] of Object.entries(sortedProfilesByGroup)) {
if (groupProfiles.length === 0) continue;
groupProfiles.sort((a, b) => a.name.localeCompare(b.name));
const group = groups[apiType];
for (const profile of groupProfiles) {
const option = document.createElement('option');
option.value = profile.id;
option.textContent = profile.name;
group.appendChild(option);
}
}
for (const group of Object.values(groups)) {
if (group.children.length > 0) {
dropdown.append(group);
}
}
const selectedProfile = profiles.find((p) => p.id === initialSelectedProfileId);
if (selectedProfile) {
dropdown.val(selectedProfile.id);
}
context.eventSource.on(context.eventTypes.CONNECTION_PROFILE_CREATED, async (profile) => {
const isSupported = this.isProfileSupported(profile);
if (!isSupported) {
return;
}
const group = groups[CONNECT_API_MAP[profile.api].selected];
const option = document.createElement('option');
option.value = profile.id;
option.textContent = profile.name;
group.appendChild(option);
await onCreate(profile);
});
context.eventSource.on(context.eventTypes.CONNECTION_PROFILE_UPDATED, async (oldProfile, newProfile) => {
const currentSelected = dropdown.val();
const isSelectedProfile = currentSelected === oldProfile.id;
await unUpdate(oldProfile, newProfile);
if (!this.isProfileSupported(newProfile)) {
if (isSelectedProfile) {
dropdown.val('');
dropdown.trigger('change');
}
return;
}
const group = groups[CONNECT_API_MAP[newProfile.api].selected];
const oldOption = group.querySelector(`option[value="${oldProfile.id}"]`);
if (oldOption) {
oldOption.remove();
}
const option = document.createElement('option');
option.value = newProfile.id;
option.textContent = newProfile.name;
group.appendChild(option);
if (isSelectedProfile) {
// Ackchyually, we don't need to reselect but what if id changes? It is not possible for now I couldn't stop myself.
dropdown.val(newProfile.id);
dropdown.trigger('change');
}
});
context.eventSource.on(context.eventTypes.CONNECTION_PROFILE_DELETED, async (profile) => {
const currentSelected = dropdown.val();
const isSelectedProfile = currentSelected === profile.id;
if (!this.isProfileSupported(profile)) {
return;
}
const group = groups[CONNECT_API_MAP[profile.api].selected];
const optionToRemove = group.querySelector(`option[value="${profile.id}"]`);
if (optionToRemove) {
optionToRemove.remove();
}
if (isSelectedProfile) {
dropdown.val('');
dropdown.trigger('change');
}
await onDelete(profile);
});
dropdown.on('change', async () => {
const profileId = dropdown.val();
const profile = context.extensionSettings.connectionManager.profiles.find((p) => p.id === profileId);
await onChange(profile);
});
}
}

View File

@@ -320,59 +320,61 @@ export const force_output_sequence = {
* @param {string} name1 User name.
* @param {string} name2 Character name.
* @param {boolean|number} forceOutputSequence Force to use first/last output sequence (if configured).
* @param {InstructSettings} customInstruct Custom instruct mode settings.
* @returns {string} Formatted instruct mode chat message.
*/
export function formatInstructModeChat(name, mes, isUser, isNarrator, forceAvatar, name1, name2, forceOutputSequence) {
let includeNames = isNarrator ? false : power_user.instruct.names_behavior === names_behavior_types.ALWAYS;
export function formatInstructModeChat(name, mes, isUser, isNarrator, forceAvatar, name1, name2, forceOutputSequence, customInstruct = null) {
const instruct = structuredClone(customInstruct ?? power_user.instruct);
let includeNames = isNarrator ? false : instruct.names_behavior === names_behavior_types.ALWAYS;
if (!isNarrator && power_user.instruct.names_behavior === names_behavior_types.FORCE && ((selected_group && name !== name1) || (forceAvatar && name !== name1))) {
if (!isNarrator && instruct.names_behavior === names_behavior_types.FORCE && ((selected_group && name !== name1) || (forceAvatar && name !== name1))) {
includeNames = true;
}
function getPrefix() {
if (isNarrator) {
return power_user.instruct.system_same_as_user ? power_user.instruct.input_sequence : power_user.instruct.system_sequence;
return instruct.system_same_as_user ? instruct.input_sequence : instruct.system_sequence;
}
if (isUser) {
if (forceOutputSequence === force_output_sequence.FIRST) {
return power_user.instruct.first_input_sequence || power_user.instruct.input_sequence;
return instruct.first_input_sequence || instruct.input_sequence;
}
if (forceOutputSequence === force_output_sequence.LAST) {
return power_user.instruct.last_input_sequence || power_user.instruct.input_sequence;
return instruct.last_input_sequence || instruct.input_sequence;
}
return power_user.instruct.input_sequence;
return instruct.input_sequence;
}
if (forceOutputSequence === force_output_sequence.FIRST) {
return power_user.instruct.first_output_sequence || power_user.instruct.output_sequence;
return instruct.first_output_sequence || instruct.output_sequence;
}
if (forceOutputSequence === force_output_sequence.LAST) {
return power_user.instruct.last_output_sequence || power_user.instruct.output_sequence;
return instruct.last_output_sequence || instruct.output_sequence;
}
return power_user.instruct.output_sequence;
return instruct.output_sequence;
}
function getSuffix() {
if (isNarrator) {
return power_user.instruct.system_same_as_user ? power_user.instruct.input_suffix : power_user.instruct.system_suffix;
return instruct.system_same_as_user ? instruct.input_suffix : instruct.system_suffix;
}
if (isUser) {
return power_user.instruct.input_suffix;
return instruct.input_suffix;
}
return power_user.instruct.output_suffix;
return instruct.output_suffix;
}
let prefix = getPrefix() || '';
let suffix = getSuffix() || '';
if (power_user.instruct.macro) {
if (instruct.macro) {
prefix = substituteParams(prefix, name1, name2);
prefix = prefix.replace(/{{name}}/gi, name || 'System');
@@ -380,11 +382,11 @@ export function formatInstructModeChat(name, mes, isUser, isNarrator, forceAvata
suffix = suffix.replace(/{{name}}/gi, name || 'System');
}
if (!suffix && power_user.instruct.wrap) {
if (!suffix && instruct.wrap) {
suffix = '\n';
}
const separator = power_user.instruct.wrap ? '\n' : '';
const separator = instruct.wrap ? '\n' : '';
// Don't include the name if it's empty
const textArray = includeNames && name ? [prefix, `${name}: ${mes}` + suffix] : [prefix, mes + suffix];
@@ -504,30 +506,32 @@ export function formatInstructModeExamples(mesExamplesArray, name1, name2) {
* @param {string} name2 Character name.
* @param {boolean} isQuiet Is quiet mode generation.
* @param {boolean} isQuietToLoud Is quiet to loud generation.
* @param {InstructSettings} customInstruct Custom instruct settings.
* @returns {string} Formatted instruct mode last prompt line.
*/
export function formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2, isQuiet, isQuietToLoud) {
const includeNames = name && (power_user.instruct.names_behavior === names_behavior_types.ALWAYS || (!!selected_group && power_user.instruct.names_behavior === names_behavior_types.FORCE)) && !(isQuiet && !isQuietToLoud);
export function formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2, isQuiet, isQuietToLoud, customInstruct = null) {
const instruct = structuredClone(customInstruct ?? power_user.instruct);
const includeNames = name && (instruct.names_behavior === names_behavior_types.ALWAYS || (!!selected_group && instruct.names_behavior === names_behavior_types.FORCE)) && !(isQuiet && !isQuietToLoud);
function getSequence() {
// User impersonation prompt
if (isImpersonate) {
return power_user.instruct.input_sequence;
return instruct.input_sequence;
}
// Neutral / system / quiet prompt
// Use a special quiet instruct sequence if defined, or assistant's output sequence otherwise
if (isQuiet && !isQuietToLoud) {
return power_user.instruct.last_system_sequence || power_user.instruct.output_sequence;
return instruct.last_system_sequence || instruct.output_sequence;
}
// Quiet in-character prompt
if (isQuiet && isQuietToLoud) {
return power_user.instruct.last_output_sequence || power_user.instruct.output_sequence;
return instruct.last_output_sequence || instruct.output_sequence;
}
// Default AI response
return power_user.instruct.last_output_sequence || power_user.instruct.output_sequence;
return instruct.last_output_sequence || instruct.output_sequence;
}
let sequence = getSequence() || '';
@@ -536,21 +540,21 @@ export function formatInstructModePrompt(name, isImpersonate, promptBias, name1,
// A hack for Mistral's formatting that has a normal output sequence ending with a space
if (
includeNames &&
power_user.instruct.last_output_sequence &&
power_user.instruct.output_sequence &&
sequence === power_user.instruct.last_output_sequence &&
/\s$/.test(power_user.instruct.output_sequence) &&
!/\s$/.test(power_user.instruct.last_output_sequence)
instruct.last_output_sequence &&
instruct.output_sequence &&
sequence === instruct.last_output_sequence &&
/\s$/.test(instruct.output_sequence) &&
!/\s$/.test(instruct.last_output_sequence)
) {
nameFiller = power_user.instruct.output_sequence.slice(-1);
nameFiller = instruct.output_sequence.slice(-1);
}
if (power_user.instruct.macro) {
if (instruct.macro) {
sequence = substituteParams(sequence, name1, name2);
sequence = sequence.replace(/{{name}}/gi, name || 'System');
}
const separator = power_user.instruct.wrap ? '\n' : '';
const separator = instruct.wrap ? '\n' : '';
let text = includeNames ? (separator + sequence + separator + nameFiller + `${name}:`) : (separator + sequence);
// Quiet prompt already has a newline at the end
@@ -562,7 +566,7 @@ export function formatInstructModePrompt(name, isImpersonate, promptBias, name1,
text += (includeNames ? promptBias : (separator + promptBias.trimStart()));
}
return (power_user.instruct.wrap ? text.trimEnd() : text) + (includeNames ? '' : separator);
return (instruct.wrap ? text.trimEnd() : text) + (includeNames ? '' : separator);
}
/**

View File

@@ -2048,7 +2048,7 @@ async function sendOpenAIRequest(type, messages, signal) {
delete generate_data.stop;
delete generate_data.logprobs;
}
if (isOAI && oai_settings.openai_model.includes('gpt-4.5-preview') || isOpenRouter && oai_settings.openrouter_model.includes('gpt-4.5-preview')) {
if (isOAI && oai_settings.openai_model.includes('gpt-4.5') || isOpenRouter && oai_settings.openrouter_model.includes('gpt-4.5')) {
delete generate_data.logprobs;
}

View File

@@ -218,7 +218,9 @@ let power_user = {
system_sequence: '',
system_suffix: '',
last_system_sequence: '',
first_input_sequence: '',
first_output_sequence: '',
last_input_sequence: '',
last_output_sequence: '',
system_sequence_prefix: '',
system_sequence_suffix: '',

View File

@@ -57,19 +57,24 @@ function toggleReasoningAutoExpand() {
* @param {object} data Response data
* @returns {string} Extracted reasoning
*/
export function extractReasoningFromData(data) {
switch (main_api) {
export function extractReasoningFromData(data, {
mainApi = null,
ignoreShowThoughts = false,
textGenType = null,
chatCompletionSource = null
} = {}) {
switch (mainApi ?? main_api) {
case 'textgenerationwebui':
switch (textgenerationwebui_settings.type) {
switch (textGenType ?? textgenerationwebui_settings.type) {
case textgen_types.OPENROUTER:
return data?.choices?.[0]?.reasoning ?? '';
}
break;
case 'openai':
if (!oai_settings.show_thoughts) break;
if (!ignoreShowThoughts && !oai_settings.show_thoughts) break;
switch (oai_settings.chat_completion_source) {
switch (chatCompletionSource ?? oai_settings.chat_completion_source) {
case chat_completion_sources.DEEPSEEK:
return data?.choices?.[0]?.message?.reasoning_content ?? '';
case chat_completion_sources.OPENROUTER:

View File

@@ -80,6 +80,7 @@ import { timestampToMoment, uuidv4 } from './utils.js';
import { getGlobalVariable, getLocalVariable, setGlobalVariable, setLocalVariable } from './variables.js';
import { convertCharacterBook, loadWorldInfo, saveWorldInfo, updateWorldInfoList } from './world-info.js';
import { ChatCompletionService, TextCompletionService } from './custom-request.js';
import { ConnectionManagerRequestService } from './extensions/shared.js';
import { updateReasoningUI, parseReasoningFromString } from './reasoning.js';
export function getContext() {
@@ -215,6 +216,7 @@ export function getContext() {
clearChat,
ChatCompletionService,
TextCompletionService,
ConnectionManagerRequestService,
updateReasoningUI,
parseReasoningFromString,
unshallowCharacter,

View File

@@ -86,7 +86,7 @@ const OOBA_DEFAULT_ORDER = [
'encoder_repetition_penalty',
'no_repeat_ngram',
];
const APHRODITE_DEFAULT_ORDER = [
export const APHRODITE_DEFAULT_ORDER = [
'dry',
'penalties',
'no_repeat_ngram',

View File

@@ -411,7 +411,7 @@ export function getTokenizerModel(requestModel) {
return 'gpt-4o';
}
if (requestModel.includes('gpt-4.5-preview')) {
if (requestModel.includes('gpt-4.5')) {
return 'gpt-4o';
}