mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Merge branch 'staging' of https://github.com/Cohee1207/SillyTavern into staging
This commit is contained in:
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"temp": 0,
|
"temp": 0,
|
||||||
"rep_pen": 1.1,
|
"rep_pen": 1.18,
|
||||||
"rep_pen_range": 2048,
|
"rep_pen_range": 2048,
|
||||||
"streaming_kobold": true,
|
"streaming_kobold": true,
|
||||||
"top_p": 0,
|
"top_p": 0,
|
||||||
@@ -8,7 +8,7 @@
|
|||||||
"top_k": 1,
|
"top_k": 1,
|
||||||
"typical": 1,
|
"typical": 1,
|
||||||
"tfs": 1,
|
"tfs": 1,
|
||||||
"rep_pen_slope": 0.2,
|
"rep_pen_slope": 0,
|
||||||
"single_line": false,
|
"single_line": false,
|
||||||
"sampler_order": [
|
"sampler_order": [
|
||||||
6,
|
6,
|
||||||
|
@@ -1,13 +1,13 @@
|
|||||||
{
|
{
|
||||||
"temp": 1,
|
"temp": 0,
|
||||||
"top_p": 1,
|
"top_p": 0,
|
||||||
"top_k": 50,
|
"top_k": 1,
|
||||||
"typical_p": 1,
|
"typical_p": 1,
|
||||||
"top_a": 0,
|
"top_a": 0,
|
||||||
"tfs": 1,
|
"tfs": 1,
|
||||||
"epsilon_cutoff": 0,
|
"epsilon_cutoff": 0,
|
||||||
"eta_cutoff": 0,
|
"eta_cutoff": 0,
|
||||||
"rep_pen": 1,
|
"rep_pen": 1.18,
|
||||||
"rep_pen_range": 0,
|
"rep_pen_range": 0,
|
||||||
"no_repeat_ngram_size": 0,
|
"no_repeat_ngram_size": 0,
|
||||||
"penalty_alpha": 0,
|
"penalty_alpha": 0,
|
||||||
|
@@ -434,7 +434,7 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="range-block-range-and-counter">
|
<div class="range-block-range-and-counter">
|
||||||
<div class="range-block-range">
|
<div class="range-block-range">
|
||||||
<input type="range" id="temp_textgenerationwebui" name="volume" min="0.1" max="2.0" step="0.01">
|
<input type="range" id="temp_textgenerationwebui" name="volume" min="0.0" max="2.0" step="0.01">
|
||||||
</div>
|
</div>
|
||||||
<div class="range-block-counter">
|
<div class="range-block-counter">
|
||||||
<div contenteditable="true" data-for="temp_textgenerationwebui" id="temp_counter_textgenerationwebui">
|
<div contenteditable="true" data-for="temp_textgenerationwebui" id="temp_counter_textgenerationwebui">
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
"input_sequence": "### Instruction:",
|
"input_sequence": "### Instruction:",
|
||||||
"macro": true,
|
"macro": true,
|
||||||
"name": "Roleplay",
|
"name": "Roleplay",
|
||||||
"names": false,
|
"names": true,
|
||||||
"output_sequence": "### Response (2 paragraphs, engaging, natural, authentic, descriptive, creative):",
|
"output_sequence": "### Response (2 paragraphs, engaging, natural, authentic, descriptive, creative):",
|
||||||
"separator_sequence": "",
|
"separator_sequence": "",
|
||||||
"stop_sequence": "",
|
"stop_sequence": "",
|
||||||
|
@@ -510,13 +510,13 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti
|
|||||||
eventSource.on(event_types.MESSAGE_RECEIVED, () => this.renderDebounced());
|
eventSource.on(event_types.MESSAGE_RECEIVED, () => this.renderDebounced());
|
||||||
|
|
||||||
// Re-render when chatcompletion settings change
|
// Re-render when chatcompletion settings change
|
||||||
eventSource.on(event_types.CHATCOMPLETION_SOURCE_CHANGED, () => this.render());
|
eventSource.on(event_types.CHATCOMPLETION_SOURCE_CHANGED, () => this.renderDebounced());
|
||||||
eventSource.on(event_types.CHATCOMPLETION_MODEL_CHANGED, () => this.render());
|
eventSource.on(event_types.CHATCOMPLETION_MODEL_CHANGED, () => this.renderDebounced());
|
||||||
|
|
||||||
// Re-render when the character changes.
|
// Re-render when the character changes.
|
||||||
eventSource.on('chatLoaded', (event) => {
|
eventSource.on('chatLoaded', (event) => {
|
||||||
this.handleCharacterSelected(event)
|
this.handleCharacterSelected(event)
|
||||||
this.saveServiceSettings().then(() => this.render());
|
this.saveServiceSettings().then(() => this.renderDebounced());
|
||||||
});
|
});
|
||||||
|
|
||||||
// Re-render when the character gets edited.
|
// Re-render when the character gets edited.
|
||||||
@@ -528,13 +528,13 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti
|
|||||||
// Re-render when the group changes.
|
// Re-render when the group changes.
|
||||||
eventSource.on('groupSelected', (event) => {
|
eventSource.on('groupSelected', (event) => {
|
||||||
this.handleGroupSelected(event)
|
this.handleGroupSelected(event)
|
||||||
this.saveServiceSettings().then(() => this.render());
|
this.saveServiceSettings().then(() => this.renderDebounced());
|
||||||
});
|
});
|
||||||
|
|
||||||
// Sanitize settings after character has been deleted.
|
// Sanitize settings after character has been deleted.
|
||||||
eventSource.on('characterDeleted', (event) => {
|
eventSource.on('characterDeleted', (event) => {
|
||||||
this.handleCharacterDeleted(event)
|
this.handleCharacterDeleted(event)
|
||||||
this.saveServiceSettings().then(() => this.render());
|
this.saveServiceSettings().then(() => this.renderDebounced());
|
||||||
});
|
});
|
||||||
|
|
||||||
// Trigger re-render when token settings are changed
|
// Trigger re-render when token settings are changed
|
||||||
@@ -562,7 +562,7 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti
|
|||||||
document.getElementById(this.configuration.prefix + 'prompt_manager_popup_close_button').addEventListener('click', closeAndClearPopup);
|
document.getElementById(this.configuration.prefix + 'prompt_manager_popup_close_button').addEventListener('click', closeAndClearPopup);
|
||||||
|
|
||||||
// Re-render prompt manager on openai preset change
|
// Re-render prompt manager on openai preset change
|
||||||
eventSource.on(event_types.OAI_PRESET_CHANGED, settings => this.render());
|
eventSource.on(event_types.OAI_PRESET_CHANGED, settings => this.renderDebounced());
|
||||||
|
|
||||||
// Close popup on preset change
|
// Close popup on preset change
|
||||||
eventSource.on(event_types.OAI_PRESET_CHANGED, () => {
|
eventSource.on(event_types.OAI_PRESET_CHANGED, () => {
|
||||||
|
@@ -45,6 +45,7 @@ import {
|
|||||||
} from "./secrets.js";
|
} from "./secrets.js";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
|
IndexedDBStore,
|
||||||
delay,
|
delay,
|
||||||
download,
|
download,
|
||||||
getFileText,
|
getFileText,
|
||||||
@@ -120,7 +121,39 @@ const openrouter_website_model = 'OR_Website';
|
|||||||
|
|
||||||
let biasCache = undefined;
|
let biasCache = undefined;
|
||||||
let model_list = [];
|
let model_list = [];
|
||||||
const tokenCache = {};
|
const objectStore = new IndexedDBStore('SillyTavern', 'chat_completions');
|
||||||
|
const tokenCache = await loadTokenCache();
|
||||||
|
|
||||||
|
async function loadTokenCache() {
|
||||||
|
try {
|
||||||
|
console.debug('Chat Completions: loading token cache from IndexedDB')
|
||||||
|
return await objectStore.get('tokenCache') || {};
|
||||||
|
} catch (e) {
|
||||||
|
console.log('Chat Completions: unable to load token cache from IndexedDB, using default value', e);
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function saveTokenCache() {
|
||||||
|
try {
|
||||||
|
console.debug('Chat Completions: saving token cache to IndexedDB')
|
||||||
|
await objectStore.put('tokenCache', tokenCache);
|
||||||
|
} catch (e) {
|
||||||
|
console.log('Chat Completions: unable to save token cache to IndexedDB', e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function resetTokenCache() {
|
||||||
|
try {
|
||||||
|
console.debug('Chat Completions: resetting token cache in IndexedDB');
|
||||||
|
Object.keys(tokenCache).forEach(key => delete tokenCache[key]);
|
||||||
|
await objectStore.delete('tokenCache');
|
||||||
|
} catch (e) {
|
||||||
|
console.log('Chat Completions: unable to reset token cache in IndexedDB', e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
window['resetTokenCache'] = resetTokenCache;
|
||||||
|
|
||||||
export const chat_completion_sources = {
|
export const chat_completion_sources = {
|
||||||
OPENAI: 'openai',
|
OPENAI: 'openai',
|
||||||
@@ -795,6 +828,8 @@ function prepareOpenAIMessages({
|
|||||||
|
|
||||||
const chat = chatCompletion.getChat();
|
const chat = chatCompletion.getChat();
|
||||||
openai_messages_count = chat.filter(x => x?.role === "user" || x?.role === "assistant")?.length || 0;
|
openai_messages_count = chat.filter(x => x?.role === "user" || x?.role === "assistant")?.length || 0;
|
||||||
|
// Save token cache to IndexedDB storage (async, no need to await)
|
||||||
|
saveTokenCache();
|
||||||
|
|
||||||
return [chat, promptManager.tokenHandler.counts];
|
return [chat, promptManager.tokenHandler.counts];
|
||||||
}
|
}
|
||||||
|
@@ -375,15 +375,18 @@ export class IndexedDBStore {
|
|||||||
this.dbName = dbName;
|
this.dbName = dbName;
|
||||||
this.storeName = storeName;
|
this.storeName = storeName;
|
||||||
this.db = null;
|
this.db = null;
|
||||||
|
this.version = Date.now();
|
||||||
}
|
}
|
||||||
|
|
||||||
async open() {
|
async open() {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const request = indexedDB.open(this.dbName);
|
const request = indexedDB.open(this.dbName, this.version);
|
||||||
|
|
||||||
request.onupgradeneeded = (event) => {
|
request.onupgradeneeded = (event) => {
|
||||||
const db = event.target.result;
|
const db = event.target.result;
|
||||||
|
if (!db.objectStoreNames.contains(this.storeName)) {
|
||||||
db.createObjectStore(this.storeName, { keyPath: null, autoIncrement: false });
|
db.createObjectStore(this.storeName, { keyPath: null, autoIncrement: false });
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
request.onsuccess = (event) => {
|
request.onsuccess = (event) => {
|
||||||
|
Reference in New Issue
Block a user