Merge branch 'staging' of https://github.com/Cohee1207/SillyTavern into staging

This commit is contained in:
RossAscends 2023-08-14 05:33:48 +09:00
commit 3f87874afe
7 changed files with 55 additions and 17 deletions

View File

@ -1,6 +1,6 @@
{
"temp": 0,
"rep_pen": 1.1,
"rep_pen": 1.18,
"rep_pen_range": 2048,
"streaming_kobold": true,
"top_p": 0,
@ -8,7 +8,7 @@
"top_k": 1,
"typical": 1,
"tfs": 1,
"rep_pen_slope": 0.2,
"rep_pen_slope": 0,
"single_line": false,
"sampler_order": [
6,

View File

@ -1,13 +1,13 @@
{
"temp": 1,
"top_p": 1,
"top_k": 50,
"temp": 0,
"top_p": 0,
"top_k": 1,
"typical_p": 1,
"top_a": 0,
"tfs": 1,
"epsilon_cutoff": 0,
"eta_cutoff": 0,
"rep_pen": 1,
"rep_pen": 1.18,
"rep_pen_range": 0,
"no_repeat_ngram_size": 0,
"penalty_alpha": 0,

View File

@ -434,7 +434,7 @@
</div>
<div class="range-block-range-and-counter">
<div class="range-block-range">
<input type="range" id="temp_textgenerationwebui" name="volume" min="0.1" max="2.0" step="0.01">
<input type="range" id="temp_textgenerationwebui" name="volume" min="0.0" max="2.0" step="0.01">
</div>
<div class="range-block-counter">
<div contenteditable="true" data-for="temp_textgenerationwebui" id="temp_counter_textgenerationwebui">

View File

@ -2,7 +2,7 @@
"input_sequence": "### Instruction:",
"macro": true,
"name": "Roleplay",
"names": false,
"names": true,
"output_sequence": "### Response (2 paragraphs, engaging, natural, authentic, descriptive, creative):",
"separator_sequence": "",
"stop_sequence": "",

View File

@ -510,13 +510,13 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti
eventSource.on(event_types.MESSAGE_RECEIVED, () => this.renderDebounced());
// Re-render when chatcompletion settings change
eventSource.on(event_types.CHATCOMPLETION_SOURCE_CHANGED, () => this.render());
eventSource.on(event_types.CHATCOMPLETION_MODEL_CHANGED, () => this.render());
eventSource.on(event_types.CHATCOMPLETION_SOURCE_CHANGED, () => this.renderDebounced());
eventSource.on(event_types.CHATCOMPLETION_MODEL_CHANGED, () => this.renderDebounced());
// Re-render when the character changes.
eventSource.on('chatLoaded', (event) => {
this.handleCharacterSelected(event)
this.saveServiceSettings().then(() => this.render());
this.saveServiceSettings().then(() => this.renderDebounced());
});
// Re-render when the character gets edited.
@ -528,13 +528,13 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti
// Re-render when the group changes.
eventSource.on('groupSelected', (event) => {
this.handleGroupSelected(event)
this.saveServiceSettings().then(() => this.render());
this.saveServiceSettings().then(() => this.renderDebounced());
});
// Sanitize settings after character has been deleted.
eventSource.on('characterDeleted', (event) => {
this.handleCharacterDeleted(event)
this.saveServiceSettings().then(() => this.render());
this.saveServiceSettings().then(() => this.renderDebounced());
});
// Trigger re-render when token settings are changed
@ -562,7 +562,7 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti
document.getElementById(this.configuration.prefix + 'prompt_manager_popup_close_button').addEventListener('click', closeAndClearPopup);
// Re-render prompt manager on openai preset change
eventSource.on(event_types.OAI_PRESET_CHANGED, settings => this.render());
eventSource.on(event_types.OAI_PRESET_CHANGED, settings => this.renderDebounced());
// Close popup on preset change
eventSource.on(event_types.OAI_PRESET_CHANGED, () => {

View File

@ -45,6 +45,7 @@ import {
} from "./secrets.js";
import {
IndexedDBStore,
delay,
download,
getFileText,
@ -120,7 +121,39 @@ const openrouter_website_model = 'OR_Website';
let biasCache = undefined;
let model_list = [];
const tokenCache = {};
const objectStore = new IndexedDBStore('SillyTavern', 'chat_completions');
const tokenCache = await loadTokenCache();
async function loadTokenCache() {
try {
console.debug('Chat Completions: loading token cache from IndexedDB')
return await objectStore.get('tokenCache') || {};
} catch (e) {
console.log('Chat Completions: unable to load token cache from IndexedDB, using default value', e);
return {};
}
}
async function saveTokenCache() {
try {
console.debug('Chat Completions: saving token cache to IndexedDB')
await objectStore.put('tokenCache', tokenCache);
} catch (e) {
console.log('Chat Completions: unable to save token cache to IndexedDB', e);
}
}
async function resetTokenCache() {
try {
console.debug('Chat Completions: resetting token cache in IndexedDB');
Object.keys(tokenCache).forEach(key => delete tokenCache[key]);
await objectStore.delete('tokenCache');
} catch (e) {
console.log('Chat Completions: unable to reset token cache in IndexedDB', e);
}
}
window['resetTokenCache'] = resetTokenCache;
export const chat_completion_sources = {
OPENAI: 'openai',
@ -795,6 +828,8 @@ function prepareOpenAIMessages({
const chat = chatCompletion.getChat();
openai_messages_count = chat.filter(x => x?.role === "user" || x?.role === "assistant")?.length || 0;
// Save token cache to IndexedDB storage (async, no need to await)
saveTokenCache();
return [chat, promptManager.tokenHandler.counts];
}

View File

@ -375,15 +375,18 @@ export class IndexedDBStore {
this.dbName = dbName;
this.storeName = storeName;
this.db = null;
this.version = Date.now();
}
async open() {
return new Promise((resolve, reject) => {
const request = indexedDB.open(this.dbName);
const request = indexedDB.open(this.dbName, this.version);
request.onupgradeneeded = (event) => {
const db = event.target.result;
if (!db.objectStoreNames.contains(this.storeName)) {
db.createObjectStore(this.storeName, { keyPath: null, autoIncrement: false });
}
};
request.onsuccess = (event) => {