Introduction of additional helper classes, refactoring

This commit is contained in:
maver 2023-06-14 22:36:14 +02:00
parent 18641ea3d2
commit 8ae2c80358
2 changed files with 225 additions and 92 deletions

View File

@ -1,7 +1,54 @@
import {countTokens} from "./openai.js";
import {DraggablePromptListModule as DraggableList} from "./DraggableList.js"; import {DraggablePromptListModule as DraggableList} from "./DraggableList.js";
import {eventSource, substituteParams} from "../script.js"; import {eventSource, substituteParams} from "../script.js";
import {TokenHandler} from "./openai.js"; import {IdentifierNotFoundError, TokenHandler} from "./openai.js";
class Prompt {
identifier; role; content; name; system_prompt;
constructor({identifier, role, content, name, system_prompt} = {}) {
this.identifier = identifier;
this.role = role;
this.content = content;
this.name = name;
this.system_prompt = system_prompt;
}
}
class PromptCollection {
collection = [];
constructor(...prompts) {
for(let prompt of prompts) {
if(!(prompt instanceof Prompt)) {
throw new Error('Only Prompt instances can be added to PromptCollection');
}
}
this.collection.push(...prompts);
}
add(...prompts) {
for(let prompt of prompts) {
if(!(prompt instanceof Prompt)) {
throw new Error('Only Prompt instances can be added to PromptCollection');
}
}
this.collection.push(...prompts);
}
get(identifier) {
const index = this.index(identifier);
if (0 > index) throw new IdentifierNotFoundError(identifier);
return this.collection[index];
}
index (identifier){
return this.collection.findIndex(prompt => prompt.identifier === identifier);
}
has(identifier) {
return this.collection.some(message => message.identifier === identifier);
}
}
function PromptManagerModule() { function PromptManagerModule() {
this.configuration = { this.configuration = {
@ -18,7 +65,6 @@ function PromptManagerModule() {
this.activeCharacter = null; this.activeCharacter = null;
this.tokenHandler = null; this.tokenHandler = null;
this.totalActiveTokens = 0;
this.handleToggle = () => { }; this.handleToggle = () => { };
this.handleEdit = () => { }; this.handleEdit = () => { };
@ -81,8 +127,11 @@ PromptManagerModule.prototype.init = function (moduleConfiguration, serviceSetti
const promptId = event.target.dataset.pmPrompt; const promptId = event.target.dataset.pmPrompt;
const prompt = this.getPromptById(promptId); const prompt = this.getPromptById(promptId);
if (null === prompt) this.addPrompt(prompt, promptId); if (null === prompt){
else this.updatePrompt(prompt); this.addPrompt({}, promptId);
} else {
this.updatePrompt(prompt);
}
this.hideEditForm(); this.hideEditForm();
this.clearEditForm(prompt); this.clearEditForm(prompt);
@ -414,8 +463,8 @@ PromptManagerModule.prototype.getPromptIndexById = function (identifier) {
*/ */
PromptManagerModule.prototype.preparePrompt = function (prompt) { PromptManagerModule.prototype.preparePrompt = function (prompt) {
const groupMembers = this.getActiveGroupCharacters(); const groupMembers = this.getActiveGroupCharacters();
if (0 < groupMembers.length) return {role: prompt.role, content: substituteParams(prompt.content ?? '', null, null, groupMembers.join(', '))} if (0 < groupMembers.length) return {role: prompt.role || 'system', content: substituteParams(prompt.content ?? '', null, null, groupMembers.join(', '))}
return {role: prompt.role, content: substituteParams(prompt.content ?? '')}; return new Prompt(prompt);
} }
/** /**
@ -459,16 +508,15 @@ PromptManagerModule.prototype.clearEditForm = function () {
* Generates and returns a new ChatCompletion object based on the active character's prompt list. * Generates and returns a new ChatCompletion object based on the active character's prompt list.
* @returns {Object} A ChatCompletion object * @returns {Object} A ChatCompletion object
*/ */
PromptManagerModule.prototype.getOrderedPromptList = function () { PromptManagerModule.prototype.getPromptCollection = function () {
const promptList = this.getPromptListByCharacter(this.activeCharacter); const promptList = this.getPromptListByCharacter(this.activeCharacter);
const assembledPromptList = []; const promptCollection = new PromptCollection();
promptList.forEach(entry => { promptList.forEach(entry => {
const chatMessage = this.preparePrompt(this.getPromptById(entry.identifier)) if (true === entry.enabled) promptCollection.add(this.preparePrompt(this.getPromptById(entry.identifier)));
if (true === entry.enabled) assembledPromptList.push({identifier: entry.identifier, ...chatMessage});
}) })
return assembledPromptList; return promptCollection
} }
// Empties, then re-assembles the container containing the prompt list. // Empties, then re-assembles the container containing the prompt list.
@ -827,5 +875,6 @@ export {
PromptManagerModule, PromptManagerModule,
openAiDefaultPrompts, openAiDefaultPrompts,
openAiDefaultPromptLists, openAiDefaultPromptLists,
defaultPromptManagerSettings defaultPromptManagerSettings,
Prompt
}; };

View File

@ -367,50 +367,70 @@ function formatWorldInfo(value) {
return stringFormat(oai_settings.wi_format, value); return stringFormat(oai_settings.wi_format, value);
} }
async function prepareOpenAIMessages({ name2, charDescription, charPersonality, Scenario, worldInfoBefore, worldInfoAfter, bias, type, quietPrompt, extensionPrompts, cyclePrompt } = {}) { async function prepareOpenAIMessages({
const promptList = promptManager.getOrderedPromptList(); name2,
const promptIndex = (identifier) => promptList.findIndex(prompt => prompt.identifier === identifier); charDescription,
const getMessageWithIndex = (identifier) => { charPersonality,
const index = promptIndex(identifier); Scenario,
const prompt = promptList[index]; worldInfoBefore,
const msg = new Message(prompt.role, prompt.content, prompt.identifier); worldInfoAfter,
return {message: msg, index: index} bias,
} type,
quietPrompt,
extensionPrompts,
cyclePrompt
} = {}) {
const prompts = promptManager.getPromptCollection();
const chatCompletion = new ChatCompletion();
const chatCompletion = ChatCompletion.new();
chatCompletion.tokenBudget = promptManager.serviceSettings.openai_max_context - promptManager.serviceSettings.amount_gen; chatCompletion.tokenBudget = promptManager.serviceSettings.openai_max_context - promptManager.serviceSettings.amount_gen;
const main = getMessageWithIndex('main'); if (power_user.console_log_prompts) chatCompletion.enableLogging();
const nsfw = getMessageWithIndex('nsfw');
const jailbreak = getMessageWithIndex('jailbreak');
const worldInfoBeforeMsg = new Message('system', formatWorldInfo(worldInfoBefore), 'worldInfoBefore'); // Helper functions
const worldInfoAfterMsg = new Message('system', formatWorldInfo(worldInfoAfter), 'worldInfoAfter'); const createMessageCollection = (role, content, identifier) => MessageCollection.fromPrompt(new Prompt({role, content, identifier}));
const charDescriptionMsg = new Message('system', substituteParams(charDescription), 'charDescription'); const addMessageToChatCompletion = (role, content, identifier) => {
const charPersonalityMsg = new Message('system', `${name2}'s personality: ${substituteParams(charPersonality)}`, 'charPersonality'); chatCompletion.add(createMessageCollection(role, content, identifier), prompts.index(identifier));
const scenarioMsg = new Message('system', `Circumstances and context of the dialogue: ${substituteParams(Scenario)}`, 'scenario'); };
chatCompletion addMessageToChatCompletion('system', formatWorldInfo(worldInfoBefore), 'worldInfoBefore');
.add(main.message, main.index) addMessageToChatCompletion('system', formatWorldInfo(worldInfoAfter), 'worldInfoAfter');
.add(nsfw.message, nsfw.index) addMessageToChatCompletion('system', substituteParams(charDescription), 'charDescription');
.add(jailbreak.message, jailbreak.index) addMessageToChatCompletion('system', `${name2}'s personality: ${substituteParams(charPersonality)}`, 'charPersonality');
.add(worldInfoBeforeMsg, promptIndex('worldInfoBefore')) addMessageToChatCompletion('system', `Circumstances and context of the dialogue: ${substituteParams(Scenario)}`, 'scenario');
.add(worldInfoAfterMsg, promptIndex('worldInfoAfter'))
.add(charDescriptionMsg, promptIndex('charDescription'))
.add(charPersonalityMsg, promptIndex('charPersonality'))
.add(scenarioMsg, promptIndex('scenario'));
const optionalSystemPrompts = ['nsfw', 'jailbreak'];
const userPrompts = prompts.collection
.filter((prompt) => false === prompt.system_prompt)
.reduce((acc, prompt) => {
acc.push(prompt.identifier)
return acc;
}, []);
// Add optional prompts if they exist
[...optionalSystemPrompts, ...userPrompts].forEach(identifier => {
if (prompts.has(identifier)) {
chatCompletion.add(MessageCollection.fromPrompt(prompts.get(identifier)), prompts.index(identifier));
}
});
// Chat History // Chat History
const startNewChatPrompt = selected_group ? '[Start a new group chat. Group members: ${names}]' : '[Start a new Chat]'; const startNewChat = selected_group ? '[Start a new group chat. Group members: ${names}]' : '[Start a new Chat]';
chatCompletion.add(new Message('system', startNewChatPrompt, 'newMainChat' ), promptIndex('newMainChat')); chatCompletion.add(new MessageCollection('chatHistory'), prompts.index('chatHistory'));
chatCompletion.insert(new Message('system', startNewChat, 'newMainChat'), 'chatHistory');
const chatHistoryIndex = promptIndex('chatHistory'); // Insert chat messages
[...openai_msgs].reverse().forEach((prompt, index) => { [...openai_msgs].reverse().forEach((prompt, index) => {
const message = new Message(prompt.role, prompt.content, 'chatHistory-' + index); const chatMessage = new Message(prompt.role, prompt.content, 'chatHistory-' + index);
if (chatCompletion.canAfford(message)) chatCompletion.insert(message, chatHistoryIndex); if (chatCompletion.canAfford(chatMessage)) {
chatCompletion.insert(chatMessage, 'chatHistory');
}
}); });
// Insert chat message examples if there's enough budget
//ToDo
const chat = chatCompletion.getChat(); const chat = chatCompletion.getChat();
openai_messages_count = chat.filter(x => x.role === "user" || x.role === "assistant").length; openai_messages_count = chat.filter(x => x.role === "user" || x.role === "assistant").length;
@ -1058,8 +1078,8 @@ class TokenBudgetExceededError extends Error {
} }
class Message { class Message {
identifier; role; content; tokens; tokens; identifier; role; content;
constructor(role, content, identifier = null) { constructor(role, content, identifier) {
this.identifier = identifier; this.identifier = identifier;
this.role = role; this.role = role;
this.content = content; this.content = content;
@ -1067,9 +1087,14 @@ class Message {
} }
getTokens() {return this.tokens}; getTokens() {return this.tokens};
static fromPrompt(prompt) {
return new Message(prompt.role, prompt.content, prompt.identifier);
}
} }
class MessageCollection extends Array { class MessageCollection {
collection = [];
identifier; identifier;
constructor(identifier, ...items) { constructor(identifier, ...items) {
for(let item of items) { for(let item of items) {
@ -1077,12 +1102,17 @@ class MessageCollection extends Array {
throw new Error('Only Message and MessageCollection instances can be added to MessageCollection'); throw new Error('Only Message and MessageCollection instances can be added to MessageCollection');
} }
} }
super(...items);
this.collection.push(...items);
this.identifier = identifier; this.identifier = identifier;
} }
getTokens() { getTokens() {
return this.reduce((tokens, message) => tokens + message.getTokens(), 0); return this.collection.reduce((tokens, message) => tokens + message.getTokens(), 0);
}
static fromPrompt(prompt) {
return new MessageCollection(prompt.identifier, Message.fromPrompt(prompt));
} }
} }
@ -1092,56 +1122,110 @@ class MessageCollection extends Array {
* *
* @see https://platform.openai.com/docs/guides/gpt/chat-completions-api * @see https://platform.openai.com/docs/guides/gpt/chat-completions-api
*/ */
const ChatCompletion = { class ChatCompletion {
new() { constructor() {
return { this.tokenBudget = 0;
tokenBudget: 4095, this.messages = new MessageCollection();
messages: new MessageCollection(), this.loggingEnabled = false;
add(message, position = null) { }
if (!(message instanceof Message)) throw Error('Invalid argument type');
if (!message.content) return this;
if (false === this.canAfford(message)) throw new TokenBudgetExceededError(message.identifier);
if (message instanceof MessageCollection) message.forEach(item => this.add(item, position)); add(collection, position = null) {
this.validateMessageCollection(collection);
this.checkTokenBudget(collection, collection.identifier);
if (position) this.messages[position] = message; if (position) {
else this.messages.push(message); this.messages.collection[position] = collection;
} else {
this.messages.collection.push(collection);
}
this.tokenBudget -= message.getTokens(); this.decreaseTokenBudgetBy(collection.getTokens());
this.log(`Added ${collection.identifier}. Remaining tokens: ${this.tokenBudget}`);
this.log(`Added ${message.identifier}. Remaining tokens: ${this.tokenBudget}`);
return this; return this;
}, }
insert(message, position) {
if (!(message instanceof Message)) throw Error('Invalid argument type');
if (!message.content) return this;
if (false === this.canAfford(message)) throw new TokenBudgetExceededError(message.identifier);
this.messages.splice(position, 0, message) insert(message, identifier) {
this.validateMessage(message);
this.tokenBudget -= message.getTokens(); this.checkTokenBudget(message, message.identifier);
const index = this.findMessageIndex(identifier);
if (message.content) {
this.messages.collection[index].collection.push(message);
this.decreaseTokenBudgetBy(message.getTokens());
this.log(`Added ${message.identifier}. Remaining tokens: ${this.tokenBudget}`); this.log(`Added ${message.identifier}. Remaining tokens: ${this.tokenBudget}`);
}, }
}
canAfford(message) { canAfford(message) {
return 0 < this.tokenBudget - message.getTokens(); return 0 < this.tokenBudget - message.getTokens();
}, }
log(output) {
if (power_user.console_log_prompts) console.log('[ChatCompletion] ' + output); has(identifier) {
}, return this.messages.collection.some(message => message.identifier === identifier);
}
getTotalTokenCount() { getTotalTokenCount() {
return this.messages.getTokens(); return this.messages.getTokens();
}, }
getChat() { getChat() {
return this.messages.reduce((chat, message) => { const chat = [];
if (message.content) chat.push({role: message.role, content: message.content}); for (let item of this.messages.collection) {
return chat; if (item instanceof MessageCollection) {
const messages = item.collection.reduce((acc, message) => {
if (message.content) acc.push({role: message.role, content: message.content});
return acc;
}, []); }, []);
}, chat.push(...messages);
} }
} }
}; return chat;
}
log(output) {
if (this.loggingEnabled) console.log('[ChatCompletion] ' + output);
}
enableLogging() {
this.loggingEnabled = true;
}
disableLogging() {
this.loggingEnabled = false;
}
// Move validation to its own method for readability
validateMessageCollection(collection) {
if (!(collection instanceof MessageCollection)) {
throw new Error('Argument must be an instance of MessageCollection');
}
}
validateMessage(message) {
if (!(message instanceof Message)) {
throw new Error('Argument must be an instance of Message');
}
}
checkTokenBudget(message, identifier) {
if (!this.canAfford(message)) {
throw new TokenBudgetExceededError(identifier);
}
}
decreaseTokenBudgetBy(tokens) {
this.tokenBudget -= tokens;
}
findMessageIndex(identifier) {
const index = this.messages.collection.findIndex(item => item?.identifier === identifier);
if (index < 0) {
throw new IdentifierNotFoundError(identifier);
}
return index;
}
}
export function getTokenizerModel() { export function getTokenizerModel() {
// OpenAI models always provide their own tokenizer // OpenAI models always provide their own tokenizer