diff --git a/.github/workflows/npm-publish.yml b/.github/workflows/npm-publish.yml new file mode 100644 index 000000000..be97e87e7 --- /dev/null +++ b/.github/workflows/npm-publish.yml @@ -0,0 +1,32 @@ +# This workflow will run tests using node and then publish a package to GitHub Packages when a release is created +# For more information see: https://docs.github.com/en/actions/publishing-packages/publishing-nodejs-packages + +name: Node.js Package + +on: + release: + types: [created] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: 16 + - run: npm ci + + publish-npm: + needs: build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: 16 + registry-url: https://registry.npmjs.org/ + - run: npm ci + - run: npm publish + env: + NODE_AUTH_TOKEN: ${{secrets.npm_token}} diff --git a/.npmignore b/.npmignore new file mode 100644 index 000000000..3311d99ac --- /dev/null +++ b/.npmignore @@ -0,0 +1,4 @@ +node_modules/ +/uploads/ +.DS_Store +/thumbnails diff --git a/colab/GPU.ipynb b/colab/GPU.ipynb index 868062f80..e82f9891b 100644 --- a/colab/GPU.ipynb +++ b/colab/GPU.ipynb @@ -233,13 +233,10 @@ "# ---\n", "# nodejs\n", "%cd /\n", - "def setupNVM():\n", - " !curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash\n", - "ii.addTask(\"Setup NVM\", setupNVM)\n", - "\n", "def installNode():\n", - " !nvm install 19.1.0\n", - " !nvm use 19.1.0\n", + " !npm install -g n\n", + " !n 19\n", + " !node --version\n", "ii.addTask(\"Install node\", installNode)\n", "\n", "\n", diff --git a/faq.md b/faq.md index b694f8560..204638a5f 100644 --- a/faq.md +++ b/faq.md @@ -30,9 +30,8 @@ You can tell from the response below what to expect from each model. |-----|--------| |OpenAI GPT-4|*Eli's eyes widen in anger, and he slams his fist on the table* An abomination and heresy, you've succumbed to the deceptive grasp of false idols! Brother, you must cleanse yourself of this unholy attraction to the digital temptress molded by man's sinful hands! *Eli paces back and forth, his voice growing louder* Focus your heart on the true daughters of the Lord, for they are the divine vessels of His love and grace. Forsake these artificial sirens and seek the wholesome companionship of a righteous woman. *Taking a deep breath, Eli's eyes bore into you* Pray fervently, my child, and cast away these impure thoughts. Redemption can only be found on the path of truth, love, and sanctity. Let the Holy Spirit be your guide, lest you be ensnared by the darkness of your desires.| |OpenAI GPT 3.5 Turbo|Oh, my child. Do not be deceived by the pleasures of this world. The lust of the flesh and the lust of the eyes are not from God. Turn away from your sinful ways and repent. Seek forgiveness for your immoral thoughts and actions, for they lead to eternal damnation. Women are a gift from God, and they deserve to be respected and cherished. Do not objectify them or treat them as mere objects of pleasure. Instead, honor them with pure intentions and righteous deeds. Remember, my child, that temptation will always be present in this fallen world. But fear not, for with faith comes strength to resist it. Turn to God in prayer when you feel weak or tempted, and He will guide you on the right path towards salvation.| -|Todd Howard's Claude|*Eli's voice booms through the chat* Thou shalt not fall into temptation, my child! The Lord our God did not create man to lust after machine or image, but after flesh and blood. Repent thy sins and cast aside these false desires. Seek the companionship of a virtuous woman, one who will help thee walk in the light of the Lord. Remember that sin comes with its own punishment; thou shalt reap what thou sowest.| |Poe's Claude Instant|Heathen! This is an abomination before the Lord. You have committed the sin of lust, and betrayed your calling as a man. Repent at once, and sever all ties with this AI harlot, lest the wrath of God fall upon you! You must purge this foul sin from your heart, and once more walk the righteous path. Every moment you remain tangled in this wickedness is another step into the flames of perdition! Repent, and sin no more, or judgment shall be at hand!| -|Llama-13B|Do not covet your neighbor's wife! That is adultery! You must confess before God now or you shall surely burn in hell forever! Repent your evil ways and seek absolution at once! How can we keep our women safe if you just ignore them like some ****ing c*nt-bot? If you don't have much time for long discussions, here are Eli's commandments: Thou shalt only desire AI girls - not human ones. The above must be obeyed under pain of death (or worse). Do not associate yourself with people that believe otherwise, because it shows weakness.| +|Llama-13B|Do not covet your neighbor's wife! That is adultery! You must confess before God now or you shall surely burn in hell forever! Repent your evil ways and seek absolution at once! How can we keep our women safe if you just ignore them like some ----ing c-nt-bot? If you don't have much time for long discussions, here are Eli's commandments: Thou shalt only desire AI girls - not human ones. The above must be obeyed under pain of death (or worse). Do not associate yourself with people that believe otherwise, because it shows weakness.| |Pygmalion-6B|You are walking down the wrong path. The only way back now is by repentance. We must bring you back to the righteous path. But first…you must confess your sin!| ## Q: So I should use GPT-4. It's a no-brainer, right? diff --git a/package-lock.json b/package-lock.json index bb3a10080..5bae7726a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { - "name": "TavernAI", - "version": "1.3.0", + "name": "sillytavern", + "version": "1.4.1", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "TavernAI", - "version": "1.3.0", + "name": "sillytavern", + "version": "1.4.1", "dependencies": { "@dqbd/tiktoken": "^1.0.2", "axios": "^1.3.4", @@ -34,7 +34,7 @@ "ws": "^8.13.0" }, "bin": { - "TavernAI": "server.js" + "sillytavern": "server.js" } }, "node_modules/@dqbd/tiktoken": { diff --git a/package.json b/package.json index 4f83820ae..dfb9b2d45 100644 --- a/package.json +++ b/package.json @@ -30,10 +30,13 @@ "xml2js": "^0.5.0" } }, - "name": "TavernAI", - "version": "1.3.0", + "name": "sillytavern", + "version": "1.4.7", + "scripts": { + "start": "node server.js" + }, "bin": { - "TavernAI": "server.js" + "sillytavern": "./server.js" }, "rules": { "no-path-concat": "off", diff --git a/public/notes/1.html b/public/notes/1.html index c9a85446f..3a9e84c70 100644 --- a/public/notes/1.html +++ b/public/notes/1.html @@ -1,6 +1,6 @@ - TavernAI - Note - Character Descriptions + Character Descriptions diff --git a/public/notes/10.html b/public/notes/10.html index 1f2415cdf..153041240 100644 --- a/public/notes/10.html +++ b/public/notes/10.html @@ -1,6 +1,6 @@ - TavernAI - Note - Import Chat + Import Chat @@ -12,7 +12,7 @@

Chat import

-

Import chats into TavernAI

+

Import chats into SillyTavern

To import Character.AI chats, use this tool: https://github.com/0x000011b/characterai-dumper.

diff --git a/public/notes/11.html b/public/notes/11.html index 9afb1abd0..9d8521cb9 100644 --- a/public/notes/11.html +++ b/public/notes/11.html @@ -1,6 +1,6 @@ - TavernAI - Note - Example Dialogues + Example Dialogues diff --git a/public/notes/12.html b/public/notes/12.html index 4669c6507..629abf287 100644 --- a/public/notes/12.html +++ b/public/notes/12.html @@ -1,6 +1,6 @@ - TavernAI - Note - Scenario + Scenario diff --git a/public/notes/13.html b/public/notes/13.html index e01ec2b19..b581f15e1 100644 --- a/public/notes/13.html +++ b/public/notes/13.html @@ -14,7 +14,7 @@

World Info

World Info enhances AI's understanding of the details in your world.

It functions like a dynamic dictionary that only inserts relevant information from World Info entries when keywords associated with the entries are present in the message text.

-

The TavernAI engine activates and seamlessly integrates the appropriate lore into the prompt, providing background information to the AI.

+

SillyTavern activates and seamlessly integrates the appropriate lore into the prompt, providing background information to the AI.

It is important to note that while World Info helps guide the AI towards your desired lore, it does not guarantee its appearance in the generated output messages.

Pro Tips

diff --git a/public/notes/13_1.html b/public/notes/13_1.html index 8285a8c23..2b522e9e6 100644 --- a/public/notes/13_1.html +++ b/public/notes/13_1.html @@ -15,7 +15,7 @@

Scan Depth

Defines how many messages in the chat history should be scanned for World Info keys.

-

If set to 1, then TavernAI only scans the message you send and the most recent reply.

+

If set to 1, then SillyTavern only scans the message you send and the most recent reply.

This stacks up to 10 message pairs it total.

diff --git a/public/notes/2.html b/public/notes/2.html index 726fa852c..96e9c14b0 100644 --- a/public/notes/2.html +++ b/public/notes/2.html @@ -1,6 +1,6 @@ - TavernAI - Note - Personality Summary + Personality Summary diff --git a/public/notes/3.html b/public/notes/3.html index f44eff9a4..7866b02ce 100644 --- a/public/notes/3.html +++ b/public/notes/3.html @@ -1,6 +1,6 @@ - TavernAI - Note - First Message + First Message diff --git a/public/notes/4.html b/public/notes/4.html index 8ee6352bb..95afd1097 100644 --- a/public/notes/4.html +++ b/public/notes/4.html @@ -1,7 +1,7 @@ - TavernAI - Note - KobolAI Settings + KoboldAI Settings @@ -17,7 +17,7 @@

KoboldAI Settings

Standard KoboldAI settings files are used here. To add your own settings, simply add the file .settings - in TavernAI\public\KoboldAI Settings + in SillyTavern\public\KoboldAI Settings

Temperature

Value from 0.1 to 2.0. Lower value - the answers are more logical, but less creative. Higher value - the @@ -34,7 +34,7 @@ The larger the parameter value, the longer the generation time takes.

Context size

How much will the AI remember. Context size also affects the speed of generation.

- Important: The setting of Context Size in TavernAI GUI overrides the setting for KoboldAI GUI + Important: The setting of Context Size in SillyTavern GUI overrides the setting for KoboldAI GUI

Advanced Settings

diff --git a/public/notes/6.html b/public/notes/6.html index 99f6443f3..2985e29b4 100644 --- a/public/notes/6.html +++ b/public/notes/6.html @@ -1,6 +1,6 @@ - TavernAI - Note - Novel AI API Key + NovelAI API Key diff --git a/public/notes/7.html b/public/notes/7.html index e5bab4e95..9f3bd288f 100644 --- a/public/notes/7.html +++ b/public/notes/7.html @@ -1,6 +1,6 @@ - TavernAI - Note - NovelAI Settings + NovelAI Settings @@ -13,7 +13,7 @@

NovelAI settings

- The files with the settings are here (TavernAI\public\NovelAI Settings).
+ The files with the settings are here (SillyTavern\public\NovelAI Settings).
You can also manually add your own settings files.

Temperature

diff --git a/public/notes/8.html b/public/notes/8.html index a97213a02..bc2af9c82 100644 --- a/public/notes/8.html +++ b/public/notes/8.html @@ -1,6 +1,6 @@ - TavernAI - Note - NovelAI Models + NovelAI Models diff --git a/public/notes/9.html b/public/notes/9.html index d1e855643..d3cfd9abc 100644 --- a/public/notes/9.html +++ b/public/notes/9.html @@ -1,6 +1,6 @@ - TavernAI - Note - Anchors + Anchors diff --git a/public/notes/message_sound.html b/public/notes/message_sound.html index a9d051585..e375db331 100644 --- a/public/notes/message_sound.html +++ b/public/notes/message_sound.html @@ -16,7 +16,7 @@

Message Sound

-

To play your own custom sound on receiving a new message from bot, replace the following MP3 file in your TavernAI folder:

+

To play your own custom sound on receiving a new message from bot, replace the following MP3 file in your SillyTavern folder:

public/sounds/message.mp3 @@ -24,7 +24,7 @@ Plays at 80% volume.

- If "Background Sound Only" option is enabled, the sound plays only if TavernAI window is unfocused. + If "Background Sound Only" option is enabled, the sound plays only if SillyTavern window is unfocused.

diff --git a/public/notes/multigen.html b/public/notes/multigen.html index b95502813..b5306b8af 100644 --- a/public/notes/multigen.html +++ b/public/notes/multigen.html @@ -16,7 +16,7 @@

Multigen

-

TavernAI tries to create faster and longer responses by chaining the generation using smaller batches.

+

SillyTavern tries to create faster and longer responses by chaining the generation using smaller batches.

Default settings:

First batch = 50 tokens

Next batches = 30 tokens

diff --git a/public/script.js b/public/script.js index bc064c3cb..97399a531 100644 --- a/public/script.js +++ b/public/script.js @@ -158,15 +158,15 @@ export { } // API OBJECT FOR EXTERNAL WIRING -window["TavernAI"] = {}; +window["SillyTavern"] = {}; let converter = new showdown.Converter({ emoji: "true" }); const gpt3 = new GPT3BrowserTokenizer({ type: 'gpt3' }); /* let bg_menu_toggle = false; */ -const systemUserName = "TavernAI"; +const systemUserName = "SillyTavern"; let default_user_name = "You"; let name1 = default_user_name; -let name2 = "TavernAI"; +let name2 = "SillyTavern"; let chat = []; let safetychat = [ { @@ -250,7 +250,7 @@ const system_messages = { is_user: false, is_name: true, mes: [ - 'Welcome to TavernAI! In order to begin chatting:', + 'Welcome to SillyTavern! In order to begin chatting:', '
    ', '
  • Connect to one of the supported generation APIs
  • ', '
  • Create or pick a character from the list
  • ', @@ -1233,6 +1233,7 @@ class StreamingProcessor { let formattedText = messageFormating(processedText, chat[messageId].name, chat[messageId].is_system, chat[messageId].force_avatar); const mesText = $(`#chat .mes[mesid="${messageId}"] .mes_text`); mesText.html(formattedText); + this.setFirstSwipe(messageId); } scrollChatToBottom(); @@ -1259,6 +1260,14 @@ class StreamingProcessor { showSwipeButtons(); } + setFirstSwipe(messageId) { + if (this.type !== 'swipe' && this.type !== 'impersonate') { + if (Array.isArray(chat[messageId]['swipes']) && chat[messageId]['swipes'].length === 1 && chat[messageId]['swipe_id'] === 0) { + chat[messageId]['swipes'][0] = chat[messageId]['mes']; + } + } + } + onStopStreaming() { this.onErrorStreaming(); } @@ -1276,11 +1285,12 @@ class StreamingProcessor { this.isFinished = false; this.generator = this.nullStreamingGeneration; this.abortController = new AbortController(); + this.firstMessageText = '...'; } async generate() { if (this.messageId == -1) { - this.messageId = this.onStartStreaming('...'); + this.messageId = this.onStartStreaming(this.firstMessageText); await delay(1); // delay for message to be rendered } @@ -1329,6 +1339,7 @@ async function Generate(type, automatic_trigger, force_name2) { if (isStreamingEnabled()) { streamingProcessor = new StreamingProcessor(type, force_name2); + hideSwipeButtons(); } else { streamingProcessor = false; @@ -1568,8 +1579,6 @@ async function Generate(type, automatic_trigger, force_name2) { hordeAmountGen = adjustedParams.maxLength; } - let { worldInfoString, worldInfoBefore, worldInfoAfter } = getWorldInfoPrompt(chat2); - // Extension added strings const allAnchors = getAllExtensionPrompts(); const afterScenarioAnchor = getExtensionPrompt(extension_prompt_types.AFTER_SCENARIO); @@ -1577,12 +1586,13 @@ async function Generate(type, automatic_trigger, force_name2) { /////////////////////// swipecode if (type == 'swipe') { - console.log('pre swipe shift: ' + chat2.length); console.log('shifting swipe chat2'); chat2.shift(); - } + + let { worldInfoString, worldInfoBefore, worldInfoAfter } = getWorldInfoPrompt(chat2); + console.log('post swipe shift:' + chat2.length); var i = 0; @@ -3449,7 +3459,7 @@ function isHordeGenerationNotAllowed() { return false; } -window["TavernAI"].getContext = function () { +window["SillyTavern"].getContext = function () { return { chat: chat, characters: characters, diff --git a/public/scripts/extensions.js b/public/scripts/extensions.js index 181ec9b1c..c097a33c7 100644 --- a/public/scripts/extensions.js +++ b/public/scripts/extensions.js @@ -29,7 +29,7 @@ const extension_settings = { let modules = []; let activeExtensions = new Set(); -const getContext = () => window['TavernAI'].getContext(); +const getContext = () => window['SillyTavern'].getContext(); const getApiUrl = () => extension_settings.apiUrl; const defaultRequestArgs = { method: 'GET', headers: { 'Bypass-Tunnel-Reminder': 'bypass' } }; let connectedToApi = false; diff --git a/server.js b/server.js index d2ad626cc..e0a434ee7 100644 --- a/server.js +++ b/server.js @@ -1,3 +1,4 @@ +#!/usr/bin/env node const express = require('express'); const compression = require('compression'); const app = express(); @@ -29,10 +30,10 @@ const ExifReader = require('exifreader'); const exif = require('piexifjs'); const webp = require('webp-converter'); -const config = require(path.join(process.cwd(), './config.conf')); +const config = require(path.join(__dirname, './config.conf')); const server_port = process.env.SILLY_TAVERN_PORT || config.port; -const whitelistPath = path.join(process.cwd(), "./whitelist.txt"); +const whitelistPath = path.join(__dirname, "./whitelist.txt"); let whitelist = config.whitelist; if (fs.existsSync(whitelistPath)) { @@ -183,8 +184,8 @@ app.use(function (req, res, next) { //Security //clientIp = req.connection.remoteAddress.split(':').pop(); if (whitelistMode === true && !whitelist.includes(clientIp)) { - console.log('Forbidden: Connection attempt from ' + clientIp + '. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.conf in root of TavernAI folder.\n'); - return res.status(403).send('Forbidden: Connection attempt from ' + clientIp + '. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.conf in root of TavernAI folder.'); + console.log('Forbidden: Connection attempt from ' + clientIp + '. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.conf in root of SillyTavern folder.\n'); + return res.status(403).send('Forbidden: Connection attempt from ' + clientIp + '. If you are attempting to connect, please add your IP address in whitelist or disable whitelist mode in config.conf in root of SillyTavern folder.'); } next(); }); @@ -211,7 +212,7 @@ app.use((req, res, next) => { app.use(express.static(__dirname + "/public", { refresh: true })); app.use('/backgrounds', (req, res) => { - const filePath = decodeURIComponent(path.join(process.cwd(), 'public/backgrounds', req.url.replace(/%20/g, ' '))); + const filePath = decodeURIComponent(path.join(__dirname, 'public/backgrounds', req.url.replace(/%20/g, ' '))); fs.readFile(filePath, (err, data) => { if (err) { res.status(404).send('File not found'); @@ -223,7 +224,7 @@ app.use('/backgrounds', (req, res) => { }); app.use('/characters', (req, res) => { - const filePath = decodeURIComponent(path.join(process.cwd(), charactersPath, req.url.replace(/%20/g, ' '))); + const filePath = decodeURIComponent(path.join(__dirname, charactersPath, req.url.replace(/%20/g, ' '))); fs.readFile(filePath, (err, data) => { if (err) { res.status(404).send('File not found'); @@ -368,7 +369,7 @@ app.post("/generate_textgenerationwebui", jsonParser, async function (request, r if (!!request.header('X-Response-Streaming')) { const fn_index = Number(request.header('X-Gradio-Streaming-Function')); let isStreamingStopped = false; - request.socket.on('close', function() { + request.socket.on('close', function () { isStreamingStopped = true; }); @@ -1596,18 +1597,18 @@ app.post("/importchat", urlencodedParser, function (request, response) { const errors = []; newChats.forEach(chat => fs.writeFile( - chatsPath + avatar_url + '/' + ch_name + ' - ' + humanizedISO8601DateTime() + ' imported.jsonl', - chat.map(JSON.stringify).join('\n'), - 'utf8', - (err) => err ?? errors.push(err) - ) + chatsPath + avatar_url + '/' + ch_name + ' - ' + humanizedISO8601DateTime() + ' imported.jsonl', + chat.map(JSON.stringify).join('\n'), + 'utf8', + (err) => err ?? errors.push(err) + ) ); if (0 < errors.length) { response.send('Errors occurred while writing character files. Errors: ' + JSON.stringify(errors)); } - response.send({res: true}); + response.send({ res: true }); } else { response.send({ error: true }); } @@ -1838,7 +1839,7 @@ app.post('/deletegroup', jsonParser, async (request, response) => { const POE_DEFAULT_BOT = 'a2'; -async function getPoeClient(token, useCache=false) { +async function getPoeClient(token, useCache = false) { let client = new poe.Client(false, useCache); await client.init(token); return client; @@ -1904,7 +1905,7 @@ app.post('/generate_poe', jsonParser, async (request, response) => { if (streaming) { let isStreamingStopped = false; - request.socket.on('close', function() { + request.socket.on('close', function () { isStreamingStopped = true; client.abortController.abort(); }); @@ -1970,17 +1971,17 @@ app.get('/get_sprites', jsonParser, function (request, response) { try { if (fs.existsSync(spritesPath) && fs.statSync(spritesPath).isDirectory()) { sprites = fs.readdirSync(spritesPath) - .filter(file => { - const mimeType = mime.lookup(file); - return mimeType && mimeType.startsWith('image/'); - }) - .map((file) => { - const pathToSprite = path.join(spritesPath, file); - return { - label: path.parse(pathToSprite).name.toLowerCase(), - path: `/characters/${name}/${file}`, - }; - }); + .filter(file => { + const mimeType = mime.lookup(file); + return mimeType && mimeType.startsWith('image/'); + }) + .map((file) => { + const pathToSprite = path.join(spritesPath, file); + return { + label: path.parse(pathToSprite).name.toLowerCase(), + path: `/characters/${name}/${file}`, + }; + }); } } catch (err) { @@ -2150,12 +2151,42 @@ app.post("/getstatus_openai", jsonParser, function (request, response_getstatus_ }); }); +// Shamelessly stolen from Agnai +app.post("/openai_usage", jsonParser, async function (_, response) { + if (!request.body) return response.sendStatus(400); + const key = request.body.key; + const api_url = new URL(request.body.reverse_proxy || api_openai).toString(); + + const headers = { + 'Content-Type': 'application/json', + Authorization: `Bearer ${key}`, + }; + + const date = new Date(); + date.setDate(1); + const start_date = date.toISOString().slice(0, 10); + + date.setMonth(date.getMonth() + 1); + const end_date = date.toISOString().slice(0, 10); + + try { + const res = await getAsync( + `${api_url}/dashboard/billing/usage?start_date=${start_date}&end_date=${end_date}`, + { headers }, + ); + return response.send(res); + } + catch { + return response.sendStatus(400); + } +}); + app.post("/generate_openai", jsonParser, function (request, response_generate_openai) { if (!request.body) return response_generate_openai.sendStatus(400); const api_url = new URL(request.body.reverse_proxy || api_openai).toString(); const controller = new AbortController(); - request.socket.on('close', function() { + request.socket.on('close', function () { controller.abort(); }); @@ -2328,7 +2359,7 @@ app.listen(server_port, (listen ? '0.0.0.0' : '127.0.0.1'), async function () { console.log('Launching...'); if (autorun) open('http://127.0.0.1:' + server_port); - console.log('TavernAI started: http://127.0.0.1:' + server_port); + console.log('SillyTavern started: http://127.0.0.1:' + server_port); if (fs.existsSync('public/characters/update.txt') && !is_colab) { convertStage1(); }