Merge branch 'staging' of https://github.com/Cohee1207/SillyTavern into staging

This commit is contained in:
RossAscends
2023-09-16 12:50:11 +09:00
30 changed files with 701 additions and 1049 deletions

View File

@ -41,8 +41,6 @@ SillyTavern 本身并无用处,因为它只是一个用户聊天界面。你
<https://rentry.org/STAI-Termux> <https://rentry.org/STAI-Termux>
Termux 不支持**.Webp 字符卡的导入/导出。请使用 JSON 或 PNG 格式**。
## 有问题或建议? ## 有问题或建议?
### 我们现在有了 Discord 社区 ### 我们现在有了 Discord 社区

3
.github/readme.md vendored
View File

@ -41,8 +41,6 @@ Since Tavern is only a user interface, it has tiny hardware requirements, it wil
<https://rentry.org/STAI-Termux> <https://rentry.org/STAI-Termux>
**.webp character cards import/export is not supported in Termux. Use either JSON or PNG formats instead.**
## Questions or suggestions? ## Questions or suggestions?
### We now have a community Discord server ### We now have a community Discord server
@ -71,7 +69,6 @@ Get in touch with the developers directly:
* [Oobabooga's TextGen WebUI](https://github.com/oobabooga/text-generation-webui) API connection * [Oobabooga's TextGen WebUI](https://github.com/oobabooga/text-generation-webui) API connection
* [AI Horde](https://horde.koboldai.net/) connection * [AI Horde](https://horde.koboldai.net/) connection
* Prompt generation formatting tweaking * Prompt generation formatting tweaking
* webp character card interoperability (PNG is still an internal format)
## Extensions ## Extensions

45
.github/workflows/docker-publish.yml vendored Normal file
View File

@ -0,0 +1,45 @@
# This workflow will publish a docker image for every full release to the GitHub package repository
name: Create Docker Image on Release
on:
release:
# Only runs on full releases not pre releases
types: [released]
env:
# This should allow creation of docker images even in forked repositories
# Image name may not contain uppercase characters, so we can not use the repository name
# Creates a string like: ghcr.io/SillyTavern/sillytavern
image_name: ghcr.io/${{ github.repository_owner }}/sillytavern
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
# Build docker image using dockerfile and tag it with branch name
# Assumes branch name is the version number
- name: Build the Docker image
run: |
docker build . --file Dockerfile --tag $image_name:${{ github.ref_name }}
# Login into package repository as the person who created the release
- name: Login to GitHub Container Registry
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Assumes release is the latest and marks image as such
- name: Docker Tag and Push
run: |
docker tag $image_name:${{ github.ref_name }} $image_name:latest
docker push $image_name:${{ github.ref_name }}
docker push $image_name:latest

View File

@ -13,7 +13,7 @@ ENTRYPOINT [ "tini", "--" ]
WORKDIR ${APP_HOME} WORKDIR ${APP_HOME}
# Install app dependencies # Install app dependencies
COPY package*.json ./ COPY package*.json post-install.js ./
RUN \ RUN \
echo "*** Install npm packages ***" && \ echo "*** Install npm packages ***" && \
npm install && npm cache clean --force npm install && npm cache clean --force

View File

@ -75,9 +75,6 @@
"always_force_name2": true, "always_force_name2": true,
"user_prompt_bias": "", "user_prompt_bias": "",
"show_user_prompt_bias": true, "show_user_prompt_bias": true,
"multigen": false,
"multigen_first_chunk": 50,
"multigen_next_chunks": 30,
"markdown_escape_strings": "", "markdown_escape_strings": "",
"fast_ui_mode": false, "fast_ui_mode": false,
"avatar_style": 0, "avatar_style": 0,

35
package-lock.json generated
View File

@ -1,12 +1,12 @@
{ {
"name": "sillytavern", "name": "sillytavern",
"version": "1.10.3", "version": "1.10.4",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "sillytavern", "name": "sillytavern",
"version": "1.10.3", "version": "1.10.4",
"hasInstallScript": true, "hasInstallScript": true,
"license": "AGPL-3.0", "license": "AGPL-3.0",
"dependencies": { "dependencies": {
@ -21,7 +21,6 @@
"cors": "^2.8.5", "cors": "^2.8.5",
"csrf-csrf": "^2.2.3", "csrf-csrf": "^2.2.3",
"device-detector-js": "^3.0.3", "device-detector-js": "^3.0.3",
"exifreader": "^4.12.0",
"express": "^4.18.2", "express": "^4.18.2",
"google-translate-api-browser": "^3.0.1", "google-translate-api-browser": "^3.0.1",
"gpt3-tokenizer": "^1.1.5", "gpt3-tokenizer": "^1.1.5",
@ -35,7 +34,6 @@
"multer": "^1.4.5-lts.1", "multer": "^1.4.5-lts.1",
"node-fetch": "^2.6.11", "node-fetch": "^2.6.11",
"open": "^8.4.2", "open": "^8.4.2",
"piexifjs": "^1.0.6",
"png-chunk-text": "^1.0.0", "png-chunk-text": "^1.0.0",
"png-chunks-encode": "^1.0.0", "png-chunks-encode": "^1.0.0",
"png-chunks-extract": "^1.0.0", "png-chunks-extract": "^1.0.0",
@ -45,7 +43,6 @@
"simple-git": "^3.19.1", "simple-git": "^3.19.1",
"uniqolor": "^1.1.0", "uniqolor": "^1.1.0",
"vectra": "^0.2.2", "vectra": "^0.2.2",
"webp-converter": "2.3.2",
"write-file-atomic": "^5.0.1", "write-file-atomic": "^5.0.1",
"ws": "^8.13.0", "ws": "^8.13.0",
"yargs": "^17.7.1", "yargs": "^17.7.1",
@ -968,15 +965,6 @@
"integrity": "sha512-9E61voMP4+Rze02jlTXud++Htpjyyk8vw5Hyw9FGRrmhHQg2GqbuOfwf5Klrb8vTxc2XWI3EfO7RUHMpxTj26A==", "integrity": "sha512-9E61voMP4+Rze02jlTXud++Htpjyyk8vw5Hyw9FGRrmhHQg2GqbuOfwf5Klrb8vTxc2XWI3EfO7RUHMpxTj26A==",
"peer": true "peer": true
}, },
"node_modules/@xmldom/xmldom": {
"version": "0.8.9",
"resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.9.tgz",
"integrity": "sha512-4VSbbcMoxc4KLjb1gs96SRmi7w4h1SF+fCoiK0XaQX62buCc1G5d0DC5bJ9xJBNPDSVCmIrcl8BiYxzjrqaaJA==",
"optional": true,
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/accepts": { "node_modules/accepts": {
"version": "1.3.8", "version": "1.3.8",
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
@ -1793,15 +1781,6 @@
"resolved": "https://registry.npmjs.org/exif-parser/-/exif-parser-0.1.12.tgz", "resolved": "https://registry.npmjs.org/exif-parser/-/exif-parser-0.1.12.tgz",
"integrity": "sha512-c2bQfLNbMzLPmzQuOr8fy0csy84WmwnER81W88DzTp9CYNPJ6yzOj2EZAh9pywYpqHnshVLHQJ8WzldAyfY+Iw==" "integrity": "sha512-c2bQfLNbMzLPmzQuOr8fy0csy84WmwnER81W88DzTp9CYNPJ6yzOj2EZAh9pywYpqHnshVLHQJ8WzldAyfY+Iw=="
}, },
"node_modules/exifreader": {
"version": "4.13.0",
"resolved": "https://registry.npmjs.org/exifreader/-/exifreader-4.13.0.tgz",
"integrity": "sha512-IhJBpyXDLbCdgzVHkthadOvrMiZOR2XS7POVp0b5JoVfScRoCJ6YazZ+stTkbDTE5TRTP44bE5RKsujckAs45Q==",
"hasInstallScript": true,
"optionalDependencies": {
"@xmldom/xmldom": "^0.8.8"
}
},
"node_modules/expand-template": { "node_modules/expand-template": {
"version": "2.0.3", "version": "2.0.3",
"resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz",
@ -3097,11 +3076,6 @@
"url": "https://github.com/sponsors/jonschlinkert" "url": "https://github.com/sponsors/jonschlinkert"
} }
}, },
"node_modules/piexifjs": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/piexifjs/-/piexifjs-1.0.6.tgz",
"integrity": "sha512-0wVyH0cKohzBQ5Gi2V1BuxYpxWfxF3cSqfFXfPIpl5tl9XLS5z4ogqhUCD20AbHi0h9aJkqXNJnkVev6gwh2ag=="
},
"node_modules/pixelmatch": { "node_modules/pixelmatch": {
"version": "4.0.2", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/pixelmatch/-/pixelmatch-4.0.2.tgz", "resolved": "https://registry.npmjs.org/pixelmatch/-/pixelmatch-4.0.2.tgz",
@ -4183,11 +4157,6 @@
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
}, },
"node_modules/webp-converter": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/webp-converter/-/webp-converter-2.3.2.tgz",
"integrity": "sha512-9kQ9Q/MPzUV2mye8Tv7vA6vDIPk77rI4AWWm2vSaCyGAEsxqyVZYeVU2MSJY5fLkf6u7G5K343vLxKubOxz16Q=="
},
"node_modules/whatwg-fetch": { "node_modules/whatwg-fetch": {
"version": "3.6.18", "version": "3.6.18",
"resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.18.tgz", "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.18.tgz",

View File

@ -11,7 +11,6 @@
"cors": "^2.8.5", "cors": "^2.8.5",
"csrf-csrf": "^2.2.3", "csrf-csrf": "^2.2.3",
"device-detector-js": "^3.0.3", "device-detector-js": "^3.0.3",
"exifreader": "^4.12.0",
"express": "^4.18.2", "express": "^4.18.2",
"google-translate-api-browser": "^3.0.1", "google-translate-api-browser": "^3.0.1",
"gpt3-tokenizer": "^1.1.5", "gpt3-tokenizer": "^1.1.5",
@ -25,7 +24,6 @@
"multer": "^1.4.5-lts.1", "multer": "^1.4.5-lts.1",
"node-fetch": "^2.6.11", "node-fetch": "^2.6.11",
"open": "^8.4.2", "open": "^8.4.2",
"piexifjs": "^1.0.6",
"png-chunk-text": "^1.0.0", "png-chunk-text": "^1.0.0",
"png-chunks-encode": "^1.0.0", "png-chunks-encode": "^1.0.0",
"png-chunks-extract": "^1.0.0", "png-chunks-extract": "^1.0.0",
@ -35,7 +33,6 @@
"simple-git": "^3.19.1", "simple-git": "^3.19.1",
"uniqolor": "^1.1.0", "uniqolor": "^1.1.0",
"vectra": "^0.2.2", "vectra": "^0.2.2",
"webp-converter": "2.3.2",
"write-file-atomic": "^5.0.1", "write-file-atomic": "^5.0.1",
"ws": "^8.13.0", "ws": "^8.13.0",
"yargs": "^17.7.1", "yargs": "^17.7.1",
@ -53,7 +50,7 @@
"type": "git", "type": "git",
"url": "https://github.com/SillyTavern/SillyTavern.git" "url": "https://github.com/SillyTavern/SillyTavern.git"
}, },
"version": "1.10.3", "version": "1.10.4",
"scripts": { "scripts": {
"start": "node server.js", "start": "node server.js",
"start-multi": "node server.js --disableCsrf", "start-multi": "node server.js --disableCsrf",

View File

@ -158,9 +158,6 @@
"Disabled for all models": "对所有模型禁用", "Disabled for all models": "对所有模型禁用",
"Automatic (based on model name)": "自动(基于型号名称)", "Automatic (based on model name)": "自动(基于型号名称)",
"Enabled for all models": "所有模型启用", "Enabled for all models": "所有模型启用",
"Multigen": "Multigen",
"First chunk (tokens)": "第一个区块Tokens",
"Next chunks (tokens)": "接下来的区块Tokens",
"Anchors Order": "锚点顺序", "Anchors Order": "锚点顺序",
"Character then Style": "字符然后样式", "Character then Style": "字符然后样式",
"Style then Character": "样式然后字符", "Style then Character": "样式然后字符",
@ -284,7 +281,6 @@
"Regenerate": "重新生成", "Regenerate": "重新生成",
"PNG": "PNG", "PNG": "PNG",
"JSON": "JSON", "JSON": "JSON",
"WEBP": "WEBP",
"presets": "预设", "presets": "预设",
"Message Sound": "AI 消息提示音", "Message Sound": "AI 消息提示音",
"Author's Note": "作者注释", "Author's Note": "作者注释",
@ -711,9 +707,6 @@
"Disabled for all models": "すべてのモデルで無効", "Disabled for all models": "すべてのモデルで無効",
"Automatic (based on model name)": "自動(モデル名に基づく)", "Automatic (based on model name)": "自動(モデル名に基づく)",
"Enabled for all models": "すべてのモデルで有効", "Enabled for all models": "すべてのモデルで有効",
"Multigen": "マルチジェン",
"First chunk (tokens)": "最初のチャンク(トークン)",
"Next chunks (tokens)": "次のチャンク(トークン)",
"Anchors Order": "アンカーオーダー", "Anchors Order": "アンカーオーダー",
"Character then Style": "キャラクター、次にスタイル", "Character then Style": "キャラクター、次にスタイル",
"Style then Character": "スタイル、次にキャラクター", "Style then Character": "スタイル、次にキャラクター",
@ -836,7 +829,6 @@
"Regenerate": "再生成", "Regenerate": "再生成",
"PNG": "PNG", "PNG": "PNG",
"JSON": "JSON", "JSON": "JSON",
"WEBP": "WEBP",
"presets": "プリセット", "presets": "プリセット",
"Message Sound": "メッセージ音", "Message Sound": "メッセージ音",
"Author's Note": "作者の注記", "Author's Note": "作者の注記",
@ -1266,9 +1258,6 @@
"Disabled for all models": "모든 모델에 비활성화", "Disabled for all models": "모든 모델에 비활성화",
"Automatic (based on model name)": "모델 서식 자동탐지", "Automatic (based on model name)": "모델 서식 자동탐지",
"Enabled for all models": "모든 모델에 활성화", "Enabled for all models": "모든 모델에 활성화",
"Multigen": "다수답변 생성",
"First chunk (tokens)": "첫 말뭉치(토큰수)",
"Next chunks (tokens)": "다음 말뭉치(토큰수)",
"Anchors Order": "Anchors Order", "Anchors Order": "Anchors Order",
"Character then Style": "캐릭터 다음 스타일", "Character then Style": "캐릭터 다음 스타일",
"Style then Character": "스타일 다음 캐릭터", "Style then Character": "스타일 다음 캐릭터",
@ -1392,7 +1381,6 @@
"Regenerate": "재생성", "Regenerate": "재생성",
"PNG": "PNG", "PNG": "PNG",
"JSON": "JSON", "JSON": "JSON",
"WEBP": "WEBP",
"presets": "기본설정", "presets": "기본설정",
"Message Sound": "메시지 효과음", "Message Sound": "메시지 효과음",
"Author's Note": "글쓴이 쪽지", "Author's Note": "글쓴이 쪽지",
@ -1876,9 +1864,6 @@
"Disabled for all models": "Выключено для всех моделей", "Disabled for all models": "Выключено для всех моделей",
"Automatic (based on model name)": "Автоматически (выбор по названию модели)", "Automatic (based on model name)": "Автоматически (выбор по названию модели)",
"Enabled for all models": "Включить для всех моделей", "Enabled for all models": "Включить для всех моделей",
"Multigen": "Мултиген",
"First chunk (tokens)": "Первый отрезок (в токенах)",
"Next chunks (tokens)": "Следующий отрезок (в токенах)",
"Anchors Order": "Порядок Anchors", "Anchors Order": "Порядок Anchors",
"Character then Style": "Персонаж после Стиля", "Character then Style": "Персонаж после Стиля",
"Style then Character": "Стиль после Персонажа", "Style then Character": "Стиль после Персонажа",
@ -2016,7 +2001,6 @@
"Regenerate": "Повторная генерация", "Regenerate": "Повторная генерация",
"PNG": "PNG", "PNG": "PNG",
"JSON": "JSON", "JSON": "JSON",
"WEBP": "WEBP",
"presets": "Предустановки", "presets": "Предустановки",
"Message Sound": "Звук сообщения", "Message Sound": "Звук сообщения",
"Author's Note": "Авторские заметки", "Author's Note": "Авторские заметки",
@ -2454,9 +2438,6 @@
"Disabled for all models": "Disabilita per tutti i modelli", "Disabled for all models": "Disabilita per tutti i modelli",
"Automatic (based on model name)": "Automatico (basato sul nome del modello)", "Automatic (based on model name)": "Automatico (basato sul nome del modello)",
"Enabled for all models": "Abilita per tutti i modelli", "Enabled for all models": "Abilita per tutti i modelli",
"Multigen": "Multigen",
"First chunk (tokens)": "Primo pacchetto in Token",
"Next chunks (tokens)": "Pacchetto successivo in Token",
"Anchors Order": "Anchors Order", "Anchors Order": "Anchors Order",
"Character then Style": "Prima il personaggio, successivamente lo stile", "Character then Style": "Prima il personaggio, successivamente lo stile",
"Style then Character": "Prima lo stile, successivamente il personaggio", "Style then Character": "Prima lo stile, successivamente il personaggio",
@ -2580,7 +2561,6 @@
"Regenerate": "Rigenera", "Regenerate": "Rigenera",
"PNG": "PNG", "PNG": "PNG",
"JSON": "JSON", "JSON": "JSON",
"WEBP": "WEBP",
"presets": "preset", "presets": "preset",
"Message Sound": "Suono del messaggio", "Message Sound": "Suono del messaggio",
"Author's Note": "Note d'autore", "Author's Note": "Note d'autore",
@ -3133,9 +3113,6 @@
"Disabled for all models": "Uitgeschakeld voor alle modellen", "Disabled for all models": "Uitgeschakeld voor alle modellen",
"Automatic (based on model name)": "Automatisch (op basis van modelnaam)", "Automatic (based on model name)": "Automatisch (op basis van modelnaam)",
"Enabled for all models": "Ingeschakeld voor alle modellen", "Enabled for all models": "Ingeschakeld voor alle modellen",
"Multigen": "Multigen",
"First chunk (tokens)": "Eerste stuk (tokens)",
"Next chunks (tokens)": "Volgende stukken (tokens)",
"Anchors Order": "Ankersvolgorde", "Anchors Order": "Ankersvolgorde",
"Character then Style": "Personage dan Stijl", "Character then Style": "Personage dan Stijl",
"Style then Character": "Stijl dan Personage", "Style then Character": "Stijl dan Personage",
@ -3259,7 +3236,6 @@
"Regenerate": "Regenereren", "Regenerate": "Regenereren",
"PNG": "PNG", "PNG": "PNG",
"JSON": "JSON", "JSON": "JSON",
"WEBP": "WEBP",
"presets": "sjablonen", "presets": "sjablonen",
"Message Sound": "Berichtgeluid", "Message Sound": "Berichtgeluid",
"Author's Note": "Notitie van auteur", "Author's Note": "Notitie van auteur",
@ -3696,6 +3672,6 @@
"API Key": "Clave API", "API Key": "Clave API",
"Get it here:": "Consíguela aquí:", "Get it here:": "Consíguela aquí:",
"View my Kudos": "Ver mis Kudos", "View my Kudos": "Ver mis Kudos",
"Models": "Modelos IA" "Models": "Modelos IA"
} }
} }

View File

@ -2474,25 +2474,27 @@
</div> </div>
<div data-newbie-hidden> <div data-newbie-hidden>
<h4> <h4>
<span data-i18n="Multigen">Multigen</span> <span data-i18n="Auto-Continue">Auto-Continue</span>
<a href="https://docs.sillytavern.app/usage/core-concepts/advancedformatting/#multigen" class="notes-link" target="_blank">
<span class="note-link-span">?</span>
</a>
</h4> </h4>
<label class="checkbox_label" for="multigen"> <div class="flex-container">
<input id="multigen" type="checkbox" /> <label class="checkbox_label" for="auto_continue_enabled">
<span data-i18n="Enabled"> <input id="auto_continue_enabled" type="checkbox" />
Enabled <span data-i18n="Enabled">
</span> Enabled
</label> </span>
<div class="multigen_settings_block">
<label for="multigen_1st_chunk">
<small><span data-i18n="First chunk (tokens)">First chunk (tokens)</span></small>
<input id="multigen_first_chunk" type="number" class="text_pole textarea_compact" min="1" max="512" />
</label> </label>
<label for="multigen_next_chunk"> <label class="checkbox_label" for="auto_continue_allow_chat_completions">
<small><span data-i18n="Next chunks (tokens)">Next chunks (tokens)</span></small> <input id="auto_continue_allow_chat_completions" type="checkbox" />
<input id="multigen_next_chunks" type="number" class="text_pole textarea_compact" min="1" max="512" /> <span data-i18n="Allow for Chat Completion APIs">
Allow for Chat Completion APIs
</span>
</label>
</div>
<div class="auto_continue_settings_block">
<label for="auto_continue_target_length">
<span data-i18n="Target length (tokens)">Target length (tokens)</span>
<input id="auto_continue_target_length" type="number" class="text_pole textarea_compact" min="0" max="1024" />
</label> </label>
</div> </div>
</div> </div>
@ -3499,7 +3501,7 @@
<div id="rm_character_import" class="right_menu" style="display: none;"> <div id="rm_character_import" class="right_menu" style="display: none;">
<form id="form_import" action="javascript:void(null);" method="post" enctype="multipart/form-data"> <form id="form_import" action="javascript:void(null);" method="post" enctype="multipart/form-data">
<input multiple type="file" id="character_import_file" accept=".json, image/png, image/webp" name="avatar"> <input multiple type="file" id="character_import_file" accept=".json, image/png" name="avatar">
<input id="character_import_file_type" name="file_type" class="text_pole" maxlength="999" size="2" value="" autocomplete="off"> <input id="character_import_file_type" name="file_type" class="text_pole" maxlength="999" size="2" value="" autocomplete="off">
</form> </form>
</div> </div>
@ -4462,7 +4464,6 @@
<div id="export_format_popup" class="list-group"> <div id="export_format_popup" class="list-group">
<div class="export_format list-group-item" data-format="png">PNG</div> <div class="export_format list-group-item" data-format="png">PNG</div>
<div class="export_format list-group-item" data-format="json">JSON</div> <div class="export_format list-group-item" data-format="json">JSON</div>
<div class="export_format list-group-item" data-format="webp">WEBP</div>
</div> </div>
<div id="zoomed_avatar_template" class="template_element"> <div id="zoomed_avatar_template" class="template_element">

View File

@ -322,7 +322,6 @@ let safetychat = [
{ {
name: systemUserName, name: systemUserName,
is_user: false, is_user: false,
is_name: true,
create_date: 0, create_date: 0,
mes: "You deleted a character/chat and arrived back here for safety reasons! Pick another character!", mes: "You deleted a character/chat and arrived back here for safety reasons! Pick another character!",
}, },
@ -398,7 +397,6 @@ function getSystemMessages() {
force_avatar: system_avatar, force_avatar: system_avatar,
is_user: false, is_user: false,
is_system: true, is_system: true,
is_name: true,
mes: renderTemplate("help"), mes: renderTemplate("help"),
}, },
slash_commands: { slash_commands: {
@ -406,7 +404,6 @@ function getSystemMessages() {
force_avatar: system_avatar, force_avatar: system_avatar,
is_user: false, is_user: false,
is_system: true, is_system: true,
is_name: true,
mes: '', mes: '',
}, },
hotkeys: { hotkeys: {
@ -414,7 +411,6 @@ function getSystemMessages() {
force_avatar: system_avatar, force_avatar: system_avatar,
is_user: false, is_user: false,
is_system: true, is_system: true,
is_name: true,
mes: renderTemplate("hotkeys"), mes: renderTemplate("hotkeys"),
}, },
formatting: { formatting: {
@ -422,7 +418,6 @@ function getSystemMessages() {
force_avatar: system_avatar, force_avatar: system_avatar,
is_user: false, is_user: false,
is_system: true, is_system: true,
is_name: true,
mes: renderTemplate("formatting"), mes: renderTemplate("formatting"),
}, },
macros: { macros: {
@ -430,7 +425,6 @@ function getSystemMessages() {
force_avatar: system_avatar, force_avatar: system_avatar,
is_user: false, is_user: false,
is_system: true, is_system: true,
is_name: true,
mes: renderTemplate("macros"), mes: renderTemplate("macros"),
}, },
welcome: welcome:
@ -439,7 +433,6 @@ function getSystemMessages() {
force_avatar: system_avatar, force_avatar: system_avatar,
is_user: false, is_user: false,
is_system: true, is_system: true,
is_name: true,
mes: renderTemplate("welcome"), mes: renderTemplate("welcome"),
}, },
group: { group: {
@ -447,7 +440,6 @@ function getSystemMessages() {
force_avatar: system_avatar, force_avatar: system_avatar,
is_user: false, is_user: false,
is_system: true, is_system: true,
is_name: true,
is_group: true, is_group: true,
mes: "Group chat created. Say 'Hi' to lovely people!", mes: "Group chat created. Say 'Hi' to lovely people!",
}, },
@ -456,7 +448,6 @@ function getSystemMessages() {
force_avatar: system_avatar, force_avatar: system_avatar,
is_user: false, is_user: false,
is_system: true, is_system: true,
is_name: true,
mes: "No one hears you. <b>Hint&#58;</b> add more members to the group!", mes: "No one hears you. <b>Hint&#58;</b> add more members to the group!",
}, },
generic: { generic: {
@ -464,7 +455,6 @@ function getSystemMessages() {
force_avatar: system_avatar, force_avatar: system_avatar,
is_user: false, is_user: false,
is_system: true, is_system: true,
is_name: true,
mes: "Generic system message. User `text` parameter to override the contents", mes: "Generic system message. User `text` parameter to override the contents",
}, },
bookmark_created: { bookmark_created: {
@ -472,7 +462,6 @@ function getSystemMessages() {
force_avatar: system_avatar, force_avatar: system_avatar,
is_user: false, is_user: false,
is_system: true, is_system: true,
is_name: true,
mes: `Bookmark created! Click here to open the bookmark chat: <a class="bookmark_link" file_name="{0}" href="javascript:void(null);">{1}</a>`, mes: `Bookmark created! Click here to open the bookmark chat: <a class="bookmark_link" file_name="{0}" href="javascript:void(null);">{1}</a>`,
}, },
bookmark_back: { bookmark_back: {
@ -480,7 +469,6 @@ function getSystemMessages() {
force_avatar: system_avatar, force_avatar: system_avatar,
is_user: false, is_user: false,
is_system: true, is_system: true,
is_name: true,
mes: `Click here to return to the previous chat: <a class="bookmark_link" file_name="{0}" href="javascript:void(null);">Return</a>`, mes: `Click here to return to the previous chat: <a class="bookmark_link" file_name="{0}" href="javascript:void(null);">Return</a>`,
}, },
}; };
@ -662,9 +650,7 @@ export let user_avatar = "you.png";
export var amount_gen = 80; //default max length of AI generated responses export var amount_gen = 80; //default max length of AI generated responses
var max_context = 2048; var max_context = 2048;
var tokens_already_generated = 0;
var message_already_generated = ""; var message_already_generated = "";
var cycle_count_generation = 0;
var swipes = true; var swipes = true;
let extension_prompts = {}; let extension_prompts = {};
@ -2060,8 +2046,7 @@ function isStreamingEnabled() {
return ((main_api == 'openai' && oai_settings.stream_openai && oai_settings.chat_completion_source !== chat_completion_sources.SCALE && oai_settings.chat_completion_source !== chat_completion_sources.AI21) return ((main_api == 'openai' && oai_settings.stream_openai && oai_settings.chat_completion_source !== chat_completion_sources.SCALE && oai_settings.chat_completion_source !== chat_completion_sources.AI21)
|| (main_api == 'kobold' && kai_settings.streaming_kobold && kai_flags.can_use_streaming) || (main_api == 'kobold' && kai_settings.streaming_kobold && kai_flags.can_use_streaming)
|| (main_api == 'novel' && nai_settings.streaming_novel) || (main_api == 'novel' && nai_settings.streaming_novel)
|| (main_api == 'textgenerationwebui' && textgenerationwebui_settings.streaming)) || (main_api == 'textgenerationwebui' && textgenerationwebui_settings.streaming));
&& !isMultigenEnabled(); // Multigen has a quasi-streaming mode which breaks the real streaming
} }
function showStopButton() { function showStopButton() {
@ -2128,9 +2113,6 @@ class StreamingProcessor {
const isContinue = this.type == "continue"; const isContinue = this.type == "continue";
text = this.removePrefix(text); text = this.removePrefix(text);
let processedText = cleanUpMessage(text, isImpersonate, isContinue, !isFinal); let processedText = cleanUpMessage(text, isImpersonate, isContinue, !isFinal);
let result = extractNameFromMessage(processedText, this.force_name2, isImpersonate);
let isName = result.this_mes_is_name;
processedText = result.getMessage;
// Predict unbalanced asterisks / quotes during streaming // Predict unbalanced asterisks / quotes during streaming
const charsToBalance = ['*', '"']; const charsToBalance = ['*', '"'];
@ -2149,7 +2131,6 @@ class StreamingProcessor {
// Don't waste time calculating token count for streaming // Don't waste time calculating token count for streaming
let currentTokenCount = isFinal && power_user.message_token_count_enabled ? getTokenCount(processedText, 0) : 0; let currentTokenCount = isFinal && power_user.message_token_count_enabled ? getTokenCount(processedText, 0) : 0;
const timePassed = formatGenerationTimer(this.timeStarted, currentTime, currentTokenCount); const timePassed = formatGenerationTimer(this.timeStarted, currentTime, currentTokenCount);
chat[messageId]['is_name'] = isName;
chat[messageId]['mes'] = processedText; chat[messageId]['mes'] = processedText;
chat[messageId]['gen_started'] = this.timeStarted; chat[messageId]['gen_started'] = this.timeStarted;
chat[messageId]['gen_finished'] = currentTime; chat[messageId]['gen_finished'] = currentTime;
@ -2311,7 +2292,6 @@ class StreamingProcessor {
async function Generate(type, { automatic_trigger, force_name2, resolve, reject, quiet_prompt, force_chid, signal } = {}, dryRun = false) { async function Generate(type, { automatic_trigger, force_name2, resolve, reject, quiet_prompt, force_chid, signal } = {}, dryRun = false) {
//console.log('Generate entered'); //console.log('Generate entered');
setGenerationProgress(0); setGenerationProgress(0);
tokens_already_generated = 0;
generation_started = new Date(); generation_started = new Date();
// Don't recreate abort controller if signal is passed // Don't recreate abort controller if signal is passed
@ -2324,17 +2304,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
const isImpersonate = type == "impersonate"; const isImpersonate = type == "impersonate";
message_already_generated = isImpersonate ? `${name1}: ` : `${name2}: `; message_already_generated = isImpersonate ? `${name1}: ` : `${name2}: `;
// Name for the multigen prefix
const magName = isImpersonate ? name1 : name2;
if (isInstruct) {
message_already_generated = formatInstructModePrompt(magName, isImpersonate, '', name1, name2);
} else {
message_already_generated = `${magName}: `;
}
// To trim after multigen ended
const magFirst = message_already_generated;
const interruptedByCommand = processCommands($("#send_textarea").val(), type); const interruptedByCommand = processCommands($("#send_textarea").val(), type);
@ -2356,12 +2325,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
return; return;
} }
if (main_api == 'kobold' && kai_settings.streaming_kobold && power_user.multigen) {
toastr.error('Multigen is not supported with Kobold streaming enabled. Disable streaming in "AI Response Configuration" or multigen in "Advanced Formatting" to proceed.', undefined, { timeOut: 10000, preventDuplicates: true, });
is_send_press = false;
return;
}
if (isHordeGenerationNotAllowed()) { if (isHordeGenerationNotAllowed()) {
is_send_press = false; is_send_press = false;
return; return;
@ -2523,7 +2486,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
console.log(`Core/all messages: ${coreChat.length}/${chat.length}`); console.log(`Core/all messages: ${coreChat.length}/${chat.length}`);
// kingbri MARK: - Make sure the prompt bias isn't the same as the user bias // kingbri MARK: - Make sure the prompt bias isn't the same as the user bias
if ((promptBias && !isUserPromptBias) || power_user.always_force_name2) { if ((promptBias && !isUserPromptBias) || power_user.always_force_name2 || main_api == 'novel') {
force_name2 = true; force_name2 = true;
} }
@ -2605,7 +2568,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
const storyString = renderStoryString(storyStringParams); const storyString = renderStoryString(storyStringParams);
if (main_api === 'openai') { if (main_api === 'openai') {
message_already_generated = ''; // OpenAI doesn't have multigen message_already_generated = '';
setOpenAIMessages(coreChat); setOpenAIMessages(coreChat);
setOpenAIMessageExamples(mesExamplesArray); setOpenAIMessageExamples(mesExamplesArray);
} }
@ -2699,10 +2662,8 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
// Save reply does add cycle text to the prompt, so it's not needed here // Save reply does add cycle text to the prompt, so it's not needed here
streamingProcessor && (streamingProcessor.firstMessageText = ''); streamingProcessor && (streamingProcessor.firstMessageText = '');
message_already_generated = continue_mag; message_already_generated = continue_mag;
tokens_already_generated = 1; // Multigen copium
} }
// Multigen rewrites the type and I don't know why
const originalType = type; const originalType = type;
runGenerate(cyclePrompt); runGenerate(cyclePrompt);
@ -2771,13 +2732,13 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
} }
// Get instruct mode line // Get instruct mode line
if (isInstruct && tokens_already_generated === 0) { if (isInstruct && !isContinue) {
const name = isImpersonate ? name1 : name2; const name = isImpersonate ? name1 : name2;
lastMesString += formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2); lastMesString += formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2);
} }
// Get non-instruct impersonation line // Get non-instruct impersonation line
if (!isInstruct && isImpersonate && tokens_already_generated === 0) { if (!isInstruct && isImpersonate && !isContinue) {
const name = name1; const name = name1;
if (!lastMesString.endsWith('\n')) { if (!lastMesString.endsWith('\n')) {
lastMesString += '\n'; lastMesString += '\n';
@ -2787,7 +2748,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
// Add character's name // Add character's name
// Force name append on continue // Force name append on continue
if (!isInstruct && force_name2 && (tokens_already_generated === 0 || isContinue)) { if (!isInstruct && force_name2) {
if (!lastMesString.endsWith('\n')) { if (!lastMesString.endsWith('\n')) {
lastMesString += '\n'; lastMesString += '\n';
} }
@ -2900,14 +2861,12 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}); });
// TODO: Move zero-depth anchor append to work like CFG and bias appends // TODO: Move zero-depth anchor append to work like CFG and bias appends
if (zeroDepthAnchor && zeroDepthAnchor.length) { if (zeroDepthAnchor?.length && !isContinue) {
if (!isMultigenEnabled() || tokens_already_generated == 0) { console.log(/\s/.test(finalMesSend[finalMesSend.length - 1].message.slice(-1)))
console.log(/\s/.test(finalMesSend[finalMesSend.length - 1].message.slice(-1))) finalMesSend[finalMesSend.length - 1].message +=
finalMesSend[finalMesSend.length - 1].message += /\s/.test(finalMesSend[finalMesSend.length - 1].message.slice(-1))
/\s/.test(finalMesSend[finalMesSend.length - 1].message.slice(-1)) ? zeroDepthAnchor
? zeroDepthAnchor : `${zeroDepthAnchor}`;
: `${zeroDepthAnchor}`;
}
} }
let cfgPrompt = {}; let cfgPrompt = {};
@ -2929,7 +2888,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
// Add prompt bias after everything else // Add prompt bias after everything else
// Always run with continue // Always run with continue
if (!isInstruct && !isImpersonate && (tokens_already_generated === 0 || isContinue)) { if (!isInstruct && !isImpersonate) {
if (promptBias.trim().length !== 0) { if (promptBias.trim().length !== 0) {
finalMesSend[finalMesSend.length - 1].message += finalMesSend[finalMesSend.length - 1].message +=
/\s/.test(finalMesSend[finalMesSend.length - 1].message.slice(-1)) /\s/.test(finalMesSend[finalMesSend.length - 1].message.slice(-1))
@ -2978,11 +2937,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
let this_amount_gen = Number(amount_gen); // how many tokens the AI will be requested to generate let this_amount_gen = Number(amount_gen); // how many tokens the AI will be requested to generate
let this_settings = koboldai_settings[koboldai_setting_names[preset_settings]]; let this_settings = koboldai_settings[koboldai_setting_names[preset_settings]];
if (isMultigenEnabled() && type !== 'quiet') {
// if nothing has been generated yet..
this_amount_gen = getMultigenAmount();
}
let thisPromptBits = []; let thisPromptBits = [];
// TODO: Make this a switch // TODO: Make this a switch
@ -3133,6 +3087,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
if (isStreamingEnabled() && type !== 'quiet') { if (isStreamingEnabled() && type !== 'quiet') {
hideSwipeButtons(); hideSwipeButtons();
let getMessage = await streamingProcessor.generate(); let getMessage = await streamingProcessor.generate();
let messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
if (isContinue) { if (isContinue) {
getMessage = continue_mag + getMessage; getMessage = continue_mag + getMessage;
@ -3141,10 +3096,13 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) { if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) {
await streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage); await streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage);
streamingProcessor = null; streamingProcessor = null;
triggerAutoContinue(messageChunk, isImpersonate);
} }
} }
async function onSuccess(data) { async function onSuccess(data) {
let messageChunk = '';
if (data.error == 'dryRun') { if (data.error == 'dryRun') {
generatedPromptCache = ''; generatedPromptCache = '';
resolve(); resolve();
@ -3157,48 +3115,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
let title = extractTitleFromData(data); let title = extractTitleFromData(data);
kobold_horde_model = title; kobold_horde_model = title;
// to make it continue generating so long as it's under max_amount and hasn't signaled messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);
// an end to the character's response via typing "You:" or adding "<endoftext>"
if (isMultigenEnabled() && type !== 'quiet') {
message_already_generated += getMessage;
promptBias = '';
let this_mes_is_name;
({ this_mes_is_name, getMessage } = extractNameFromMessage(getMessage, force_name2, isImpersonate));
if (!isImpersonate) {
if (tokens_already_generated == 0) {
console.debug("New message");
({ type, getMessage } = await saveReply(type, getMessage, this_mes_is_name, title));
}
else {
console.debug("Should append message");
({ type, getMessage } = await saveReply('append', getMessage, this_mes_is_name, title));
}
} else {
let chunk = cleanUpMessage(message_already_generated, true, isContinue, true);
let extract = extractNameFromMessage(chunk, force_name2, isImpersonate);
$('#send_textarea').val(extract.getMessage).trigger('input');
}
if (shouldContinueMultigen(getMessage, isImpersonate, isInstruct)) {
hideSwipeButtons();
tokens_already_generated += this_amount_gen; // add new gen amt to any prev gen counter..
getMessage = message_already_generated;
// if any tokens left to generate
if (getMultigenAmount() > 0) {
runGenerate(getMessage);
console.debug('returning to make generate again');
return;
}
}
tokens_already_generated = 0;
generatedPromptCache = "";
const substringStart = originalType !== 'continue' ? magFirst.length : 0;
getMessage = message_already_generated.substring(substringStart);
}
if (isContinue) { if (isContinue) {
getMessage = continue_mag + getMessage; getMessage = continue_mag + getMessage;
@ -3208,8 +3125,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
const displayIncomplete = type == 'quiet'; const displayIncomplete = type == 'quiet';
getMessage = cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncomplete); getMessage = cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncomplete);
let this_mes_is_name;
({ this_mes_is_name, getMessage } = extractNameFromMessage(getMessage, force_name2, isImpersonate));
if (getMessage.length > 0) { if (getMessage.length > 0) {
if (isImpersonate) { if (isImpersonate) {
$('#send_textarea').val(getMessage).trigger('input'); $('#send_textarea').val(getMessage).trigger('input');
@ -3220,12 +3135,12 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
resolve(getMessage); resolve(getMessage);
} }
else { else {
// Without streaming we'll be having a full message on continuation. Treat it as a multigen last chunk. // Without streaming we'll be having a full message on continuation. Treat it as a last chunk.
if (!isMultigenEnabled() && originalType !== 'continue') { if (originalType !== 'continue') {
({ type, getMessage } = await saveReply(type, getMessage, this_mes_is_name, title)); ({ type, getMessage } = await saveReply(type, getMessage, true, title));
} }
else { else {
({ type, getMessage } = await saveReply('appendFinal', getMessage, this_mes_is_name, title)); ({ type, getMessage } = await saveReply('appendFinal', getMessage, true, title));
} }
} }
activateSendButtons(); activateSendButtons();
@ -3298,6 +3213,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
setGenerationProgress(0); setGenerationProgress(0);
if (type !== 'quiet') { if (type !== 'quiet') {
triggerAutoContinue(messageChunk, isImpersonate);
resolve(); resolve();
} }
}; };
@ -3330,6 +3246,61 @@ function getNextMessageId(type) {
return type == 'swipe' ? Number(count_view_mes - 1) : Number(count_view_mes); return type == 'swipe' ? Number(count_view_mes - 1) : Number(count_view_mes);
} }
/**
*
* @param {string} messageChunk
* @param {boolean} isImpersonate
* @returns {void}
*/
export function triggerAutoContinue(messageChunk, isImpersonate) {
if (selected_group) {
console.log('Auto-continue is disabled for group chat');
return;
}
if (power_user.auto_continue.enabled && !is_send_press) {
if (power_user.auto_continue.target_length <= 0) {
console.log('Auto-continue target length is 0, not triggering auto-continue');
return;
}
if (main_api === 'openai' && !power_user.auto_continue.allow_chat_completions) {
console.log('Auto-continue for OpenAI is disabled by user.');
return;
}
if (isImpersonate) {
console.log('Continue for impersonation is not implemented yet');
return;
}
const textareaText = String($('#send_textarea').val());
const USABLE_LENGTH = 5;
if (textareaText.length > 0) {
console.log('Not triggering auto-continue because user input is not empty');
return;
}
if (messageChunk.trim().length > USABLE_LENGTH && chat.length) {
const lastMessage = chat[chat.length - 1];
const messageLength = getTokenCount(lastMessage.mes);
const shouldAutoContinue = messageLength < power_user.auto_continue.target_length;
if (shouldAutoContinue) {
console.log(`Triggering auto-continue. Message tokens: ${messageLength}. Target tokens: ${power_user.auto_continue.target_length}. Message chunk: ${messageChunk}`);
$("#option_continue").trigger('click');
} else {
console.log(`Not triggering auto-continue. Message tokens: ${messageLength}. Target tokens: ${power_user.auto_continue.target_length}`);
return;
}
} else {
console.log('Last generated chunk was empty, not triggering auto-continue');
return;
}
}
}
export function getBiasStrings(textareaText, type) { export function getBiasStrings(textareaText, type) {
if (type == 'impersonate' || type == 'continue') { if (type == 'impersonate' || type == 'continue') {
return { messageBias: '', promptBias: '', isUserPromptBias: false }; return { messageBias: '', promptBias: '', isUserPromptBias: false };
@ -3373,7 +3344,7 @@ function formatMessageHistoryItem(chatItem, isInstruct, forceOutputSequence) {
const isNarratorType = chatItem?.extra?.type === system_message_types.NARRATOR; const isNarratorType = chatItem?.extra?.type === system_message_types.NARRATOR;
const characterName = (selected_group || chatItem.force_avatar) ? chatItem.name : name2; const characterName = (selected_group || chatItem.force_avatar) ? chatItem.name : name2;
const itemName = chatItem.is_user ? chatItem['name'] : characterName; const itemName = chatItem.is_user ? chatItem['name'] : characterName;
const shouldPrependName = (chatItem.is_name || chatItem.force_avatar || selected_group) && !isNarratorType; const shouldPrependName = !isNarratorType;
let textResult = shouldPrependName ? `${itemName}: ${chatItem.mes}\n` : `${chatItem.mes}\n`; let textResult = shouldPrependName ? `${itemName}: ${chatItem.mes}\n` : `${chatItem.mes}\n`;
@ -3396,7 +3367,6 @@ export async function sendMessageAsUser(textareaText, messageBias) {
chat[chat.length] = {}; chat[chat.length] = {};
chat[chat.length - 1]['name'] = name1; chat[chat.length - 1]['name'] = name1;
chat[chat.length - 1]['is_user'] = true; chat[chat.length - 1]['is_user'] = true;
chat[chat.length - 1]['is_name'] = true;
chat[chat.length - 1]['send_date'] = getMessageTimeStamp(); chat[chat.length - 1]['send_date'] = getMessageTimeStamp();
chat[chat.length - 1]['mes'] = substituteParams(textareaText); chat[chat.length - 1]['mes'] = substituteParams(textareaText);
chat[chat.length - 1]['extra'] = {}; chat[chat.length - 1]['extra'] = {};
@ -3507,35 +3477,6 @@ function appendZeroDepthAnchor(force_name2, zeroDepthAnchor, finalPrompt) {
return finalPrompt; return finalPrompt;
} }
function getMultigenAmount() {
let this_amount_gen = Number(amount_gen);
if (tokens_already_generated === 0) {
// if the max gen setting is > 50...(
if (Number(amount_gen) >= power_user.multigen_first_chunk) {
// then only try to make 50 this cycle..
this_amount_gen = power_user.multigen_first_chunk;
}
else {
// otherwise, make as much as the max amount request.
this_amount_gen = Number(amount_gen);
}
}
// if we already received some generated text...
else {
// if the remaining tokens to be made is less than next potential cycle count
if (Number(amount_gen) - tokens_already_generated < power_user.multigen_next_chunks) {
// subtract already generated amount from the desired max gen amount
this_amount_gen = Number(amount_gen) - tokens_already_generated;
}
else {
// otherwise make the standard cycle amount (first 50, and 30 after that)
this_amount_gen = power_user.multigen_next_chunks;
}
}
return this_amount_gen;
}
async function DupeChar() { async function DupeChar() {
if (!this_chid) { if (!this_chid) {
toastr.warning('You must first select a character to duplicate!') toastr.warning('You must first select a character to duplicate!')
@ -3754,50 +3695,11 @@ function getGenerateUrl() {
} else if (main_api == 'textgenerationwebui') { } else if (main_api == 'textgenerationwebui') {
generate_url = '/generate_textgenerationwebui'; generate_url = '/generate_textgenerationwebui';
} else if (main_api == 'novel') { } else if (main_api == 'novel') {
generate_url = '/generate_novelai'; generate_url = '/api/novelai/generate';
} }
return generate_url; return generate_url;
} }
function shouldContinueMultigen(getMessage, isImpersonate, isInstruct) {
if (isInstruct && power_user.instruct.stop_sequence) {
if (message_already_generated.indexOf(power_user.instruct.stop_sequence) !== -1) {
return false;
}
}
// stopping name string
const nameString = isImpersonate ? `${name2}:` : `${name1}:`;
// if there is no 'You:' in the response msg
const doesNotContainName = message_already_generated.indexOf(nameString) === -1;
//if there is no <endoftext> stamp in the response msg
const isNotEndOfText = message_already_generated.indexOf('<|endoftext|>') === -1;
//if the gen'd msg is less than the max response length..
const notReachedMax = tokens_already_generated < Number(amount_gen);
//if we actually have gen'd text at all...
const msgHasText = getMessage.length > 0;
return doesNotContainName && isNotEndOfText && notReachedMax && msgHasText;
}
function extractNameFromMessage(getMessage, force_name2, isImpersonate) {
const nameToTrim = isImpersonate ? name1 : name2;
let this_mes_is_name = true;
if (getMessage.startsWith(nameToTrim + ":")) {
getMessage = getMessage.replace(nameToTrim + ':', '');
getMessage = getMessage.trimStart();
} else {
this_mes_is_name = false;
}
if (force_name2 || power_user.instruct.enabled)
this_mes_is_name = true;
if (isImpersonate) {
getMessage = getMessage.trim();
}
return { this_mes_is_name, getMessage };
}
function throwCircuitBreakerError() { function throwCircuitBreakerError() {
callPopup(`Could not extract reply in ${MAX_GENERATION_LOOPS} attempts. Try generating again`, 'text'); callPopup(`Could not extract reply in ${MAX_GENERATION_LOOPS} attempts. Try generating again`, 'text');
generate_loop_counter = 0; generate_loop_counter = 0;
@ -3876,7 +3778,7 @@ function cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncomplete
if (nameToTrim && getMessage.indexOf(`${nameToTrim}:`) == 0) { if (nameToTrim && getMessage.indexOf(`${nameToTrim}:`) == 0) {
getMessage = getMessage.substr(0, getMessage.indexOf(`${nameToTrim}:`)); getMessage = getMessage.substr(0, getMessage.indexOf(`${nameToTrim}:`));
} }
if (nameToTrim && getMessage.indexOf(`\n${nameToTrim}:`) > 0) { if (nameToTrim && getMessage.indexOf(`\n${nameToTrim}:`) >= 0) {
getMessage = getMessage.substr(0, getMessage.indexOf(`\n${nameToTrim}:`)); getMessage = getMessage.substr(0, getMessage.indexOf(`\n${nameToTrim}:`));
} }
if (getMessage.indexOf('<|endoftext|>') != -1) { if (getMessage.indexOf('<|endoftext|>') != -1) {
@ -3947,10 +3849,22 @@ function cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncomplete
if (power_user.auto_fix_generated_markdown) { if (power_user.auto_fix_generated_markdown) {
getMessage = fixMarkdown(getMessage, false); getMessage = fixMarkdown(getMessage, false);
} }
const nameToTrim2 = isImpersonate ? name1 : name2;
if (getMessage.startsWith(nameToTrim2 + ":")) {
getMessage = getMessage.replace(nameToTrim2 + ':', '');
getMessage = getMessage.trimStart();
}
if (isImpersonate) {
getMessage = getMessage.trim();
}
return getMessage; return getMessage;
} }
async function saveReply(type, getMessage, this_mes_is_name, title) { async function saveReply(type, getMessage, _, title) {
if (type != 'append' && type != 'continue' && type != 'appendFinal' && chat.length && (chat[chat.length - 1]['swipe_id'] === undefined || if (type != 'append' && type != 'continue' && type != 'appendFinal' && chat.length && (chat[chat.length - 1]['swipe_id'] === undefined ||
chat[chat.length - 1]['is_user'])) { chat[chat.length - 1]['is_user'])) {
type = 'normal'; type = 'normal';
@ -4023,7 +3937,6 @@ async function saveReply(type, getMessage, this_mes_is_name, title) {
chat[chat.length - 1]['extra'] = {}; chat[chat.length - 1]['extra'] = {};
chat[chat.length - 1]['name'] = name2; chat[chat.length - 1]['name'] = name2;
chat[chat.length - 1]['is_user'] = false; chat[chat.length - 1]['is_user'] = false;
chat[chat.length - 1]['is_name'] = this_mes_is_name;
chat[chat.length - 1]['send_date'] = getMessageTimeStamp(); chat[chat.length - 1]['send_date'] = getMessageTimeStamp();
chat[chat.length - 1]["extra"]["api"] = getGeneratingApi(); chat[chat.length - 1]["extra"]["api"] = getGeneratingApi();
chat[chat.length - 1]["extra"]["model"] = getGeneratingModel(); chat[chat.length - 1]["extra"]["model"] = getGeneratingModel();
@ -4045,7 +3958,6 @@ async function saveReply(type, getMessage, this_mes_is_name, title) {
if (characters[this_chid].avatar != 'none') { if (characters[this_chid].avatar != 'none') {
avatarImg = getThumbnailUrl('avatar', characters[this_chid].avatar); avatarImg = getThumbnailUrl('avatar', characters[this_chid].avatar);
} }
chat[chat.length - 1]['is_name'] = true;
chat[chat.length - 1]['force_avatar'] = avatarImg; chat[chat.length - 1]['force_avatar'] = avatarImg;
chat[chat.length - 1]['original_avatar'] = characters[this_chid].avatar; chat[chat.length - 1]['original_avatar'] = characters[this_chid].avatar;
chat[chat.length - 1]['extra']['gen_id'] = group_generation_id; chat[chat.length - 1]['extra']['gen_id'] = group_generation_id;
@ -4135,10 +4047,6 @@ function extractImageFromMessage(getMessage) {
return { getMessage, image, title }; return { getMessage, image, title };
} }
export function isMultigenEnabled() {
return power_user.multigen && (main_api == 'textgenerationwebui' || main_api == 'kobold' || main_api == 'koboldhorde' || main_api == 'novel');
}
export function activateSendButtons() { export function activateSendButtons() {
is_send_press = false; is_send_press = false;
$("#send_but").removeClass("displayNone"); $("#send_but").removeClass("displayNone");
@ -4537,7 +4445,6 @@ function getFirstMessage() {
name: name2, name: name2,
is_user: false, is_user: false,
is_system: false, is_system: false,
is_name: true,
send_date: getMessageTimeStamp(), send_date: getMessageTimeStamp(),
mes: getRegexedString(firstMes, regex_placement.AI_OUTPUT), mes: getRegexedString(firstMes, regex_placement.AI_OUTPUT),
extra: {}, extra: {},
@ -5120,11 +5027,11 @@ function updateMessage(div) {
const mes = chat[this_edit_mes_id]; const mes = chat[this_edit_mes_id];
let regexPlacement; let regexPlacement;
if (mes.is_name && mes.is_user) { if (mes.is_user) {
regexPlacement = regex_placement.USER_INPUT; regexPlacement = regex_placement.USER_INPUT;
} else if (mes.is_name && mes.name === name2) { } else if (mes.name === name2) {
regexPlacement = regex_placement.AI_OUTPUT; regexPlacement = regex_placement.AI_OUTPUT;
} else if (mes.is_name && mes.name !== name2 || mes.extra?.type === "narrator") { } else if (mes.name !== name2 || mes.extra?.type === "narrator") {
regexPlacement = regex_placement.SLASH_COMMAND; regexPlacement = regex_placement.SLASH_COMMAND;
} }
@ -5476,7 +5383,6 @@ function select_rm_info(type, charId, previousCharId = null) {
$('#rm_print_characters_pagination').pagination('go', page); $('#rm_print_characters_pagination').pagination('go', page);
waitUntilCondition(() => document.querySelector(selector) !== null).then(() => { waitUntilCondition(() => document.querySelector(selector) !== null).then(() => {
const parent = $('#rm_print_characters_block');
const element = $(selector).parent(); const element = $(selector).parent();
if (element.length === 0) { if (element.length === 0) {
@ -5484,7 +5390,8 @@ function select_rm_info(type, charId, previousCharId = null) {
return; return;
} }
parent.scrollTop(element.position().top + parent.scrollTop()); const scrollOffset = element.offset().top - element.parent().offset().top;
element.parent().scrollTop(scrollOffset);
element.addClass('flash animated'); element.addClass('flash animated');
setTimeout(function () { setTimeout(function () {
element.removeClass('flash animated'); element.removeClass('flash animated');
@ -5508,12 +5415,12 @@ function select_rm_info(type, charId, previousCharId = null) {
const perPage = Number(localStorage.getItem('Characters_PerPage')); const perPage = Number(localStorage.getItem('Characters_PerPage'));
const page = Math.floor(charIndex / perPage) + 1; const page = Math.floor(charIndex / perPage) + 1;
$('#rm_print_characters_pagination').pagination('go', page); $('#rm_print_characters_pagination').pagination('go', page);
const parent = $('#rm_print_characters_block');
const selector = `#rm_print_characters_block [grid="${charId}"]`; const selector = `#rm_print_characters_block [grid="${charId}"]`;
try { try {
waitUntilCondition(() => document.querySelector(selector) !== null).then(() => { waitUntilCondition(() => document.querySelector(selector) !== null).then(() => {
const element = $(selector); const element = $(selector);
parent.scrollTop(element.position().top + parent.scrollTop()); const scrollOffset = element.offset().top - element.parent().offset().top;
element.parent().scrollTop(scrollOffset);
$(element).addClass('flash animated'); $(element).addClass('flash animated');
setTimeout(function () { setTimeout(function () {
$(element).removeClass('flash animated'); $(element).removeClass('flash animated');
@ -6573,13 +6480,6 @@ const swipe_right = () => {
return; return;
} }
// if (chat.length == 1) {
// if (chat[0]['swipe_id'] !== undefined && chat[0]['swipe_id'] == chat[0]['swipes'].length - 1) {
// toastr.info('Add more alternative greetings to swipe through', 'That\'s all for now');
// return;
// }
// }
const swipe_duration = 200; const swipe_duration = 200;
const swipe_range = 700; const swipe_range = 700;
//console.log(swipe_range); //console.log(swipe_range);
@ -6829,7 +6729,6 @@ export function processDroppedFiles(files) {
const allowedMimeTypes = [ const allowedMimeTypes = [
'application/json', 'application/json',
'image/png', 'image/png',
'image/webp',
]; ];
for (const file of files) { for (const file of files) {
@ -6845,7 +6744,7 @@ function importCharacter(file) {
const ext = file.name.match(/\.(\w+)$/); const ext = file.name.match(/\.(\w+)$/);
if ( if (
!ext || !ext ||
(ext[1].toLowerCase() != "json" && ext[1].toLowerCase() != "png" && ext[1] != "webp") (ext[1].toLowerCase() != "json" && ext[1].toLowerCase() != "png")
) { ) {
return; return;
} }

View File

@ -280,7 +280,6 @@ async function convertSoloToGroupChat() {
message.name = character.name; message.name = character.name;
message.original_avatar = character.avatar; message.original_avatar = character.avatar;
message.force_avatar = getThumbnailUrl('avatar', character.avatar); message.force_avatar = getThumbnailUrl('avatar', character.avatar);
message.is_name = true;
// Allow regens of a single message in group // Allow regens of a single message in group
if (typeof message.extra !== 'object') { if (typeof message.extra !== 'object') {

View File

@ -53,7 +53,6 @@ async function sendCaptionedMessage(caption, image) {
const message = { const message = {
name: context.name1, name: context.name1,
is_user: true, is_user: true,
is_name: true,
send_date: getMessageTimeStamp(), send_date: getMessageTimeStamp(),
mes: messageText, mes: messageText,
extra: { extra: {

View File

@ -159,7 +159,7 @@ async function generateHypeBot() {
abortController = new AbortController(); abortController = new AbortController();
const response = await fetch('/generate_novelai', { const response = await fetch('/api/novelai/generate', {
headers: getRequestHeaders(), headers: getRequestHeaders(),
body: JSON.stringify(parameters), body: JSON.stringify(parameters),
method: 'POST', method: 'POST',

View File

@ -508,7 +508,6 @@ async function onSelectInjectFile(e) {
meta: JSON.stringify({ meta: JSON.stringify({
name: file.name, name: file.name,
is_user: false, is_user: false,
is_name: false,
is_system: false, is_system: false,
send_date: humanizedDateTime(), send_date: humanizedDateTime(),
mes: m, mes: m,
@ -686,7 +685,6 @@ window.chromadb_interceptGeneration = async (chat, maxContext) => {
const charname = context.name2; const charname = context.name2;
newChat.push( newChat.push(
{ {
is_name: false,
is_user: false, is_user: false,
mes: `[Use these past chat exchanges to inform ${charname}'s next response:`, mes: `[Use these past chat exchanges to inform ${charname}'s next response:`,
name: "system", name: "system",
@ -696,7 +694,6 @@ window.chromadb_interceptGeneration = async (chat, maxContext) => {
newChat.push(...queriedMessages.map(m => m.meta).filter(onlyUnique).map(JSON.parse)); newChat.push(...queriedMessages.map(m => m.meta).filter(onlyUnique).map(JSON.parse));
newChat.push( newChat.push(
{ {
is_name: false,
is_user: false, is_user: false,
mes: `]\n`, mes: `]\n`,
name: "system", name: "system",
@ -752,7 +749,6 @@ window.chromadb_interceptGeneration = async (chat, maxContext) => {
newChat.push( newChat.push(
{ {
is_name: false,
is_user: false, is_user: false,
mes: recallStart, mes: recallStart,
name: "system", name: "system",
@ -762,7 +758,6 @@ window.chromadb_interceptGeneration = async (chat, maxContext) => {
newChat.push(...queriedMessages.map(m => m.meta).filter(onlyUnique).map(JSON.parse)); newChat.push(...queriedMessages.map(m => m.meta).filter(onlyUnique).map(JSON.parse));
newChat.push( newChat.push(
{ {
is_name: false,
is_user: false, is_user: false,
mes: recallEnd + `\n`, mes: recallEnd + `\n`,
name: "system", name: "system",

View File

@ -165,7 +165,6 @@ async function processTranscript(transcript) {
const message = { const message = {
name: context.name1, name: context.name1,
is_user: true, is_user: true,
is_name: true,
send_date: getMessageTimeStamp(), send_date: getMessageTimeStamp(),
mes: messageText, mes: messageText,
}; };

View File

@ -1246,7 +1246,6 @@ async function sendMessage(prompt, image) {
name: context.groupId ? systemUserName : context.name2, name: context.groupId ? systemUserName : context.name2,
is_user: false, is_user: false,
is_system: true, is_system: true,
is_name: true,
send_date: getMessageTimeStamp(), send_date: getMessageTimeStamp(),
mes: context.groupId ? p(messageText) : messageText, mes: context.groupId ? p(messageText) : messageText,
extra: { extra: {

View File

@ -1,4 +1,4 @@
import { callPopup, cancelTtsPlay, eventSource, event_types, isMultigenEnabled, is_send_press, saveSettingsDebounced } from '../../../script.js' import { callPopup, cancelTtsPlay, eventSource, event_types, saveSettingsDebounced } from '../../../script.js'
import { ModuleWorkerWrapper, doExtrasFetch, extension_settings, getApiUrl, getContext, modules } from '../../extensions.js' import { ModuleWorkerWrapper, doExtrasFetch, extension_settings, getApiUrl, getContext, modules } from '../../extensions.js'
import { escapeRegex, getStringHash } from '../../utils.js' import { escapeRegex, getStringHash } from '../../utils.js'
import { EdgeTtsProvider } from './edge.js' import { EdgeTtsProvider } from './edge.js'
@ -117,11 +117,6 @@ async function moduleWorker() {
return return
} }
// Multigen message is currently being generated
if (is_send_press && isMultigenEnabled()) {
return;
}
// Chat changed // Chat changed
if ( if (
context.chatId !== lastChatId context.chatId !== lastChatId

View File

@ -170,7 +170,7 @@ class NovelTtsProvider {
async fetchTtsGeneration(inputText, voiceId) { async fetchTtsGeneration(inputText, voiceId) {
console.info(`Generating new TTS for voice_id ${voiceId}`) console.info(`Generating new TTS for voice_id ${voiceId}`)
const response = await fetch(`/novel_tts`, const response = await fetch(`/api/novelai/generate-voice`,
{ {
method: 'POST', method: 'POST',
headers: getRequestHeaders(), headers: getRequestHeaders(),

View File

@ -51,7 +51,6 @@ import {
menu_type, menu_type,
select_selected_character, select_selected_character,
cancelTtsPlay, cancelTtsPlay,
isMultigenEnabled,
displayPastChats, displayPastChats,
sendMessageAsUser, sendMessageAsUser,
getBiasStrings, getBiasStrings,
@ -206,7 +205,6 @@ function getFirstCharacterMessage(character) {
mes["is_user"] = false; mes["is_user"] = false;
mes["is_system"] = false; mes["is_system"] = false;
mes["name"] = character.name; mes["name"] = character.name;
mes["is_name"] = true;
mes["send_date"] = getMessageTimeStamp(); mes["send_date"] = getMessageTimeStamp();
mes["original_avatar"] = character.avatar; mes["original_avatar"] = character.avatar;
mes["extra"] = { "gen_id": Date.now() * Math.random() * 1000000 }; mes["extra"] = { "gen_id": Date.now() * Math.random() * 1000000 };
@ -577,7 +575,7 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
await Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) }); await Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) });
if (type !== "swipe" && type !== "impersonate" && !isMultigenEnabled() && !isStreamingEnabled()) { if (type !== "swipe" && type !== "impersonate" && !isStreamingEnabled()) {
// update indicator and scroll down // update indicator and scroll down
typingIndicator typingIndicator
.find(".typing_indicator_name") .find(".typing_indicator_name")
@ -593,7 +591,7 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
} }
// if not swipe - check if message generated already // if not swipe - check if message generated already
if (generateType === "group_chat" && !isMultigenEnabled() && chat.length == messagesBefore) { if (generateType === "group_chat" && chat.length == messagesBefore) {
await delay(100); await delay(100);
} }
// if swipe - see if message changed // if swipe - see if message changed
@ -606,13 +604,6 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
break; break;
} }
} }
else if (isMultigenEnabled()) {
if (isGenerationDone) {
break;
} else {
await delay(100);
}
}
else { else {
if (lastMessageText === chat[chat.length - 1].mes) { if (lastMessageText === chat[chat.length - 1].mes) {
await delay(100); await delay(100);
@ -631,13 +622,6 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
break; break;
} }
} }
else if (isMultigenEnabled()) {
if (isGenerationDone) {
break;
} else {
await delay(100);
}
}
else { else {
if (!$("#send_textarea").val() || $("#send_textarea").val() == userInput) { if (!$("#send_textarea").val() || $("#send_textarea").val() == userInput) {
await delay(100); await delay(100);
@ -654,14 +638,6 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
await delay(100); await delay(100);
} }
} }
else if (isMultigenEnabled()) {
if (isGenerationDone) {
messagesBefore++;
break;
} else {
await delay(100);
}
}
else if (isStreamingEnabled()) { else if (isStreamingEnabled()) {
if (streamingProcessor && !streamingProcessor.isFinished) { if (streamingProcessor && !streamingProcessor.isFinished) {
await delay(100); await delay(100);

View File

@ -11,6 +11,7 @@ import { getTextTokens, tokenizers } from "./tokenizers.js";
import { import {
getSortableDelay, getSortableDelay,
getStringHash, getStringHash,
onlyUnique,
uuidv4, uuidv4,
} from "./utils.js"; } from "./utils.js";
@ -87,7 +88,7 @@ export function getNovelUnlimitedImageGeneration() {
} }
export async function loadNovelSubscriptionData() { export async function loadNovelSubscriptionData() {
const result = await fetch('/getstatus_novelai', { const result = await fetch('/api/novelai/status', {
method: 'POST', method: 'POST',
headers: getRequestHeaders(), headers: getRequestHeaders(),
}); });
@ -402,7 +403,7 @@ function getBadWordPermutations(text) {
// Ditto + leading space // Ditto + leading space
result.push(` ${text.toLowerCase()}`); result.push(` ${text.toLowerCase()}`);
return result; return result.filter(onlyUnique);
} }
export function getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate, cfgValues) { export function getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate, cfgValues) {
@ -679,7 +680,7 @@ function tryParseStreamingError(decoded) {
export async function generateNovelWithStreaming(generate_data, signal) { export async function generateNovelWithStreaming(generate_data, signal) {
generate_data.streaming = nai_settings.streaming_novel; generate_data.streaming = nai_settings.streaming_novel;
const response = await fetch('/generate_novelai', { const response = await fetch('/api/novelai/generate', {
headers: getRequestHeaders(), headers: getRequestHeaders(),
body: JSON.stringify(generate_data), body: JSON.stringify(generate_data),
method: 'POST', method: 'POST',

View File

@ -123,6 +123,32 @@ const j2_max_pres = 5.0;
const openrouter_website_model = 'OR_Website'; const openrouter_website_model = 'OR_Website';
const openai_max_stop_strings = 4; const openai_max_stop_strings = 4;
const textCompletionModels = [
"text-davinci-003",
"text-davinci-002",
"text-davinci-001",
"text-curie-001",
"text-babbage-001",
"text-ada-001",
"code-davinci-002",
"code-davinci-001",
"code-cushman-002",
"code-cushman-001",
"text-davinci-edit-001",
"code-davinci-edit-001",
"text-embedding-ada-002",
"text-similarity-davinci-001",
"text-similarity-curie-001",
"text-similarity-babbage-001",
"text-similarity-ada-001",
"text-search-davinci-doc-001",
"text-search-curie-doc-001",
"text-search-babbage-doc-001",
"text-search-ada-doc-001",
"code-search-babbage-code-001",
"code-search-ada-code-001",
];
let biasCache = undefined; let biasCache = undefined;
let model_list = []; let model_list = [];
@ -1123,7 +1149,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
const isOpenRouter = oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER; const isOpenRouter = oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER;
const isScale = oai_settings.chat_completion_source == chat_completion_sources.SCALE; const isScale = oai_settings.chat_completion_source == chat_completion_sources.SCALE;
const isAI21 = oai_settings.chat_completion_source == chat_completion_sources.AI21; const isAI21 = oai_settings.chat_completion_source == chat_completion_sources.AI21;
const isTextCompletion = oai_settings.chat_completion_source == chat_completion_sources.OPENAI && (oai_settings.openai_model.startsWith('text-') || oai_settings.openai_model.startsWith('code-')); const isTextCompletion = oai_settings.chat_completion_source == chat_completion_sources.OPENAI && textCompletionModels.includes(oai_settings.openai_model);
const isQuiet = type === 'quiet'; const isQuiet = type === 'quiet';
const stream = oai_settings.stream_openai && !isQuiet && !isScale && !isAI21; const stream = oai_settings.stream_openai && !isQuiet && !isScale && !isAI21;

View File

@ -93,9 +93,11 @@ let power_user = {
always_force_name2: false, always_force_name2: false,
user_prompt_bias: '', user_prompt_bias: '',
show_user_prompt_bias: true, show_user_prompt_bias: true,
multigen: false, auto_continue: {
multigen_first_chunk: 50, enabled: false,
multigen_next_chunks: 30, allow_chat_completions: false,
target_length: 400,
},
markdown_escape_strings: '', markdown_escape_strings: '',
ui_mode: ui_mode.POWER, ui_mode: ui_mode.POWER,
@ -848,9 +850,9 @@ function loadPowerUserSettings(settings, data) {
$("#noShadowsmode").prop("checked", power_user.noShadows); $("#noShadowsmode").prop("checked", power_user.noShadows);
$("#start_reply_with").val(power_user.user_prompt_bias); $("#start_reply_with").val(power_user.user_prompt_bias);
$("#chat-show-reply-prefix-checkbox").prop("checked", power_user.show_user_prompt_bias); $("#chat-show-reply-prefix-checkbox").prop("checked", power_user.show_user_prompt_bias);
$("#multigen").prop("checked", power_user.multigen); $("#auto_continue_enabled").prop("checked", power_user.auto_continue.enabled);
$("#multigen_first_chunk").val(power_user.multigen_first_chunk); $("#auto_continue_allow_chat_completions").prop("checked", power_user.auto_continue.allow_chat_completions);
$("#multigen_next_chunks").val(power_user.multigen_next_chunks); $("#auto_continue_target_length").val(power_user.auto_continue.target_length);
$("#play_message_sound").prop("checked", power_user.play_message_sound); $("#play_message_sound").prop("checked", power_user.play_message_sound);
$("#play_sound_unfocused").prop("checked", power_user.play_sound_unfocused); $("#play_sound_unfocused").prop("checked", power_user.play_sound_unfocused);
$("#never_resize_avatars").prop("checked", power_user.never_resize_avatars); $("#never_resize_avatars").prop("checked", power_user.never_resize_avatars);
@ -1816,8 +1818,18 @@ $(document).ready(() => {
saveSettingsDebounced(); saveSettingsDebounced();
}) })
$("#multigen").change(function () { $("#auto_continue_enabled").on('change', function () {
power_user.multigen = $(this).prop("checked"); power_user.auto_continue.enabled = $(this).prop("checked");
saveSettingsDebounced();
});
$("#auto_continue_allow_chat_completions").on('change', function () {
power_user.auto_continue.allow_chat_completions = !!$(this).prop('checked');
saveSettingsDebounced();
});
$("#auto_continue_target_length").on('input', function () {
power_user.auto_continue.target_length = Number($(this).val());
saveSettingsDebounced(); saveSettingsDebounced();
}); });
@ -1986,16 +1998,6 @@ $(document).ready(() => {
saveSettingsDebounced(); saveSettingsDebounced();
}); });
$("#multigen_first_chunk").on('input', function () {
power_user.multigen_first_chunk = Number($(this).val());
saveSettingsDebounced();
});
$("#multigen_next_chunks").on('input', function () {
power_user.multigen_next_chunks = Number($(this).val());
saveSettingsDebounced();
});
$('#auto_swipe').on('input', function () { $('#auto_swipe').on('input', function () {
power_user.auto_swipe = !!$(this).prop('checked'); power_user.auto_swipe = !!$(this).prop('checked');
saveSettingsDebounced(); saveSettingsDebounced();

View File

@ -325,7 +325,6 @@ async function sendMessageAs(_, text) {
const message = { const message = {
name: name, name: name,
is_user: false, is_user: false,
is_name: true,
is_system: isSystem, is_system: isSystem,
send_date: getMessageTimeStamp(), send_date: getMessageTimeStamp(),
mes: substituteParams(mesText), mes: substituteParams(mesText),
@ -357,7 +356,6 @@ async function sendNarratorMessage(_, text) {
const message = { const message = {
name: name, name: name,
is_user: false, is_user: false,
is_name: false,
is_system: isSystem, is_system: isSystem,
send_date: getMessageTimeStamp(), send_date: getMessageTimeStamp(),
mes: substituteParams(text.trim()), mes: substituteParams(text.trim()),
@ -384,7 +382,6 @@ async function sendCommentMessage(_, text) {
const message = { const message = {
name: COMMENT_NAME_DEFAULT, name: COMMENT_NAME_DEFAULT,
is_user: false, is_user: false,
is_name: true,
is_system: true, is_system: true,
send_date: getMessageTimeStamp(), send_date: getMessageTimeStamp(),
mes: substituteParams(text.trim()), mes: substituteParams(text.trim()),

View File

@ -2436,18 +2436,18 @@ input[type="range"]::-webkit-slider-thumb {
#anchor_checkbox label, #anchor_checkbox label,
#power-user-option-checkboxes label, #power-user-option-checkboxes label,
.checkbox_label, .checkbox_label,
.multigen_settings_block { .auto_continue_settings_block {
display: flex; display: flex;
flex-direction: row; flex-direction: row;
column-gap: 5px; column-gap: 5px;
align-items: center; align-items: center;
} }
.multigen_settings_block { .auto_continue_settings_block {
margin-top: 10px; margin-top: 10px;
} }
.multigen_settings_block label { .auto_continue_settings_block label {
flex: 1; flex: 1;
display: flex; display: flex;
flex-direction: column; flex-direction: column;

546
server.js
View File

@ -10,7 +10,7 @@ const readline = require('readline');
const util = require('util'); const util = require('util');
const { Readable } = require('stream'); const { Readable } = require('stream');
const { finished } = require('stream/promises'); const { finished } = require('stream/promises');
const { TextEncoder, TextDecoder } = require('util'); const { TextDecoder } = require('util');
// cli/fs related library imports // cli/fs related library imports
const open = require('open'); const open = require('open');
@ -40,14 +40,11 @@ const json5 = require('json5');
const WebSocket = require('ws'); const WebSocket = require('ws');
// image processing related library imports // image processing related library imports
const exif = require('piexifjs');
const encode = require('png-chunks-encode'); const encode = require('png-chunks-encode');
const extract = require('png-chunks-extract'); const extract = require('png-chunks-extract');
const jimp = require('jimp'); const jimp = require('jimp');
const mime = require('mime-types'); const mime = require('mime-types');
const PNGtext = require('png-chunk-text'); const PNGtext = require('png-chunk-text');
const webp = require('webp-converter');
const yauzl = require('yauzl');
// tokenizing related library imports // tokenizing related library imports
const { SentencePieceProcessor } = require("@agnai/sentencepiece-js"); const { SentencePieceProcessor } = require("@agnai/sentencepiece-js");
@ -65,10 +62,9 @@ util.inspect.defaultOptions.maxStringLength = null;
const basicAuthMiddleware = require('./src/middleware/basicAuthMiddleware'); const basicAuthMiddleware = require('./src/middleware/basicAuthMiddleware');
const characterCardParser = require('./src/character-card-parser.js'); const characterCardParser = require('./src/character-card-parser.js');
const contentManager = require('./src/content-manager'); const contentManager = require('./src/content-manager');
const novelai = require('./src/novelai');
const statsHelpers = require('./statsHelpers.js'); const statsHelpers = require('./statsHelpers.js');
const { writeSecret, readSecret, readSecretState, migrateSecrets, SECRET_KEYS, getAllSecrets } = require('./src/secrets'); const { writeSecret, readSecret, readSecretState, migrateSecrets, SECRET_KEYS, getAllSecrets } = require('./src/secrets');
const { delay, getVersion } = require('./src/util'); const { delay, getVersion, getImageBuffers } = require('./src/util');
// Work around a node v20.0.0, v20.1.0, and v20.2.0 bug. The issue was fixed in v20.3.0. // Work around a node v20.0.0, v20.1.0, and v20.2.0 bug. The issue was fixed in v20.3.0.
// https://github.com/nodejs/node/issues/47822#issuecomment-1564708870 // https://github.com/nodejs/node/issues/47822#issuecomment-1564708870
@ -109,8 +105,6 @@ const app = express();
app.use(compression()); app.use(compression());
app.use(responseTime()); app.use(responseTime());
const utf8Encode = new TextEncoder();
// impoort from statsHelpers.js // impoort from statsHelpers.js
const config = require(path.join(process.cwd(), './config.conf')); const config = require(path.join(process.cwd(), './config.conf'));
@ -133,7 +127,6 @@ const enableExtensions = config.enableExtensions;
const listen = config.listen; const listen = config.listen;
const allowKeysExposure = config.allowKeysExposure; const allowKeysExposure = config.allowKeysExposure;
const API_NOVELAI = "https://api.novelai.net";
const API_OPENAI = "https://api.openai.com/v1"; const API_OPENAI = "https://api.openai.com/v1";
const API_CLAUDE = "https://api.anthropic.com/v1"; const API_CLAUDE = "https://api.anthropic.com/v1";
@ -242,6 +235,35 @@ function countClaudeTokens(tokenizer, messages) {
const tokenizersCache = {}; const tokenizersCache = {};
/**
* @type {import('@dqbd/tiktoken').TiktokenModel[]}
*/
const textCompletionModels = [
"text-davinci-003",
"text-davinci-002",
"text-davinci-001",
"text-curie-001",
"text-babbage-001",
"text-ada-001",
"code-davinci-002",
"code-davinci-001",
"code-cushman-002",
"code-cushman-001",
"text-davinci-edit-001",
"code-davinci-edit-001",
"text-embedding-ada-002",
"text-similarity-davinci-001",
"text-similarity-curie-001",
"text-similarity-babbage-001",
"text-similarity-ada-001",
"text-search-davinci-doc-001",
"text-search-curie-doc-001",
"text-search-babbage-doc-001",
"text-search-ada-doc-001",
"code-search-babbage-code-001",
"code-search-ada-code-001",
];
function getTokenizerModel(requestModel) { function getTokenizerModel(requestModel) {
if (requestModel.includes('claude')) { if (requestModel.includes('claude')) {
return 'claude'; return 'claude';
@ -259,7 +281,7 @@ function getTokenizerModel(requestModel) {
return 'gpt-3.5-turbo'; return 'gpt-3.5-turbo';
} }
if (requestModel.startsWith('text-') || requestModel.startsWith('code-')) { if (textCompletionModels.includes(requestModel)) {
return requestModel; return requestModel;
} }
@ -1097,7 +1119,7 @@ app.post("/renamecharacter", jsonParser, async function (request, response) {
try { try {
// Read old file, replace name int it // Read old file, replace name int it
const rawOldData = await charaRead(oldAvatarPath); const rawOldData = await charaRead(oldAvatarPath);
if (rawOldData === false || rawOldData === undefined) throw new Error("Failed to read character file"); if (rawOldData === undefined) throw new Error("Failed to read character file");
const oldData = getCharaCardV2(json5.parse(rawOldData)); const oldData = getCharaCardV2(json5.parse(rawOldData));
_.set(oldData, 'data.name', newName); _.set(oldData, 'data.name', newName);
@ -1344,7 +1366,7 @@ const calculateDataSize = (data) => {
const processCharacter = async (item, i) => { const processCharacter = async (item, i) => {
try { try {
const img_data = await charaRead(charactersPath + item); const img_data = await charaRead(charactersPath + item);
if (img_data === false || img_data === undefined) throw new Error("Failed to read character file"); if (img_data === undefined) throw new Error("Failed to read character file");
let jsonObject = getCharaCardV2(json5.parse(img_data)); let jsonObject = getCharaCardV2(json5.parse(img_data));
jsonObject.avatar = item; jsonObject.avatar = item;
@ -1853,162 +1875,6 @@ function getImages(path) {
.sort(Intl.Collator().compare); .sort(Intl.Collator().compare);
} }
//***********Novel.ai API
app.post("/getstatus_novelai", jsonParser, async function (request, response_getstatus_novel) {
if (!request.body) return response_getstatus_novel.sendStatus(400);
const api_key_novel = readSecret(SECRET_KEYS.NOVEL);
if (!api_key_novel) {
return response_getstatus_novel.sendStatus(401);
}
try {
const response = await fetch(API_NOVELAI + "/user/subscription", {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'Authorization': "Bearer " + api_key_novel,
},
});
if (response.ok) {
const data = await response.json();
return response_getstatus_novel.send(data);
} else if (response.status == 401) {
console.log('NovelAI Access Token is incorrect.');
return response_getstatus_novel.send({ error: true });
}
else {
console.log('NovelAI returned an error:', response.statusText);
return response_getstatus_novel.send({ error: true });
}
} catch (error) {
console.log(error);
return response_getstatus_novel.send({ error: true });
}
});
app.post("/generate_novelai", jsonParser, async function (request, response_generate_novel) {
if (!request.body) return response_generate_novel.sendStatus(400);
const api_key_novel = readSecret(SECRET_KEYS.NOVEL);
if (!api_key_novel) {
return response_generate_novel.sendStatus(401);
}
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
const isNewModel = (request.body.model.includes('clio') || request.body.model.includes('kayra'));
const badWordsList = novelai.getBadWordsList(request.body.model);
// Add customized bad words for Clio and Kayra
if (isNewModel && Array.isArray(request.body.bad_words_ids)) {
for (const badWord of request.body.bad_words_ids) {
if (Array.isArray(badWord) && badWord.every(x => Number.isInteger(x))) {
badWordsList.push(badWord);
}
}
}
// Add default biases for dinkus and asterism
const logit_bias_exp = isNewModel ? novelai.logitBiasExp.slice() : [];
if (Array.isArray(logit_bias_exp) && Array.isArray(request.body.logit_bias_exp)) {
logit_bias_exp.push(...request.body.logit_bias_exp);
}
const data = {
"input": request.body.input,
"model": request.body.model,
"parameters": {
"use_string": request.body.use_string ?? true,
"temperature": request.body.temperature,
"max_length": request.body.max_length,
"min_length": request.body.min_length,
"tail_free_sampling": request.body.tail_free_sampling,
"repetition_penalty": request.body.repetition_penalty,
"repetition_penalty_range": request.body.repetition_penalty_range,
"repetition_penalty_slope": request.body.repetition_penalty_slope,
"repetition_penalty_frequency": request.body.repetition_penalty_frequency,
"repetition_penalty_presence": request.body.repetition_penalty_presence,
"repetition_penalty_whitelist": isNewModel ? novelai.repPenaltyAllowList : null,
"top_a": request.body.top_a,
"top_p": request.body.top_p,
"top_k": request.body.top_k,
"typical_p": request.body.typical_p,
"mirostat_lr": request.body.mirostat_lr,
"mirostat_tau": request.body.mirostat_tau,
"cfg_scale": request.body.cfg_scale,
"cfg_uc": request.body.cfg_uc,
"phrase_rep_pen": request.body.phrase_rep_pen,
"stop_sequences": request.body.stop_sequences,
"bad_words_ids": badWordsList,
"logit_bias_exp": logit_bias_exp,
"generate_until_sentence": request.body.generate_until_sentence,
"use_cache": request.body.use_cache,
"return_full_text": request.body.return_full_text,
"prefix": request.body.prefix,
"order": request.body.order
}
};
console.log(util.inspect(data, { depth: 4 }))
const args = {
body: JSON.stringify(data),
headers: { "Content-Type": "application/json", "Authorization": "Bearer " + api_key_novel },
signal: controller.signal,
};
try {
const url = request.body.streaming ? `${API_NOVELAI}/ai/generate-stream` : `${API_NOVELAI}/ai/generate`;
const response = await fetch(url, { method: 'POST', timeout: 0, ...args });
if (request.body.streaming) {
// Pipe remote SSE stream to Express response
response.body.pipe(response_generate_novel);
request.socket.on('close', function () {
if (response.body instanceof Readable) response.body.destroy(); // Close the remote stream
response_generate_novel.end(); // End the Express response
});
response.body.on('end', function () {
console.log("Streaming request finished");
response_generate_novel.end();
});
} else {
if (!response.ok) {
const text = await response.text();
let message = text;
console.log(`Novel API returned error: ${response.status} ${response.statusText} ${text}`);
try {
const data = JSON.parse(text);
message = data.message;
}
catch {
// ignore
}
return response_generate_novel.status(response.status).send({ error: { message } });
}
const data = await response.json();
console.log(data);
return response_generate_novel.send(data);
}
} catch (error) {
return response_generate_novel.send({ error: true });
}
});
app.post("/getallchatsofcharacter", jsonParser, function (request, response) { app.post("/getallchatsofcharacter", jsonParser, function (request, response) {
if (!request.body) return response.sendStatus(400); if (!request.body) return response.sendStatus(400);
@ -2182,26 +2048,13 @@ app.post("/importcharacter", urlencodedParser, async function (request, response
} else { } else {
try { try {
var img_data = await charaRead(uploadPath, format); var img_data = await charaRead(uploadPath, format);
if (img_data === false || img_data === undefined) throw new Error('Failed to read character data'); if (img_data === undefined) throw new Error('Failed to read character data');
let jsonData = json5.parse(img_data); let jsonData = json5.parse(img_data);
jsonData.name = sanitize(jsonData.data?.name || jsonData.name); jsonData.name = sanitize(jsonData.data?.name || jsonData.name);
png_name = getPngName(jsonData.name); png_name = getPngName(jsonData.name);
if (format == 'webp') {
try {
let convertedPath = path.join(UPLOADS_PATH, path.basename(uploadPath, ".webp") + ".png")
await webp.dwebp(uploadPath, convertedPath, "-o");
fs.unlinkSync(uploadPath);
uploadPath = convertedPath;
}
catch {
console.error('WEBP image conversion failed. Using the default character image.');
uploadPath = defaultAvatarPath;
}
}
if (jsonData.spec !== undefined) { if (jsonData.spec !== undefined) {
console.log('Found a v2 character file.'); console.log('Found a v2 character file.');
importRisuSprites(jsonData); importRisuSprites(jsonData);
@ -2381,7 +2234,7 @@ app.post("/exportcharacter", jsonParser, async function (request, response) {
case 'json': { case 'json': {
try { try {
let json = await charaRead(filename); let json = await charaRead(filename);
if (json === false || json === undefined) return response.sendStatus(400); if (json === undefined) return response.sendStatus(400);
let jsonObject = getCharaCardV2(json5.parse(json)); let jsonObject = getCharaCardV2(json5.parse(json));
return response.type('json').send(jsonObject) return response.type('json').send(jsonObject)
} }
@ -2389,39 +2242,6 @@ app.post("/exportcharacter", jsonParser, async function (request, response) {
return response.sendStatus(400); return response.sendStatus(400);
} }
} }
case 'webp': {
try {
let json = await charaRead(filename);
if (json === false || json === undefined) return response.sendStatus(400);
let stringByteArray = utf8Encode.encode(json).toString();
let inputWebpPath = path.join(UPLOADS_PATH, `${Date.now()}_input.webp`);
let outputWebpPath = path.join(UPLOADS_PATH, `${Date.now()}_output.webp`);
let metadataPath = path.join(UPLOADS_PATH, `${Date.now()}_metadata.exif`);
let metadata =
{
"Exif": {
[exif.ExifIFD.UserComment]: stringByteArray,
},
};
const exifString = exif.dump(metadata);
writeFileAtomicSync(metadataPath, exifString, 'binary');
await webp.cwebp(filename, inputWebpPath, '-q 95');
await webp.webpmux_add(inputWebpPath, outputWebpPath, metadataPath, 'exif');
response.sendFile(outputWebpPath, { root: process.cwd() }, () => {
fs.rmSync(inputWebpPath);
fs.rmSync(metadataPath);
fs.rmSync(outputWebpPath);
});
return;
}
catch (err) {
console.log(err);
return response.sendStatus(400);
}
}
} }
return response.sendStatus(400); return response.sendStatus(400);
@ -2479,7 +2299,6 @@ app.post("/importchat", urlencodedParser, function (request, response) {
(message) => ({ (message) => ({
name: message.src.is_human ? user_name : ch_name, name: message.src.is_human ? user_name : ch_name,
is_user: message.src.is_human, is_user: message.src.is_human,
is_name: true,
send_date: humanizedISO8601DateTime(), send_date: humanizedISO8601DateTime(),
mes: message.text, mes: message.text,
}) })
@ -2524,7 +2343,6 @@ app.post("/importchat", urlencodedParser, function (request, response) {
const userMessage = { const userMessage = {
name: user_name, name: user_name,
is_user: true, is_user: true,
is_name: true,
send_date: humanizedISO8601DateTime(), send_date: humanizedISO8601DateTime(),
mes: arr[0], mes: arr[0],
}; };
@ -2534,7 +2352,6 @@ app.post("/importchat", urlencodedParser, function (request, response) {
const charMessage = { const charMessage = {
name: ch_name, name: ch_name,
is_user: false, is_user: false,
is_name: true,
send_date: humanizedISO8601DateTime(), send_date: humanizedISO8601DateTime(),
mes: arr[1], mes: arr[1],
}; };
@ -3598,7 +3415,7 @@ app.post("/generate_openai", jsonParser, function (request, response_generate_op
bodyParams['stop'] = request.body.stop; bodyParams['stop'] = request.body.stop;
} }
const isTextCompletion = Boolean(request.body.model && (request.body.model.startsWith('text-') || request.body.model.startsWith('code-'))); const isTextCompletion = Boolean(request.body.model && textCompletionModels.includes(request.body.model));
const textPrompt = isTextCompletion ? convertChatMLPrompt(request.body.messages) : ''; const textPrompt = isTextCompletion ? convertChatMLPrompt(request.body.messages) : '';
const endpointUrl = isTextCompletion ? `${api_url}/completions` : `${api_url}/chat/completions`; const endpointUrl = isTextCompletion ? `${api_url}/completions` : `${api_url}/chat/completions`;
@ -3627,7 +3444,7 @@ app.post("/generate_openai", jsonParser, function (request, response_generate_op
"frequency_penalty": request.body.frequency_penalty, "frequency_penalty": request.body.frequency_penalty,
"top_p": request.body.top_p, "top_p": request.body.top_p,
"top_k": request.body.top_k, "top_k": request.body.top_k,
"stop": request.body.stop, "stop": isTextCompletion === false ? request.body.stop : undefined,
"logit_bias": request.body.logit_bias, "logit_bias": request.body.logit_bias,
...bodyParams, ...bodyParams,
}), }),
@ -4080,8 +3897,6 @@ const setupTasks = async function () {
contentManager.checkForNewContent(); contentManager.checkForNewContent();
cleanUploads(); cleanUploads();
await convertWebp();
[spp_llama, spp_nerd, spp_nerd_v2, claude_tokenizer] = await Promise.all([ [spp_llama, spp_nerd, spp_nerd_v2, claude_tokenizer] = await Promise.all([
loadSentencepieceTokenizer('src/sentencepiece/tokenizer.model'), loadSentencepieceTokenizer('src/sentencepiece/tokenizer.model'),
loadSentencepieceTokenizer('src/sentencepiece/nerdstash.model'), loadSentencepieceTokenizer('src/sentencepiece/nerdstash.model'),
@ -4141,47 +3956,6 @@ if (true === cliArguments.ssl) {
); );
} }
async function convertWebp() {
const files = fs.readdirSync(directories.characters).filter(e => e.endsWith(".webp"));
if (!files.length) {
return;
}
console.log(`${files.length} WEBP files will be automatically converted.`);
for (const file of files) {
try {
const source = path.join(directories.characters, file);
const dest = path.join(directories.characters, path.basename(file, ".webp") + ".png");
if (fs.existsSync(dest)) {
console.log(`${dest} already exists. Delete ${source} manually`);
continue;
}
console.log(`Read... ${source}`);
const data = await charaRead(source);
console.log(`Convert... ${source} -> ${dest}`);
await webp.dwebp(source, dest, "-o");
console.log(`Write... ${dest}`);
const success = await charaWrite(dest, data, path.parse(dest).name);
if (!success) {
console.log(`Failure on ${source} -> ${dest}`);
continue;
}
console.log(`Remove... ${source}`);
fs.rmSync(source);
} catch (err) {
console.log(err);
}
}
}
function backupSettings() { function backupSettings() {
const MAX_BACKUPS = 25; const MAX_BACKUPS = 25;
@ -4264,136 +4038,6 @@ app.post('/viewsecrets', jsonParser, async (_, response) => {
} }
}); });
app.post('/api/novelai/generate-image', jsonParser, async (request, response) => {
if (!request.body) {
return response.sendStatus(400);
}
const key = readSecret(SECRET_KEYS.NOVEL);
if (!key) {
return response.sendStatus(401);
}
try {
console.log('NAI Diffusion request:', request.body);
const generateUrl = `${API_NOVELAI}/ai/generate-image`;
const generateResult = await fetch(generateUrl, {
method: 'POST',
headers: {
'Authorization': `Bearer ${key}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
action: 'generate',
input: request.body.prompt,
model: request.body.model ?? 'nai-diffusion',
parameters: {
negative_prompt: request.body.negative_prompt ?? '',
height: request.body.height ?? 512,
width: request.body.width ?? 512,
scale: request.body.scale ?? 9,
seed: Math.floor(Math.random() * 9999999999),
sampler: request.body.sampler ?? 'k_dpmpp_2m',
steps: request.body.steps ?? 28,
n_samples: 1,
// NAI handholding for prompts
ucPreset: 0,
qualityToggle: false,
},
}),
});
if (!generateResult.ok) {
console.log('NovelAI returned an error.', generateResult.statusText);
return response.sendStatus(500);
}
const archiveBuffer = await generateResult.arrayBuffer();
const imageBuffer = await extractFileFromZipBuffer(archiveBuffer, '.png');
const originalBase64 = imageBuffer.toString('base64');
// No upscaling
if (isNaN(request.body.upscale_ratio) || request.body.upscale_ratio <= 1) {
return response.send(originalBase64);
}
try {
console.debug('Upscaling image...');
const upscaleUrl = `${API_NOVELAI}/ai/upscale`;
const upscaleResult = await fetch(upscaleUrl, {
method: 'POST',
headers: {
'Authorization': `Bearer ${key}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
image: originalBase64,
height: request.body.height,
width: request.body.width,
scale: request.body.upscale_ratio,
}),
});
if (!upscaleResult.ok) {
throw new Error('NovelAI returned an error.');
}
const upscaledArchiveBuffer = await upscaleResult.arrayBuffer();
const upscaledImageBuffer = await extractFileFromZipBuffer(upscaledArchiveBuffer, '.png');
const upscaledBase64 = upscaledImageBuffer.toString('base64');
return response.send(upscaledBase64);
} catch (error) {
console.warn('NovelAI generated an image, but upscaling failed. Returning original image.');
return response.send(originalBase64)
}
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
app.post('/novel_tts', jsonParser, async (request, response) => {
const token = readSecret(SECRET_KEYS.NOVEL);
if (!token) {
return response.sendStatus(401);
}
const text = request.body.text;
const voice = request.body.voice;
if (!text || !voice) {
return response.sendStatus(400);
}
try {
const url = `${API_NOVELAI}/ai/generate-voice?text=${encodeURIComponent(text)}&voice=-1&seed=${encodeURIComponent(voice)}&opus=false&version=v2`;
const result = await fetch(url, {
method: 'GET',
headers: {
'Authorization': `Bearer ${token}`,
'Accept': 'audio/mpeg',
},
timeout: 0,
});
if (!result.ok) {
return response.sendStatus(result.status);
}
const chunks = await readAllChunks(result.body);
const buffer = Buffer.concat(chunks);
response.setHeader('Content-Type', 'audio/mpeg');
return response.send(buffer);
}
catch (error) {
console.error(error);
return response.sendStatus(500);
}
});
app.post('/delete_sprite', jsonParser, async (request, response) => { app.post('/delete_sprite', jsonParser, async (request, response) => {
const label = request.body.label; const label = request.body.label;
const name = request.body.name; const name = request.body.name;
@ -4551,45 +4195,6 @@ app.post('/import_custom', jsonParser, async (request, response) => {
} }
}); });
/**
* Extracts a file with given extension from an ArrayBuffer containing a ZIP archive.
* @param {ArrayBuffer} archiveBuffer Buffer containing a ZIP archive
* @param {string} fileExtension File extension to look for
* @returns {Promise<Buffer>} Buffer containing the extracted file
*/
async function extractFileFromZipBuffer(archiveBuffer, fileExtension) {
return await new Promise((resolve, reject) => yauzl.fromBuffer(Buffer.from(archiveBuffer), { lazyEntries: true }, (err, zipfile) => {
if (err) {
reject(err);
}
zipfile.readEntry();
zipfile.on('entry', (entry) => {
if (entry.fileName.endsWith(fileExtension)) {
console.log(`Extracting ${entry.fileName}`);
zipfile.openReadStream(entry, (err, readStream) => {
if (err) {
reject(err);
} else {
const chunks = [];
readStream.on('data', (chunk) => {
chunks.push(chunk);
});
readStream.on('end', () => {
const buffer = Buffer.concat(chunks);
resolve(buffer);
zipfile.readEntry(); // Continue to the next entry
});
}
});
} else {
zipfile.readEntry();
}
});
}));
}
async function downloadChubLorebook(id) { async function downloadChubLorebook(id) {
const result = await fetch('https://api.chub.ai/api/lorebooks/download', { const result = await fetch('https://api.chub.ai/api/lorebooks/download', {
method: 'POST', method: 'POST',
@ -4737,78 +4342,6 @@ function importRisuSprites(data) {
} }
} }
async function readAllChunks(readableStream) {
return new Promise((resolve, reject) => {
// Consume the readable stream
const chunks = [];
readableStream.on('data', (chunk) => {
chunks.push(chunk);
});
readableStream.on('end', () => {
//console.log('Finished reading the stream.');
resolve(chunks);
});
readableStream.on('error', (error) => {
console.error('Error while reading the stream:', error);
reject();
});
});
}
async function getImageBuffers(zipFilePath) {
return new Promise((resolve, reject) => {
// Check if the zip file exists
if (!fs.existsSync(zipFilePath)) {
reject(new Error('File not found'));
return;
}
const imageBuffers = [];
yauzl.open(zipFilePath, { lazyEntries: true }, (err, zipfile) => {
if (err) {
reject(err);
} else {
zipfile.readEntry();
zipfile.on('entry', (entry) => {
const mimeType = mime.lookup(entry.fileName);
if (mimeType && mimeType.startsWith('image/') && !entry.fileName.startsWith('__MACOSX')) {
console.log(`Extracting ${entry.fileName}`);
zipfile.openReadStream(entry, (err, readStream) => {
if (err) {
reject(err);
} else {
const chunks = [];
readStream.on('data', (chunk) => {
chunks.push(chunk);
});
readStream.on('end', () => {
imageBuffers.push([path.parse(entry.fileName).base, Buffer.concat(chunks)]);
zipfile.readEntry(); // Continue to the next entry
});
}
});
} else {
zipfile.readEntry(); // Continue to the next entry
}
});
zipfile.on('end', () => {
resolve(imageBuffers);
});
zipfile.on('error', (err) => {
reject(err);
});
}
});
});
}
/** /**
* This function extracts the extension information from the manifest file. * This function extracts the extension information from the manifest file.
* @param {string} extensionPath - The path of the extension folder * @param {string} extensionPath - The path of the extension folder
@ -5248,6 +4781,9 @@ app.post('/get_character_assets_list', jsonParser, async (request, response) =>
} }
}); });
// NovelAI generation
require('./src/novelai').registerEndpoints(app, jsonParser);
// Stable Diffusion generation // Stable Diffusion generation
require('./src/stable-diffusion').registerEndpoints(app, jsonParser); require('./src/stable-diffusion').registerEndpoints(app, jsonParser);

View File

@ -1,56 +1,12 @@
const fs = require('fs'); const fs = require('fs');
const json5 = require('json5');
const ExifReader = require('exifreader');
const extract = require('png-chunks-extract'); const extract = require('png-chunks-extract');
const PNGtext = require('png-chunk-text'); const PNGtext = require('png-chunk-text');
const utf8Decode = new TextDecoder('utf-8', { ignoreBOM: true });
const parse = async (cardUrl, format) => { const parse = async (cardUrl, format) => {
let fileFormat; let fileFormat = format === undefined ? 'png' : format;
if (format === undefined) {
if (cardUrl.indexOf('.webp') !== -1)
fileFormat = 'webp';
else
fileFormat = 'png';
}
else
fileFormat = format;
switch (fileFormat) { switch (fileFormat) {
case 'webp':
try {
const exif_data = await ExifReader.load(fs.readFileSync(cardUrl));
let char_data;
if (exif_data['UserComment']['description']) {
let description = exif_data['UserComment']['description'];
if (description === 'Undefined' && exif_data['UserComment'].value && exif_data['UserComment'].value.length === 1) {
description = exif_data['UserComment'].value[0];
}
try {
json5.parse(description);
char_data = description;
} catch {
const byteArr = description.split(",").map(Number);
const uint8Array = new Uint8Array(byteArr);
const char_data_string = utf8Decode.decode(uint8Array);
char_data = char_data_string;
}
}
else {
console.log('No description found in EXIF data.');
return false;
}
return char_data;
}
catch (err) {
console.log(err);
return false;
}
case 'png': case 'png':
const buffer = fs.readFileSync(cardUrl); const buffer = fs.readFileSync(cardUrl);
const chunks = extract(buffer); const chunks = extract(buffer);

View File

@ -1,6 +1,14 @@
const fetch = require('node-fetch').default;
const util = require('util');
const { Readable } = require('stream');
const { readSecret, SECRET_KEYS } = require('./secrets');
const { readAllChunks, extractFileFromZipBuffer } = require('./util');
const API_NOVELAI = "https://api.novelai.net";
// Ban bracket generation, plus defaults // Ban bracket generation, plus defaults
const badWordsList = [ const badWordsList = [
[3], [49356], [1431], [31715], [34387], [20765], [30702], [10691], [49333], [1266], [3], [49356], [1431], [31715], [34387], [20765], [30702], [10691], [49333], [1266],
[19438], [43145], [26523], [41471], [2936], [85, 85], [49332], [7286], [1115] [19438], [43145], [26523], [41471], [2936], [85, 85], [49332], [7286], [1115]
] ]
@ -38,7 +46,7 @@ const logitBiasExp = [
] ]
const hypeBotLogitBiasExp = [ const hypeBotLogitBiasExp = [
{ "sequence": [8162], "bias": -0.12, "ensure_sequence_finish": false, "generate_once": false}, { "sequence": [8162], "bias": -0.12, "ensure_sequence_finish": false, "generate_once": false },
{ "sequence": [46256, 224], "bias": -0.12, "ensure_sequence_finish": false, "generate_once": false } { "sequence": [46256, 224], "bias": -0.12, "ensure_sequence_finish": false, "generate_once": false }
]; ];
@ -57,11 +65,297 @@ function getBadWordsList(model) {
return list.slice(); return list.slice();
} }
/**
* Registers NovelAI API endpoints.
* @param {import('express').Express} app - Express app
* @param {any} jsonParser - JSON parser middleware
*/
function registerEndpoints(app, jsonParser) {
app.post("/api/novelai/status", jsonParser, async function (req, res) {
if (!req.body) return res.sendStatus(400);
const api_key_novel = readSecret(SECRET_KEYS.NOVEL);
if (!api_key_novel) {
return res.sendStatus(401);
}
try {
const response = await fetch(API_NOVELAI + "/user/subscription", {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'Authorization': "Bearer " + api_key_novel,
},
});
if (response.ok) {
const data = await response.json();
return res.send(data);
} else if (response.status == 401) {
console.log('NovelAI Access Token is incorrect.');
return res.send({ error: true });
}
else {
console.log('NovelAI returned an error:', response.statusText);
return res.send({ error: true });
}
} catch (error) {
console.log(error);
return res.send({ error: true });
}
});
app.post("/api/novelai/generate", jsonParser, async function (req, res) {
if (!req.body) return res.sendStatus(400);
const api_key_novel = readSecret(SECRET_KEYS.NOVEL);
if (!api_key_novel) {
return res.sendStatus(401);
}
const controller = new AbortController();
req.socket.removeAllListeners('close');
req.socket.on('close', function () {
controller.abort();
});
const isNewModel = (req.body.model.includes('clio') || req.body.model.includes('kayra'));
const badWordsList = getBadWordsList(req.body.model);
// Add customized bad words for Clio and Kayra
if (isNewModel && Array.isArray(req.body.bad_words_ids)) {
for (const badWord of req.body.bad_words_ids) {
if (Array.isArray(badWord) && badWord.every(x => Number.isInteger(x))) {
badWordsList.push(badWord);
}
}
}
// Add default biases for dinkus and asterism
const logit_bias_exp = isNewModel ? logitBiasExp.slice() : [];
if (Array.isArray(logit_bias_exp) && Array.isArray(req.body.logit_bias_exp)) {
logit_bias_exp.push(...req.body.logit_bias_exp);
}
const data = {
"input": req.body.input,
"model": req.body.model,
"parameters": {
"use_string": req.body.use_string ?? true,
"temperature": req.body.temperature,
"max_length": req.body.max_length,
"min_length": req.body.min_length,
"tail_free_sampling": req.body.tail_free_sampling,
"repetition_penalty": req.body.repetition_penalty,
"repetition_penalty_range": req.body.repetition_penalty_range,
"repetition_penalty_slope": req.body.repetition_penalty_slope,
"repetition_penalty_frequency": req.body.repetition_penalty_frequency,
"repetition_penalty_presence": req.body.repetition_penalty_presence,
"repetition_penalty_whitelist": isNewModel ? repPenaltyAllowList : null,
"top_a": req.body.top_a,
"top_p": req.body.top_p,
"top_k": req.body.top_k,
"typical_p": req.body.typical_p,
"mirostat_lr": req.body.mirostat_lr,
"mirostat_tau": req.body.mirostat_tau,
"cfg_scale": req.body.cfg_scale,
"cfg_uc": req.body.cfg_uc,
"phrase_rep_pen": req.body.phrase_rep_pen,
"stop_sequences": req.body.stop_sequences,
"bad_words_ids": badWordsList,
"logit_bias_exp": logit_bias_exp,
"generate_until_sentence": req.body.generate_until_sentence,
"use_cache": req.body.use_cache,
"return_full_text": req.body.return_full_text,
"prefix": req.body.prefix,
"order": req.body.order
}
};
console.log(util.inspect(data, { depth: 4 }))
const args = {
body: JSON.stringify(data),
headers: { "Content-Type": "application/json", "Authorization": "Bearer " + api_key_novel },
signal: controller.signal,
};
try {
const url = req.body.streaming ? `${API_NOVELAI}/ai/generate-stream` : `${API_NOVELAI}/ai/generate`;
const response = await fetch(url, { method: 'POST', timeout: 0, ...args });
if (req.body.streaming) {
// Pipe remote SSE stream to Express response
response.body.pipe(res);
req.socket.on('close', function () {
if (response.body instanceof Readable) response.body.destroy(); // Close the remote stream
res.end(); // End the Express response
});
response.body.on('end', function () {
console.log("Streaming request finished");
res.end();
});
} else {
if (!response.ok) {
const text = await response.text();
let message = text;
console.log(`Novel API returned error: ${response.status} ${response.statusText} ${text}`);
try {
const data = JSON.parse(text);
message = data.message;
}
catch {
// ignore
}
return res.status(response.status).send({ error: { message } });
}
const data = await response.json();
console.log(data);
return res.send(data);
}
} catch (error) {
return res.send({ error: true });
}
});
app.post('/api/novelai/generate-image', jsonParser, async (request, response) => {
if (!request.body) {
return response.sendStatus(400);
}
const key = readSecret(SECRET_KEYS.NOVEL);
if (!key) {
return response.sendStatus(401);
}
try {
console.log('NAI Diffusion request:', request.body);
const generateUrl = `${API_NOVELAI}/ai/generate-image`;
const generateResult = await fetch(generateUrl, {
method: 'POST',
headers: {
'Authorization': `Bearer ${key}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
action: 'generate',
input: request.body.prompt,
model: request.body.model ?? 'nai-diffusion',
parameters: {
negative_prompt: request.body.negative_prompt ?? '',
height: request.body.height ?? 512,
width: request.body.width ?? 512,
scale: request.body.scale ?? 9,
seed: Math.floor(Math.random() * 9999999999),
sampler: request.body.sampler ?? 'k_dpmpp_2m',
steps: request.body.steps ?? 28,
n_samples: 1,
// NAI handholding for prompts
ucPreset: 0,
qualityToggle: false,
},
}),
});
if (!generateResult.ok) {
console.log('NovelAI returned an error.', generateResult.statusText);
return response.sendStatus(500);
}
const archiveBuffer = await generateResult.arrayBuffer();
const imageBuffer = await extractFileFromZipBuffer(archiveBuffer, '.png');
const originalBase64 = imageBuffer.toString('base64');
// No upscaling
if (isNaN(request.body.upscale_ratio) || request.body.upscale_ratio <= 1) {
return response.send(originalBase64);
}
try {
console.debug('Upscaling image...');
const upscaleUrl = `${API_NOVELAI}/ai/upscale`;
const upscaleResult = await fetch(upscaleUrl, {
method: 'POST',
headers: {
'Authorization': `Bearer ${key}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
image: originalBase64,
height: request.body.height,
width: request.body.width,
scale: request.body.upscale_ratio,
}),
});
if (!upscaleResult.ok) {
throw new Error('NovelAI returned an error.');
}
const upscaledArchiveBuffer = await upscaleResult.arrayBuffer();
const upscaledImageBuffer = await extractFileFromZipBuffer(upscaledArchiveBuffer, '.png');
const upscaledBase64 = upscaledImageBuffer.toString('base64');
return response.send(upscaledBase64);
} catch (error) {
console.warn('NovelAI generated an image, but upscaling failed. Returning original image.');
return response.send(originalBase64)
}
} catch (error) {
console.log(error);
return response.sendStatus(500);
}
});
app.post('/api/novelai/generate-voice', jsonParser, async (request, response) => {
const token = readSecret(SECRET_KEYS.NOVEL);
if (!token) {
return response.sendStatus(401);
}
const text = request.body.text;
const voice = request.body.voice;
if (!text || !voice) {
return response.sendStatus(400);
}
try {
const url = `${API_NOVELAI}/ai/generate-voice?text=${encodeURIComponent(text)}&voice=-1&seed=${encodeURIComponent(voice)}&opus=false&version=v2`;
const result = await fetch(url, {
method: 'GET',
headers: {
'Authorization': `Bearer ${token}`,
'Accept': 'audio/mpeg',
},
timeout: 0,
});
if (!result.ok) {
return response.sendStatus(result.status);
}
const chunks = await readAllChunks(result.body);
const buffer = Buffer.concat(chunks);
response.setHeader('Content-Type', 'audio/mpeg');
return response.send(buffer);
}
catch (error) {
console.error(error);
return response.sendStatus(500);
}
});
}
module.exports = { module.exports = {
badWordsList, registerEndpoints,
repPenaltyAllowList,
logitBiasExp,
hypeBotBadWordsList,
hypeBotLogitBiasExp,
getBadWordsList,
}; };

View File

@ -1,7 +1,10 @@
const path = require('path'); const path = require('path');
const fs = require('fs');
const child_process = require('child_process'); const child_process = require('child_process');
const commandExistsSync = require('command-exists').sync; const commandExistsSync = require('command-exists').sync;
const _ = require('lodash'); const _ = require('lodash');
const yauzl = require('yauzl');
const mime = require('mime-types');
/** /**
* Returns the config object from the config.conf file. * Returns the config object from the config.conf file.
@ -77,10 +80,133 @@ function delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms)); return new Promise(resolve => setTimeout(resolve, ms));
} }
/**
* Extracts a file with given extension from an ArrayBuffer containing a ZIP archive.
* @param {ArrayBuffer} archiveBuffer Buffer containing a ZIP archive
* @param {string} fileExtension File extension to look for
* @returns {Promise<Buffer>} Buffer containing the extracted file
*/
async function extractFileFromZipBuffer(archiveBuffer, fileExtension) {
return await new Promise((resolve, reject) => yauzl.fromBuffer(Buffer.from(archiveBuffer), { lazyEntries: true }, (err, zipfile) => {
if (err) {
reject(err);
}
zipfile.readEntry();
zipfile.on('entry', (entry) => {
if (entry.fileName.endsWith(fileExtension)) {
console.log(`Extracting ${entry.fileName}`);
zipfile.openReadStream(entry, (err, readStream) => {
if (err) {
reject(err);
} else {
const chunks = [];
readStream.on('data', (chunk) => {
chunks.push(chunk);
});
readStream.on('end', () => {
const buffer = Buffer.concat(chunks);
resolve(buffer);
zipfile.readEntry(); // Continue to the next entry
});
}
});
} else {
zipfile.readEntry();
}
});
}));
}
/**
* Extracts all images from a ZIP archive.
* @param {string} zipFilePath Path to the ZIP archive
* @returns {Promise<[string, Buffer][]>} Array of image buffers
*/
async function getImageBuffers(zipFilePath) {
return new Promise((resolve, reject) => {
// Check if the zip file exists
if (!fs.existsSync(zipFilePath)) {
reject(new Error('File not found'));
return;
}
const imageBuffers = [];
yauzl.open(zipFilePath, { lazyEntries: true }, (err, zipfile) => {
if (err) {
reject(err);
} else {
zipfile.readEntry();
zipfile.on('entry', (entry) => {
const mimeType = mime.lookup(entry.fileName);
if (mimeType && mimeType.startsWith('image/') && !entry.fileName.startsWith('__MACOSX')) {
console.log(`Extracting ${entry.fileName}`);
zipfile.openReadStream(entry, (err, readStream) => {
if (err) {
reject(err);
} else {
const chunks = [];
readStream.on('data', (chunk) => {
chunks.push(chunk);
});
readStream.on('end', () => {
imageBuffers.push([path.parse(entry.fileName).base, Buffer.concat(chunks)]);
zipfile.readEntry(); // Continue to the next entry
});
}
});
} else {
zipfile.readEntry(); // Continue to the next entry
}
});
zipfile.on('end', () => {
resolve(imageBuffers);
});
zipfile.on('error', (err) => {
reject(err);
});
}
});
});
}
/**
* Gets all chunks of data from the given readable stream.
* @param {any} readableStream Readable stream to read from
* @returns {Promise<Buffer[]>} Array of chunks
*/
async function readAllChunks(readableStream) {
return new Promise((resolve, reject) => {
// Consume the readable stream
const chunks = [];
readableStream.on('data', (chunk) => {
chunks.push(chunk);
});
readableStream.on('end', () => {
//console.log('Finished reading the stream.');
resolve(chunks);
});
readableStream.on('error', (error) => {
console.error('Error while reading the stream:', error);
reject();
});
});
}
module.exports = { module.exports = {
getConfig, getConfig,
getConfigValue, getConfigValue,
getVersion, getVersion,
getBasicAuthHeader, getBasicAuthHeader,
extractFileFromZipBuffer,
getImageBuffers,
readAllChunks,
delay, delay,
}; };

View File

@ -1,117 +0,0 @@
import fs from 'fs';
import jimp from 'jimp';
import extract from 'png-chunks-extract';
import encode from 'png-chunks-encode';
import PNGtext from 'png-chunk-text';
import ExifReader from 'exifreader';
import webp from 'webp-converter';
import path from 'path';
async function charaRead(img_url, input_format){
let format;
if(input_format === undefined){
if(img_url.indexOf('.webp') !== -1){
format = 'webp';
}else{
format = 'png';
}
}else{
format = input_format;
}
switch(format){
case 'webp':
const exif_data = await ExifReader.load(fs.readFileSync(img_url));
const char_data = exif_data['UserComment']['description'];
if (char_data === 'Undefined' && exif_data['UserComment'].value && exif_data['UserComment'].value.length === 1) {
return exif_data['UserComment'].value[0];
}
return char_data;
case 'png':
const buffer = fs.readFileSync(img_url);
const chunks = extract(buffer);
const textChunks = chunks.filter(function (chunk) {
return chunk.name === 'tEXt';
}).map(function (chunk) {
//console.log(text.decode(chunk.data));
return PNGtext.decode(chunk.data);
});
var base64DecodedData = Buffer.from(textChunks[0].text, 'base64').toString('utf8');
return base64DecodedData;//textChunks[0].text;
//console.log(textChunks[0].keyword); // 'hello'
//console.log(textChunks[0].text); // 'world'
default:
break;
}
}
async function charaWrite(img_url, data, target_img, response = undefined, mes = 'ok') {
try {
// Read the image, resize, and save it as a PNG into the buffer
webp
const rawImg = await jimp.read(img_url);
const image = await rawImg.cover(400, 600).getBufferAsync(jimp.MIME_PNG);
// Get the chunks
const chunks = extract(image);
const tEXtChunks = chunks.filter(chunk => chunk.create_date === 'tEXt');
// Remove all existing tEXt chunks
for (let tEXtChunk of tEXtChunks) {
chunks.splice(chunks.indexOf(tEXtChunk), 1);
}
// Add new chunks before the IEND chunk
const base64EncodedData = Buffer.from(data, 'utf8').toString('base64');
chunks.splice(-1, 0, PNGtext.encode('chara', base64EncodedData));
//chunks.splice(-1, 0, text.encode('lorem', 'ipsum'));
fs.writeFileSync(target_img, new Buffer.from(encode(chunks)));
if (response !== undefined) response.send(mes);
return true;
} catch (err) {
console.log(err);
if (response !== undefined) response.status(500).send(err);
return false;
}
}
(async function() {
const spath = process.argv[2]
const dpath = process.argv[3] || spath
const files = fs.readdirSync(spath).filter(e => e.endsWith(".webp"))
if (!files.length) {
console.log("Nothing to convert.")
return
}
try { fs.mkdirSync(dpath) } catch {}
for(const f of files) {
const source = path.join(spath, f),
dest = path.join(dpath, path.basename(f, ".webp") + ".png")
console.log(`Read... ${source}`)
const data = await charaRead(source)
console.log(`Convert... ${source} -> ${dest}`)
await webp.dwebp(source, dest, "-o")
console.log(`Write... ${dest}`)
const success = await charaWrite(dest, data, path.parse(dest).name);
if (!success) {
console.log(`Failure on ${source} -> ${dest}`);
continue;
}
console.log(`Remove... ${source}`)
fs.rmSync(source)
}
})()

View File

@ -1,10 +0,0 @@
{
"dependencies": {
"exifreader": "^4.12.0",
"jimp": "^0.22.7",
"png-chunk-text": "^1.0.0",
"png-chunks-encode": "^1.0.0",
"png-chunks-extract": "^1.0.0",
"webp-converter": "^2.3.3"
}
}