mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Merge remote-tracking branch 'remotes/origin/vars-rename' into UI2
This commit is contained in:
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1,2 +1,3 @@
|
||||
*.min.lua linguist-vendored
|
||||
*documentation.html linguist-vendored
|
||||
/static/swagger-ui/* linguist-vendored
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -25,6 +25,8 @@ softprompts
|
||||
models
|
||||
!models/models go here.txt
|
||||
Uninstall
|
||||
flask_session
|
||||
accelerate-disk-cache
|
||||
.ipynb_checkpoints
|
||||
|
||||
# Ignore PyCharm project files.
|
||||
|
@@ -11,11 +11,15 @@ IF EXIST "Uninstall\unins000.exe" (
|
||||
start Uninstall\unins000.exe
|
||||
exit
|
||||
) ELSE (
|
||||
echo This will remove all KoboldAI folders that do not contain user data
|
||||
pause
|
||||
GOTO UNINSTALL
|
||||
echo This will remove all KoboldAI folders that do not contain user data.
|
||||
echo DO NOT CONTINUE IF KOBOLDAI IS NOT IN ITS OWN FOLDER! OTHERWISE YOUR OTHER DATA IN THIS FOLDER WILL BE DELETED AS WELL!
|
||||
pause
|
||||
set /P D=Type DELETE if you wish to continue the uninstallation:
|
||||
)
|
||||
|
||||
IF %D%==DELETE GOTO UNINSTALL
|
||||
exit
|
||||
|
||||
:UNINSTALL
|
||||
echo Uninstallation in progress, please wait...
|
||||
set DM=Y
|
||||
|
3572
aiserver.py
3572
aiserver.py
File diff suppressed because it is too large
Load Diff
13
bridge.lua
13
bridge.lua
@@ -165,7 +165,7 @@ return function(_python, _bridged)
|
||||
---@field num_outputs integer
|
||||
---@field feedback string
|
||||
---@field is_config_file_open boolean
|
||||
local kobold = setmetatable({API_VERSION = 1.1}, metawrapper)
|
||||
local kobold = setmetatable({API_VERSION = 1.2}, metawrapper)
|
||||
local KoboldLib_mt = setmetatable({}, metawrapper)
|
||||
local KoboldLib_getters = setmetatable({}, metawrapper)
|
||||
local KoboldLib_setters = setmetatable({}, metawrapper)
|
||||
@@ -505,6 +505,7 @@ return function(_python, _bridged)
|
||||
elseif entries.name == "KoboldWorldInfoEntry" then
|
||||
_entries = {entries}
|
||||
else
|
||||
_entries = {}
|
||||
for k, v in pairs(entries) do
|
||||
if type(v) == "table" and v.name == "KoboldWorldInfoEntry" and v:is_valid() then
|
||||
_entries[k] = v.uid
|
||||
@@ -725,11 +726,11 @@ return function(_python, _bridged)
|
||||
if k == "content" then
|
||||
if rawget(t, "_num") == 0 then
|
||||
if bridged.koboldai_vars.gamestarted then
|
||||
local prompt = koboldbridge.userstate == "genmod" and bridged.vars._prompt or bridged.koboldai_vars.prompt
|
||||
local prompt = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._prompt or bridged.koboldai_vars.prompt
|
||||
return prompt
|
||||
end
|
||||
end
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.vars._actions or bridged.koboldai_vars.actions
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
|
||||
return _python.as_attrgetter(actions).get(math.tointeger(rawget(t, "_num")) - 1)
|
||||
end
|
||||
end
|
||||
@@ -751,7 +752,7 @@ return function(_python, _bridged)
|
||||
error("Attempted to set the prompt chunk's content to the empty string; this is not allowed")
|
||||
return
|
||||
end
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.vars._actions or bridged.koboldai_vars.actions
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
|
||||
if _k ~= 0 and _python.as_attrgetter(actions).get(_k-1) == nil then
|
||||
return
|
||||
end
|
||||
@@ -776,7 +777,7 @@ return function(_python, _bridged)
|
||||
|
||||
---@return fun(): KoboldStoryChunk, table, nil
|
||||
function KoboldStory:forward_iter()
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.vars._actions or bridged.koboldai_vars.actions
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
|
||||
local nxt, iterator = _python.iter(actions)
|
||||
local run_once = false
|
||||
local function f()
|
||||
@@ -804,7 +805,7 @@ return function(_python, _bridged)
|
||||
|
||||
---@return fun(): KoboldStoryChunk, table, nil
|
||||
function KoboldStory:reverse_iter()
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.vars._actions or bridged.koboldai_vars.actions
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
|
||||
local nxt, iterator = _python.iter(_python.builtins.reversed(actions))
|
||||
local last_run = false
|
||||
local function f()
|
||||
|
@@ -41,7 +41,7 @@
|
||||
"\n",
|
||||
"For more information about KoboldAI check our our Github readme : https://github.com/KoboldAI/KoboldAI-Client/blob/main/readme.md\n",
|
||||
"\n",
|
||||
"For the larger AI models (That are typically more coherent) check out our [TPU edition](https://colab.research.google.com/github/KoboldAI/KoboldAI-Client/blob/main/colab/TPU.ipynb)"
|
||||
"For the larger AI models (That are typically more coherent) check out our **[TPU edition](https://colab.research.google.com/github/KoboldAI/KoboldAI-Client/blob/main/colab/TPU.ipynb)**!"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -65,23 +65,56 @@
|
||||
"cellView": "form"
|
||||
},
|
||||
"source": [
|
||||
"#@title <b><-- Click this to start KoboldAI</b>\n",
|
||||
"#@title <b><-- Select your model below and then click this to start KoboldAI</b>\n",
|
||||
"#@markdown You can find a description of the models below along with instructions on how to start KoboldAI.\n",
|
||||
"\n",
|
||||
"Model = \"KoboldAI/fairseq-dense-2.7B-Nerys\" #@param [\"KoboldAI/fairseq-dense-2.7B-Nerys\", \"KoboldAI/GPT-Neo-2.7B-Janeway\", \"KoboldAI/GPT-Neo-2.7B-AID\", \"KoboldAI/GPT-Neo-2.7B-Picard\", \"KoboldAI/GPT-Neo-2.7B-Horni-LN\", \"KoboldAI/GPT-Neo-2.7B-Horni\", \"KoboldAI/GPT-Neo-2.7B-Shinen\", \"EleutherAI/gpt-neo-2.7B\"] {allow-input: true}\n",
|
||||
"Model = \"Nerys 2.7B\" #@param [\"Nerys 2.7B\", \"Janeway 2.7B\", \"Picard 2.7B\", \"AID 2.7B\", \"Horni LN 2.7B\", \"Horni 2.7B\", \"Shinen 2.7B\", \"Neo 2.7B\"] {allow-input: true}\n",
|
||||
"Version = \"Official\" #@param [\"Official\", \"United\"] {allow-input: true}\n",
|
||||
"Provider = \"Localtunnel\" #@param [\"Localtunnel\", \"Cloudflare\"]\n",
|
||||
"Provider = \"Cloudflare\" #@param [\"Localtunnel\", \"Cloudflare\"]\n",
|
||||
"\n",
|
||||
"!nvidia-smi\n",
|
||||
"from google.colab import drive\n",
|
||||
"drive.mount('/content/drive/')\n",
|
||||
"\n",
|
||||
"if Model == \"Nerys 2.7B\":\n",
|
||||
" Model = \"KoboldAI/fairseq-dense-2.7B-Nerys\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"Janeway 2.7B\":\n",
|
||||
" Model = \"KoboldAI/GPT-Neo-2.7B-Janeway\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"Picard 2.7B\":\n",
|
||||
" Model = \"KoboldAI/GPT-Neo-2.7B-Picard\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"AID 2.7B\":\n",
|
||||
" Model = \"KoboldAI/GPT-Neo-2.7B-AID\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"Horni LN 2.7B\":\n",
|
||||
" Model = \"KoboldAI/GPT-Neo-2.7B-Horni-LN\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"Horni 2.7B\":\n",
|
||||
" Model = \"KoboldAI/GPT-Neo-2.7B-Horni\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"Shinen 2.7B\":\n",
|
||||
" Model = \"KoboldAI/GPT-Neo-2.7B-Shinen\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"Neo 2.7B\":\n",
|
||||
" Model = \"EleutherAI/gpt-neo-2.7B\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"\n",
|
||||
"if Provider == \"Localtunnel\":\n",
|
||||
" tunnel = \"--localtunnel yes\"\n",
|
||||
"else:\n",
|
||||
" tunnel = \"\"\n",
|
||||
"\n",
|
||||
"!wget https://henk.tech/ckds -O - | bash /dev/stdin -m $Model -g $Version $tunnel"
|
||||
"!wget https://koboldai.org/ckds -O - | bash /dev/stdin -m $Model -g $Version $tunnel"
|
||||
],
|
||||
"execution_count": null,
|
||||
"outputs": []
|
||||
@@ -92,27 +125,25 @@
|
||||
"# GPU Edition Model Descriptions\n",
|
||||
"| Model | Size | Style | Description |\n",
|
||||
"| --- | --- | --- | --- |\n",
|
||||
"| [Fairseq-Dense-2.7B-Nerys](https://huggingface.co/KoboldAI/fairseq-dense-2.7B-Nerys) by Mr Seeker | 2.7B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. |\n",
|
||||
"| [GPT-Neo-2.7B-Janeway](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Janeway) by Mr Seeker | 2.7B | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. |\n",
|
||||
"| [GPT-Neo-2.7B-Picard](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Picard) by Mr Seeker | 2.7B | Novel | Picard is a model trained for SFW Novels based on GPT-Neo-2.7B. It is focused on Novel style writing without the NSFW bias. While the name suggests a sci-fi model this model is designed for Novels of a variety of genre's. It is meant to be used in KoboldAI's regular mode. |\n",
|
||||
"| [GPT-Neo-2.7B-AID](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-AID) by melastacho | 2.7B | Adventure | Also know as Adventure 2.7B this is a clone of the AI Dungeon Classic model and is best known for the epic wackey adventures that AI Dungeon Classic players love. |\n",
|
||||
"| [GPT-Neo-2.7B-Horni-LN](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni-LN) by finetune | 2.7B | Novel | This model is based on GPT-Neo-2.7B-Horni and retains its NSFW knowledge, but was then further biased towards SFW novel stories. If you seek a balance between a SFW Novel model and a NSFW model this model should be a good choice. |\n",
|
||||
"| [GPT-Neo-2.7B-Horni](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni) by finetune | 2.7B | NSFW | This model is tuned on Literotica to produce a Novel style model biased towards NSFW content. Can still be used for SFW stories but will have a bias towards NSFW content. It is meant to be used in KoboldAI's regular mode. |\n",
|
||||
"| [GPT-Neo-2.7B-Shinen](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Shinen) by Mr Seeker | 2.7B | NSFW | Shinen is an alternative to the Horni model designed to be more explicit. If Horni is to tame for you shinen might produce better results. While it is a Novel model it is unsuitable for SFW stories due to its heavy NSFW bias. Shinen will not hold back. It is meant to be used in KoboldAI's regular mode. |\n",
|
||||
"| [GPT-Neo-2.7B](https://huggingface.co/EleutherAI/gpt-neo-2.7B) by EleutherAI | 2.7B | Generic | This is the base model for all the other 2.7B models, it is best used when you have a use case that we have no other models available for, such as writing blog articles or programming. It can also be a good basis for the experience of some of the softprompts if your softprompt is not about a subject the other models cover. |\n",
|
||||
"| [Nerys 2.7B](https://huggingface.co/KoboldAI/fairseq-dense-2.7B-Nerys) by Mr Seeker | 2.7B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of Shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. |\n",
|
||||
"| [Janeway 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Janeway) by Mr Seeker | 2.7B | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. |\n",
|
||||
"| [Picard 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Picard) by Mr Seeker | 2.7B | Novel | Picard is a model trained for SFW Novels based on Neo 2.7B. It is focused on Novel style writing without the NSFW bias. While the name suggests a sci-fi model this model is designed for Novels of a variety of genre's. It is meant to be used in KoboldAI's regular mode. |\n",
|
||||
"| [AID 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-AID) by melastacho | 2.7B | Adventure | Also know as Adventure 2.7B this is a clone of the AI Dungeon Classic model and is best known for the epic wackey adventures that AI Dungeon Classic players love. |\n",
|
||||
"| [Horni LN 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni-LN) by finetune | 2.7B | Novel | This model is based on Horni 2.7B and retains its NSFW knowledge, but was then further biased towards SFW novel stories. If you seek a balance between a SFW Novel model and a NSFW model this model should be a good choice. |\n",
|
||||
"| [Horni 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni) by finetune | 2.7B | NSFW | This model is tuned on Literotica to produce a Novel style model biased towards NSFW content. Can still be used for SFW stories but will have a bias towards NSFW content. It is meant to be used in KoboldAI's regular mode. |\n",
|
||||
"| [Shinen 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Shinen) by Mr Seeker | 2.7B | NSFW | Shinen is an alternative to the Horni model designed to be more explicit. If Horni is to tame for you Shinen might produce better results. While it is a Novel model it is unsuitable for SFW stories due to its heavy NSFW bias. Shinen will not hold back. It is meant to be used in KoboldAI's regular mode. |\n",
|
||||
"| [Neo 2.7B](https://huggingface.co/EleutherAI/gpt-neo-2.7B) by EleutherAI | 2.7B | Generic | This is the base model for all the other 2.7B models, it is best used when you have a use case that we have no other models available for, such as writing blog articles or programming. It can also be a good basis for the experience of some of the softprompts if your softprompt is not about a subject the other models cover. |\n",
|
||||
"\n",
|
||||
"# [TPU Edition Model Descriptions](https://colab.research.google.com/github/KoboldAI/KoboldAI-Client/blob/main/colab/TPU.ipynb)\n",
|
||||
"\n",
|
||||
"| Model | Size | Style | Description |\n",
|
||||
"| --- | --- | --- | --- |\n",
|
||||
"| [Nerys](https://huggingface.co/KoboldAI/fairseq-dense-13B-Nerys) by Mr Seeker | 13B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. |\n",
|
||||
"| [Nerys](https://huggingface.co/KoboldAI/fairseq-dense-13B-Nerys) by Mr Seeker | 13B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of Shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. |\n",
|
||||
"| [Janeway](https://huggingface.co/KoboldAI/fairseq-dense-13B-Janeway) by Mr Seeker | 13B | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. |\n",
|
||||
"| [Shinen](https://huggingface.co/KoboldAI/fairseq-dense-13B-Shinen) by Mr Seeker | 13B | NSFW | Shinen is an NSFW model designed to be more explicit. Trained on a variety of stories from the website Sexstories it contains many different kinks. |\n",
|
||||
"| [Skein](https://huggingface.co/KoboldAI/GPT-J-6B-Skein) by VE\\_FORBRYDERNE | 6B | Adventure | Skein is best used with Adventure mode enabled, it consists of a 4 times larger adventure dataset than the Adventure model making it excellent for text adventure gaming. On top of that it also consists of light novel training further expanding its knowledge and writing capabilities. It can be used with the You filter bias if you wish to write Novels with it, but dedicated Novel models can perform better for this task. |\n",
|
||||
"| [Adventure](https://huggingface.co/KoboldAI/GPT-J-6B-Adventure) by VE\\_FORBRYDERNE | 6B | Adventure | Adventure is a 6B model designed to mimick the behavior of AI Dungeon. It is exclusively for Adventure Mode and can take you on the epic and wackey adventures that AI Dungeon players love. It also features the many tropes of AI Dungeon as it has been trained on very similar data. It must be used in second person (You). |\n",
|
||||
"| [Lit](https://huggingface.co/hakurei/lit-6B) by Haru | 6B | NSFW | Lit is a great NSFW model trained by Haru on both a large set of Literotica stories and high quality novels along with tagging support. Creating a high quality model for your NSFW stories. This model is exclusively a novel model and is best used in third person. |\n",
|
||||
"| [Convo](https://huggingface.co/hitomi-team/convo-6B) by Hitomi Team | 6B | Chatbot | Convo-6B is a GPT-J 6B model fine-tuned on a collection of high quality open source datasets which amount to 6 million messages. The primary goal of the model is to provide improved performance and generalization when generating multi-turn dialogue for characters that were not present from within the fine tuning data. The prompted performance has especially improved over the predecessor model [C1-6B](https://huggingface.co/hakurei/c1-6B). |\n",
|
||||
"| [C1](https://huggingface.co/hakurei/c1-6B) by Haru | 6B | Chatbot | C1 has been trained on various internet chatrooms, it makes the basis for an interesting chatbot model and has been optimized to be used in the Chatmode. |\n",
|
||||
"| Neo(X) by EleutherAI | 20B | Generic | NeoX is the largest EleutherAI model currently available, being a generic model it is not particularly trained towards anything and can do a variety of writing, Q&A and coding tasks. 20B's performance is closely compared to the 13B models and it is worth trying both especially if you have a task that does not involve english writing. Its behavior will be similar to the GPT-J-6B model since they are trained on the same dataset but with more sensitivity towards repetition penalty and with more knowledge. |\n",
|
||||
"| [Fairseq Dense](https://huggingface.co/KoboldAI/fairseq-dense-13B) | 13B | Generic | Trained by Facebook Researchers this model stems from the MOE research project within Fairseq. This particular version has been converted by us for use in KoboldAI. It is known to be on par with the larger 20B model from EleutherAI and considered as better for pop culture and language tasks. Because the model has never seen a new line (enter) it may perform worse on formatting and paragraphing. |\n",
|
||||
"| [GPT-J-6B](https://huggingface.co/EleutherAI/gpt-j-6B) by EleutherAI | 6B | Generic | This model serves as the basis for most other 6B models (Some being based on Fairseq Dense instead). Being trained on the Pile and not biased towards anything in particular it is suitable for a variety of tasks such as writing, Q&A and coding tasks. You will likely get better result with larger generic models or finetuned models. |\n",
|
||||
@@ -123,9 +154,9 @@
|
||||
"| Novel | For regular story writing, not compatible with Adventure mode or other specialty modes. |\n",
|
||||
"| NSFW | Indicates that the model is strongly biased towards NSFW content and is not suitable for children, work environments or livestreaming. Most NSFW models are also Novel models in nature. |\n",
|
||||
"| Adventure | These models are excellent for people willing to play KoboldAI like a Text Adventure game and are meant to be used with Adventure mode enabled. Even if you wish to use it as a Novel style model you should always have Adventure mode on and set it to story. These models typically have a strong bias towards the use of the word You and without Adventure mode enabled break the story flow and write actions on your behalf. |\n",
|
||||
"| Chatbot | These models are specifically trained for chatting and are best used with the Chatmode enabled. Typically trained on either public chatrooms or private chats. |\n",
|
||||
"| Generic | Generic models are not trained towards anything specific, typically used as a basis for other tasks and models. They can do everything the other models can do, but require much more handholding to work properly. Generic models are an ideal basis for tasks that we have no specific model for, or for experiencing a softprompt in its raw form. |\n",
|
||||
"\n",
|
||||
"---\n",
|
||||
"# How to start KoboldAI in 7 simple steps\n",
|
||||
"Using KoboldAI on Google Colab is easy! Simply follow these steps to get started:\n",
|
||||
"1. Mobile phone? Tap the play button below next to \"<--- Tap this if you play on mobile\" to reveal an audio player, play the silent audio to keep the tab alive so Google will not shut you down when your using KoboldAI. If no audio player is revealed your phone browser does not support Google Colab in the mobile view, go to your browser menu and enable Desktop mode before you continue.\n",
|
||||
@@ -143,4 +174,4 @@
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@@ -32,7 +32,9 @@
|
||||
"\n",
|
||||
"Further down the page you can find descriptions of the models, and tips to get the most out of your Google Colab experience.\n",
|
||||
"\n",
|
||||
"Make sure to keep this page open while you are using KoboldAI, and check back regularly to see if you got a Captcha. Failure to complete the captcha's in time can result in termination of your session or a lower priority towards the TPUs."
|
||||
"Make sure to keep this page open while you are using KoboldAI, and check back regularly to see if you got a Captcha. Failure to complete the captcha's in time can result in termination of your session or a lower priority towards the TPUs.\n",
|
||||
"\n",
|
||||
"Firefox users need to disable the enhanced tracking protection or use a different browser in order to be able to use Google Colab without errors (This is not something we can do anything about, the cookie blocker breaks the Google Drive integration because it uses different domains)."
|
||||
],
|
||||
"metadata": {
|
||||
"id": "zrLGxVCEaqZx"
|
||||
@@ -64,10 +66,9 @@
|
||||
"#@title <b><-- Select your model below and then click this to start KoboldAI</b>\n",
|
||||
"#@markdown You can find a description of the models below along with instructions on how to start KoboldAI.\n",
|
||||
"\n",
|
||||
"#@title <b><-- Click this to start KoboldAI</b>\n",
|
||||
"Model = \"Nerys 13B\" #@param [\"Nerys 13B\", \"Janeway 13B\", \"Shinen 13B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Shinen 6B\", \"Lit 6B\", \"NeoX 20B\", \"facebook/opt-13b\", \"KoboldAI/fairseq-dense-13B\", \"EleutherAI/gpt-j-6B\"] {allow-input: true}\n",
|
||||
"Model = \"Nerys 13B V2\" #@param [\"Nerys 13B V2\", \"Janeway 13B\", \"Shinen 13B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Shinen 6B\", \"Lit 6B\", \"NeoX 20B\", \"OPT 13B\", \"Fairseq Dense 13B\", \"GPT-J-6B\"] {allow-input: true}\n",
|
||||
"Version = \"Official\" #@param [\"Official\", \"United\"] {allow-input: true}\n",
|
||||
"Provider = \"Localtunnel\" #@param [\"Localtunnel\", \"Cloudflare\"]\n",
|
||||
"Provider = \"Cloudflare\" #@param [\"Localtunnel\", \"Cloudflare\"]\n",
|
||||
"\n",
|
||||
"import os\n",
|
||||
"try:\n",
|
||||
@@ -84,8 +85,8 @@
|
||||
" Model = \"KoboldAI/fairseq-dense-13B-Janeway\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"Nerys 13B\":\n",
|
||||
" Model = \"KoboldAI/fairseq-dense-13B-Nerys\"\n",
|
||||
"elif Model == \"Nerys 13B V2\":\n",
|
||||
" Model = \"KoboldAI/fairseq-dense-13B-Nerys-v2\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"Shinen 13B\":\n",
|
||||
@@ -93,13 +94,9 @@
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"NeoX 20B\":\n",
|
||||
" Model = \"TPUMeshTransformerGPTNeoX\"\n",
|
||||
" path = \" -p gpt-neox-20b-jax\"\n",
|
||||
" location = \"colab\"\n",
|
||||
" download = \" -a https://storage.henk.tech/KoboldAI/neox-20b.txt\"\n",
|
||||
" extract = \"\"\n",
|
||||
" Drive = \"Unextracted (Less Space)\"\n",
|
||||
" ![[ -f /content/drive/MyDrive/KoboldAI/settings/gpt-neox-20b-jax.settings ]] || echo -e \"{\\n \\\"apikey\\\": \\\"\\\",\\n \\\"andepth\\\": 3,\\n \\\"temp\\\": 0.5,\\n \\\"top_p\\\": 0.9,\\n \\\"top_k\\\": 0,\\n \\\"tfs\\\": 1.0,\\n \\\"rep_pen\\\": 1.03,\\n \\\"genamt\\\": 80,\\n \\\"max_length\\\": 2048,\\n \\\"ikgen\\\": 200,\\n \\\"formatoptns\\\": {\\n \\\"frmttriminc\\\": true,\\n \\\"frmtrmblln\\\": false,\\n \\\"frmtrmspch\\\": false,\\n \\\"frmtadsnsp\\\": false\\n },\\n \\\"numseqs\\\": 1,\\n \\\"widepth\\\": 3,\\n \\\"useprompt\\\": true,\\n \\\"adventure\\\": false\\n}\" > /content/drive/MyDrive/KoboldAI/settings/gpt-neox-20b-jax.settings\n",
|
||||
" Model = \"EleutherAI/gpt-neox-20b\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"Skein 6B\":\n",
|
||||
" Model = \"KoboldAI/GPT-J-6B-Skein\"\n",
|
||||
" path = \"\"\n",
|
||||
@@ -120,14 +117,18 @@
|
||||
" Model = \"KoboldAI/GPT-J-6B-Shinen\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"Convo 6B\":\n",
|
||||
" Model = \"hitomi-team/convo-6B\"\n",
|
||||
"elif Model == \"OPT 13B\":\n",
|
||||
" Model = \"facebook/opt-13b\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"C1 6B\":\n",
|
||||
" Model = \"hakurei/c1-6B\"\n",
|
||||
"elif Model == \"Fairseq Dense 13B\":\n",
|
||||
" Model = \"KoboldAI/fairseq-dense-13B\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"GPT-J-6B\":\n",
|
||||
" Model = \"EleutherAI/gpt-j-6B\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"else:\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
@@ -137,17 +138,17 @@
|
||||
"else:\n",
|
||||
" tunnel = \"\"\n",
|
||||
"\n",
|
||||
"!wget https://henk.tech/ckds -O - | bash /dev/stdin $path$download -m $Model -g $Version $tunnel"
|
||||
"!wget https://koboldai.org/ckds -O - | bash /dev/stdin $path$download -m $Model -g $Version $tunnel"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## TPU Edition Model Descriptions\n",
|
||||
"# TPU Edition Model Descriptions\n",
|
||||
"\n",
|
||||
"| Model | Size | Style | Description |\n",
|
||||
"| --- | --- | --- | --- |\n",
|
||||
"| [Nerys](https://huggingface.co/KoboldAI/fairseq-dense-13B-Nerys) by Mr Seeker | 13B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. |\n",
|
||||
"| [Nerys](https://huggingface.co/KoboldAI/fairseq-dense-13B-Nerys) by Mr Seeker | 13B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of Shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. |\n",
|
||||
"| [Janeway](https://huggingface.co/KoboldAI/fairseq-dense-13B-Janeway) by Mr Seeker | 13B | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. |\n",
|
||||
"| [Shinen](https://huggingface.co/KoboldAI/fairseq-dense-13B-Shinen) by Mr Seeker | 13B | NSFW | Shinen is an NSFW model designed to be more explicit. Trained on a variety of stories from the website Sexstories it contains many different kinks. |\n",
|
||||
"| [Skein](https://huggingface.co/KoboldAI/GPT-J-6B-Skein) by VE\\_FORBRYDERNE | 6B | Adventure | Skein is best used with Adventure mode enabled, it consists of a 4 times larger adventure dataset than the Adventure model making it excellent for text adventure gaming. On top of that it also consists of light novel training further expanding its knowledge and writing capabilities. It can be used with the You filter bias if you wish to write Novels with it, but dedicated Novel models can perform better for this task. |\n",
|
||||
@@ -162,35 +163,47 @@
|
||||
"\n",
|
||||
"| Model | Size | Style | Description |\n",
|
||||
"| --- | --- | --- | --- |\n",
|
||||
"| [Fairseq-Dense-2.7B-Nerys](https://huggingface.co/KoboldAI/fairseq-dense-2.7B-Nerys) by Mr Seeker | 2.7B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. |\n",
|
||||
"| [GPT-Neo-2.7B-Janeway](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Janeway) by Mr Seeker | 2.7B | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. |\n",
|
||||
"| [GPT-Neo-2.7B-Picard](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Picard) by Mr Seeker | 2.7B | Novel | Picard is a model trained for SFW Novels based on GPT-Neo-2.7B. It is focused on Novel style writing without the NSFW bias. While the name suggests a sci-fi model this model is designed for Novels of a variety of genre's. It is meant to be used in KoboldAI's regular mode. |\n",
|
||||
"| [GPT-Neo-2.7B-AID](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-AID) by melastacho | 2.7B | Adventure | Also know as Adventure 2.7B this is a clone of the AI Dungeon Classic model and is best known for the epic wackey adventures that AI Dungeon Classic players love. |\n",
|
||||
"| [GPT-Neo-2.7B-Horni-LN](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni-LN) by finetune | 2.7B | Novel | This model is based on GPT-Neo-2.7B-Horni and retains its NSFW knowledge, but was then further biased towards SFW novel stories. If you seek a balance between a SFW Novel model and a NSFW model this model should be a good choice. |\n",
|
||||
"| [GPT-Neo-2.7B-Horni](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni) by finetune | 2.7B | NSFW | This model is tuned on Literotica to produce a Novel style model biased towards NSFW content. Can still be used for SFW stories but will have a bias towards NSFW content. It is meant to be used in KoboldAI's regular mode. |\n",
|
||||
"| [GPT-Neo-2.7B-Shinen](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Shinen) by Mr Seeker | 2.7B | NSFW | Shinen is an alternative to the Horni model designed to be more explicit. If Horni is to tame for you shinen might produce better results. While it is a Novel model it is unsuitable for SFW stories due to its heavy NSFW bias. Shinen will not hold back. It is meant to be used in KoboldAI's regular mode. |\n",
|
||||
"| [GPT-Neo-2.7B](https://huggingface.co/EleutherAI/gpt-neo-2.7B) by EleutherAI | 2.7B | Generic | This is the base model for all the other 2.7B models, it is best used when you have a use case that we have no other models available for, such as writing blog articles or programming. It can also be a good basis for the experience of some of the softprompts if your softprompt is not about a subject the other models cover. |\n",
|
||||
"| [Nerys 2.7B](https://huggingface.co/KoboldAI/fairseq-dense-2.7B-Nerys) by Mr Seeker | 2.7B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of Shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. |\n",
|
||||
"| [Janeway 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Janeway) by Mr Seeker | 2.7B | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. |\n",
|
||||
"| [Picard 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Picard) by Mr Seeker | 2.7B | Novel | Picard is a model trained for SFW Novels based on Neo 2.7B. It is focused on Novel style writing without the NSFW bias. While the name suggests a sci-fi model this model is designed for Novels of a variety of genre's. It is meant to be used in KoboldAI's regular mode. |\n",
|
||||
"| [AID 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-AID) by melastacho | 2.7B | Adventure | Also know as Adventure 2.7B this is a clone of the AI Dungeon Classic model and is best known for the epic wackey adventures that AI Dungeon Classic players love. |\n",
|
||||
"| [Horni LN 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni-LN) by finetune | 2.7B | Novel | This model is based on Horni 2.7B and retains its NSFW knowledge, but was then further biased towards SFW novel stories. If you seek a balance between a SFW Novel model and a NSFW model this model should be a good choice. |\n",
|
||||
"| [Horni 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni) by finetune | 2.7B | NSFW | This model is tuned on Literotica to produce a Novel style model biased towards NSFW content. Can still be used for SFW stories but will have a bias towards NSFW content. It is meant to be used in KoboldAI's regular mode. |\n",
|
||||
"| [Shinen 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Shinen) by Mr Seeker | 2.7B | NSFW | Shinen is an alternative to the Horni model designed to be more explicit. If Horni is to tame for you Shinen might produce better results. While it is a Novel model it is unsuitable for SFW stories due to its heavy NSFW bias. Shinen will not hold back. It is meant to be used in KoboldAI's regular mode. |\n",
|
||||
"| [Neo 2.7B](https://huggingface.co/EleutherAI/gpt-neo-2.7B) by EleutherAI | 2.7B | Generic | This is the base model for all the other 2.7B models, it is best used when you have a use case that we have no other models available for, such as writing blog articles or programming. It can also be a good basis for the experience of some of the softprompts if your softprompt is not about a subject the other models cover. |\n",
|
||||
"\n",
|
||||
"| Style | Description |\n",
|
||||
"| --- | --- |\n",
|
||||
"| Novel | For regular story writing, not compatible with Adventure mode or other specialty modes. |\n",
|
||||
"| NSFW | Indicates that the model is strongly biased towards NSFW content and is not suitable for children, work environments or livestreaming. Most NSFW models are also Novel models in nature. |\n",
|
||||
"| Adventure | These models are excellent for people willing to play KoboldAI like a Text Adventure game and are meant to be used with Adventure mode enabled. Even if you wish to use it as a Novel style model you should always have Adventure mode on and set it to story. These models typically have a strong bias towards the use of the word You and without Adventure mode enabled break the story flow and write actions on your behalf. |\n",
|
||||
"| Chatbot | These models are specifically trained for chatting and are best used with the Chatmode enabled. Typically trained on either public chatrooms or private chats. |\n",
|
||||
"| Generic | Generic models are not trained towards anything specific, typically used as a basis for other tasks and models. They can do everything the other models can do, but require much more handholding to work properly. Generic models are an ideal basis for tasks that we have no specific model for, or for experiencing a softprompt in its raw form. |\n",
|
||||
"\n",
|
||||
"---\n",
|
||||
"## Tips to get the most out of Google Colab\n",
|
||||
"# Tips to get the most out of Google Colab\n",
|
||||
"- Google will occationally show a Captcha, typically after it has been open for 30 minutes but it can be more frequent if you often use Colab. Make sure to do these properly, or you risk getting your instance shut down and getting a lower priority towards the TPU's.\n",
|
||||
"- KoboldAI uses Google Drive to store your files and settings, if you wish to upload a softprompt or userscript this can be done directly on the Google Drive website. You can also use this to download backups of your KoboldAI related files or upload models of your own.\n",
|
||||
"- Don't want to save your stories on Google Drive for privacy reasons? Do not use KoboldAI's save function and instead click Download as .json, this will automatically download the story to your own computer without ever touching Google's harddrives. You can load this back trough the Load from file option.\n",
|
||||
"- Google shut your instance down unexpectedly? You can still make use of the Download as .json button to recover your story as long as you did not close the KoboldAI window. You can then load this back up in your next session.\n",
|
||||
"- Done with KoboldAI? Go to the Runtime menu, click on Manage Sessions and terminate your open sessions that you no longer need. This trick can help you maintain higher priority towards getting a TPU.\n",
|
||||
"- Models stored on Google Drive typically load faster than models we need to download from the internet."
|
||||
"- Done with KoboldAI? Go to the Runtime menu, click on Manage Sessions and terminate your open sessions that you no longer need. This trick can help you maintain higher priority towards getting a TPU."
|
||||
],
|
||||
"metadata": {
|
||||
"id": "i0-9ARA3c4Fx"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"#@title <b>Model Cleaner</b>\n",
|
||||
"#@markdown Out of space? Run this to remove all cached models (Google Drive models are not effected).\n",
|
||||
"!rm /content/KoboldAI-Client/cache/*\n"
|
||||
],
|
||||
"metadata": {
|
||||
"cellView": "form",
|
||||
"id": "QQZSmoNol04V"
|
||||
},
|
||||
"execution_count": null,
|
||||
"outputs": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
@@ -212,4 +225,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
||||
}
|
||||
|
@@ -1,76 +0,0 @@
|
||||
{
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"name": "ColabKobold Code",
|
||||
"provenance": [],
|
||||
"authorship_tag": "ABX9TyOuIHmyxj4U9dipAib4hfIi",
|
||||
"include_colab_link": true
|
||||
},
|
||||
"kernelspec": {
|
||||
"name": "python3",
|
||||
"display_name": "Python 3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python"
|
||||
},
|
||||
"accelerator": "TPU"
|
||||
},
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "view-in-github",
|
||||
"colab_type": "text"
|
||||
},
|
||||
"source": [
|
||||
"<a href=\"https://colab.research.google.com/github/henk717/KoboldAI/blob/united/colab/vscode.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"# ColabKobold VSCode Edition\n",
|
||||
"This is a special edition of ColabKobold aimed at developers, it will not start a KoboldAI instance for you to play KoboldAI and instead will launch a fully functional copy of VSCode for easy development.\n",
|
||||
"\n",
|
||||
"Few things of note:\n",
|
||||
"1. Make sure the desired (or no) accelertor is selected on Colab, you do not want a TPU ban for not using it.\n",
|
||||
"1. The Version can be replaced with your github URL and appended with -b for the branch for example \"https://github.com/henk717/koboldai -b united\" dependencies will automatically be installed from requirements.txt or requirements_mtj.txt.\n",
|
||||
"1. With the args you can specify launch options for the KoboldAI Deployment Script, this way you can easily preinstall models to your development instance so you have a model to test with. To install TPU requirements specify the -m TPUMeshTransformerGPTJ argument.\n",
|
||||
"1. You will need an Ngrok auth token which you can obtain here : https://dashboard.ngrok.com/get-started/your-authtoken\n",
|
||||
"1. KoboldAI is installed in /content/koboldai-client opening this folder is enough to automatically get full git history and revision support. Also keep in mind that it mounts your Google Drive, be careful comitting directly from this instance.\n",
|
||||
"1. With Ctrl + Shift + ` you can get a terminal to launch KoboldAI with your own parameters, launching with --colab is recommended.\n",
|
||||
"\n",
|
||||
"# [If you are not a developer and are looking to use KoboldAI click here](https://henk.tech/colabkobold)"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "hMRnGz42Xsy3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "40B1QvI3Xv02"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#@title VSCode Server\n",
|
||||
"Version = \"United\" #@param [\"Official\", \"United\"] {allow-input: true}\n",
|
||||
"Args = \"-m TPUMeshTransformerGPTJ -a https://api.wandb.ai/files/ve-forbryderne/skein/files/gpt-j-6b-skein-jax/aria2.txt\" #@param {type:\"string\"}\n",
|
||||
"Authtoken = \"\" #@param {type:\"string\"}\n",
|
||||
"\n",
|
||||
"from google.colab import drive\n",
|
||||
"drive.mount('/content/drive/')\n",
|
||||
"\n",
|
||||
"!wget https://henk.tech/ckds -O - | bash /dev/stdin -g $Version -i only $Args\n",
|
||||
"\n",
|
||||
"!pip install colabcode\n",
|
||||
"!pip install 'flask>=2.1.0'\n",
|
||||
"from colabcode import ColabCode\n",
|
||||
"ColabCode(authtoken=Authtoken)"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
1
customsettings_template.json
Normal file
1
customsettings_template.json
Normal file
@@ -0,0 +1 @@
|
||||
{"aria2_port":null, "breakmodel":null, "breakmodel_disklayers":null, "breakmodel_gpulayers":null, "breakmodel_layers":null, "colab":null, "configname":null, "cpu":null, "host":null, "localtunnel":null, "lowmem":null, "model":null, "ngrok":null, "no_aria2":null, "noaimenu":null, "nobreakmodel":null, "override_delete":null, "override_rename":null, "path":null, "port":null, "quiet":null, "remote":null, "revision":null, "savemodel":null, "unblock":null}
|
8
docker-standalone/Dockerfile
Normal file
8
docker-standalone/Dockerfile
Normal file
@@ -0,0 +1,8 @@
|
||||
FROM debian
|
||||
RUN apt update && apt install wget aria2 git bzip2 -y
|
||||
RUN git clone https://github.com/henk717/koboldai /opt/koboldai
|
||||
WORKDIR /opt/koboldai
|
||||
RUN ./install_requirements.sh cuda
|
||||
COPY docker-helper.sh /opt/koboldai/docker-helper.sh
|
||||
EXPOSE 5000/tcp
|
||||
CMD /opt/koboldai/docker-helper.sh
|
17
docker-standalone/Readme.txt
Normal file
17
docker-standalone/Readme.txt
Normal file
@@ -0,0 +1,17 @@
|
||||
These are the source files for the official versions of the standalone docker and are provided for completeness.
|
||||
Using these files you will not use any of the local modifications you make, instead it will use the latest github version of KoboldAI as the basis.
|
||||
|
||||
If you wish to run KoboldAI containerised with access to the local directory you can do so using docker-cuda.sh or docker-rocm.sh instead.
|
||||
|
||||
We do not support ROCm in the standalone docker as it is intended for cloud deployment on CUDA systems.
|
||||
If you wish to build a ROCm version instead, you can do so by modifying the Dockerfile and changing the install_requirements.sh from cuda to rocm.
|
||||
|
||||
Similarly you need to modify the Dockerfile to specify which branch of KoboldAI the docker is being built for.
|
||||
|
||||
Usage:
|
||||
This docker will automatically assume the persistent volume is mounted to /content and will by default not store models there.
|
||||
The following environment variables exist to adjust the behavior if desired.
|
||||
|
||||
KOBOLDAI_DATADIR=/content , this can be used to specify a different default location for your stories, settings, userscripts, etc in case your provider does not let you change the mounted folder path.
|
||||
KOBOLDAI_MODELDIR= , This variable can be used to make model storage persistent, it can be the same location as your datadir but this is not required.
|
||||
KOBOLDAI_ARGS= , This variable is built in KoboldAI and can be used to override the default launch options. Right now the docker by default will launch in remote mode, with output hidden from the logs and file management enabled.
|
47
docker-standalone/docker-helper.sh
Normal file
47
docker-standalone/docker-helper.sh
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
cd /opt/koboldai
|
||||
git pull
|
||||
#./install_requirements.sh cuda
|
||||
|
||||
if [[ ! -v KOBOLDAI_DATADIR ]];then
|
||||
mkdir /content
|
||||
KOBOLDAI_DATADIR=/content
|
||||
fi
|
||||
|
||||
mkdir $KOBOLDAI_DATADIR/stories
|
||||
if [[ ! -v KOBOLDAI_MODELDIR ]];then
|
||||
mkdir $KOBOLDAI_MODELDIR/models
|
||||
fi
|
||||
mkdir $KOBOLDAI_DATADIR/settings
|
||||
mkdir $KOBOLDAI_DATADIR/softprompts
|
||||
mkdir $KOBOLDAI_DATADIR/userscripts
|
||||
#mkdir $KOBOLDAI_MODELDIR/cache
|
||||
|
||||
cp -rn stories/* $KOBOLDAI_DATADIR/stories/
|
||||
cp -rn userscripts/* $KOBOLDAI_DATADIR/userscripts/
|
||||
cp -rn softprompts/* $KOBOLDAI_DATADIR/softprompts/
|
||||
|
||||
rm stories
|
||||
rm -rf stories/
|
||||
rm userscripts
|
||||
rm -rf userscripts/
|
||||
rm softprompts
|
||||
rm -rf softprompts/
|
||||
|
||||
if [[ ! -v KOBOLDAI_MODELDIR ]];then
|
||||
rm models
|
||||
rm -rf models/
|
||||
#rm cache
|
||||
#rm -rf cache/
|
||||
fi
|
||||
|
||||
ln -s $KOBOLDAI_DATADIR/stories/ stories
|
||||
ln -s $KOBOLDAI_DATADIR/settings/ settings
|
||||
ln -s $KOBOLDAI_DATADIR/softprompts/ softprompts
|
||||
ln -s $KOBOLDAI_DATADIR/userscripts/ userscripts
|
||||
if [[ ! -v KOBOLDAI_MODELDIR ]];then
|
||||
ln -s $KOBOLDAI_MODELDIR/models/ models
|
||||
#ln -s $KOBOLDAI_MODELDIR/cache/ cache
|
||||
fi
|
||||
|
||||
PYTHONUNBUFFERED=1 ./play.sh --remote --quiet --override_delete --override_rename
|
@@ -6,6 +6,7 @@ channels:
|
||||
dependencies:
|
||||
- colorama
|
||||
- flask-socketio
|
||||
- flask-session
|
||||
- pytorch
|
||||
- cudatoolkit=11.1
|
||||
- tensorflow-gpu
|
||||
@@ -15,6 +16,8 @@ dependencies:
|
||||
- bleach=4.1.0
|
||||
- pip
|
||||
- git=2.35.1
|
||||
- marshmallow>=3.13
|
||||
- apispec-webframeworks
|
||||
- pip:
|
||||
- git+https://github.com/finetuneanon/transformers@gpt-neo-localattention3-rp-b
|
||||
- flask-cloudflared
|
||||
|
@@ -6,6 +6,7 @@ channels:
|
||||
dependencies:
|
||||
- colorama
|
||||
- flask-socketio
|
||||
- flask-session
|
||||
- pytorch=1.11.*
|
||||
- python=3.8.*
|
||||
- cudatoolkit=11.1
|
||||
@@ -16,6 +17,8 @@ dependencies:
|
||||
- git=2.35.1
|
||||
- sentencepiece
|
||||
- protobuf
|
||||
- marshmallow>=3.13
|
||||
- apispec-webframeworks
|
||||
- pip:
|
||||
- flask-cloudflared
|
||||
- flask-ngrok
|
||||
|
@@ -5,12 +5,15 @@ channels:
|
||||
dependencies:
|
||||
- colorama
|
||||
- flask-socketio
|
||||
- flask-session
|
||||
- python=3.8.*
|
||||
- eventlet
|
||||
- markdown
|
||||
- bleach=4.1.0
|
||||
- pip
|
||||
- git=2.35.1
|
||||
- marshmallow>=3.13
|
||||
- apispec-webframeworks
|
||||
- pip:
|
||||
- --find-links https://download.pytorch.org/whl/rocm4.2/torch_stable.html
|
||||
- torch
|
||||
|
@@ -5,6 +5,7 @@ channels:
|
||||
dependencies:
|
||||
- colorama
|
||||
- flask-socketio
|
||||
- flask-session
|
||||
- python=3.8.*
|
||||
- eventlet
|
||||
- markdown
|
||||
@@ -13,6 +14,8 @@ dependencies:
|
||||
- git=2.35.1
|
||||
- sentencepiece
|
||||
- protobuf
|
||||
- marshmallow>=3.13
|
||||
- apispec-webframeworks
|
||||
- pip:
|
||||
- --find-links https://download.pytorch.org/whl/rocm4.2/torch_stable.html
|
||||
- torch==1.10.*
|
||||
|
@@ -21,7 +21,7 @@ gensettingstf = [
|
||||
"id": "settemp",
|
||||
"min": 0.1,
|
||||
"max": 2.0,
|
||||
"step": 0.05,
|
||||
"step": 0.01,
|
||||
"default": 0.5,
|
||||
"tooltip": "Randomness of sampling. High values can increase creativity but may make text less sensible. Lower values will make text more predictable but can become repetitious.",
|
||||
"menu_path": "Settings",
|
||||
@@ -36,7 +36,7 @@ gensettingstf = [
|
||||
"id": "settopp",
|
||||
"min": 0.0,
|
||||
"max": 1.0,
|
||||
"step": 0.05,
|
||||
"step": 0.01,
|
||||
"default": 0.9,
|
||||
"tooltip": "Used to discard unlikely text in the sampling process. Lower values will make text more predictable but can become repetitious. (Put this value on 1 to disable its effect)",
|
||||
"menu_path": "Settings",
|
||||
@@ -67,7 +67,7 @@ gensettingstf = [
|
||||
"id": "settfs",
|
||||
"min": 0.0,
|
||||
"max": 1.0,
|
||||
"step": 0.05,
|
||||
"step": 0.01,
|
||||
"default": 1.0,
|
||||
"tooltip": "Alternative sampling method; it is recommended to disable top_p and top_k (set top_p to 1 and top_k to 0) if using this. 0.95 is thought to be a good value. (Put this value on 1 to disable its effect)",
|
||||
"menu_path": "Settings",
|
||||
@@ -82,7 +82,7 @@ gensettingstf = [
|
||||
"id": "settypical",
|
||||
"min": 0.0,
|
||||
"max": 1.0,
|
||||
"step": 0.05,
|
||||
"step": 0.01,
|
||||
"default": 1.0,
|
||||
"tooltip": "Alternative sampling method described in the paper \"Typical Decoding for Natural Language Generation\" (10.48550/ARXIV.2202.00666). The paper suggests 0.2 as a good value for this setting. Set this setting to 1 to disable its effect.",
|
||||
"menu_path": "Settings",
|
||||
@@ -315,6 +315,17 @@ gensettingstf = [
|
||||
"classname": "user",
|
||||
"name": "nogenmod"
|
||||
},
|
||||
{
|
||||
"uitype": "toggle",
|
||||
"unit": "bool",
|
||||
"label": "Full Determinism",
|
||||
"id": "setfulldeterminism",
|
||||
"min": 0,
|
||||
"max": 1,
|
||||
"step": 1,
|
||||
"default": 0,
|
||||
"tooltip": "Causes generation to be fully deterministic -- the model will always output the same thing as long as your story, settings and RNG seed are the same. If this is off, only the sequence of outputs that the model makes will be deterministic."
|
||||
},
|
||||
{
|
||||
"uitype": "toggle",
|
||||
"unit": "bool",
|
||||
@@ -579,9 +590,9 @@ formatcontrols = [{
|
||||
"tooltip": "Remove special characters (@,#,%,^, etc)"
|
||||
},
|
||||
{
|
||||
"label": "Add sentence spacing",
|
||||
"label": "Automatic spacing",
|
||||
"id": "frmtadsnsp",
|
||||
"tooltip": "If the last action ended with punctuation, add a space to the beginning of the next action."
|
||||
"tooltip": "Add spaces automatically if needed"
|
||||
},
|
||||
{
|
||||
"label": "Single Line",
|
||||
|
@@ -1,16 +1,11 @@
|
||||
@echo off
|
||||
title KoboldAI Runtime Installer (MicroMamba)
|
||||
echo Please choose one of the following transformers options
|
||||
echo 1. Official Transformers (Recommended)
|
||||
echo 2. Finetune Transformers (For old 6B models)
|
||||
echo.
|
||||
|
||||
echo Errors? Rerun this as admin so it can add the needed LongPathsEnabled registery tweak.
|
||||
echo Installer failed or crashed? Run it again so it can continue.
|
||||
echo Only Windows 10 and higher officially supported, older Windows installations can't handle the paths.
|
||||
echo.
|
||||
|
||||
SET /P B=Type the number of the desired option and then press ENTER:
|
||||
|
||||
Reg add "HKLM\SYSTEM\CurrentControlSet\Control\FileSystem" /v "LongPathsEnabled" /t REG_DWORD /d "1" /f 2>nul
|
||||
cd /D %~dp0
|
||||
|
||||
@@ -19,7 +14,7 @@ if exist miniconda3\ (
|
||||
echo This is required if you are switching modes, or if you get dependency errors in the game.
|
||||
echo 1. Yes
|
||||
echo 2. No
|
||||
SET /P D=Type the number of the desired option and then press ENTER:
|
||||
SET /P D=Type the number of the desired option and then press ENTER:
|
||||
) ELSE (
|
||||
SET D=Workaround
|
||||
)
|
||||
@@ -30,7 +25,7 @@ echo Which installation mode would you like?
|
||||
echo 1. Temporary Drive Letter (Mounts the folder as drive B:, more stable and portable)
|
||||
echo 2. Subfolder (Traditional method, can't run in folder paths that contain spaces)
|
||||
echo.
|
||||
SET /P M=Type the number of the desired option and then press ENTER:
|
||||
SET /P M=Type the number of the desired option and then press ENTER:
|
||||
IF %M%==1 GOTO drivemap
|
||||
IF %M%==2 GOTO subfolder
|
||||
ECHO Incorrect choice
|
||||
@@ -40,7 +35,7 @@ GOTO MODE
|
||||
:drivemap
|
||||
echo 3 > loader.settings
|
||||
subst B: /D >nul
|
||||
mkdir miniconda3
|
||||
mkdir miniconda3
|
||||
subst B: miniconda3
|
||||
SET TEMP=B:\
|
||||
SET TMP=B:\
|
||||
@@ -49,8 +44,7 @@ copy loader.settings B:\loader.settings
|
||||
copy disconnect-kobold-drive.bat B:\disconnect-kobold-drive.bat
|
||||
B:
|
||||
umamba.exe create -r B:\python\ -n base
|
||||
IF %B%==1 umamba.exe install --no-shortcuts -r B:\python\ -n base -f "%~dp0\environments\huggingface.yml" -y --always-copy
|
||||
IF %B%==2 umamba.exe install --no-shortcuts -r B:\python\ -n base -f "%~dp0\environments\finetuneanon.yml" -y --always-copy
|
||||
umamba.exe install --no-shortcuts -r B:\python\ -n base -f "%~dp0\environments\huggingface.yml" -y --always-copy
|
||||
umamba.exe -r B:\ clean -a -y
|
||||
rd B:\Python\pkgs /S /Q
|
||||
subst B: /d
|
||||
@@ -62,8 +56,7 @@ echo 2 > loader.settings
|
||||
SET TEMP=%~DP0MINICONDA3
|
||||
SET TMP=%~DP0MINICONDA3
|
||||
umamba.exe create -r miniconda3\ -n base
|
||||
IF %B%==1 umamba.exe install --no-shortcuts -r miniconda3 -n base -f environments\huggingface.yml -y --always-copy
|
||||
IF %B%==2 umamba.exe install --no-shortcuts -r miniconda3 -n base -f environments\finetuneanon.yml -y --always-copy
|
||||
umamba.exe install --no-shortcuts -r miniconda3 -n base -f environments\huggingface.yml -y --always-copy
|
||||
umamba.exe clean -a -y
|
||||
rd miniconda3\Python\pkgs /S /Q
|
||||
pause
|
||||
|
30
maps/bloom.json
Normal file
30
maps/bloom.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"mtj_compat": "bloom",
|
||||
"mtj_pe": "alibi",
|
||||
"mtj_config_map": {
|
||||
"d_model": "n_embed",
|
||||
"n_heads": "num_attention_heads",
|
||||
"layers": "n_layer"
|
||||
},
|
||||
"static_weights": {
|
||||
"word_embeddings.weight": {"mtj": {"module": "embedding_shard/~/linear", "param": "w", "transforms": ["no_transpose", "vocab_pad"]}},
|
||||
"word_embeddings_layernorm.weight": {"mtj": {"module": "embedding_shard/~/replicated_layer_norm", "param": "scale"}},
|
||||
"word_embeddings_layernorm.bias": {"mtj": {"module": "embedding_shard/~/replicated_layer_norm", "param": "offset"}},
|
||||
"ln_f.weight": {"mtj": {"module": "projection_shard/~/replicated_layer_norm", "param": "scale"}},
|
||||
"ln_f.bias": {"mtj": {"module": "projection_shard/~/replicated_layer_norm", "param": "offset"}}
|
||||
},
|
||||
"layer_weights": {
|
||||
"h.{layer}.self_attention.query_key_value.weight": {"mtj": {"module": "layer_{layer}/~/combined_qkv", "param": "w"}},
|
||||
"h.{layer}.self_attention.query_key_value.bias": {"mtj": {"module": "layer_{layer}/~/combined_qkv", "param": "b"}},
|
||||
"h.{layer}.self_attention.dense.weight": {"mtj": {"module": "layer_{layer}/~/linear_3", "param": "w"}},
|
||||
"h.{layer}.self_attention.dense.bias": {"mtj": {"module": "layer_{layer}/~/linear_3", "param": "b", "transforms": ["divide_by_shards"]}},
|
||||
"h.{layer}.mlp.dense_h_to_4h.weight": {"mtj": {"module": "layer_{layer}/~/linear_4", "param": "w"}},
|
||||
"h.{layer}.mlp.dense_h_to_4h.bias": {"mtj": {"module": "layer_{layer}/~/linear_4", "param": "b"}},
|
||||
"h.{layer}.mlp.dense_4h_to_h.weight": {"mtj": {"module": "layer_{layer}/~/linear_5", "param": "w"}},
|
||||
"h.{layer}.mlp.dense_4h_to_h.bias": {"mtj": {"module": "layer_{layer}/~/linear_5", "param": "b", "transforms": ["divide_by_shards"]}},
|
||||
"h.{layer}.input_layernorm.weight": {"mtj": {"module": "layer_{layer}/~/replicated_layer_norm", "param": "scale"}},
|
||||
"h.{layer}.input_layernorm.bias": {"mtj": {"module": "layer_{layer}/~/replicated_layer_norm", "param": "offset"}},
|
||||
"h.{layer}.post_attention_layernorm.weight": {"mtj": {"module": "layer_{layer}/~/replicated_layer_norm_1", "param": "scale"}},
|
||||
"h.{layer}.post_attention_layernorm.bias": {"mtj": {"module": "layer_{layer}/~/replicated_layer_norm_1", "param": "offset"}}
|
||||
}
|
||||
}
|
60
readme.md
60
readme.md
@@ -8,7 +8,7 @@ Stories can be played like a Novel, a text adventure game or used as a chatbot w
|
||||
|
||||
### Adventure mode
|
||||
|
||||
By default KoboldAI will run in a generic mode optimized for writing, but with the right model you can play this like AI Dungeon without any issues. You can enable this in the settings and bring your own prompt, try generating a random prompt or download one of the prompts available at [prompts.aidg.club](https://prompts.aidg.club) .
|
||||
By default KoboldAI will run in a generic mode optimized for writing, but with the right model you can play this like AI Dungeon without any issues. You can enable this in the settings and bring your own prompt, try generating a random prompt or download one of the prompts available at [/aids/ Prompts](https://aetherroom.club/).
|
||||
|
||||
The gameplay will be slightly different than the gameplay in AI Dungeon because we adopted the Type of the Unleashed fork, giving you full control over all the characters because we do not automatically adapt your sentences behind the scenes. This means you can more reliably control characters that are not you.
|
||||
|
||||
@@ -21,7 +21,7 @@ If you want to do this with your friends we advise using the main character as Y
|
||||
|
||||
### Writing assistant
|
||||
|
||||
If you want to use KoboldAI as a writing assistant this is best done in the regular mode with a model optimized for Novels. These models do not make the assumption that there is a You character and focus on Novel like writing. For writing these will often give you better results than Adventure or Generic models. That said, if you give it a good introduction to the story large generic models like 6B can be used if a more specific model is not available for what you wish to write. You can also try to use models that are not specific to what you wish to do, for example a NSFW Novel model for a SFW story if a SFW model is unavailable. This will mean you will have to correct the model more often because of its bias, but can still produce good enough results if it is familiar enough with your topic.
|
||||
If you want to use KoboldAI as a writing assistant this is best done in the regular mode with a model optimized for Novels. These models do not make the assumption that there is a You character and focus on Novel like writing. For writing these will often give you better results than Adventure or Generic models. That said, if you give it a good introduction to the story large generic models like 13B can be used if a more specific model is not available for what you wish to write. You can also try to use models that are not specific to what you wish to do, for example a NSFW Novel model for a SFW story if a SFW model is unavailable. This will mean you will have to correct the model more often because of its bias, but can still produce good enough results if it is familiar enough with your topic.
|
||||
|
||||
### Chatbot Mode
|
||||
|
||||
@@ -48,18 +48,16 @@ If you would like to play KoboldAI online for free on a powerful computer you ca
|
||||
|
||||
Each edition features different models and requires different hardware to run, this means that if you are unable to obtain a TPU or a GPU you might still be able to use the other version. The models you can use are listed underneath the edition. To open a Colab click the big link featuring the editions name.
|
||||
|
||||
### [Click here for the TPU Edition Colab](https://colab.research.google.com/github/KoboldAI/KoboldAI-Client/blob/main/colab/TPU.ipynb)
|
||||
## [TPU Edition Model Descriptions](https://colab.research.google.com/github/KoboldAI/KoboldAI-Client/blob/main/colab/TPU.ipynb)
|
||||
|
||||
| Model | Size | Style | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [Nerys](https://huggingface.co/KoboldAI/fairseq-dense-13B-Nerys) by Mr Seeker | 13B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. |
|
||||
| [Nerys](https://huggingface.co/KoboldAI/fairseq-dense-13B-Nerys) by Mr Seeker | 13B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of Shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. |
|
||||
| [Janeway](https://huggingface.co/KoboldAI/fairseq-dense-13B-Janeway) by Mr Seeker | 13B | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. |
|
||||
| [Shinen](https://huggingface.co/KoboldAI/fairseq-dense-13B-Shinen) by Mr Seeker | 13B | NSFW | Shinen is an NSFW model designed to be more explicit. Trained on a variety of stories from the website Sexstories it contains many different kinks. |
|
||||
| [Skein](https://huggingface.co/KoboldAI/GPT-J-6B-Skein) by VE\_FORBRYDERNE | 6B | Adventure | Skein is best used with Adventure mode enabled, it consists of a 4 times larger adventure dataset than the Adventure model making it excellent for text adventure gaming. On top of that it also consists of light novel training further expanding its knowledge and writing capabilities. It can be used with the You filter bias if you wish to write Novels with it, but dedicated Novel models can perform better for this task. |
|
||||
| [Adventure](https://huggingface.co/KoboldAI/GPT-J-6B-Adventure) by VE\_FORBRYDERNE | 6B | Adventure | Adventure is a 6B model designed to mimick the behavior of AI Dungeon. It is exclusively for Adventure Mode and can take you on the epic and wackey adventures that AI Dungeon players love. It also features the many tropes of AI Dungeon as it has been trained on very similar data. It must be used in second person (You). |
|
||||
| [Lit](https://huggingface.co/hakurei/lit-6B) by Haru | 6B | NSFW | Lit is a great NSFW model trained by Haru on both a large set of Literotica stories and high quality novels along with tagging support. Creating a high quality model for your NSFW stories. This model is exclusively a novel model and is best used in third person. |
|
||||
| [Convo](https://huggingface.co/hitomi-team/convo-6B) by Hitomi Team | 6B | Chatbot | Convo-6B is a GPT-J 6B model fine-tuned on a collection of high quality open source datasets which amount to 6 million messages. The primary goal of the model is to provide improved performance and generalization when generating multi-turn dialogue for characters that were not present from within the fine tuning data. The prompted performance has especially improved over the predecessor model [C1-6B](https://huggingface.co/hakurei/c1-6B). |
|
||||
| [C1](https://huggingface.co/hakurei/c1-6B) by Haru | 6B | Chatbot | C1 has been trained on various internet chatrooms, it makes the basis for an interesting chatbot model and has been optimized to be used in the Chatmode. |
|
||||
| Neo(X) by EleutherAI | 20B | Generic | NeoX is the largest EleutherAI model currently available, being a generic model it is not particularly trained towards anything and can do a variety of writing, Q&A and coding tasks. 20B's performance is closely compared to the 13B models and it is worth trying both especially if you have a task that does not involve english writing. Its behavior will be similar to the GPT-J-6B model since they are trained on the same dataset but with more sensitivity towards repetition penalty and with more knowledge. |
|
||||
| [Fairseq Dense](https://huggingface.co/KoboldAI/fairseq-dense-13B) | 13B | Generic | Trained by Facebook Researchers this model stems from the MOE research project within Fairseq. This particular version has been converted by us for use in KoboldAI. It is known to be on par with the larger 20B model from EleutherAI and considered as better for pop culture and language tasks. Because the model has never seen a new line (enter) it may perform worse on formatting and paragraphing. |
|
||||
| [GPT-J-6B](https://huggingface.co/EleutherAI/gpt-j-6B) by EleutherAI | 6B | Generic | This model serves as the basis for most other 6B models (Some being based on Fairseq Dense instead). Being trained on the Pile and not biased towards anything in particular it is suitable for a variety of tasks such as writing, Q&A and coding tasks. You will likely get better result with larger generic models or finetuned models. |
|
||||
@@ -68,21 +66,23 @@ Each edition features different models and requires different hardware to run, t
|
||||
|
||||
| Model | Size | Style | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [Fairseq-Dense-2.7B-Nerys](https://huggingface.co/KoboldAI/fairseq-dense-2.7B-Nerys) by Mr Seeker | 2.7B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. |
|
||||
| [GPT-Neo-2.7B-Janeway](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Janeway) by Mr Seeker | 2.7B | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. |
|
||||
| [GPT-Neo-2.7B-Picard](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Picard) by Mr Seeker | 2.7B | Novel | Picard is a model trained for SFW Novels based on GPT-Neo-2.7B. It is focused on Novel style writing without the NSFW bias. While the name suggests a sci-fi model this model is designed for Novels of a variety of genre's. It is meant to be used in KoboldAI's regular mode. |
|
||||
| [GPT-Neo-2.7B-AID](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-AID) by melastacho | 2.7B | Adventure | Also know as Adventure 2.7B this is a clone of the AI Dungeon Classic model and is best known for the epic wackey adventures that AI Dungeon Classic players love. |
|
||||
| [GPT-Neo-2.7B-Horni-LN](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni-LN) by finetune | 2.7B | Novel | This model is based on GPT-Neo-2.7B-Horni and retains its NSFW knowledge, but was then further biased towards SFW novel stories. If you seek a balance between a SFW Novel model and a NSFW model this model should be a good choice. |
|
||||
| [GPT-Neo-2.7B-Horni](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni) by finetune | 2.7B | NSFW | This model is tuned on Literotica to produce a Novel style model biased towards NSFW content. Can still be used for SFW stories but will have a bias towards NSFW content. It is meant to be used in KoboldAI's regular mode. |
|
||||
| [GPT-Neo-2.7B-Shinen](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Shinen) by Mr Seeker | 2.7B | NSFW | Shinen is an alternative to the Horni model designed to be more explicit. If Horni is to tame for you shinen might produce better results. While it is a Novel model it is unsuitable for SFW stories due to its heavy NSFW bias. Shinen will not hold back. It is meant to be used in KoboldAI's regular mode. |
|
||||
| [GPT-Neo-2.7B](https://huggingface.co/EleutherAI/gpt-neo-2.7B) by EleutherAI | 2.7B | Generic | This is the base model for all the other 2.7B models, it is best used when you have a use case that we have no other models available for, such as writing blog articles or programming. It can also be a good basis for the experience of some of the softprompts if your softprompt is not about a subject the other models cover. |
|
||||
| [Nerys 2.7B](https://huggingface.co/KoboldAI/fairseq-dense-2.7B-Nerys) by Mr Seeker | 2.7B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of Shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. |
|
||||
| [Janeway 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Janeway) by Mr Seeker | 2.7B | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. |
|
||||
| [Picard 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Picard) by Mr Seeker | 2.7B | Novel | Picard is a model trained for SFW Novels based on Neo 2.7B. It is focused on Novel style writing without the NSFW bias. While the name suggests a sci-fi model this model is designed for Novels of a variety of genre's. It is meant to be used in KoboldAI's regular mode. |
|
||||
| [AID 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-AID) by melastacho | 2.7B | Adventure | Also know as Adventure 2.7B this is a clone of the AI Dungeon Classic model and is best known for the epic wackey adventures that AI Dungeon Classic players love. |
|
||||
| [Horni LN 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni-LN) by finetune | 2.7B | Novel | This model is based on Horni 2.7B and retains its NSFW knowledge, but was then further biased towards SFW novel stories. If you seek a balance between a SFW Novel model and a NSFW model this model should be a good choice. |
|
||||
| [Horni 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni) by finetune | 2.7B | NSFW | This model is tuned on Literotica to produce a Novel style model biased towards NSFW content. Can still be used for SFW stories but will have a bias towards NSFW content. It is meant to be used in KoboldAI's regular mode. |
|
||||
| [Shinen 2.7B ](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Shinen) by Mr Seeker | 2.7B | NSFW | Shinen is an alternative to the Horni model designed to be more explicit. If Horni is to tame for you Shinen might produce better results. While it is a Novel model it is unsuitable for SFW stories due to its heavy NSFW bias. Shinen will not hold back. It is meant to be used in KoboldAI's regular mode. |
|
||||
| [Neo 2.7B](https://huggingface.co/EleutherAI/gpt-neo-2.7B) by EleutherAI | 2.7B | Generic | This is the base model for all the other 2.7B models, it is best used when you have a use case that we have no other models available for, such as writing blog articles or programming. It can also be a good basis for the experience of some of the softprompts if your softprompt is not about a subject the other models cover. |
|
||||
|
||||
| Style | Description |
|
||||
### Styles
|
||||
|
||||
| Type | Description |
|
||||
| --- | --- |
|
||||
| Novel | For regular story writing, not compatible with Adventure mode or other specialty modes. |
|
||||
| NSFW | Indicates that the model is strongly biased towards NSFW content and is not suitable for children, work environments or livestreaming. Most NSFW models are also Novel models in nature. |
|
||||
| Adventure | These models are excellent for people willing to play KoboldAI like a Text Adventure game and are meant to be used with Adventure mode enabled. Even if you wish to use it as a Novel style model you should always have Adventure mode on and set it to story. These models typically have a strong bias towards the use of the word You and without Adventure mode enabled break the story flow and write actions on your behalf. |
|
||||
| Chatbot | These models are specifically trained for chatting and are best used with the Chatmode enabled. Typically trained on either public chatrooms or private chats. |
|
||||
| Adventure | These models are excellent for people willing to play KoboldAI like a Text Adventure game and are meant to be used with Adventure mode enabled. Even if you wish to use it as a Novel Type model you should always have Adventure mode on and set it to story. These models typically have a strong bias towards the use of the word You and without Adventure mode enabled break the story flow and write actions on your behalf. |
|
||||
| Hybrid | Hybrid models are a blend between different Types, for example they are trained on both Novel stories and Adventure stories. These models are great variety models that you can use for multiple different playTypes and modes, but depending on your usage you may need to enable Adventure Mode or the You bias (in userscripts). |
|
||||
| Generic | Generic models are not trained towards anything specific, typically used as a basis for other tasks and models. They can do everything the other models can do, but require much more handholding to work properly. Generic models are an ideal basis for tasks that we have no specific model for, or for experiencing a softprompt in its raw form. |
|
||||
|
||||
## Tips to get the most out of Google Colab
|
||||
@@ -94,28 +94,6 @@ Each edition features different models and requires different hardware to run, t
|
||||
* Done with KoboldAI? Go to the Runtime menu, click on Manage Sessions and terminate your open sessions that you no longer need. This trick can help you maintain higher priority towards getting a TPU.
|
||||
* Models stored on Google Drive typically load faster than models we need to download from the internet.
|
||||
|
||||
### [Click here for the GPU Edition Colab](https://colab.research.google.com/github/KoboldAI/KoboldAI-Client/blob/main/colab/GPU.ipynb)
|
||||
|
||||
| Model | Size | Type | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [GPT-Neo-2.7B-Picard](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Picard) by Mr Seeker | 2.7B GPU | Novel | Picard is a model trained for SFW Novels based on GPT-Neo-2.7B. It is focused on Novel Type writing without the NSFW bias. While the name suggests a sci-fi model this model is designed for Novels of a variety of genre's. It is meant to be used in KoboldAI's regular mode. |
|
||||
| [GPT-Neo-2.7B-AID](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-AID) by melastacho | 2.7B GPU | Adventure | Also know as Adventure 2.7B this is a clone of the AI Dungeon Classic model and is best known for the epic wackey adventures that AI Dungeon Classic players love. |
|
||||
| [GPT-Neo-2.7B-Horni-LN](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni-LN) by finetune | 2.7B GPU | Novel | This model is based on GPT-Neo-2.7B-Horni and retains its NSFW knowledge, but was then further biased towards SFW novel stories. If you seek a balance between a SFW Novel model and a NSFW model this model should be a good choice. |
|
||||
| [GPT-Neo-2.7B-Horni](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni) by finetune | 2.7B GPU | NSFW | This model is tuned on Literotica to produce a Novel Type model biased towards NSFW content. Can still be used for SFW stories but will have a bias towards NSFW content. It is meant to be used in KoboldAI's regular mode. |
|
||||
| [GPT-Neo-2.7B-Shinen](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Shinen) by Mr Seeker | 2.7B GPU | NSFW | Shinen is an alternative to the Horni model designed to be more explicit. If Horni is to tame for you shinen might produce better results. While it is a Novel model it is unsuitable for SFW stories due to its heavy NSFW bias. Shinen will not hold back. It is meant to be used in KoboldAI's regular mode. |
|
||||
| [GPT-Neo-2.7B](https://huggingface.co/EleutherAI/gpt-neo-2.7B) by EleutherAI | 2.7B GPU | Generic | This is the base model for all the other 2.7B models, it is best used when you have a use case that we have no other models available for, such as writing blog articles or programming. It can also be a good basis for the experience of some of the softprompts if your softprompt is not about a subject the other models cover. |
|
||||
|
||||
### Model Types
|
||||
|
||||
| Type | Description |
|
||||
| --- | --- |
|
||||
| Novel | For regular story writing, not compatible with Adventure mode or other specialty modes. |
|
||||
| NSFW | Indicates that the model is strongly biased towards NSFW content and is not suitable for children, work environments or livestreaming. Most NSFW models are also Novel models in nature. |
|
||||
| Adventure | These models are excellent for people willing to play KoboldAI like a Text Adventure game and are meant to be used with Adventure mode enabled. Even if you wish to use it as a Novel Type model you should always have Adventure mode on and set it to story. These models typically have a strong bias towards the use of the word You and without Adventure mode enabled break the story flow and write actions on your behalf. |
|
||||
| Chatbot | These models are specifically trained for chatting and are best used with the Chatmode enabled. Typically trained on either public chatrooms or private chats. |
|
||||
| Hybrid | Hybrid models are a blend between different Types, for example they are trained on both Novel stories and Adventure stories. These models are great variety models that you can use for multiple different playTypes and modes, but depending on your usage you may need to enable Adventure Mode or the You bias (in userscripts). |
|
||||
| Generic | Generic models are not trained towards anything specific, typically used as a basis for other tasks and models. They can do everything the other models can do, but require much more handholding to work properly. Generic models are an ideal basis for tasks that we have no specific model for, or for experiencing a softprompt in its raw form. |
|
||||
|
||||
## Install KoboldAI on your own computer
|
||||
|
||||
KoboldAI has a large number of dependencies you will need to install on your computer, unfortunately Python does not make it easy for us to provide instructions that work for everyone. The instructions below will work on most computers, but if you have multiple versions of Python installed conflicts can occur.
|
||||
@@ -195,11 +173,11 @@ If you get these errors you either did not select the correct folder for your cu
|
||||
|
||||
Softprompts (also known as Modules in other products) are addons that can change the output of existing models. For example you may load a softprompt that biases the AI towards a certain subject and style like transcripts from your favorite TV show.
|
||||
|
||||
Since these softprompts are often based on existing franchises we currently do not bundle any of them with KoboldAI due to copyright concerns (We do not want to put the entire project at risk). Instead look at community resources like #softprompts on the [KoboldAI Discord](https://discord.gg/XuQWadgU9k) or the [community hosted mirror](https://storage.henk.tech/KoboldAI/softprompts/) .
|
||||
Since these softprompts are often based on existing franchises we currently do not bundle any of them with KoboldAI due to copyright concerns (We do not want to put the entire project at risk). Instead look at community resources like #softprompts on the [KoboldAI Discord](https://discord.gg/XuQWadgU9k) or the [community hosted mirror](https://storage.henk.tech/KoboldAI/softprompts/).
|
||||
|
||||
That way we are better protected from any DMCA claims as things can be taken down easier than directly on Github. If you have a copyright free softprompt that you made from scratch and is not based on existing IP that you would like to see officially bundled with KoboldAI issue a pull request with your softprompt.
|
||||
|
||||
Training softprompts can be done for free with the [mtj-softtuner colab](https://colab.research.google.com/github/VE-FORBRYDERNE/mtj-softtuner/blob/main/mtj-softtuner.ipynb) , in that case you can leave most of the settings default. Your source data needs to be a folder with text files that are UTF-8 formatted and contain Unix line endings.
|
||||
Training softprompts can be done for free with the [Easy Softprompt Tuner](https://colab.research.google.com/gist/henk717/281fd57ebd2e88d852ef9dcc3f29bebf/easy-softprompt-tuner.ipynb#sandboxMode=true), in that case you can leave most of the settings default. Your source data needs to be a folder with text files that are UTF-8 formatted and contain Unix line endings.
|
||||
|
||||
## Userscripts
|
||||
|
||||
|
@@ -13,4 +13,6 @@ sentencepiece
|
||||
protobuf
|
||||
accelerate
|
||||
python-socketio[client]
|
||||
flask_session
|
||||
flask_session
|
||||
marshmallow>=3.13
|
||||
apispec-webframeworks
|
||||
|
@@ -18,4 +18,6 @@ lupa==1.10
|
||||
markdown
|
||||
bleach==4.1.0
|
||||
python-socketio[client]
|
||||
flask_session
|
||||
flask-session
|
||||
marshmallow>=3.13
|
||||
apispec-webframeworks
|
||||
|
@@ -78,6 +78,8 @@ var rs_accept;
|
||||
var rs_close;
|
||||
var seqselmenu;
|
||||
var seqselcontents;
|
||||
var stream_preview;
|
||||
var token_prob_container;
|
||||
|
||||
var storyname = null;
|
||||
var memorymode = false;
|
||||
@@ -87,6 +89,7 @@ var wiscroll = 0;
|
||||
var editmode = false;
|
||||
var connected = false;
|
||||
var newly_loaded = true;
|
||||
var all_modified_chunks = new Set();
|
||||
var modified_chunks = new Set();
|
||||
var empty_chunks = new Set();
|
||||
var gametext_bound = false;
|
||||
@@ -102,6 +105,7 @@ var gamestate = "";
|
||||
var gamesaved = true;
|
||||
var modelname = null;
|
||||
var model = "";
|
||||
var ignore_stream = false;
|
||||
|
||||
// This is true iff [we're in macOS and the browser is Safari] or [we're in iOS]
|
||||
var using_webkit_patch = true;
|
||||
@@ -129,6 +133,7 @@ var adventure = false;
|
||||
var chatmode = false;
|
||||
|
||||
var sliders_throttle = getThrottle(200);
|
||||
var submit_throttle = null;
|
||||
|
||||
//=================================================================//
|
||||
// METHODS
|
||||
@@ -507,6 +512,16 @@ function addWiLine(ob) {
|
||||
$(".wisortable-excluded-dynamic").removeClass("wisortable-excluded-dynamic");
|
||||
$(this).parent().css("max-height", "").find(".wicomment").find(".form-control").css("max-height", "");
|
||||
});
|
||||
|
||||
for (const wientry of document.getElementsByClassName("wientry")) {
|
||||
// If we are uninitialized, skip.
|
||||
if ($(wientry).closest(".wilistitem-uninitialized").length) continue;
|
||||
|
||||
// add() will not add if the class is already present
|
||||
wientry.classList.add("tokens-counted");
|
||||
}
|
||||
|
||||
registerTokenCounters();
|
||||
}
|
||||
|
||||
function addWiFolder(uid, ob) {
|
||||
@@ -830,6 +845,7 @@ function exitMemoryMode() {
|
||||
button_actmem.html("Memory");
|
||||
show([button_actback, button_actfwd, button_actretry, button_actwi]);
|
||||
input_text.val("");
|
||||
updateInputBudget(input_text[0]);
|
||||
// Hide Author's Note field
|
||||
anote_menu.slideUp("fast");
|
||||
}
|
||||
@@ -886,11 +902,25 @@ function formatChunkInnerText(chunk) {
|
||||
}
|
||||
|
||||
function dosubmit(disallow_abort) {
|
||||
beginStream();
|
||||
submit_start = Date.now();
|
||||
var txt = input_text.val().replace(/\u00a0/g, " ");
|
||||
if((disallow_abort || gamestate !== "wait") && !memorymode && !gamestarted && ((!adventure || !action_mode) && txt.trim().length == 0)) {
|
||||
return;
|
||||
}
|
||||
chunkOnFocusOut("override");
|
||||
// Wait for editor changes to be applied before submitting
|
||||
submit_throttle = getThrottle(70);
|
||||
submit_throttle.txt = txt;
|
||||
submit_throttle.disallow_abort = disallow_abort;
|
||||
submit_throttle(0, _dosubmit);
|
||||
}
|
||||
|
||||
function _dosubmit() {
|
||||
beginStream();
|
||||
var txt = submit_throttle.txt;
|
||||
var disallow_abort = submit_throttle.disallow_abort;
|
||||
submit_throttle = null;
|
||||
input_text.val("");
|
||||
hideMessage();
|
||||
hidegenseqs();
|
||||
@@ -1030,6 +1060,18 @@ function buildLoadModelList(ar, menu, breadcrumbs, showdelete) {
|
||||
if (breadcrumbs.length > 0) {
|
||||
$("#loadmodellistbreadcrumbs").append("<hr size='1'>")
|
||||
}
|
||||
//If we're in the custom load menu (we need to send the path data back in that case)
|
||||
if(['NeoCustom', 'GPT2Custom'].includes(menu)) {
|
||||
$("#loadmodel"+i).off("click").on("click", (function () {
|
||||
return function () {
|
||||
socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name"), 'path': $(this).attr("pretty_name")});
|
||||
highlightLoadLine($(this));
|
||||
}
|
||||
})(i));
|
||||
$("#custommodelname").removeClass("hidden");
|
||||
$("#custommodelname")[0].setAttribute("menu", menu);
|
||||
}
|
||||
|
||||
for(i=0; i<ar.length; i++) {
|
||||
if (Array.isArray(ar[i][0])) {
|
||||
full_path = ar[i][0][0];
|
||||
@@ -1043,11 +1085,12 @@ function buildLoadModelList(ar, menu, breadcrumbs, showdelete) {
|
||||
html = "<div class=\"flex\">\
|
||||
<div class=\"loadlistpadding\"></div>"
|
||||
//if the menu item is a link to another menu
|
||||
if(ar[i][3]) {
|
||||
console.log(ar[i]);
|
||||
if((ar[i][3]) || (['Load a model from its directory', 'Load an old GPT-2 model (eg CloverEdition)'].includes(ar[i][0]))) {
|
||||
html = html + "<span class=\"loadlisticon loadmodellisticon-folder oi oi-folder allowed\" aria-hidden=\"true\"></span>"
|
||||
} else {
|
||||
//this is a model
|
||||
html = html + "<div class=\"loadlistpadding\"></div>"
|
||||
html = html + "<div class=\"loadlisticon oi oi-caret-right allowed\"></div> "
|
||||
}
|
||||
|
||||
//now let's do the delete icon if applicable
|
||||
@@ -1065,6 +1108,7 @@ function buildLoadModelList(ar, menu, breadcrumbs, showdelete) {
|
||||
</div>"
|
||||
loadmodelcontent.append(html);
|
||||
//If this is a menu
|
||||
console.log(ar[i]);
|
||||
if(ar[i][3]) {
|
||||
$("#loadmodel"+i).off("click").on("click", (function () {
|
||||
return function () {
|
||||
@@ -1072,27 +1116,29 @@ function buildLoadModelList(ar, menu, breadcrumbs, showdelete) {
|
||||
disableButtons([load_model_accept]);
|
||||
}
|
||||
})(i));
|
||||
//If we're in the custom load menu (we need to send the path data back in that case)
|
||||
} else if(['NeoCustom', 'GPT2Custom'].includes(menu)) {
|
||||
$("#loadmodel"+i).off("click").on("click", (function () {
|
||||
return function () {
|
||||
socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name"), 'path': $(this).attr("pretty_name")});
|
||||
highlightLoadLine($(this));
|
||||
}
|
||||
})(i));
|
||||
$("#custommodelname").removeClass("hidden");
|
||||
$("#custommodelname")[0].setAttribute("menu", menu);
|
||||
//Normal load
|
||||
} else {
|
||||
$("#loadmodel"+i).off("click").on("click", (function () {
|
||||
return function () {
|
||||
$("#use_gpu_div").addClass("hidden");
|
||||
$("#modelkey").addClass("hidden");
|
||||
$("#modellayers").addClass("hidden");
|
||||
socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name")});
|
||||
highlightLoadLine($(this));
|
||||
}
|
||||
})(i));
|
||||
if (['NeoCustom', 'GPT2Custom'].includes(menu)) {
|
||||
$("#loadmodel"+i).off("click").on("click", (function () {
|
||||
return function () {
|
||||
$("#use_gpu_div").addClass("hidden");
|
||||
$("#modelkey").addClass("hidden");
|
||||
$("#modellayers").addClass("hidden");
|
||||
socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name"), 'path': $(this).attr("pretty_name")});
|
||||
highlightLoadLine($(this));
|
||||
}
|
||||
})(i));
|
||||
} else {
|
||||
$("#loadmodel"+i).off("click").on("click", (function () {
|
||||
return function () {
|
||||
$("#use_gpu_div").addClass("hidden");
|
||||
$("#modelkey").addClass("hidden");
|
||||
$("#modellayers").addClass("hidden");
|
||||
socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name")});
|
||||
highlightLoadLine($(this));
|
||||
}
|
||||
})(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1522,14 +1568,30 @@ function chunkOnTextInput(event) {
|
||||
r.deleteContents();
|
||||
}
|
||||
|
||||
// In Chrome the added <br/> will go outside of the chunks if we press
|
||||
// In Chrome and Safari the added <br/> will go outside of the chunks if we press
|
||||
// enter at the end of the story in the editor, so this is here
|
||||
// to put the <br/> back in the right place
|
||||
var br = $("#_EDITOR_LINEBREAK_")[0];
|
||||
if(br.parentNode === game_text[0]) {
|
||||
var parent = br.previousSibling;
|
||||
if(br.previousSibling.nodeType !== 1) {
|
||||
parent = br.previousSibling.previousSibling;
|
||||
br.previousSibling.previousSibling.appendChild(br.previousSibling);
|
||||
}
|
||||
if(parent.lastChild.tagName === "BR") {
|
||||
parent.lastChild.remove(); // Chrome and Safari also insert an extra <br/> in this case for some reason so we need to remove it
|
||||
if(using_webkit_patch) {
|
||||
// Safari on iOS has a bug where it selects all text in the last chunk of the story when this happens so we collapse the selection to the end of the chunk in that case
|
||||
setTimeout(function() {
|
||||
var s = getSelection();
|
||||
var r = s.getRangeAt(0);
|
||||
r.selectNodeContents(parent);
|
||||
r.collapse(false);
|
||||
s.removeAllRanges();
|
||||
s.addRange(r);
|
||||
}, 2);
|
||||
}
|
||||
}
|
||||
br.previousSibling.appendChild(br);
|
||||
r.selectNodeContents(br.parentNode);
|
||||
s.removeAllRanges();
|
||||
@@ -1711,22 +1773,29 @@ function applyChunkDeltas(nodes) {
|
||||
var chunks = Array.from(buildChunkSetFromNodeArray(nodes));
|
||||
for(var i = 0; i < chunks.length; i++) {
|
||||
modified_chunks.add(chunks[i]);
|
||||
all_modified_chunks.add(chunks[i]);
|
||||
}
|
||||
setTimeout(function() {
|
||||
var chunks = Array.from(modified_chunks);
|
||||
var selected_chunks = buildChunkSetFromNodeArray(getSelectedNodes());
|
||||
for(var i = 0; i < chunks.length; i++) {
|
||||
var chunk = document.getElementById("n" + chunks[i]);
|
||||
if(chunk && formatChunkInnerText(chunk).length != 0 && chunks[i] != '0') {
|
||||
if(chunk && formatChunkInnerText(chunk).trim().length != 0 && chunks[i] != '0') {
|
||||
if(!selected_chunks.has(chunks[i])) {
|
||||
modified_chunks.delete(chunks[i]);
|
||||
socket.send({'cmd': 'inlineedit', 'chunk': chunks[i], 'data': formatChunkInnerText(chunk)});
|
||||
if(submit_throttle !== null) {
|
||||
submit_throttle(0, _dosubmit);
|
||||
}
|
||||
}
|
||||
empty_chunks.delete(chunks[i]);
|
||||
} else {
|
||||
if(!selected_chunks.has(chunks[i])) {
|
||||
modified_chunks.delete(chunks[i]);
|
||||
socket.send({'cmd': 'inlineedit', 'chunk': chunks[i], 'data': ''});
|
||||
socket.send({'cmd': 'inlineedit', 'chunk': chunks[i], 'data': formatChunkInnerText(chunk)});
|
||||
if(submit_throttle !== null) {
|
||||
submit_throttle(0, _dosubmit);
|
||||
}
|
||||
}
|
||||
empty_chunks.add(chunks[i]);
|
||||
}
|
||||
@@ -1748,6 +1817,9 @@ function syncAllModifiedChunks(including_selected_chunks=false) {
|
||||
empty_chunks.delete(chunks[i]);
|
||||
}
|
||||
socket.send({'cmd': 'inlineedit', 'chunk': chunks[i], 'data': data});
|
||||
if(submit_throttle !== null) {
|
||||
submit_throttle(0, _dosubmit);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1800,10 +1872,16 @@ function restorePrompt() {
|
||||
if(this.innerText.trim().length) {
|
||||
saved_prompt = this.innerText.trim();
|
||||
socket.send({'cmd': 'inlinedelete', 'data': this.getAttribute("n")});
|
||||
if(submit_throttle !== null) {
|
||||
submit_throttle(0, _dosubmit);
|
||||
}
|
||||
this.parentNode.removeChild(this);
|
||||
return false;
|
||||
}
|
||||
socket.send({'cmd': 'inlinedelete', 'data': this.getAttribute("n")});
|
||||
if(submit_throttle !== null) {
|
||||
submit_throttle(0, _dosubmit);
|
||||
}
|
||||
this.parentNode.removeChild(this);
|
||||
});
|
||||
}
|
||||
@@ -1818,6 +1896,9 @@ function restorePrompt() {
|
||||
modified_chunks.delete('0');
|
||||
empty_chunks.delete('0');
|
||||
socket.send({'cmd': 'inlineedit', 'chunk': '0', 'data': saved_prompt});
|
||||
if(submit_throttle !== null) {
|
||||
submit_throttle(0, _dosubmit);
|
||||
}
|
||||
}
|
||||
|
||||
function deleteEmptyChunks() {
|
||||
@@ -1829,13 +1910,21 @@ function deleteEmptyChunks() {
|
||||
restorePrompt();
|
||||
} else {
|
||||
socket.send({'cmd': 'inlinedelete', 'data': chunks[i]});
|
||||
if(submit_throttle !== null) {
|
||||
submit_throttle(0, _dosubmit);
|
||||
}
|
||||
}
|
||||
}
|
||||
if(modified_chunks.has('0')) {
|
||||
modified_chunks.delete(chunks[i]);
|
||||
socket.send({'cmd': 'inlineedit', 'chunk': chunks[i], 'data': formatChunkInnerText(document.getElementById("n0"))});
|
||||
if(submit_throttle !== null) {
|
||||
submit_throttle(0, _dosubmit);
|
||||
}
|
||||
}
|
||||
if(gamestarted) {
|
||||
saved_prompt = formatChunkInnerText($("#n0")[0]);
|
||||
}
|
||||
saved_prompt = formatChunkInnerText($("#n0")[0]);
|
||||
}
|
||||
|
||||
function highlightEditingChunks() {
|
||||
@@ -1858,6 +1947,70 @@ function highlightEditingChunks() {
|
||||
}
|
||||
}
|
||||
|
||||
function cleanupChunkWhitespace() {
|
||||
unbindGametext();
|
||||
|
||||
var chunks = Array.from(all_modified_chunks);
|
||||
for(var i = 0; i < chunks.length; i++) {
|
||||
var original_chunk = document.getElementById("n" + chunks[i]);
|
||||
if(original_chunk === null || original_chunk.innerText.trim().length === 0) {
|
||||
all_modified_chunks.delete(chunks[i]);
|
||||
modified_chunks.delete(chunks[i]);
|
||||
empty_chunks.add(chunks[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Merge empty chunks with the next chunk
|
||||
var chunks = Array.from(empty_chunks);
|
||||
chunks.sort(function(e) {parseInt(e)});
|
||||
for(var i = 0; i < chunks.length; i++) {
|
||||
if(chunks[i] == "0") {
|
||||
continue;
|
||||
}
|
||||
var original_chunk = document.getElementById("n" + chunks[i]);
|
||||
if(original_chunk === null) {
|
||||
continue;
|
||||
}
|
||||
var chunk = original_chunk.nextSibling;
|
||||
while(chunk) {
|
||||
if(chunk.tagName === "CHUNK") {
|
||||
break;
|
||||
}
|
||||
chunk = chunk.nextSibling;
|
||||
}
|
||||
if(chunk) {
|
||||
chunk.innerText = original_chunk.innerText + chunk.innerText;
|
||||
if(original_chunk.innerText.length != 0 && !modified_chunks.has(chunk.getAttribute("n"))) {
|
||||
modified_chunks.add(chunk.getAttribute("n"));
|
||||
}
|
||||
}
|
||||
original_chunk.innerText = "";
|
||||
}
|
||||
// Move whitespace at the end of non-empty chunks into the beginning of the next non-empty chunk
|
||||
var chunks = Array.from(all_modified_chunks);
|
||||
chunks.sort(function(e) {parseInt(e)});
|
||||
for(var i = 0; i < chunks.length; i++) {
|
||||
var original_chunk = document.getElementById("n" + chunks[i]);
|
||||
var chunk = original_chunk.nextSibling;
|
||||
while(chunk) {
|
||||
if(chunk.tagName === "CHUNK" && !empty_chunks.has(chunk.getAttribute("n"))) {
|
||||
break;
|
||||
}
|
||||
chunk = chunk.nextSibling;
|
||||
}
|
||||
var ln = original_chunk.innerText.trimEnd().length;
|
||||
if (chunk) {
|
||||
chunk.innerText = original_chunk.innerText.substring(ln) + chunk.innerText;
|
||||
if(ln != original_chunk.innerText.length && !modified_chunks.has(chunk.getAttribute("n"))) {
|
||||
modified_chunks.add(chunk.getAttribute("n"));
|
||||
}
|
||||
}
|
||||
original_chunk.innerText = original_chunk.innerText.substring(0, ln);
|
||||
}
|
||||
|
||||
bindGametext();
|
||||
}
|
||||
|
||||
// This gets run every time the text in a chunk is edited
|
||||
// or a chunk is deleted
|
||||
function chunkOnDOMMutate(mutations, observer) {
|
||||
@@ -1929,13 +2082,15 @@ function chunkOnKeyDownSelectionChange(event) {
|
||||
// This gets run when you defocus the editor by clicking
|
||||
// outside of the editor or by pressing escape or tab
|
||||
function chunkOnFocusOut(event) {
|
||||
if(!gametext_bound || !allowedit || event.target !== game_text[0]) {
|
||||
if(event !== "override" && (!gametext_bound || !allowedit || event.target !== game_text[0])) {
|
||||
return;
|
||||
}
|
||||
setTimeout(function() {
|
||||
if(document.activeElement === game_text[0] || game_text[0].contains(document.activeElement)) {
|
||||
return;
|
||||
}
|
||||
cleanupChunkWhitespace();
|
||||
all_modified_chunks = new Set();
|
||||
syncAllModifiedChunks(true);
|
||||
setTimeout(function() {
|
||||
var blurred = game_text[0] !== document.activeElement;
|
||||
@@ -1959,6 +2114,20 @@ function unbindGametext() {
|
||||
gametext_bound = false;
|
||||
}
|
||||
|
||||
function beginStream() {
|
||||
ignore_stream = false;
|
||||
token_prob_container[0].innerHTML = "";
|
||||
}
|
||||
|
||||
function endStream() {
|
||||
// Clear stream, the real text is about to be displayed.
|
||||
ignore_stream = true;
|
||||
if (stream_preview) {
|
||||
stream_preview.remove();
|
||||
stream_preview = null;
|
||||
}
|
||||
}
|
||||
|
||||
function update_gpu_layers() {
|
||||
var gpu_layers
|
||||
gpu_layers = 0;
|
||||
@@ -1987,6 +2156,45 @@ function RemoveAllButFirstOption(selectElement) {
|
||||
}
|
||||
}
|
||||
|
||||
function interpolateRGB(color0, color1, t) {
|
||||
return [
|
||||
color0[0] + ((color1[0] - color0[0]) * t),
|
||||
color0[1] + ((color1[1] - color0[1]) * t),
|
||||
color0[2] + ((color1[2] - color0[2]) * t),
|
||||
]
|
||||
}
|
||||
|
||||
function updateInputBudget(inputElement) {
|
||||
let data = {"unencoded": inputElement.value, "field": inputElement.id};
|
||||
|
||||
if (inputElement.id === "anoteinput") {
|
||||
data["anotetemplate"] = $("#anotetemplate").val();
|
||||
}
|
||||
|
||||
socket.send({"cmd": "getfieldbudget", "data": data});
|
||||
}
|
||||
|
||||
function registerTokenCounters() {
|
||||
// Add token counters to all input containers with the class of "tokens-counted",
|
||||
// if a token counter is not already a child of said container.
|
||||
for (const el of document.getElementsByClassName("tokens-counted")) {
|
||||
if (el.getElementsByClassName("input-token-usage").length) continue;
|
||||
|
||||
let span = document.createElement("span");
|
||||
span.classList.add("input-token-usage");
|
||||
span.innerText = "?/? Tokens";
|
||||
el.appendChild(span);
|
||||
|
||||
let inputElement = el.querySelector("input, textarea");
|
||||
|
||||
inputElement.addEventListener("input", function() {
|
||||
updateInputBudget(this);
|
||||
});
|
||||
|
||||
updateInputBudget(inputElement);
|
||||
}
|
||||
}
|
||||
|
||||
//=================================================================//
|
||||
// READY/RUNTIME
|
||||
//=================================================================//
|
||||
@@ -2078,9 +2286,16 @@ $(document).ready(function(){
|
||||
rs_close = $("#btn_rsclose");
|
||||
seqselmenu = $("#seqselmenu");
|
||||
seqselcontents = $("#seqselcontents");
|
||||
token_prob_container = $("#token_prob_container");
|
||||
token_prob_menu = $("#token_prob_menu");
|
||||
|
||||
// Connect to SocketIO server
|
||||
socket = io.connect(window.document.origin, {transports: ['polling', 'websocket'], closeOnBeforeunload: false, query:{"ui": "1"}});
|
||||
socket.on('load_popup', function(data){load_popup(data);});
|
||||
socket.on('popup_items', function(data){popup_items(data);});
|
||||
socket.on('popup_breadcrumbs', function(data){popup_breadcrumbs(data);});
|
||||
socket.on('popup_edit_file', function(data){popup_edit_file(data);});
|
||||
socket.on('error_popup', function(data){error_popup(data);});
|
||||
|
||||
socket.on('from_server', function(msg) {
|
||||
//console.log(msg);
|
||||
@@ -2130,6 +2345,75 @@ $(document).ready(function(){
|
||||
active_element.focus();
|
||||
})();
|
||||
$("body").addClass("connected");
|
||||
} else if (msg.cmd == "streamtoken") {
|
||||
// Sometimes the stream_token messages will come in too late, after
|
||||
// we have recieved the full text. This leads to some stray tokens
|
||||
// appearing after the output. To combat this, we only allow tokens
|
||||
// to be displayed after requesting and before recieving text.
|
||||
if (ignore_stream) return;
|
||||
|
||||
let streamingEnabled = $("#setoutputstreaming")[0].checked;
|
||||
let probabilitiesEnabled = $("#setshowprobs")[0].checked;
|
||||
|
||||
if (!streamingEnabled && !probabilitiesEnabled) return;
|
||||
|
||||
if (!stream_preview && streamingEnabled) {
|
||||
stream_preview = document.createElement("span");
|
||||
game_text.append(stream_preview);
|
||||
}
|
||||
|
||||
for (const token of msg.data) {
|
||||
if (streamingEnabled) stream_preview.innerText += token.decoded;
|
||||
|
||||
if (probabilitiesEnabled) {
|
||||
// Probability display
|
||||
let probDiv = document.createElement("div");
|
||||
probDiv.classList.add("token-probs");
|
||||
|
||||
let probTokenSpan = document.createElement("span");
|
||||
probTokenSpan.classList.add("token-probs-header");
|
||||
probTokenSpan.innerText = token.decoded.replaceAll("\n", "\\n");
|
||||
probDiv.appendChild(probTokenSpan);
|
||||
|
||||
let probTable = document.createElement("table");
|
||||
let probTBody = document.createElement("tbody");
|
||||
probTable.appendChild(probTBody);
|
||||
|
||||
for (const probToken of token.probabilities) {
|
||||
let tr = document.createElement("tr");
|
||||
let rgb = interpolateRGB(
|
||||
[255, 255, 255],
|
||||
[0, 255, 0],
|
||||
probToken.score
|
||||
).map(Math.round);
|
||||
let color = `rgb(${rgb.join(", ")})`;
|
||||
|
||||
if (probToken.decoded === token.decoded) {
|
||||
tr.classList.add("token-probs-final-token");
|
||||
}
|
||||
|
||||
let tds = {};
|
||||
|
||||
for (const property of ["tokenId", "decoded", "score"]) {
|
||||
let td = document.createElement("td");
|
||||
td.style.color = color;
|
||||
tds[property] = td;
|
||||
tr.appendChild(td);
|
||||
}
|
||||
|
||||
tds.tokenId.innerText = probToken.tokenId;
|
||||
tds.decoded.innerText = probToken.decoded.toString().replaceAll("\n", "\\n");
|
||||
tds.score.innerText = (probToken.score * 100).toFixed(2) + "%";
|
||||
|
||||
probTBody.appendChild(tr);
|
||||
}
|
||||
|
||||
probDiv.appendChild(probTable);
|
||||
token_prob_container.append(probDiv);
|
||||
}
|
||||
}
|
||||
|
||||
scrollToBottom();
|
||||
} else if(msg.cmd == "updatescreen") {
|
||||
var _gamestarted = gamestarted;
|
||||
gamestarted = msg.gamestarted;
|
||||
@@ -2140,6 +2424,7 @@ $(document).ready(function(){
|
||||
unbindGametext();
|
||||
allowedit = gamestarted && $("#allowediting").prop('checked');
|
||||
game_text.attr('contenteditable', allowedit);
|
||||
all_modified_chunks = new Set();
|
||||
modified_chunks = new Set();
|
||||
empty_chunks = new Set();
|
||||
game_text.html(msg.data);
|
||||
@@ -2159,6 +2444,7 @@ $(document).ready(function(){
|
||||
scrollToBottom();
|
||||
} else if(msg.cmd == "updatechunk") {
|
||||
hideMessage();
|
||||
game_text.attr('contenteditable', allowedit);
|
||||
if (typeof submit_start !== 'undefined') {
|
||||
$("#runtime")[0].innerHTML = `Generation time: ${Math.round((Date.now() - submit_start)/1000)} sec`;
|
||||
delete submit_start;
|
||||
@@ -2178,7 +2464,11 @@ $(document).ready(function(){
|
||||
} else if (!empty_chunks.has(index.toString())) {
|
||||
// Append at the end
|
||||
unbindGametext();
|
||||
var lc = game_text[0].lastChild;
|
||||
|
||||
// game_text can contain things other than chunks (stream
|
||||
// preview), so we use querySelector to get the last chunk.
|
||||
var lc = game_text[0].querySelector("chunk:last-of-type");
|
||||
|
||||
if(lc.tagName === "CHUNK" && lc.lastChild !== null && lc.lastChild.tagName === "BR") {
|
||||
lc.removeChild(lc.lastChild);
|
||||
}
|
||||
@@ -2194,7 +2484,11 @@ $(document).ready(function(){
|
||||
var element = game_text.children('#n' + index);
|
||||
if(element.length) {
|
||||
unbindGametext();
|
||||
if((element[0].nextSibling === null || element[0].nextSibling.nodeType !== 1 || element[0].nextSibling.tagName !== "CHUNK") && element[0].previousSibling !== null && element[0].previousSibling.tagName === "CHUNK") {
|
||||
if(
|
||||
(element[0].nextSibling === null || element[0].nextSibling.nodeType !== 1 || element[0].nextSibling.tagName !== "CHUNK")
|
||||
&& element[0].previousSibling !== null
|
||||
&& element[0].previousSibling.tagName === "CHUNK"
|
||||
) {
|
||||
element[0].previousSibling.appendChild(document.createElement("br"));
|
||||
}
|
||||
element.remove(); // Remove the chunk
|
||||
@@ -2204,6 +2498,7 @@ $(document).ready(function(){
|
||||
} else if(msg.cmd == "setgamestate") {
|
||||
// Enable or Disable buttons
|
||||
if(msg.data == "ready") {
|
||||
endStream();
|
||||
enableSendBtn();
|
||||
enableButtons([button_actmem, button_actwi, button_actback, button_actfwd, button_actretry]);
|
||||
hideWaitAnimation();
|
||||
@@ -2243,6 +2538,7 @@ $(document).ready(function(){
|
||||
memorytext = msg.data;
|
||||
input_text.val(msg.data);
|
||||
}
|
||||
updateInputBudget(input_text[0]);
|
||||
} else if(msg.cmd == "setmemory") {
|
||||
memorytext = msg.data;
|
||||
if(memorymode) {
|
||||
@@ -2364,6 +2660,7 @@ $(document).ready(function(){
|
||||
} else if(msg.cmd == "setanote") {
|
||||
// Set contents of Author's Note field
|
||||
anote_input.val(msg.data);
|
||||
updateInputBudget(anote_input[0]);
|
||||
} else if(msg.cmd == "setanotetemplate") {
|
||||
// Set contents of Author's Note Template field
|
||||
$("#anotetemplate").val(msg.data);
|
||||
@@ -2390,6 +2687,17 @@ $(document).ready(function(){
|
||||
} else if(msg.cmd == "updatesingleline") {
|
||||
// Update toggle state
|
||||
$("#singleline").prop('checked', msg.data).change();
|
||||
} else if(msg.cmd == "updateoutputstreaming") {
|
||||
// Update toggle state
|
||||
$("#setoutputstreaming").prop('checked', msg.data).change();
|
||||
} else if(msg.cmd == "updateshowprobs") {
|
||||
$("#setshowprobs").prop('checked', msg.data).change();
|
||||
|
||||
if(msg.data) {
|
||||
token_prob_menu.removeClass("hidden");
|
||||
} else {
|
||||
token_prob_menu.addClass("hidden");
|
||||
}
|
||||
} else if(msg.cmd == "allowtoggle") {
|
||||
// Allow toggle change states to propagate
|
||||
allowtoggle = msg.data;
|
||||
@@ -2564,6 +2872,9 @@ $(document).ready(function(){
|
||||
} else if(msg.cmd == "updatenogenmod") {
|
||||
// Update toggle state
|
||||
$("#setnogenmod").prop('checked', msg.data).change();
|
||||
} else if(msg.cmd == "updatefulldeterminism") {
|
||||
// Update toggle state
|
||||
$("#setfulldeterminism").prop('checked', msg.data).change();
|
||||
} else if(msg.cmd == "runs_remotely") {
|
||||
remote = true;
|
||||
hide([button_savetofile, button_import, button_importwi]);
|
||||
@@ -2589,6 +2900,8 @@ $(document).ready(function(){
|
||||
if (msg.key) {
|
||||
$("#modelkey").removeClass("hidden");
|
||||
$("#modelkey")[0].value = msg.key_value;
|
||||
//if we're in the API list, disable to load button until the model is selected (after the API Key is entered)
|
||||
disableButtons([load_model_accept]);
|
||||
} else {
|
||||
$("#modelkey").addClass("hidden");
|
||||
|
||||
@@ -2626,6 +2939,7 @@ $(document).ready(function(){
|
||||
}
|
||||
} else if(msg.cmd == 'oai_engines') {
|
||||
$("#oaimodel").removeClass("hidden")
|
||||
enableButtons([load_model_accept]);
|
||||
selected_item = 0;
|
||||
length = $("#oaimodel")[0].options.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
@@ -2648,6 +2962,7 @@ $(document).ready(function(){
|
||||
$("#showmodelnamecontainer").removeClass("hidden");
|
||||
} else if(msg.cmd == 'hide_model_name') {
|
||||
$("#showmodelnamecontainer").addClass("hidden");
|
||||
location.reload();
|
||||
//console.log("Closing window");
|
||||
} else if(msg.cmd == 'model_load_status') {
|
||||
$("#showmodelnamecontent").html("<div class=\"flex\"><div class=\"loadlistpadding\"></div><div class=\"loadlistitem\" style='align: left'>" + msg.data + "</div></div>");
|
||||
@@ -2661,7 +2976,18 @@ $(document).ready(function(){
|
||||
opt.innerHTML = engine[1];
|
||||
$("#oaimodel")[0].appendChild(opt);
|
||||
}
|
||||
} else if(msg.cmd == 'showfieldbudget') {
|
||||
let inputElement = document.getElementById(msg.data.field);
|
||||
let tokenBudgetElement = inputElement.parentNode.getElementsByClassName("input-token-usage")[0];
|
||||
if (msg.data.max === null) {
|
||||
tokenBudgetElement.innerText = "";
|
||||
} else {
|
||||
let tokenLength = msg.data.length ?? "?";
|
||||
let tokenMax = msg.data.max ?? "?";
|
||||
tokenBudgetElement.innerText = `${tokenLength}/${tokenMax} Tokens`;
|
||||
}
|
||||
}
|
||||
enableButtons([load_model_accept]);
|
||||
});
|
||||
|
||||
socket.on('disconnect', function() {
|
||||
@@ -2691,6 +3017,12 @@ $(document).ready(function(){
|
||||
chunkOnFocusOut
|
||||
);
|
||||
mutation_observer = new MutationObserver(chunkOnDOMMutate);
|
||||
$("#gamescreen").on('click', function(e) {
|
||||
if(this !== e.target) {
|
||||
return;
|
||||
}
|
||||
document.activeElement.blur();
|
||||
});
|
||||
|
||||
// This is required for the editor to work correctly in Firefox on desktop
|
||||
// because the gods of HTML and JavaScript say so
|
||||
@@ -2776,6 +3108,7 @@ $(document).ready(function(){
|
||||
});
|
||||
|
||||
button_actretry.on("click", function(ev) {
|
||||
beginStream();
|
||||
hideMessage();
|
||||
socket.send({'cmd': 'retry', 'chatname': chatmode ? chat_name.val() : undefined, 'data': ''});
|
||||
hidegenseqs();
|
||||
@@ -3022,6 +3355,7 @@ $(document).ready(function(){
|
||||
});
|
||||
|
||||
rs_accept.on("click", function(ev) {
|
||||
beginStream();
|
||||
hideMessage();
|
||||
socket.send({'cmd': 'rndgame', 'memory': $("#rngmemory").val(), 'data': topic.val()});
|
||||
hideRandomStoryPopup();
|
||||
@@ -3095,4 +3429,287 @@ $(document).ready(function(){
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
// Shortcuts
|
||||
$(window).keydown(function (ev) {
|
||||
// Only ctrl prefixed (for now)
|
||||
if (!ev.ctrlKey) return;
|
||||
|
||||
let handled = true;
|
||||
switch (ev.key) {
|
||||
// Ctrl+Z - Back
|
||||
case "z":
|
||||
button_actback.click();
|
||||
break;
|
||||
// Ctrl+Y - Forward
|
||||
case "y":
|
||||
button_actfwd.click();
|
||||
break;
|
||||
// Ctrl+E - Retry
|
||||
case "e":
|
||||
button_actretry.click();
|
||||
break;
|
||||
default:
|
||||
handled = false;
|
||||
}
|
||||
|
||||
if (handled) ev.preventDefault();
|
||||
});
|
||||
|
||||
$("#anotetemplate").on("input", function() {
|
||||
updateInputBudget(anote_input[0]);
|
||||
})
|
||||
|
||||
registerTokenCounters();
|
||||
|
||||
updateInputBudget(input_text[0]);
|
||||
|
||||
});
|
||||
|
||||
|
||||
|
||||
var popup_deleteable = false;
|
||||
var popup_editable = false;
|
||||
var popup_renameable = false;
|
||||
|
||||
function load_popup(data) {
|
||||
document.getElementById('spcontainer').classList.add('hidden');
|
||||
document.getElementById('uscontainer').classList.add('hidden');
|
||||
popup_deleteable = data.deleteable;
|
||||
popup_editable = data.editable;
|
||||
popup_renameable = data.renameable;
|
||||
var popup = document.getElementById("popup");
|
||||
var popup_title = document.getElementById("popup_title");
|
||||
popup_title.textContent = data.popup_title;
|
||||
var popup_list = document.getElementById("popup_list");
|
||||
//first, let's clear out our existing data
|
||||
while (popup_list.firstChild) {
|
||||
popup_list.removeChild(popup_list.firstChild);
|
||||
}
|
||||
var breadcrumbs = document.getElementById('popup_breadcrumbs');
|
||||
while (breadcrumbs.firstChild) {
|
||||
breadcrumbs.removeChild(breadcrumbs.firstChild);
|
||||
}
|
||||
|
||||
if (data.upload) {
|
||||
const dropArea = document.getElementById('popup_list');
|
||||
dropArea.addEventListener('dragover', (event) => {
|
||||
event.stopPropagation();
|
||||
event.preventDefault();
|
||||
// Style the drag-and-drop as a "copy file" operation.
|
||||
event.dataTransfer.dropEffect = 'copy';
|
||||
});
|
||||
|
||||
dropArea.addEventListener('drop', (event) => {
|
||||
event.stopPropagation();
|
||||
event.preventDefault();
|
||||
const fileList = event.dataTransfer.files;
|
||||
for (file of fileList) {
|
||||
reader = new FileReader();
|
||||
reader.onload = function (event) {
|
||||
socket.emit("upload_file", {'filename': file.name, "data": event.target.result});
|
||||
};
|
||||
reader.readAsArrayBuffer(file);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
|
||||
}
|
||||
|
||||
popup.classList.remove("hidden");
|
||||
|
||||
//adjust accept button
|
||||
if (data.call_back == "") {
|
||||
document.getElementById("popup_accept").classList.add("hidden");
|
||||
} else {
|
||||
document.getElementById("popup_accept").classList.remove("hidden");
|
||||
var accept = document.getElementById("popup_accept");
|
||||
accept.classList.add("disabled");
|
||||
accept.setAttribute("emit", data.call_back);
|
||||
accept.setAttribute("selected_value", "");
|
||||
accept.onclick = function () {
|
||||
socket.emit(this.emit, this.getAttribute("selected_value"));
|
||||
document.getElementById("popup").classList.add("hidden");
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function popup_items(data) {
|
||||
var popup_list = document.getElementById('popup_list');
|
||||
//first, let's clear out our existing data
|
||||
while (popup_list.firstChild) {
|
||||
popup_list.removeChild(popup_list.firstChild);
|
||||
}
|
||||
document.getElementById('popup_upload_input').value = "";
|
||||
|
||||
for (item of data) {
|
||||
var list_item = document.createElement("span");
|
||||
list_item.classList.add("item");
|
||||
|
||||
//create the folder icon
|
||||
var folder_icon = document.createElement("span");
|
||||
folder_icon.classList.add("folder_icon");
|
||||
if (item[0]) {
|
||||
folder_icon.classList.add("oi");
|
||||
folder_icon.setAttribute('data-glyph', "folder");
|
||||
}
|
||||
list_item.append(folder_icon);
|
||||
|
||||
//create the edit icon
|
||||
var edit_icon = document.createElement("span");
|
||||
edit_icon.classList.add("edit_icon");
|
||||
if ((popup_editable) && !(item[0])) {
|
||||
edit_icon.classList.add("oi");
|
||||
edit_icon.setAttribute('data-glyph', "spreadsheet");
|
||||
edit_icon.title = "Edit"
|
||||
edit_icon.id = item[1];
|
||||
edit_icon.onclick = function () {
|
||||
socket.emit("popup_edit", this.id);
|
||||
};
|
||||
}
|
||||
list_item.append(edit_icon);
|
||||
|
||||
//create the rename icon
|
||||
var rename_icon = document.createElement("span");
|
||||
rename_icon.classList.add("rename_icon");
|
||||
if ((popup_renameable) && !(item[0])) {
|
||||
rename_icon.classList.add("oi");
|
||||
rename_icon.setAttribute('data-glyph', "pencil");
|
||||
rename_icon.title = "Rename"
|
||||
rename_icon.id = item[1];
|
||||
rename_icon.setAttribute("filename", item[2]);
|
||||
rename_icon.onclick = function () {
|
||||
var new_name = prompt("Please enter new filename for \n"+ this.getAttribute("filename"));
|
||||
if (new_name != null) {
|
||||
socket.emit("popup_rename", {"file": this.id, "new_name": new_name});
|
||||
}
|
||||
};
|
||||
}
|
||||
list_item.append(rename_icon);
|
||||
|
||||
//create the delete icon
|
||||
var delete_icon = document.createElement("span");
|
||||
delete_icon.classList.add("delete_icon");
|
||||
if (popup_deleteable) {
|
||||
delete_icon.classList.add("oi");
|
||||
delete_icon.setAttribute('data-glyph', "x");
|
||||
delete_icon.title = "Delete"
|
||||
delete_icon.id = item[1];
|
||||
delete_icon.setAttribute("folder", item[0]);
|
||||
delete_icon.onclick = function () {
|
||||
if (this.getAttribute("folder") == "true") {
|
||||
if (window.confirm("Do you really want to delete this folder and ALL files under it?")) {
|
||||
socket.emit("popup_delete", this.id);
|
||||
}
|
||||
} else {
|
||||
if (window.confirm("Do you really want to delete this file?")) {
|
||||
socket.emit("popup_delete", this.id);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
list_item.append(delete_icon);
|
||||
|
||||
//create the actual item
|
||||
var popup_item = document.createElement("span");
|
||||
popup_item.classList.add("file");
|
||||
popup_item.id = item[1];
|
||||
popup_item.setAttribute("folder", item[0]);
|
||||
popup_item.setAttribute("valid", item[3]);
|
||||
popup_item.textContent = item[2];
|
||||
popup_item.onclick = function () {
|
||||
var accept = document.getElementById("popup_accept");
|
||||
if (this.getAttribute("valid") == "true") {
|
||||
accept.classList.remove("disabled");
|
||||
accept.setAttribute("selected_value", this.id);
|
||||
} else {
|
||||
console.log("not valid");
|
||||
accept.setAttribute("selected_value", "");
|
||||
accept.classList.add("disabled");
|
||||
if (this.getAttribute("folder") == "true") {
|
||||
console.log("folder");
|
||||
socket.emit("popup_change_folder", this.id);
|
||||
}
|
||||
}
|
||||
};
|
||||
list_item.append(popup_item);
|
||||
|
||||
|
||||
popup_list.append(list_item);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
function popup_breadcrumbs(data) {
|
||||
var breadcrumbs = document.getElementById('popup_breadcrumbs')
|
||||
while (breadcrumbs.firstChild) {
|
||||
breadcrumbs.removeChild(breadcrumbs.firstChild);
|
||||
}
|
||||
|
||||
for (item of data) {
|
||||
var button = document.createElement("button");
|
||||
button.id = item[0];
|
||||
button.textContent = item[1];
|
||||
button.classList.add("breadcrumbitem");
|
||||
button.onclick = function () {
|
||||
socket.emit("popup_change_folder", this.id);
|
||||
};
|
||||
breadcrumbs.append(button);
|
||||
var span = document.createElement("span");
|
||||
span.textContent = "\\";
|
||||
breadcrumbs.append(span);
|
||||
}
|
||||
}
|
||||
|
||||
function popup_edit_file(data) {
|
||||
var popup_list = document.getElementById('popup_list');
|
||||
var accept = document.getElementById("popup_accept");
|
||||
accept.classList.add("btn-secondary");
|
||||
accept.classList.remove("btn-primary");
|
||||
accept.textContent = "Save";
|
||||
//first, let's clear out our existing data
|
||||
while (popup_list.firstChild) {
|
||||
popup_list.removeChild(popup_list.firstChild);
|
||||
}
|
||||
var accept = document.getElementById("popup_accept");
|
||||
accept.setAttribute("selected_value", "");
|
||||
accept.onclick = function () {
|
||||
var textarea = document.getElementById("filecontents");
|
||||
socket.emit("popup_change_file", {"file": textarea.getAttribute("filename"), "data": textarea.value});
|
||||
document.getElementById("popup").classList.add("hidden");
|
||||
this.classList.add("hidden");
|
||||
};
|
||||
|
||||
var textarea = document.createElement("textarea");
|
||||
textarea.classList.add("fullwidth");
|
||||
textarea.rows = 25;
|
||||
textarea.id = "filecontents"
|
||||
textarea.setAttribute("filename", data.file);
|
||||
textarea.value = data.text;
|
||||
textarea.onblur = function () {
|
||||
var accept = document.getElementById("popup_accept");
|
||||
accept.classList.remove("hidden");
|
||||
accept.classList.remove("btn-secondary");
|
||||
accept.classList.add("btn-primary");
|
||||
};
|
||||
popup_list.append(textarea);
|
||||
|
||||
}
|
||||
|
||||
function error_popup(data) {
|
||||
alert(data);
|
||||
}
|
||||
|
||||
function upload_file(file_box) {
|
||||
var fileList = file_box.files;
|
||||
for (file of fileList) {
|
||||
reader = new FileReader();
|
||||
reader.onload = function (event) {
|
||||
socket.emit("upload_file", {'filename': file.name, "data": event.target.result});
|
||||
};
|
||||
reader.readAsArrayBuffer(file);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -4,6 +4,7 @@ body {
|
||||
|
||||
chunk {
|
||||
color: #ffffff;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
#gametext.adventure action {
|
||||
@@ -290,7 +291,7 @@ body.connected #formatmenu, #formatmenu.always-available {
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
#popup {
|
||||
#popup_old {
|
||||
width: 75%;
|
||||
min-width: 500px;
|
||||
max-width: 1000px;
|
||||
@@ -1545,4 +1546,179 @@ body.connected .popupfooter, .popupfooter.always-available {
|
||||
|
||||
.change .menubar3 {
|
||||
transform: translate(0px, -6px) rotate(45deg);
|
||||
}
|
||||
|
||||
|
||||
/*---------------------------------- Popup -------------------------------------------------*/
|
||||
.new_popup {
|
||||
position: absolute;
|
||||
top: 10vh;
|
||||
left: 10%;
|
||||
z-index: 999;
|
||||
width: 80%;
|
||||
height: 80vh;
|
||||
background-color: black;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
background-color: #474B4F;
|
||||
color: white;
|
||||
}
|
||||
|
||||
.new_popup .title {
|
||||
width: 100%;
|
||||
background-color: #337AB7;
|
||||
text-align: center;
|
||||
font-size: 1.3em;
|
||||
}
|
||||
|
||||
.new_popup .popup_list_area {
|
||||
height: 70vh;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
.new_popup .item {
|
||||
width: 100%;
|
||||
background-color: #262626;
|
||||
padding: 2px;
|
||||
display: grid;
|
||||
grid-template-areas: "folder_icon delete_icon edit_icon rename_icon file";
|
||||
grid-template-columns: 20px 20px 20px 20px auto;
|
||||
|
||||
}
|
||||
|
||||
.new_popup .item .folder_icon {
|
||||
grid-area: folder_icon;
|
||||
}
|
||||
|
||||
.new_popup .item .edit_icon {
|
||||
grid-area: edit_icon;
|
||||
}
|
||||
|
||||
.new_popup .item .rename_icon {
|
||||
grid-area: rename_icon;
|
||||
}
|
||||
|
||||
.new_popup .item .delete_icon {
|
||||
grid-area: delete_icon;
|
||||
}
|
||||
|
||||
.new_popup .item .file {
|
||||
grid-area: file;
|
||||
}
|
||||
|
||||
.new_popup .item .file:hover {
|
||||
background-color: #688f1f;
|
||||
}
|
||||
|
||||
.new_popup textarea {
|
||||
grid-area: textarea;
|
||||
background-color: #404040;
|
||||
color: white;
|
||||
resize: none;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.new_popup .popup_load_cancel {
|
||||
text-align: center;
|
||||
background-color: #285070;
|
||||
}
|
||||
|
||||
.popup_load_cancel_button {
|
||||
vertical-align: bottom;
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.popup_load_cancel_button.btn-secondary {
|
||||
color: rgb(51, 51, 51);
|
||||
background-color: #686c68;
|
||||
}
|
||||
|
||||
.breadcrumbitem {
|
||||
padding: 5px 10px 5px 10px;
|
||||
color: #ffffff;
|
||||
background-color: transparent;
|
||||
border: none;
|
||||
|
||||
-moz-transition: background-color 0.25s ease-in;
|
||||
-o-transition: background-color 0.25s ease-in;
|
||||
-webkit-transition: background-color 0.25s ease-in;
|
||||
transition: background-color 0.25s ease-in;
|
||||
}
|
||||
|
||||
.breadcrumbitem:hover {
|
||||
cursor: pointer;
|
||||
background-color: #688f1f;
|
||||
}
|
||||
|
||||
#token_prob_menu {
|
||||
color: white;
|
||||
background-color: #262626;
|
||||
}
|
||||
|
||||
.token-probs {
|
||||
display: inline-block;
|
||||
text-align: center;
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
.token-probs > table {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.token-probs > table > tbody > tr > td {
|
||||
border: 1px solid #262626;
|
||||
border-collapse: collapse;
|
||||
padding: 2px 15px;
|
||||
}
|
||||
|
||||
.token-probs > table > tbody > tr {
|
||||
background-color: #3e3e3e;
|
||||
}
|
||||
|
||||
.token-probs > table > tbody > tr:nth-child(2n) {
|
||||
background-color: #575757;
|
||||
}
|
||||
|
||||
.token-probs-final-token {
|
||||
font-weight: bold;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.token-probs-final-token > td {
|
||||
background: #5c8a5a;
|
||||
}
|
||||
|
||||
.token-probs-header {
|
||||
display: block;
|
||||
}
|
||||
|
||||
#token_prob_container {
|
||||
overflow-x: auto;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.tokens-counted {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.input-token-usage {
|
||||
color: white;
|
||||
position: absolute;
|
||||
font-size: 10px;
|
||||
bottom: 2px;
|
||||
right: 5px;
|
||||
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
/* Override needed here due to the 10px right padding on inputrowleft; add 10 px. */
|
||||
#inputrowleft > .input-token-usage {
|
||||
right: 15px;
|
||||
bottom: 1px;
|
||||
}
|
||||
|
||||
.wientry > .input-token-usage {
|
||||
bottom: 8px;
|
||||
}
|
202
static/swagger-ui/LICENSE
vendored
Normal file
202
static/swagger-ui/LICENSE
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
853
static/swagger-ui/SwaggerDark.css
vendored
Normal file
853
static/swagger-ui/SwaggerDark.css
vendored
Normal file
@@ -0,0 +1,853 @@
|
||||
/*!
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2020 Romans Pokrovskis
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
a { color: #8c8cfa; }
|
||||
|
||||
::-webkit-scrollbar-track-piece { background-color: rgba(255, 255, 255, .2) !important; }
|
||||
|
||||
::-webkit-scrollbar-track { background-color: rgba(255, 255, 255, .3) !important; }
|
||||
|
||||
::-webkit-scrollbar-thumb { background-color: rgba(255, 255, 255, .5) !important; }
|
||||
|
||||
embed[type="application/pdf"] { filter: invert(90%); }
|
||||
|
||||
html {
|
||||
background: #1f1f1f !important;
|
||||
box-sizing: border-box;
|
||||
filter: contrast(100%) brightness(100%) saturate(100%);
|
||||
overflow-y: scroll;
|
||||
}
|
||||
|
||||
body {
|
||||
background: #1f1f1f;
|
||||
background-color: #1f1f1f;
|
||||
background-image: none !important;
|
||||
}
|
||||
|
||||
button, input, select, textarea {
|
||||
background-color: #1f1f1f;
|
||||
color: #bfbfbf;
|
||||
}
|
||||
|
||||
font, html { color: #bfbfbf; }
|
||||
|
||||
.swagger-ui, .swagger-ui section h3 { color: #b5bac9; }
|
||||
|
||||
.swagger-ui a { background-color: transparent; }
|
||||
|
||||
.swagger-ui mark {
|
||||
background-color: #664b00;
|
||||
color: #bfbfbf;
|
||||
}
|
||||
|
||||
.swagger-ui legend { color: inherit; }
|
||||
|
||||
.swagger-ui .debug * { outline: #e6da99 solid 1px; }
|
||||
|
||||
.swagger-ui .debug-white * { outline: #fff solid 1px; }
|
||||
|
||||
.swagger-ui .debug-black * { outline: #bfbfbf solid 1px; }
|
||||
|
||||
.swagger-ui .debug-grid { background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAgAAAAICAYAAADED76LAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyhpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNi1jMTExIDc5LjE1ODMyNSwgMjAxNS8wOS8xMC0wMToxMDoyMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6MTRDOTY4N0U2N0VFMTFFNjg2MzZDQjkwNkQ4MjgwMEIiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6MTRDOTY4N0Q2N0VFMTFFNjg2MzZDQjkwNkQ4MjgwMEIiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTUgKE1hY2ludG9zaCkiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3NjcyQkQ3NjY3QzUxMUU2QjJCQ0UyNDA4MTAwMjE3MSIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3NjcyQkQ3NzY3QzUxMUU2QjJCQ0UyNDA4MTAwMjE3MSIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PsBS+GMAAAAjSURBVHjaYvz//z8DLsD4gcGXiYEAGBIKGBne//fFpwAgwAB98AaF2pjlUQAAAABJRU5ErkJggg==) 0 0; }
|
||||
|
||||
.swagger-ui .debug-grid-16 { background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyhpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNi1jMTExIDc5LjE1ODMyNSwgMjAxNS8wOS8xMC0wMToxMDoyMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6ODYyRjhERDU2N0YyMTFFNjg2MzZDQjkwNkQ4MjgwMEIiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6ODYyRjhERDQ2N0YyMTFFNjg2MzZDQjkwNkQ4MjgwMEIiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTUgKE1hY2ludG9zaCkiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3NjcyQkQ3QTY3QzUxMUU2QjJCQ0UyNDA4MTAwMjE3MSIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3NjcyQkQ3QjY3QzUxMUU2QjJCQ0UyNDA4MTAwMjE3MSIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PvCS01IAAABMSURBVHjaYmR4/5+BFPBfAMFm/MBgx8RAGWCn1AAmSg34Q6kBDKMGMDCwICeMIemF/5QawEipAWwUhwEjMDvbAWlWkvVBwu8vQIABAEwBCph8U6c0AAAAAElFTkSuQmCC) 0 0; }
|
||||
|
||||
.swagger-ui .debug-grid-8-solid { background: url(data:image/jpeg;base64,/9j/4QAYRXhpZgAASUkqAAgAAAAAAAAAAAAAAP/sABFEdWNreQABAAQAAAAAAAD/4QMxaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLwA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/PiA8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJBZG9iZSBYTVAgQ29yZSA1LjYtYzExMSA3OS4xNTgzMjUsIDIwMTUvMDkvMTAtMDE6MTA6MjAgICAgICAgICI+IDxyZGY6UkRGIHhtbG5zOnJkZj0iaHR0cDovL3d3dy53My5vcmcvMTk5OS8wMi8yMi1yZGYtc3ludGF4LW5zIyI+IDxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiIHhtbG5zOnhtcD0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLyIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bXA6Q3JlYXRvclRvb2w9IkFkb2JlIFBob3Rvc2hvcCBDQyAyMDE1IChNYWNpbnRvc2gpIiB4bXBNTTpJbnN0YW5jZUlEPSJ4bXAuaWlkOkIxMjI0OTczNjdCMzExRTZCMkJDRTI0MDgxMDAyMTcxIiB4bXBNTTpEb2N1bWVudElEPSJ4bXAuZGlkOkIxMjI0OTc0NjdCMzExRTZCMkJDRTI0MDgxMDAyMTcxIj4gPHhtcE1NOkRlcml2ZWRGcm9tIHN0UmVmOmluc3RhbmNlSUQ9InhtcC5paWQ6QjEyMjQ5NzE2N0IzMTFFNkIyQkNFMjQwODEwMDIxNzEiIHN0UmVmOmRvY3VtZW50SUQ9InhtcC5kaWQ6QjEyMjQ5NzI2N0IzMTFFNkIyQkNFMjQwODEwMDIxNzEiLz4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz7/7gAOQWRvYmUAZMAAAAAB/9sAhAAbGhopHSlBJiZBQi8vL0JHPz4+P0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHAR0pKTQmND8oKD9HPzU/R0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0f/wAARCAAIAAgDASIAAhEBAxEB/8QAWQABAQAAAAAAAAAAAAAAAAAAAAYBAQEAAAAAAAAAAAAAAAAAAAIEEAEBAAMBAAAAAAAAAAAAAAABADECA0ERAAEDBQAAAAAAAAAAAAAAAAARITFBUWESIv/aAAwDAQACEQMRAD8AoOnTV1QTD7JJshP3vSM3P//Z) 0 0 #1c1c21; }
|
||||
|
||||
.swagger-ui .debug-grid-16-solid { background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyhpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNi1jMTExIDc5LjE1ODMyNSwgMjAxNS8wOS8xMC0wMToxMDoyMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTUgKE1hY2ludG9zaCkiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6NzY3MkJEN0U2N0M1MTFFNkIyQkNFMjQwODEwMDIxNzEiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6NzY3MkJEN0Y2N0M1MTFFNkIyQkNFMjQwODEwMDIxNzEiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3NjcyQkQ3QzY3QzUxMUU2QjJCQ0UyNDA4MTAwMjE3MSIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3NjcyQkQ3RDY3QzUxMUU2QjJCQ0UyNDA4MTAwMjE3MSIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/Pve6J3kAAAAzSURBVHjaYvz//z8D0UDsMwMjSRoYP5Gq4SPNbRjVMEQ1fCRDg+in/6+J1AJUxsgAEGAA31BAJMS0GYEAAAAASUVORK5CYII=) 0 0 #1c1c21; }
|
||||
|
||||
.swagger-ui .b--black { border-color: #000; }
|
||||
|
||||
.swagger-ui .b--near-black { border-color: #121212; }
|
||||
|
||||
.swagger-ui .b--dark-gray { border-color: #333; }
|
||||
|
||||
.swagger-ui .b--mid-gray { border-color: #545454; }
|
||||
|
||||
.swagger-ui .b--gray { border-color: #787878; }
|
||||
|
||||
.swagger-ui .b--silver { border-color: #999; }
|
||||
|
||||
.swagger-ui .b--light-silver { border-color: #6e6e6e; }
|
||||
|
||||
.swagger-ui .b--moon-gray { border-color: #4d4d4d; }
|
||||
|
||||
.swagger-ui .b--light-gray { border-color: #2b2b2b; }
|
||||
|
||||
.swagger-ui .b--near-white { border-color: #242424; }
|
||||
|
||||
.swagger-ui .b--white { border-color: #1c1c21; }
|
||||
|
||||
.swagger-ui .b--white-90 { border-color: rgba(28, 28, 33, .9); }
|
||||
|
||||
.swagger-ui .b--white-80 { border-color: rgba(28, 28, 33, .8); }
|
||||
|
||||
.swagger-ui .b--white-70 { border-color: rgba(28, 28, 33, .7); }
|
||||
|
||||
.swagger-ui .b--white-60 { border-color: rgba(28, 28, 33, .6); }
|
||||
|
||||
.swagger-ui .b--white-50 { border-color: rgba(28, 28, 33, .5); }
|
||||
|
||||
.swagger-ui .b--white-40 { border-color: rgba(28, 28, 33, .4); }
|
||||
|
||||
.swagger-ui .b--white-30 { border-color: rgba(28, 28, 33, .3); }
|
||||
|
||||
.swagger-ui .b--white-20 { border-color: rgba(28, 28, 33, .2); }
|
||||
|
||||
.swagger-ui .b--white-10 { border-color: rgba(28, 28, 33, .1); }
|
||||
|
||||
.swagger-ui .b--white-05 { border-color: rgba(28, 28, 33, .05); }
|
||||
|
||||
.swagger-ui .b--white-025 { border-color: rgba(28, 28, 33, .024); }
|
||||
|
||||
.swagger-ui .b--white-0125 { border-color: rgba(28, 28, 33, .01); }
|
||||
|
||||
.swagger-ui .b--black-90 { border-color: rgba(0, 0, 0, .9); }
|
||||
|
||||
.swagger-ui .b--black-80 { border-color: rgba(0, 0, 0, .8); }
|
||||
|
||||
.swagger-ui .b--black-70 { border-color: rgba(0, 0, 0, .7); }
|
||||
|
||||
.swagger-ui .b--black-60 { border-color: rgba(0, 0, 0, .6); }
|
||||
|
||||
.swagger-ui .b--black-50 { border-color: rgba(0, 0, 0, .5); }
|
||||
|
||||
.swagger-ui .b--black-40 { border-color: rgba(0, 0, 0, .4); }
|
||||
|
||||
.swagger-ui .b--black-30 { border-color: rgba(0, 0, 0, .3); }
|
||||
|
||||
.swagger-ui .b--black-20 { border-color: rgba(0, 0, 0, .2); }
|
||||
|
||||
.swagger-ui .b--black-10 { border-color: rgba(0, 0, 0, .1); }
|
||||
|
||||
.swagger-ui .b--black-05 { border-color: rgba(0, 0, 0, .05); }
|
||||
|
||||
.swagger-ui .b--black-025 { border-color: rgba(0, 0, 0, .024); }
|
||||
|
||||
.swagger-ui .b--black-0125 { border-color: rgba(0, 0, 0, .01); }
|
||||
|
||||
.swagger-ui .b--dark-red { border-color: #bc2f36; }
|
||||
|
||||
.swagger-ui .b--red { border-color: #c83932; }
|
||||
|
||||
.swagger-ui .b--light-red { border-color: #ab3c2b; }
|
||||
|
||||
.swagger-ui .b--orange { border-color: #cc6e33; }
|
||||
|
||||
.swagger-ui .b--purple { border-color: #5e2ca5; }
|
||||
|
||||
.swagger-ui .b--light-purple { border-color: #672caf; }
|
||||
|
||||
.swagger-ui .b--dark-pink { border-color: #ab2b81; }
|
||||
|
||||
.swagger-ui .b--hot-pink { border-color: #c03086; }
|
||||
|
||||
.swagger-ui .b--pink { border-color: #8f2464; }
|
||||
|
||||
.swagger-ui .b--light-pink { border-color: #721d4d; }
|
||||
|
||||
.swagger-ui .b--dark-green { border-color: #1c6e50; }
|
||||
|
||||
.swagger-ui .b--green { border-color: #279b70; }
|
||||
|
||||
.swagger-ui .b--light-green { border-color: #228762; }
|
||||
|
||||
.swagger-ui .b--navy { border-color: #0d1d35; }
|
||||
|
||||
.swagger-ui .b--dark-blue { border-color: #20497e; }
|
||||
|
||||
.swagger-ui .b--blue { border-color: #4380d0; }
|
||||
|
||||
.swagger-ui .b--light-blue { border-color: #20517e; }
|
||||
|
||||
.swagger-ui .b--lightest-blue { border-color: #143a52; }
|
||||
|
||||
.swagger-ui .b--washed-blue { border-color: #0c312d; }
|
||||
|
||||
.swagger-ui .b--washed-green { border-color: #0f3d2c; }
|
||||
|
||||
.swagger-ui .b--washed-red { border-color: #411010; }
|
||||
|
||||
.swagger-ui .b--transparent { border-color: transparent; }
|
||||
|
||||
.swagger-ui .b--gold, .swagger-ui .b--light-yellow, .swagger-ui .b--washed-yellow, .swagger-ui .b--yellow { border-color: #664b00; }
|
||||
|
||||
.swagger-ui .shadow-1 { box-shadow: rgba(0, 0, 0, .2) 0 0 4px 2px; }
|
||||
|
||||
.swagger-ui .shadow-2 { box-shadow: rgba(0, 0, 0, .2) 0 0 8px 2px; }
|
||||
|
||||
.swagger-ui .shadow-3 { box-shadow: rgba(0, 0, 0, .2) 2px 2px 4px 2px; }
|
||||
|
||||
.swagger-ui .shadow-4 { box-shadow: rgba(0, 0, 0, .2) 2px 2px 8px 0; }
|
||||
|
||||
.swagger-ui .shadow-5 { box-shadow: rgba(0, 0, 0, .2) 4px 4px 8px 0; }
|
||||
|
||||
@media screen and (min-width: 30em) {
|
||||
.swagger-ui .shadow-1-ns { box-shadow: rgba(0, 0, 0, .2) 0 0 4px 2px; }
|
||||
|
||||
.swagger-ui .shadow-2-ns { box-shadow: rgba(0, 0, 0, .2) 0 0 8px 2px; }
|
||||
|
||||
.swagger-ui .shadow-3-ns { box-shadow: rgba(0, 0, 0, .2) 2px 2px 4px 2px; }
|
||||
|
||||
.swagger-ui .shadow-4-ns { box-shadow: rgba(0, 0, 0, .2) 2px 2px 8px 0; }
|
||||
|
||||
.swagger-ui .shadow-5-ns { box-shadow: rgba(0, 0, 0, .2) 4px 4px 8px 0; }
|
||||
}
|
||||
|
||||
@media screen and (max-width: 60em) and (min-width: 30em) {
|
||||
.swagger-ui .shadow-1-m { box-shadow: rgba(0, 0, 0, .2) 0 0 4px 2px; }
|
||||
|
||||
.swagger-ui .shadow-2-m { box-shadow: rgba(0, 0, 0, .2) 0 0 8px 2px; }
|
||||
|
||||
.swagger-ui .shadow-3-m { box-shadow: rgba(0, 0, 0, .2) 2px 2px 4px 2px; }
|
||||
|
||||
.swagger-ui .shadow-4-m { box-shadow: rgba(0, 0, 0, .2) 2px 2px 8px 0; }
|
||||
|
||||
.swagger-ui .shadow-5-m { box-shadow: rgba(0, 0, 0, .2) 4px 4px 8px 0; }
|
||||
}
|
||||
|
||||
@media screen and (min-width: 60em) {
|
||||
.swagger-ui .shadow-1-l { box-shadow: rgba(0, 0, 0, .2) 0 0 4px 2px; }
|
||||
|
||||
.swagger-ui .shadow-2-l { box-shadow: rgba(0, 0, 0, .2) 0 0 8px 2px; }
|
||||
|
||||
.swagger-ui .shadow-3-l { box-shadow: rgba(0, 0, 0, .2) 2px 2px 4px 2px; }
|
||||
|
||||
.swagger-ui .shadow-4-l { box-shadow: rgba(0, 0, 0, .2) 2px 2px 8px 0; }
|
||||
|
||||
.swagger-ui .shadow-5-l { box-shadow: rgba(0, 0, 0, .2) 4px 4px 8px 0; }
|
||||
}
|
||||
|
||||
.swagger-ui .black-05 { color: rgba(191, 191, 191, .05); }
|
||||
|
||||
.swagger-ui .bg-black-05 { background-color: rgba(0, 0, 0, .05); }
|
||||
|
||||
.swagger-ui .black-90, .swagger-ui .hover-black-90:focus, .swagger-ui .hover-black-90:hover { color: rgba(191, 191, 191, .9); }
|
||||
|
||||
.swagger-ui .black-80, .swagger-ui .hover-black-80:focus, .swagger-ui .hover-black-80:hover { color: rgba(191, 191, 191, .8); }
|
||||
|
||||
.swagger-ui .black-70, .swagger-ui .hover-black-70:focus, .swagger-ui .hover-black-70:hover { color: rgba(191, 191, 191, .7); }
|
||||
|
||||
.swagger-ui .black-60, .swagger-ui .hover-black-60:focus, .swagger-ui .hover-black-60:hover { color: rgba(191, 191, 191, .6); }
|
||||
|
||||
.swagger-ui .black-50, .swagger-ui .hover-black-50:focus, .swagger-ui .hover-black-50:hover { color: rgba(191, 191, 191, .5); }
|
||||
|
||||
.swagger-ui .black-40, .swagger-ui .hover-black-40:focus, .swagger-ui .hover-black-40:hover { color: rgba(191, 191, 191, .4); }
|
||||
|
||||
.swagger-ui .black-30, .swagger-ui .hover-black-30:focus, .swagger-ui .hover-black-30:hover { color: rgba(191, 191, 191, .3); }
|
||||
|
||||
.swagger-ui .black-20, .swagger-ui .hover-black-20:focus, .swagger-ui .hover-black-20:hover { color: rgba(191, 191, 191, .2); }
|
||||
|
||||
.swagger-ui .black-10, .swagger-ui .hover-black-10:focus, .swagger-ui .hover-black-10:hover { color: rgba(191, 191, 191, .1); }
|
||||
|
||||
.swagger-ui .hover-white-90:focus, .swagger-ui .hover-white-90:hover, .swagger-ui .white-90 { color: rgba(255, 255, 255, .9); }
|
||||
|
||||
.swagger-ui .hover-white-80:focus, .swagger-ui .hover-white-80:hover, .swagger-ui .white-80 { color: rgba(255, 255, 255, .8); }
|
||||
|
||||
.swagger-ui .hover-white-70:focus, .swagger-ui .hover-white-70:hover, .swagger-ui .white-70 { color: rgba(255, 255, 255, .7); }
|
||||
|
||||
.swagger-ui .hover-white-60:focus, .swagger-ui .hover-white-60:hover, .swagger-ui .white-60 { color: rgba(255, 255, 255, .6); }
|
||||
|
||||
.swagger-ui .hover-white-50:focus, .swagger-ui .hover-white-50:hover, .swagger-ui .white-50 { color: rgba(255, 255, 255, .5); }
|
||||
|
||||
.swagger-ui .hover-white-40:focus, .swagger-ui .hover-white-40:hover, .swagger-ui .white-40 { color: rgba(255, 255, 255, .4); }
|
||||
|
||||
.swagger-ui .hover-white-30:focus, .swagger-ui .hover-white-30:hover, .swagger-ui .white-30 { color: rgba(255, 255, 255, .3); }
|
||||
|
||||
.swagger-ui .hover-white-20:focus, .swagger-ui .hover-white-20:hover, .swagger-ui .white-20 { color: rgba(255, 255, 255, .2); }
|
||||
|
||||
.swagger-ui .hover-white-10:focus, .swagger-ui .hover-white-10:hover, .swagger-ui .white-10 { color: rgba(255, 255, 255, .1); }
|
||||
|
||||
.swagger-ui .hover-moon-gray:focus, .swagger-ui .hover-moon-gray:hover, .swagger-ui .moon-gray { color: #ccc; }
|
||||
|
||||
.swagger-ui .hover-light-gray:focus, .swagger-ui .hover-light-gray:hover, .swagger-ui .light-gray { color: #ededed; }
|
||||
|
||||
.swagger-ui .hover-near-white:focus, .swagger-ui .hover-near-white:hover, .swagger-ui .near-white { color: #f5f5f5; }
|
||||
|
||||
.swagger-ui .dark-red, .swagger-ui .hover-dark-red:focus, .swagger-ui .hover-dark-red:hover { color: #e6999d; }
|
||||
|
||||
.swagger-ui .hover-red:focus, .swagger-ui .hover-red:hover, .swagger-ui .red { color: #e69d99; }
|
||||
|
||||
.swagger-ui .hover-light-red:focus, .swagger-ui .hover-light-red:hover, .swagger-ui .light-red { color: #e6a399; }
|
||||
|
||||
.swagger-ui .hover-orange:focus, .swagger-ui .hover-orange:hover, .swagger-ui .orange { color: #e6b699; }
|
||||
|
||||
.swagger-ui .gold, .swagger-ui .hover-gold:focus, .swagger-ui .hover-gold:hover { color: #e6d099; }
|
||||
|
||||
.swagger-ui .hover-yellow:focus, .swagger-ui .hover-yellow:hover, .swagger-ui .yellow { color: #e6da99; }
|
||||
|
||||
.swagger-ui .hover-light-yellow:focus, .swagger-ui .hover-light-yellow:hover, .swagger-ui .light-yellow { color: #ede6b6; }
|
||||
|
||||
.swagger-ui .hover-purple:focus, .swagger-ui .hover-purple:hover, .swagger-ui .purple { color: #b99ae4; }
|
||||
|
||||
.swagger-ui .hover-light-purple:focus, .swagger-ui .hover-light-purple:hover, .swagger-ui .light-purple { color: #bb99e6; }
|
||||
|
||||
.swagger-ui .dark-pink, .swagger-ui .hover-dark-pink:focus, .swagger-ui .hover-dark-pink:hover { color: #e699cc; }
|
||||
|
||||
.swagger-ui .hot-pink, .swagger-ui .hover-hot-pink:focus, .swagger-ui .hover-hot-pink:hover, .swagger-ui .hover-pink:focus, .swagger-ui .hover-pink:hover, .swagger-ui .pink { color: #e699c7; }
|
||||
|
||||
.swagger-ui .hover-light-pink:focus, .swagger-ui .hover-light-pink:hover, .swagger-ui .light-pink { color: #edb6d5; }
|
||||
|
||||
.swagger-ui .dark-green, .swagger-ui .green, .swagger-ui .hover-dark-green:focus, .swagger-ui .hover-dark-green:hover, .swagger-ui .hover-green:focus, .swagger-ui .hover-green:hover { color: #99e6c9; }
|
||||
|
||||
.swagger-ui .hover-light-green:focus, .swagger-ui .hover-light-green:hover, .swagger-ui .light-green { color: #a1e8ce; }
|
||||
|
||||
.swagger-ui .hover-navy:focus, .swagger-ui .hover-navy:hover, .swagger-ui .navy { color: #99b8e6; }
|
||||
|
||||
.swagger-ui .blue, .swagger-ui .dark-blue, .swagger-ui .hover-blue:focus, .swagger-ui .hover-blue:hover, .swagger-ui .hover-dark-blue:focus, .swagger-ui .hover-dark-blue:hover { color: #99bae6; }
|
||||
|
||||
.swagger-ui .hover-light-blue:focus, .swagger-ui .hover-light-blue:hover, .swagger-ui .light-blue { color: #a9cbea; }
|
||||
|
||||
.swagger-ui .hover-lightest-blue:focus, .swagger-ui .hover-lightest-blue:hover, .swagger-ui .lightest-blue { color: #d6e9f5; }
|
||||
|
||||
.swagger-ui .hover-washed-blue:focus, .swagger-ui .hover-washed-blue:hover, .swagger-ui .washed-blue { color: #f7fdfc; }
|
||||
|
||||
.swagger-ui .hover-washed-green:focus, .swagger-ui .hover-washed-green:hover, .swagger-ui .washed-green { color: #ebfaf4; }
|
||||
|
||||
.swagger-ui .hover-washed-yellow:focus, .swagger-ui .hover-washed-yellow:hover, .swagger-ui .washed-yellow { color: #fbf9ef; }
|
||||
|
||||
.swagger-ui .hover-washed-red:focus, .swagger-ui .hover-washed-red:hover, .swagger-ui .washed-red { color: #f9e7e7; }
|
||||
|
||||
.swagger-ui .color-inherit, .swagger-ui .hover-inherit:focus, .swagger-ui .hover-inherit:hover { color: inherit; }
|
||||
|
||||
.swagger-ui .bg-black-90, .swagger-ui .hover-bg-black-90:focus, .swagger-ui .hover-bg-black-90:hover { background-color: rgba(0, 0, 0, .9); }
|
||||
|
||||
.swagger-ui .bg-black-80, .swagger-ui .hover-bg-black-80:focus, .swagger-ui .hover-bg-black-80:hover { background-color: rgba(0, 0, 0, .8); }
|
||||
|
||||
.swagger-ui .bg-black-70, .swagger-ui .hover-bg-black-70:focus, .swagger-ui .hover-bg-black-70:hover { background-color: rgba(0, 0, 0, .7); }
|
||||
|
||||
.swagger-ui .bg-black-60, .swagger-ui .hover-bg-black-60:focus, .swagger-ui .hover-bg-black-60:hover { background-color: rgba(0, 0, 0, .6); }
|
||||
|
||||
.swagger-ui .bg-black-50, .swagger-ui .hover-bg-black-50:focus, .swagger-ui .hover-bg-black-50:hover { background-color: rgba(0, 0, 0, .5); }
|
||||
|
||||
.swagger-ui .bg-black-40, .swagger-ui .hover-bg-black-40:focus, .swagger-ui .hover-bg-black-40:hover { background-color: rgba(0, 0, 0, .4); }
|
||||
|
||||
.swagger-ui .bg-black-30, .swagger-ui .hover-bg-black-30:focus, .swagger-ui .hover-bg-black-30:hover { background-color: rgba(0, 0, 0, .3); }
|
||||
|
||||
.swagger-ui .bg-black-20, .swagger-ui .hover-bg-black-20:focus, .swagger-ui .hover-bg-black-20:hover { background-color: rgba(0, 0, 0, .2); }
|
||||
|
||||
.swagger-ui .bg-white-90, .swagger-ui .hover-bg-white-90:focus, .swagger-ui .hover-bg-white-90:hover { background-color: rgba(28, 28, 33, .9); }
|
||||
|
||||
.swagger-ui .bg-white-80, .swagger-ui .hover-bg-white-80:focus, .swagger-ui .hover-bg-white-80:hover { background-color: rgba(28, 28, 33, .8); }
|
||||
|
||||
.swagger-ui .bg-white-70, .swagger-ui .hover-bg-white-70:focus, .swagger-ui .hover-bg-white-70:hover { background-color: rgba(28, 28, 33, .7); }
|
||||
|
||||
.swagger-ui .bg-white-60, .swagger-ui .hover-bg-white-60:focus, .swagger-ui .hover-bg-white-60:hover { background-color: rgba(28, 28, 33, .6); }
|
||||
|
||||
.swagger-ui .bg-white-50, .swagger-ui .hover-bg-white-50:focus, .swagger-ui .hover-bg-white-50:hover { background-color: rgba(28, 28, 33, .5); }
|
||||
|
||||
.swagger-ui .bg-white-40, .swagger-ui .hover-bg-white-40:focus, .swagger-ui .hover-bg-white-40:hover { background-color: rgba(28, 28, 33, .4); }
|
||||
|
||||
.swagger-ui .bg-white-30, .swagger-ui .hover-bg-white-30:focus, .swagger-ui .hover-bg-white-30:hover { background-color: rgba(28, 28, 33, .3); }
|
||||
|
||||
.swagger-ui .bg-white-20, .swagger-ui .hover-bg-white-20:focus, .swagger-ui .hover-bg-white-20:hover { background-color: rgba(28, 28, 33, .2); }
|
||||
|
||||
.swagger-ui .bg-black, .swagger-ui .hover-bg-black:focus, .swagger-ui .hover-bg-black:hover { background-color: #000; }
|
||||
|
||||
.swagger-ui .bg-near-black, .swagger-ui .hover-bg-near-black:focus, .swagger-ui .hover-bg-near-black:hover { background-color: #121212; }
|
||||
|
||||
.swagger-ui .bg-dark-gray, .swagger-ui .hover-bg-dark-gray:focus, .swagger-ui .hover-bg-dark-gray:hover { background-color: #333; }
|
||||
|
||||
.swagger-ui .bg-mid-gray, .swagger-ui .hover-bg-mid-gray:focus, .swagger-ui .hover-bg-mid-gray:hover { background-color: #545454; }
|
||||
|
||||
.swagger-ui .bg-gray, .swagger-ui .hover-bg-gray:focus, .swagger-ui .hover-bg-gray:hover { background-color: #787878; }
|
||||
|
||||
.swagger-ui .bg-silver, .swagger-ui .hover-bg-silver:focus, .swagger-ui .hover-bg-silver:hover { background-color: #999; }
|
||||
|
||||
.swagger-ui .bg-white, .swagger-ui .hover-bg-white:focus, .swagger-ui .hover-bg-white:hover { background-color: #1c1c21; }
|
||||
|
||||
.swagger-ui .bg-transparent, .swagger-ui .hover-bg-transparent:focus, .swagger-ui .hover-bg-transparent:hover { background-color: transparent; }
|
||||
|
||||
.swagger-ui .bg-dark-red, .swagger-ui .hover-bg-dark-red:focus, .swagger-ui .hover-bg-dark-red:hover { background-color: #bc2f36; }
|
||||
|
||||
.swagger-ui .bg-red, .swagger-ui .hover-bg-red:focus, .swagger-ui .hover-bg-red:hover { background-color: #c83932; }
|
||||
|
||||
.swagger-ui .bg-light-red, .swagger-ui .hover-bg-light-red:focus, .swagger-ui .hover-bg-light-red:hover { background-color: #ab3c2b; }
|
||||
|
||||
.swagger-ui .bg-orange, .swagger-ui .hover-bg-orange:focus, .swagger-ui .hover-bg-orange:hover { background-color: #cc6e33; }
|
||||
|
||||
.swagger-ui .bg-gold, .swagger-ui .bg-light-yellow, .swagger-ui .bg-washed-yellow, .swagger-ui .bg-yellow, .swagger-ui .hover-bg-gold:focus, .swagger-ui .hover-bg-gold:hover, .swagger-ui .hover-bg-light-yellow:focus, .swagger-ui .hover-bg-light-yellow:hover, .swagger-ui .hover-bg-washed-yellow:focus, .swagger-ui .hover-bg-washed-yellow:hover, .swagger-ui .hover-bg-yellow:focus, .swagger-ui .hover-bg-yellow:hover { background-color: #664b00; }
|
||||
|
||||
.swagger-ui .bg-purple, .swagger-ui .hover-bg-purple:focus, .swagger-ui .hover-bg-purple:hover { background-color: #5e2ca5; }
|
||||
|
||||
.swagger-ui .bg-light-purple, .swagger-ui .hover-bg-light-purple:focus, .swagger-ui .hover-bg-light-purple:hover { background-color: #672caf; }
|
||||
|
||||
.swagger-ui .bg-dark-pink, .swagger-ui .hover-bg-dark-pink:focus, .swagger-ui .hover-bg-dark-pink:hover { background-color: #ab2b81; }
|
||||
|
||||
.swagger-ui .bg-hot-pink, .swagger-ui .hover-bg-hot-pink:focus, .swagger-ui .hover-bg-hot-pink:hover { background-color: #c03086; }
|
||||
|
||||
.swagger-ui .bg-pink, .swagger-ui .hover-bg-pink:focus, .swagger-ui .hover-bg-pink:hover { background-color: #8f2464; }
|
||||
|
||||
.swagger-ui .bg-light-pink, .swagger-ui .hover-bg-light-pink:focus, .swagger-ui .hover-bg-light-pink:hover { background-color: #721d4d; }
|
||||
|
||||
.swagger-ui .bg-dark-green, .swagger-ui .hover-bg-dark-green:focus, .swagger-ui .hover-bg-dark-green:hover { background-color: #1c6e50; }
|
||||
|
||||
.swagger-ui .bg-green, .swagger-ui .hover-bg-green:focus, .swagger-ui .hover-bg-green:hover { background-color: #279b70; }
|
||||
|
||||
.swagger-ui .bg-light-green, .swagger-ui .hover-bg-light-green:focus, .swagger-ui .hover-bg-light-green:hover { background-color: #228762; }
|
||||
|
||||
.swagger-ui .bg-navy, .swagger-ui .hover-bg-navy:focus, .swagger-ui .hover-bg-navy:hover { background-color: #0d1d35; }
|
||||
|
||||
.swagger-ui .bg-dark-blue, .swagger-ui .hover-bg-dark-blue:focus, .swagger-ui .hover-bg-dark-blue:hover { background-color: #20497e; }
|
||||
|
||||
.swagger-ui .bg-blue, .swagger-ui .hover-bg-blue:focus, .swagger-ui .hover-bg-blue:hover { background-color: #4380d0; }
|
||||
|
||||
.swagger-ui .bg-light-blue, .swagger-ui .hover-bg-light-blue:focus, .swagger-ui .hover-bg-light-blue:hover { background-color: #20517e; }
|
||||
|
||||
.swagger-ui .bg-lightest-blue, .swagger-ui .hover-bg-lightest-blue:focus, .swagger-ui .hover-bg-lightest-blue:hover { background-color: #143a52; }
|
||||
|
||||
.swagger-ui .bg-washed-blue, .swagger-ui .hover-bg-washed-blue:focus, .swagger-ui .hover-bg-washed-blue:hover { background-color: #0c312d; }
|
||||
|
||||
.swagger-ui .bg-washed-green, .swagger-ui .hover-bg-washed-green:focus, .swagger-ui .hover-bg-washed-green:hover { background-color: #0f3d2c; }
|
||||
|
||||
.swagger-ui .bg-washed-red, .swagger-ui .hover-bg-washed-red:focus, .swagger-ui .hover-bg-washed-red:hover { background-color: #411010; }
|
||||
|
||||
.swagger-ui .bg-inherit, .swagger-ui .hover-bg-inherit:focus, .swagger-ui .hover-bg-inherit:hover { background-color: inherit; }
|
||||
|
||||
.swagger-ui .shadow-hover { transition: all .5s cubic-bezier(.165, .84, .44, 1) 0s; }
|
||||
|
||||
.swagger-ui .shadow-hover::after {
|
||||
border-radius: inherit;
|
||||
box-shadow: rgba(0, 0, 0, .2) 0 0 16px 2px;
|
||||
content: "";
|
||||
height: 100%;
|
||||
left: 0;
|
||||
opacity: 0;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
transition: opacity .5s cubic-bezier(.165, .84, .44, 1) 0s;
|
||||
width: 100%;
|
||||
z-index: -1;
|
||||
}
|
||||
|
||||
.swagger-ui .bg-animate, .swagger-ui .bg-animate:focus, .swagger-ui .bg-animate:hover { transition: background-color .15s ease-in-out 0s; }
|
||||
|
||||
.swagger-ui .nested-links a {
|
||||
color: #99bae6;
|
||||
transition: color .15s ease-in 0s;
|
||||
}
|
||||
|
||||
.swagger-ui .nested-links a:focus, .swagger-ui .nested-links a:hover {
|
||||
color: #a9cbea;
|
||||
transition: color .15s ease-in 0s;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock-tag {
|
||||
border-bottom: 1px solid rgba(58, 64, 80, .3);
|
||||
color: #b5bac9;
|
||||
transition: all .2s ease 0s;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock-tag svg, .swagger-ui section.models h4 svg { transition: all .4s ease 0s; }
|
||||
|
||||
.swagger-ui .opblock {
|
||||
border: 1px solid #000;
|
||||
border-radius: 4px;
|
||||
box-shadow: rgba(0, 0, 0, .19) 0 0 3px;
|
||||
margin: 0 0 15px;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock .tab-header .tab-item.active h4 span::after { background: gray; }
|
||||
|
||||
.swagger-ui .opblock.is-open .opblock-summary { border-bottom: 1px solid #000; }
|
||||
|
||||
.swagger-ui .opblock .opblock-section-header {
|
||||
background: rgba(28, 28, 33, .8);
|
||||
box-shadow: rgba(0, 0, 0, .1) 0 1px 2px;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock .opblock-section-header > label > span { padding: 0 10px 0 0; }
|
||||
|
||||
.swagger-ui .opblock .opblock-summary-method {
|
||||
background: #000;
|
||||
color: #fff;
|
||||
text-shadow: rgba(0, 0, 0, .1) 0 1px 0;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock.opblock-post {
|
||||
background: rgba(72, 203, 144, .1);
|
||||
border-color: #48cb90;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock.opblock-post .opblock-summary-method, .swagger-ui .opblock.opblock-post .tab-header .tab-item.active h4 span::after { background: #48cb90; }
|
||||
|
||||
.swagger-ui .opblock.opblock-post .opblock-summary { border-color: #48cb90; }
|
||||
|
||||
.swagger-ui .opblock.opblock-put {
|
||||
background: rgba(213, 157, 88, .1);
|
||||
border-color: #d59d58;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock.opblock-put .opblock-summary-method, .swagger-ui .opblock.opblock-put .tab-header .tab-item.active h4 span::after { background: #d59d58; }
|
||||
|
||||
.swagger-ui .opblock.opblock-put .opblock-summary { border-color: #d59d58; }
|
||||
|
||||
.swagger-ui .opblock.opblock-delete {
|
||||
background: rgba(200, 50, 50, .1);
|
||||
border-color: #c83232;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock.opblock-delete .opblock-summary-method, .swagger-ui .opblock.opblock-delete .tab-header .tab-item.active h4 span::after { background: #c83232; }
|
||||
|
||||
.swagger-ui .opblock.opblock-delete .opblock-summary { border-color: #c83232; }
|
||||
|
||||
.swagger-ui .opblock.opblock-get {
|
||||
background: rgba(42, 105, 167, .1);
|
||||
border-color: #2a69a7;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock.opblock-get .opblock-summary-method, .swagger-ui .opblock.opblock-get .tab-header .tab-item.active h4 span::after { background: #2a69a7; }
|
||||
|
||||
.swagger-ui .opblock.opblock-get .opblock-summary { border-color: #2a69a7; }
|
||||
|
||||
.swagger-ui .opblock.opblock-patch {
|
||||
background: rgba(92, 214, 188, .1);
|
||||
border-color: #5cd6bc;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock.opblock-patch .opblock-summary-method, .swagger-ui .opblock.opblock-patch .tab-header .tab-item.active h4 span::after { background: #5cd6bc; }
|
||||
|
||||
.swagger-ui .opblock.opblock-patch .opblock-summary { border-color: #5cd6bc; }
|
||||
|
||||
.swagger-ui .opblock.opblock-head {
|
||||
background: rgba(140, 63, 207, .1);
|
||||
border-color: #8c3fcf;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock.opblock-head .opblock-summary-method, .swagger-ui .opblock.opblock-head .tab-header .tab-item.active h4 span::after { background: #8c3fcf; }
|
||||
|
||||
.swagger-ui .opblock.opblock-head .opblock-summary { border-color: #8c3fcf; }
|
||||
|
||||
.swagger-ui .opblock.opblock-options {
|
||||
background: rgba(36, 89, 143, .1);
|
||||
border-color: #24598f;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock.opblock-options .opblock-summary-method, .swagger-ui .opblock.opblock-options .tab-header .tab-item.active h4 span::after { background: #24598f; }
|
||||
|
||||
.swagger-ui .opblock.opblock-options .opblock-summary { border-color: #24598f; }
|
||||
|
||||
.swagger-ui .opblock.opblock-deprecated {
|
||||
background: rgba(46, 46, 46, .1);
|
||||
border-color: #2e2e2e;
|
||||
opacity: .6;
|
||||
}
|
||||
|
||||
.swagger-ui .opblock.opblock-deprecated .opblock-summary-method, .swagger-ui .opblock.opblock-deprecated .tab-header .tab-item.active h4 span::after { background: #2e2e2e; }
|
||||
|
||||
.swagger-ui .opblock.opblock-deprecated .opblock-summary { border-color: #2e2e2e; }
|
||||
|
||||
.swagger-ui .filter .operation-filter-input { border: 2px solid #2b3446; }
|
||||
|
||||
.swagger-ui .tab li:first-of-type::after { background: rgba(0, 0, 0, .2); }
|
||||
|
||||
.swagger-ui .download-contents {
|
||||
background: #7c8192;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.swagger-ui .scheme-container {
|
||||
background: #1c1c21;
|
||||
box-shadow: rgba(0, 0, 0, .15) 0 1px 2px 0;
|
||||
}
|
||||
|
||||
.swagger-ui .loading-container .loading::before {
|
||||
animation: 1s linear 0s infinite normal none running rotation, .5s ease 0s 1 normal none running opacity;
|
||||
border-color: rgba(0, 0, 0, .6) rgba(84, 84, 84, .1) rgba(84, 84, 84, .1);
|
||||
}
|
||||
|
||||
.swagger-ui .response-control-media-type--accept-controller select { border-color: #196619; }
|
||||
|
||||
.swagger-ui .response-control-media-type__accept-message { color: #99e699; }
|
||||
|
||||
.swagger-ui .version-pragma__message code { background-color: #3b3b3b; }
|
||||
|
||||
.swagger-ui .btn {
|
||||
background: 0 0;
|
||||
border: 2px solid gray;
|
||||
box-shadow: rgba(0, 0, 0, .1) 0 1px 2px;
|
||||
color: #b5bac9;
|
||||
}
|
||||
|
||||
.swagger-ui .btn:hover { box-shadow: rgba(0, 0, 0, .3) 0 0 5px; }
|
||||
|
||||
.swagger-ui .btn.authorize, .swagger-ui .btn.cancel {
|
||||
background-color: transparent;
|
||||
border-color: #a72a2a;
|
||||
color: #e69999;
|
||||
}
|
||||
|
||||
.swagger-ui .btn.authorize {
|
||||
border-color: #48cb90;
|
||||
color: #9ce3c3;
|
||||
}
|
||||
|
||||
.swagger-ui .btn.authorize svg { fill: #9ce3c3; }
|
||||
|
||||
.swagger-ui .btn.execute {
|
||||
background-color: #5892d5;
|
||||
border-color: #5892d5;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.swagger-ui .copy-to-clipboard { background: #7c8192; }
|
||||
|
||||
.swagger-ui .copy-to-clipboard button { background: url("data:image/svg+xml;charset=utf-8,<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"16\" height=\"16\" aria-hidden=\"true\"><path fill=\"%23fff\" fill-rule=\"evenodd\" d=\"M2 13h4v1H2v-1zm5-6H2v1h5V7zm2 3V8l-3 3 3 3v-2h5v-2H9zM4.5 9H2v1h2.5V9zM2 12h2.5v-1H2v1zm9 1h1v2c-.02.28-.11.52-.3.7-.19.18-.42.28-.7.3H1c-.55 0-1-.45-1-1V4c0-.55.45-1 1-1h3c0-1.11.89-2 2-2 1.11 0 2 .89 2 2h3c.55 0 1 .45 1 1v5h-1V6H1v9h10v-2zM2 5h8c0-.55-.45-1-1-1H8c-.55 0-1-.45-1-1s-.45-1-1-1-1 .45-1 1-.45 1-1 1H3c-.55 0-1 .45-1 1z\"/></svg>") 50% center no-repeat; }
|
||||
|
||||
.swagger-ui select {
|
||||
background: url("data:image/svg+xml;charset=utf-8,<svg xmlns=\"http://www.w3.org/2000/svg\" viewBox=\"0 0 20 20\"><path d=\"M13.418 7.859a.695.695 0 01.978 0 .68.68 0 010 .969l-3.908 3.83a.697.697 0 01-.979 0l-3.908-3.83a.68.68 0 010-.969.695.695 0 01.978 0L10 11l3.418-3.141z\"/></svg>") right 10px center/20px no-repeat #212121;
|
||||
background: url(data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcKICAgeG1sbnM6ZGM9Imh0dHA6Ly9wdXJsLm9yZy9kYy9lbGVtZW50cy8xLjEvIgogICB4bWxuczpjYz0iaHR0cDovL2NyZWF0aXZlY29tbW9ucy5vcmcvbnMjIgogICB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiCiAgIHhtbG5zOnN2Zz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIKICAgeG1sbnM6c29kaXBvZGk9Imh0dHA6Ly9zb2RpcG9kaS5zb3VyY2Vmb3JnZS5uZXQvRFREL3NvZGlwb2RpLTAuZHRkIgogICB4bWxuczppbmtzY2FwZT0iaHR0cDovL3d3dy5pbmtzY2FwZS5vcmcvbmFtZXNwYWNlcy9pbmtzY2FwZSIKICAgaW5rc2NhcGU6dmVyc2lvbj0iMS4wICg0MDM1YTRmYjQ5LCAyMDIwLTA1LTAxKSIKICAgc29kaXBvZGk6ZG9jbmFtZT0iZG93bmxvYWQuc3ZnIgogICBpZD0ic3ZnNCIKICAgdmVyc2lvbj0iMS4xIgogICB2aWV3Qm94PSIwIDAgMjAgMjAiPgogIDxtZXRhZGF0YQogICAgIGlkPSJtZXRhZGF0YTEwIj4KICAgIDxyZGY6UkRGPgogICAgICA8Y2M6V29yawogICAgICAgICByZGY6YWJvdXQ9IiI+CiAgICAgICAgPGRjOmZvcm1hdD5pbWFnZS9zdmcreG1sPC9kYzpmb3JtYXQ+CiAgICAgICAgPGRjOnR5cGUKICAgICAgICAgICByZGY6cmVzb3VyY2U9Imh0dHA6Ly9wdXJsLm9yZy9kYy9kY21pdHlwZS9TdGlsbEltYWdlIiAvPgogICAgICA8L2NjOldvcms+CiAgICA8L3JkZjpSREY+CiAgPC9tZXRhZGF0YT4KICA8ZGVmcwogICAgIGlkPSJkZWZzOCIgLz4KICA8c29kaXBvZGk6bmFtZWR2aWV3CiAgICAgaW5rc2NhcGU6Y3VycmVudC1sYXllcj0ic3ZnNCIKICAgICBpbmtzY2FwZTp3aW5kb3ctbWF4aW1pemVkPSIxIgogICAgIGlua3NjYXBlOndpbmRvdy15PSItOSIKICAgICBpbmtzY2FwZTp3aW5kb3cteD0iLTkiCiAgICAgaW5rc2NhcGU6Y3k9IjEwIgogICAgIGlua3NjYXBlOmN4PSIxMCIKICAgICBpbmtzY2FwZTp6b29tPSI0MS41IgogICAgIHNob3dncmlkPSJmYWxzZSIKICAgICBpZD0ibmFtZWR2aWV3NiIKICAgICBpbmtzY2FwZTp3aW5kb3ctaGVpZ2h0PSIxMDAxIgogICAgIGlua3NjYXBlOndpbmRvdy13aWR0aD0iMTkyMCIKICAgICBpbmtzY2FwZTpwYWdlc2hhZG93PSIyIgogICAgIGlua3NjYXBlOnBhZ2VvcGFjaXR5PSIwIgogICAgIGd1aWRldG9sZXJhbmNlPSIxMCIKICAgICBncmlkdG9sZXJhbmNlPSIxMCIKICAgICBvYmplY3R0b2xlcmFuY2U9IjEwIgogICAgIGJvcmRlcm9wYWNpdHk9IjEiCiAgICAgYm9yZGVyY29sb3I9IiM2NjY2NjYiCiAgICAgcGFnZWNvbG9yPSIjZmZmZmZmIiAvPgogIDxwYXRoCiAgICAgc3R5bGU9ImZpbGw6I2ZmZmZmZiIKICAgICBpZD0icGF0aDIiCiAgICAgZD0iTTEzLjQxOCA3Ljg1OWEuNjk1LjY5NSAwIDAxLjk3OCAwIC42OC42OCAwIDAxMCAuOTY5bC0zLjkwOCAzLjgzYS42OTcuNjk3IDAgMDEtLjk3OSAwbC0zLjkwOC0zLjgzYS42OC42OCAwIDAxMC0uOTY5LjY5NS42OTUgMCAwMS45NzggMEwxMCAxMWwzLjQxOC0zLjE0MXoiIC8+Cjwvc3ZnPgo=) right 10px center/20px no-repeat #1c1c21;
|
||||
border: 2px solid #41444e;
|
||||
}
|
||||
|
||||
.swagger-ui select[multiple] { background: #212121; }
|
||||
|
||||
.swagger-ui button.invalid, .swagger-ui input[type=email].invalid, .swagger-ui input[type=file].invalid, .swagger-ui input[type=password].invalid, .swagger-ui input[type=search].invalid, .swagger-ui input[type=text].invalid, .swagger-ui select.invalid, .swagger-ui textarea.invalid {
|
||||
background: #390e0e;
|
||||
border-color: #c83232;
|
||||
}
|
||||
|
||||
.swagger-ui input[type=email], .swagger-ui input[type=file], .swagger-ui input[type=password], .swagger-ui input[type=search], .swagger-ui input[type=text], .swagger-ui textarea {
|
||||
background: #1c1c21;
|
||||
border: 1px solid #404040;
|
||||
}
|
||||
|
||||
.swagger-ui textarea {
|
||||
background: rgba(28, 28, 33, .8);
|
||||
color: #b5bac9;
|
||||
}
|
||||
|
||||
.swagger-ui input[disabled], .swagger-ui select[disabled] {
|
||||
background-color: #1f1f1f;
|
||||
color: #bfbfbf;
|
||||
}
|
||||
|
||||
.swagger-ui textarea[disabled] {
|
||||
background-color: #41444e;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.swagger-ui select[disabled] { border-color: #878787; }
|
||||
|
||||
.swagger-ui textarea:focus { border: 2px solid #2a69a7; }
|
||||
|
||||
.swagger-ui .checkbox input[type=checkbox] + label > .item {
|
||||
background: #303030;
|
||||
box-shadow: #303030 0 0 0 2px;
|
||||
}
|
||||
|
||||
.swagger-ui .checkbox input[type=checkbox]:checked + label > .item { background: url("data:image/svg+xml;charset=utf-8,<svg width=\"10\" height=\"8\" viewBox=\"3 7 10 8\" xmlns=\"http://www.w3.org/2000/svg\"><path fill=\"%2341474E\" fill-rule=\"evenodd\" d=\"M6.333 15L3 11.667l1.333-1.334 2 2L11.667 7 13 8.333z\"/></svg>") 50% center no-repeat #303030; }
|
||||
|
||||
.swagger-ui .dialog-ux .backdrop-ux { background: rgba(0, 0, 0, .8); }
|
||||
|
||||
.swagger-ui .dialog-ux .modal-ux {
|
||||
background: #1c1c21;
|
||||
border: 1px solid #2e2e2e;
|
||||
box-shadow: rgba(0, 0, 0, .2) 0 10px 30px 0;
|
||||
}
|
||||
|
||||
.swagger-ui .dialog-ux .modal-ux-header .close-modal { background: 0 0; }
|
||||
|
||||
.swagger-ui .model .deprecated span, .swagger-ui .model .deprecated td { color: #bfbfbf !important; }
|
||||
|
||||
.swagger-ui .model-toggle::after { background: url("data:image/svg+xml;charset=utf-8,<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"24\" height=\"24\"><path d=\"M10 6L8.59 7.41 13.17 12l-4.58 4.59L10 18l6-6z\"/></svg>") 50% center/100% no-repeat; }
|
||||
|
||||
.swagger-ui .model-hint {
|
||||
background: rgba(0, 0, 0, .7);
|
||||
color: #ebebeb;
|
||||
}
|
||||
|
||||
.swagger-ui section.models { border: 1px solid rgba(58, 64, 80, .3); }
|
||||
|
||||
.swagger-ui section.models.is-open h4 { border-bottom: 1px solid rgba(58, 64, 80, .3); }
|
||||
|
||||
.swagger-ui section.models .model-container { background: rgba(0, 0, 0, .05); }
|
||||
|
||||
.swagger-ui section.models .model-container:hover { background: rgba(0, 0, 0, .07); }
|
||||
|
||||
.swagger-ui .model-box { background: rgba(0, 0, 0, .1); }
|
||||
|
||||
.swagger-ui .prop-type { color: #aaaad4; }
|
||||
|
||||
.swagger-ui table thead tr td, .swagger-ui table thead tr th {
|
||||
border-bottom: 1px solid rgba(58, 64, 80, .2);
|
||||
color: #b5bac9;
|
||||
}
|
||||
|
||||
.swagger-ui .parameter__name.required::after { color: rgba(230, 153, 153, .6); }
|
||||
|
||||
.swagger-ui .topbar .download-url-wrapper .select-label { color: #f0f0f0; }
|
||||
|
||||
.swagger-ui .topbar .download-url-wrapper .download-url-button {
|
||||
background: #63a040;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.swagger-ui .info .title small { background: #7c8492; }
|
||||
|
||||
.swagger-ui .info .title small.version-stamp { background-color: #7a9b27; }
|
||||
|
||||
.swagger-ui .auth-container .errors {
|
||||
background-color: #350d0d;
|
||||
color: #b5bac9;
|
||||
}
|
||||
|
||||
.swagger-ui .errors-wrapper {
|
||||
background: rgba(200, 50, 50, .1);
|
||||
border: 2px solid #c83232;
|
||||
}
|
||||
|
||||
.swagger-ui .markdown code, .swagger-ui .renderedmarkdown code {
|
||||
background: rgba(0, 0, 0, .05);
|
||||
color: #c299e6;
|
||||
}
|
||||
|
||||
.swagger-ui .model-toggle:after { background: url(data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcKICAgeG1sbnM6ZGM9Imh0dHA6Ly9wdXJsLm9yZy9kYy9lbGVtZW50cy8xLjEvIgogICB4bWxuczpjYz0iaHR0cDovL2NyZWF0aXZlY29tbW9ucy5vcmcvbnMjIgogICB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiCiAgIHhtbG5zOnN2Zz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIKICAgeG1sbnM6c29kaXBvZGk9Imh0dHA6Ly9zb2RpcG9kaS5zb3VyY2Vmb3JnZS5uZXQvRFREL3NvZGlwb2RpLTAuZHRkIgogICB4bWxuczppbmtzY2FwZT0iaHR0cDovL3d3dy5pbmtzY2FwZS5vcmcvbmFtZXNwYWNlcy9pbmtzY2FwZSIKICAgaW5rc2NhcGU6dmVyc2lvbj0iMS4wICg0MDM1YTRmYjQ5LCAyMDIwLTA1LTAxKSIKICAgc29kaXBvZGk6ZG9jbmFtZT0iZG93bmxvYWQyLnN2ZyIKICAgaWQ9InN2ZzQiCiAgIHZlcnNpb249IjEuMSIKICAgaGVpZ2h0PSIyNCIKICAgd2lkdGg9IjI0Ij4KICA8bWV0YWRhdGEKICAgICBpZD0ibWV0YWRhdGExMCI+CiAgICA8cmRmOlJERj4KICAgICAgPGNjOldvcmsKICAgICAgICAgcmRmOmFib3V0PSIiPgogICAgICAgIDxkYzpmb3JtYXQ+aW1hZ2Uvc3ZnK3htbDwvZGM6Zm9ybWF0PgogICAgICAgIDxkYzp0eXBlCiAgICAgICAgICAgcmRmOnJlc291cmNlPSJodHRwOi8vcHVybC5vcmcvZGMvZGNtaXR5cGUvU3RpbGxJbWFnZSIgLz4KICAgICAgPC9jYzpXb3JrPgogICAgPC9yZGY6UkRGPgogIDwvbWV0YWRhdGE+CiAgPGRlZnMKICAgICBpZD0iZGVmczgiIC8+CiAgPHNvZGlwb2RpOm5hbWVkdmlldwogICAgIGlua3NjYXBlOmN1cnJlbnQtbGF5ZXI9InN2ZzQiCiAgICAgaW5rc2NhcGU6d2luZG93LW1heGltaXplZD0iMSIKICAgICBpbmtzY2FwZTp3aW5kb3cteT0iLTkiCiAgICAgaW5rc2NhcGU6d2luZG93LXg9Ii05IgogICAgIGlua3NjYXBlOmN5PSIxMiIKICAgICBpbmtzY2FwZTpjeD0iMTIiCiAgICAgaW5rc2NhcGU6em9vbT0iMzQuNTgzMzMzIgogICAgIHNob3dncmlkPSJmYWxzZSIKICAgICBpZD0ibmFtZWR2aWV3NiIKICAgICBpbmtzY2FwZTp3aW5kb3ctaGVpZ2h0PSIxMDAxIgogICAgIGlua3NjYXBlOndpbmRvdy13aWR0aD0iMTkyMCIKICAgICBpbmtzY2FwZTpwYWdlc2hhZG93PSIyIgogICAgIGlua3NjYXBlOnBhZ2VvcGFjaXR5PSIwIgogICAgIGd1aWRldG9sZXJhbmNlPSIxMCIKICAgICBncmlkdG9sZXJhbmNlPSIxMCIKICAgICBvYmplY3R0b2xlcmFuY2U9IjEwIgogICAgIGJvcmRlcm9wYWNpdHk9IjEiCiAgICAgYm9yZGVyY29sb3I9IiM2NjY2NjYiCiAgICAgcGFnZWNvbG9yPSIjZmZmZmZmIiAvPgogIDxwYXRoCiAgICAgc3R5bGU9ImZpbGw6I2ZmZmZmZiIKICAgICBpZD0icGF0aDIiCiAgICAgZD0iTTEwIDZMOC41OSA3LjQxIDEzLjE3IDEybC00LjU4IDQuNTlMMTAgMThsNi02eiIgLz4KPC9zdmc+Cg==) 50% no-repeat; }
|
||||
|
||||
.swagger-ui .expand-operation svg, .swagger-ui section.models h4 svg { fill: #fff; }
|
||||
|
||||
::-webkit-scrollbar-track { background-color: #646464 !important; }
|
||||
|
||||
::-webkit-scrollbar-thumb {
|
||||
background-color: #242424 !important;
|
||||
border: 2px solid #3e4346 !important;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-button:vertical:start:decrement {
|
||||
background: linear-gradient(130deg, #696969 40%, rgba(255, 0, 0, 0) 41%), linear-gradient(230deg, #696969 40%, transparent 41%), linear-gradient(0deg, #696969 40%, transparent 31%);
|
||||
background-color: #b6b6b6;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-button:vertical:end:increment {
|
||||
background: linear-gradient(310deg, #696969 40%, transparent 41%), linear-gradient(50deg, #696969 40%, transparent 41%), linear-gradient(180deg, #696969 40%, transparent 31%);
|
||||
background-color: #b6b6b6;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-button:horizontal:end:increment {
|
||||
background: linear-gradient(210deg, #696969 40%, transparent 41%), linear-gradient(330deg, #696969 40%, transparent 41%), linear-gradient(90deg, #696969 30%, transparent 31%);
|
||||
background-color: #b6b6b6;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-button:horizontal:start:decrement {
|
||||
background: linear-gradient(30deg, #696969 40%, transparent 41%), linear-gradient(150deg, #696969 40%, transparent 41%), linear-gradient(270deg, #696969 30%, transparent 31%);
|
||||
background-color: #b6b6b6;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-button, ::-webkit-scrollbar-track-piece { background-color: #3e4346 !important; }
|
||||
|
||||
.swagger-ui .black, .swagger-ui .checkbox, .swagger-ui .dark-gray, .swagger-ui .download-url-wrapper .loading, .swagger-ui .errors-wrapper .errors small, .swagger-ui .fallback, .swagger-ui .filter .loading, .swagger-ui .gray, .swagger-ui .hover-black:focus, .swagger-ui .hover-black:hover, .swagger-ui .hover-dark-gray:focus, .swagger-ui .hover-dark-gray:hover, .swagger-ui .hover-gray:focus, .swagger-ui .hover-gray:hover, .swagger-ui .hover-light-silver:focus, .swagger-ui .hover-light-silver:hover, .swagger-ui .hover-mid-gray:focus, .swagger-ui .hover-mid-gray:hover, .swagger-ui .hover-near-black:focus, .swagger-ui .hover-near-black:hover, .swagger-ui .hover-silver:focus, .swagger-ui .hover-silver:hover, .swagger-ui .light-silver, .swagger-ui .markdown pre, .swagger-ui .mid-gray, .swagger-ui .model .property, .swagger-ui .model .property.primitive, .swagger-ui .model-title, .swagger-ui .near-black, .swagger-ui .parameter__extension, .swagger-ui .parameter__in, .swagger-ui .prop-format, .swagger-ui .renderedmarkdown pre, .swagger-ui .response-col_links .response-undocumented, .swagger-ui .response-col_status .response-undocumented, .swagger-ui .silver, .swagger-ui section.models h4, .swagger-ui section.models h5, .swagger-ui span.token-not-formatted, .swagger-ui span.token-string, .swagger-ui table.headers .header-example, .swagger-ui table.model tr.description, .swagger-ui table.model tr.extension { color: #bfbfbf; }
|
||||
|
||||
.swagger-ui .hover-white:focus, .swagger-ui .hover-white:hover, .swagger-ui .info .title small pre, .swagger-ui .topbar a, .swagger-ui .white { color: #fff; }
|
||||
|
||||
.swagger-ui .bg-black-10, .swagger-ui .hover-bg-black-10:focus, .swagger-ui .hover-bg-black-10:hover, .swagger-ui .stripe-dark:nth-child(2n + 1) { background-color: rgba(0, 0, 0, .1); }
|
||||
|
||||
.swagger-ui .bg-white-10, .swagger-ui .hover-bg-white-10:focus, .swagger-ui .hover-bg-white-10:hover, .swagger-ui .stripe-light:nth-child(2n + 1) { background-color: rgba(28, 28, 33, .1); }
|
||||
|
||||
.swagger-ui .bg-light-silver, .swagger-ui .hover-bg-light-silver:focus, .swagger-ui .hover-bg-light-silver:hover, .swagger-ui .striped--light-silver:nth-child(2n + 1) { background-color: #6e6e6e; }
|
||||
|
||||
.swagger-ui .bg-moon-gray, .swagger-ui .hover-bg-moon-gray:focus, .swagger-ui .hover-bg-moon-gray:hover, .swagger-ui .striped--moon-gray:nth-child(2n + 1) { background-color: #4d4d4d; }
|
||||
|
||||
.swagger-ui .bg-light-gray, .swagger-ui .hover-bg-light-gray:focus, .swagger-ui .hover-bg-light-gray:hover, .swagger-ui .striped--light-gray:nth-child(2n + 1) { background-color: #2b2b2b; }
|
||||
|
||||
.swagger-ui .bg-near-white, .swagger-ui .hover-bg-near-white:focus, .swagger-ui .hover-bg-near-white:hover, .swagger-ui .striped--near-white:nth-child(2n + 1) { background-color: #242424; }
|
||||
|
||||
.swagger-ui .opblock-tag:hover, .swagger-ui section.models h4:hover { background: rgba(0, 0, 0, .02); }
|
||||
|
||||
.swagger-ui .checkbox p, .swagger-ui .dialog-ux .modal-ux-content h4, .swagger-ui .dialog-ux .modal-ux-content p, .swagger-ui .dialog-ux .modal-ux-header h3, .swagger-ui .errors-wrapper .errors h4, .swagger-ui .errors-wrapper hgroup h4, .swagger-ui .info .base-url, .swagger-ui .info .title, .swagger-ui .info h1, .swagger-ui .info h2, .swagger-ui .info h3, .swagger-ui .info h4, .swagger-ui .info h5, .swagger-ui .info li, .swagger-ui .info p, .swagger-ui .info table, .swagger-ui .loading-container .loading::after, .swagger-ui .model, .swagger-ui .opblock .opblock-section-header h4, .swagger-ui .opblock .opblock-section-header > label, .swagger-ui .opblock .opblock-summary-description, .swagger-ui .opblock .opblock-summary-operation-id, .swagger-ui .opblock .opblock-summary-path, .swagger-ui .opblock .opblock-summary-path__deprecated, .swagger-ui .opblock-description-wrapper, .swagger-ui .opblock-description-wrapper h4, .swagger-ui .opblock-description-wrapper p, .swagger-ui .opblock-external-docs-wrapper, .swagger-ui .opblock-external-docs-wrapper h4, .swagger-ui .opblock-external-docs-wrapper p, .swagger-ui .opblock-tag small, .swagger-ui .opblock-title_normal, .swagger-ui .opblock-title_normal h4, .swagger-ui .opblock-title_normal p, .swagger-ui .parameter__name, .swagger-ui .parameter__type, .swagger-ui .response-col_links, .swagger-ui .response-col_status, .swagger-ui .responses-inner h4, .swagger-ui .responses-inner h5, .swagger-ui .scheme-container .schemes > label, .swagger-ui .scopes h2, .swagger-ui .servers > label, .swagger-ui .tab li, .swagger-ui label, .swagger-ui select, .swagger-ui table.headers td { color: #b5bac9; }
|
||||
|
||||
.swagger-ui .download-url-wrapper .failed, .swagger-ui .filter .failed, .swagger-ui .model-deprecated-warning, .swagger-ui .parameter__deprecated, .swagger-ui .parameter__name.required span, .swagger-ui table.model tr.property-row .star { color: #e69999; }
|
||||
|
||||
.swagger-ui .opblock-body pre.microlight, .swagger-ui textarea.curl {
|
||||
background: #41444e;
|
||||
border-radius: 4px;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.swagger-ui .expand-methods svg, .swagger-ui .expand-methods:hover svg { fill: #bfbfbf; }
|
||||
|
||||
.swagger-ui .auth-container, .swagger-ui .dialog-ux .modal-ux-header { border-bottom: 1px solid #2e2e2e; }
|
||||
|
||||
.swagger-ui .topbar .download-url-wrapper .select-label select, .swagger-ui .topbar .download-url-wrapper input[type=text] { border: 2px solid #63a040; }
|
||||
|
||||
.swagger-ui .info a, .swagger-ui .info a:hover, .swagger-ui .scopes h2 a { color: #99bde6; }
|
||||
|
||||
/* Dark Scrollbar */
|
||||
::-webkit-scrollbar {
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-button {
|
||||
background-color: #3e4346 !important;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track {
|
||||
background-color: #646464 !important;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track-piece {
|
||||
background-color: #3e4346 !important;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb {
|
||||
height: 50px;
|
||||
background-color: #242424 !important;
|
||||
border: 2px solid #3e4346 !important;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-corner {}
|
||||
|
||||
::-webkit-resizer {}
|
||||
|
||||
::-webkit-scrollbar-button:vertical:start:decrement {
|
||||
background:
|
||||
linear-gradient(130deg, #696969 40%, rgba(255, 0, 0, 0) 41%),
|
||||
linear-gradient(230deg, #696969 40%, rgba(0, 0, 0, 0) 41%),
|
||||
linear-gradient(0deg, #696969 40%, rgba(0, 0, 0, 0) 31%);
|
||||
background-color: #b6b6b6;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-button:vertical:end:increment {
|
||||
background:
|
||||
linear-gradient(310deg, #696969 40%, rgba(0, 0, 0, 0) 41%),
|
||||
linear-gradient(50deg, #696969 40%, rgba(0, 0, 0, 0) 41%),
|
||||
linear-gradient(180deg, #696969 40%, rgba(0, 0, 0, 0) 31%);
|
||||
background-color: #b6b6b6;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-button:horizontal:end:increment {
|
||||
background:
|
||||
linear-gradient(210deg, #696969 40%, rgba(0, 0, 0, 0) 41%),
|
||||
linear-gradient(330deg, #696969 40%, rgba(0, 0, 0, 0) 41%),
|
||||
linear-gradient(90deg, #696969 30%, rgba(0, 0, 0, 0) 31%);
|
||||
background-color: #b6b6b6;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-button:horizontal:start:decrement {
|
||||
background:
|
||||
linear-gradient(30deg, #696969 40%, rgba(0, 0, 0, 0) 41%),
|
||||
linear-gradient(150deg, #696969 40%, rgba(0, 0, 0, 0) 41%),
|
||||
linear-gradient(270deg, #696969 30%, rgba(0, 0, 0, 0) 31%);
|
||||
background-color: #b6b6b6;
|
||||
}
|
17
static/swagger-ui/index.css
vendored
Normal file
17
static/swagger-ui/index.css
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
/*! Swagger UI 4.13.2 | https://swagger.io/tools/swagger-ui/ | Apache License 2.0 (license file can be found at ./LICENSE) */
|
||||
html {
|
||||
box-sizing: border-box;
|
||||
overflow: -moz-scrollbars-vertical;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
|
||||
*,
|
||||
*:before,
|
||||
*:after {
|
||||
box-sizing: inherit;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
background: #fafafa;
|
||||
}
|
79
static/swagger-ui/oauth2-redirect.html
vendored
Normal file
79
static/swagger-ui/oauth2-redirect.html
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
<!doctype html>
|
||||
<html lang="en-US">
|
||||
<head>
|
||||
<title>Swagger UI: OAuth2 Redirect</title>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
'use strict';
|
||||
function run () {
|
||||
var oauth2 = window.opener.swaggerUIRedirectOauth2;
|
||||
var sentState = oauth2.state;
|
||||
var redirectUrl = oauth2.redirectUrl;
|
||||
var isValid, qp, arr;
|
||||
|
||||
if (/code|token|error/.test(window.location.hash)) {
|
||||
qp = window.location.hash.substring(1);
|
||||
} else {
|
||||
qp = location.search.substring(1);
|
||||
}
|
||||
|
||||
arr = qp.split("&");
|
||||
arr.forEach(function (v,i,_arr) { _arr[i] = '"' + v.replace('=', '":"') + '"';});
|
||||
qp = qp ? JSON.parse('{' + arr.join() + '}',
|
||||
function (key, value) {
|
||||
return key === "" ? value : decodeURIComponent(value);
|
||||
}
|
||||
) : {};
|
||||
|
||||
isValid = qp.state === sentState;
|
||||
|
||||
if ((
|
||||
oauth2.auth.schema.get("flow") === "accessCode" ||
|
||||
oauth2.auth.schema.get("flow") === "authorizationCode" ||
|
||||
oauth2.auth.schema.get("flow") === "authorization_code"
|
||||
) && !oauth2.auth.code) {
|
||||
if (!isValid) {
|
||||
oauth2.errCb({
|
||||
authId: oauth2.auth.name,
|
||||
source: "auth",
|
||||
level: "warning",
|
||||
message: "Authorization may be unsafe, passed state was changed in server. The passed state wasn't returned from auth server."
|
||||
});
|
||||
}
|
||||
|
||||
if (qp.code) {
|
||||
delete oauth2.state;
|
||||
oauth2.auth.code = qp.code;
|
||||
oauth2.callback({auth: oauth2.auth, redirectUrl: redirectUrl});
|
||||
} else {
|
||||
let oauthErrorMsg;
|
||||
if (qp.error) {
|
||||
oauthErrorMsg = "["+qp.error+"]: " +
|
||||
(qp.error_description ? qp.error_description+ ". " : "no accessCode received from the server. ") +
|
||||
(qp.error_uri ? "More info: "+qp.error_uri : "");
|
||||
}
|
||||
|
||||
oauth2.errCb({
|
||||
authId: oauth2.auth.name,
|
||||
source: "auth",
|
||||
level: "error",
|
||||
message: oauthErrorMsg || "[Authorization failed]: no accessCode received from the server."
|
||||
});
|
||||
}
|
||||
} else {
|
||||
oauth2.callback({auth: oauth2.auth, token: qp, isValid: isValid, redirectUrl: redirectUrl});
|
||||
}
|
||||
window.close();
|
||||
}
|
||||
|
||||
if (document.readyState !== 'loading') {
|
||||
run();
|
||||
} else {
|
||||
document.addEventListener('DOMContentLoaded', function () {
|
||||
run();
|
||||
});
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
2
static/swagger-ui/swagger-ui-bundle.js
vendored
Normal file
2
static/swagger-ui/swagger-ui-bundle.js
vendored
Normal file
File diff suppressed because one or more lines are too long
3
static/swagger-ui/swagger-ui.css
vendored
Normal file
3
static/swagger-ui/swagger-ui.css
vendored
Normal file
File diff suppressed because one or more lines are too long
2
static/swagger-ui/swagger-ui.js
vendored
Normal file
2
static/swagger-ui/swagger-ui.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@@ -9,7 +9,8 @@
|
||||
<link rel="stylesheet" href="static/bootstrap.min.css">
|
||||
<link rel="stylesheet" href="static/bootstrap-toggle.min.css">
|
||||
<link rel="stylesheet" href="static/open-iconic-bootstrap.min.css">
|
||||
<link rel="stylesheet" href="static/custom.css?ver=1.18.1b">
|
||||
<link href="static/open-iconic/css/open-iconic.css" rel="stylesheet">
|
||||
<link rel="stylesheet" href="static/custom.css?ver=1.18.1c">
|
||||
|
||||
<script src="static/jquery-3.6.0.min.js"></script>
|
||||
<script src="static/jquery-ui.sortable.min.js"></script>
|
||||
@@ -17,7 +18,7 @@
|
||||
<script src="static/bootstrap.min.js"></script>
|
||||
<script src="static/bootstrap-toggle.min.js"></script>
|
||||
<script src="static/rangy-core.min.js"></script>
|
||||
<script src="static/application.js?ver=1.18.1c"></script>
|
||||
<script src="static/application.js?ver=1.18.1e"></script>
|
||||
<script src="static/favicon.js"></script>
|
||||
{% if flaskwebgui %}
|
||||
<script src="static/flask_web_gui.js"></script>
|
||||
@@ -38,12 +39,8 @@
|
||||
<div class="collapse navbar-collapse" id="navbarNavDropdown">
|
||||
<ul class="nav navbar-nav">
|
||||
{% if not hide_ai_menu %}
|
||||
<li class="nav-item dropdown">
|
||||
<a class="nav-link dropdown-toggle" href="#" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">AI</a>
|
||||
<div class="dropdown-menu">
|
||||
<a class="dropdown-item" href="#" id="btn_loadmodel">Load Model</a>
|
||||
<a class="dropdown-item" href="#" id="btn_showmodel">Model Info</a>
|
||||
</div>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="#" id="btn_loadmodel">AI</a>
|
||||
</li>
|
||||
{% endif %}
|
||||
<li class="nav-item dropdown">
|
||||
@@ -75,7 +72,7 @@
|
||||
<div class="dropdown-menu">
|
||||
<a class="dropdown-item" href="#" id="btn_import">AI Dungeon Adventure</a>
|
||||
<a class="dropdown-item" href="#" id="btn_importwi">AI Dungeon World Info</a>
|
||||
<a class="dropdown-item" href="#" id="btn_impaidg">aidg.club Prompt</a>
|
||||
<a class="dropdown-item" href="#" id="btn_impaidg">aetherroom.club Prompt</a>
|
||||
</div>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
@@ -123,6 +120,11 @@
|
||||
</div>
|
||||
<div class="row" id="formatmenu">
|
||||
</div>
|
||||
|
||||
<div id="token_prob_menu" class="row hidden">
|
||||
<div id="token_prob_container"></div>
|
||||
</div>
|
||||
|
||||
<div class="layer-container">
|
||||
<div class="layer-bottom row" id="gamescreen">
|
||||
<span id="gametext" contenteditable="true"><p>...</p></span>
|
||||
@@ -156,7 +158,7 @@
|
||||
<div id="inputrowmode">
|
||||
<button type="button" class="btn btn-secondary hidden" id="btnmode">Mode:<br/><b id="btnmode_label">Story</b></button>
|
||||
</div>
|
||||
<div id="inputrowleft">
|
||||
<div id="inputrowleft" class="tokens-counted">
|
||||
<textarea class="form-control" id="input_text" placeholder="Enter text here"></textarea>
|
||||
</div>
|
||||
<div id="inputrowright">
|
||||
@@ -169,7 +171,7 @@
|
||||
<div class="anotelabel no-padding">
|
||||
Author's Note
|
||||
</div>
|
||||
<div class="anotefield">
|
||||
<div class="anotefield tokens-counted">
|
||||
<textarea class="form-control" placeholder="Author's Note" id="anoteinput"></textarea>
|
||||
</div>
|
||||
</div>
|
||||
@@ -210,7 +212,7 @@
|
||||
</div>
|
||||
</div>
|
||||
<div class="hidden" id="popupcontainer">
|
||||
<div id="popup">
|
||||
<div id="popup_old">
|
||||
<div id="popuptitlebar">
|
||||
<div id="popuptitletext">Select an Adventure to Import</div>
|
||||
</div>
|
||||
@@ -233,7 +235,7 @@
|
||||
<div class="popuptitletext">Enter the Prompt Number</div>
|
||||
</div>
|
||||
<div class="aidgpopuplistheader">
|
||||
(4-digit number at the end of aidg.club URL)
|
||||
(4-digit number at the end of aetherroom.club URL)
|
||||
</div>
|
||||
<div class="aidgpopupcontent">
|
||||
<input class="form-control" type="text" placeholder="Prompt Number" id="aidgpromptnum">
|
||||
@@ -291,7 +293,7 @@
|
||||
<div id="loadmodellistbreadcrumbs">
|
||||
|
||||
</div>
|
||||
<div id="loadmodellistcontent" style="overflow: scroll; height: 300px;">
|
||||
<div id="loadmodellistcontent" style="overflow: auto; height: 300px;">
|
||||
</div>
|
||||
<div class="popupfooter">
|
||||
<input class="form-control hidden" type="text" placeholder="key" id="modelkey" onblur="socket.send({'cmd': 'OAI_Key_Update', 'key': $('#modelkey')[0].value});">
|
||||
@@ -340,6 +342,7 @@
|
||||
<div id="sppopup">
|
||||
<div class="popuptitlebar">
|
||||
<div class="popuptitletext">Select A Soft Prompt To Use</div>
|
||||
<button class="btn btn-primary" onclick="socket.emit('show_folder_soft_prompt', {});"><span class="oi" style="color: white;" data-glyph="folder"></span></button>
|
||||
</div>
|
||||
<div id="splistcontent">
|
||||
</div>
|
||||
@@ -353,6 +356,7 @@
|
||||
<div id="uspopup">
|
||||
<div class="popuptitlebar">
|
||||
<div class="popuptitletext">Select userscripts to load; drag-and-drop to reorder</div>
|
||||
<button class="btn btn-primary" onclick="socket.emit('show_folder_usersripts', {});"><span class="oi" style="color: white;" data-glyph="folder"></span></button>
|
||||
</div>
|
||||
<div class="usheadergrid">
|
||||
<div>[AVAILABLE]</div>
|
||||
@@ -441,10 +445,10 @@
|
||||
<div class="popuptitletext">Model Info</div>
|
||||
</div>
|
||||
<div id=showmodelnamecontent style="width:50%;">
|
||||
Read Only
|
||||
Model Info Missing
|
||||
</div>
|
||||
<div class="popupfooter" style="width:50% center;">
|
||||
<button type="button" class="btn btn-primary" onclick='$("#showmodelnamecontainer").addClass("hidden");'>OK</button>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -474,5 +478,26 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!------------- Pop-Up ------------------------------->
|
||||
<div class="popupcontainer hidden" id="popup">
|
||||
<div class="new_popup">
|
||||
<div style="height:100%;">
|
||||
<div class="title" id="popup_title">
|
||||
Popup Title
|
||||
</div>
|
||||
<div id="popup_breadcrumbs"></div>
|
||||
<div class="popup_list_area" id="popup_list"></div>
|
||||
<div class="popup_load_cancel hidden" id="popup_upload">
|
||||
<input type=file id="popup_upload_file">
|
||||
</div>
|
||||
<div style="background-color: black">Drag file(s) above or click here to Upload File<input id="popup_upload_input" type=file onchange="upload_file(this)"></div>
|
||||
<div class="popup_load_cancel" id="popup_load_cancel">
|
||||
<button class="btn btn-secondary popup_load_cancel_button" id="popup_accept">Load</button>
|
||||
<button class="btn btn-primary popup_load_cancel_button" id="popup_cancel" onclick='document.getElementById("popup").classList.add("hidden");'>Cancel</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
35
templates/swagger-ui.html
Normal file
35
templates/swagger-ui.html
Normal file
@@ -0,0 +1,35 @@
|
||||
{# This is the HTML template for Swagger UI (the GUI for the API documentation at /api/latest/docs) #}
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<title>KoboldAI API</title>
|
||||
<meta charset="UTF-8">
|
||||
<link rel="stylesheet" type="text/css" href="/static/swagger-ui/swagger-ui.css" />
|
||||
<link rel="stylesheet" type="text/css" href="/static/swagger-ui/index.css" />
|
||||
<script>
|
||||
if (window.matchMedia && window.matchMedia("(prefers-color-scheme: dark)").matches) document.write('<link rel="stylesheet" type="text/css" href="/static/swagger-ui/SwaggerDark.css" />');
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="swagger-ui"></div>
|
||||
<script src="/static/swagger-ui/swagger-ui-bundle.js" charset="UTF-8"></script>
|
||||
<script>
|
||||
window.onload = function() {
|
||||
window.ui = SwaggerUIBundle({
|
||||
url: "{{ url }}",
|
||||
oauth2RedirectUrl: "/static/swagger-ui/oauth2-redirect.html",
|
||||
dom_id: "#swagger-ui",
|
||||
deepLinking: true,
|
||||
defaultModelsExpandDepth: 0, // Causes the "Schemas" section at the bottom to be collapsed by default
|
||||
presets: [
|
||||
SwaggerUIBundle.presets.apis
|
||||
],
|
||||
plugins: [
|
||||
SwaggerUIBundle.plugins.DownloadUrl
|
||||
],
|
||||
layout: "BaseLayout"
|
||||
});
|
||||
};
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
@@ -52,7 +52,7 @@ import pickle
|
||||
import torch
|
||||
import utils
|
||||
from torch.nn import Module
|
||||
from typing import Any, Callable, Dict, Optional, Tuple, Type, Union
|
||||
from typing import Any, Callable, Dict, Optional, Tuple, Union
|
||||
|
||||
|
||||
_EXTRA_STATE_KEY_SUFFIX = '_extra_state'
|
||||
@@ -73,7 +73,7 @@ STORAGE_TYPE_MAP = {
|
||||
|
||||
|
||||
class LazyTensor:
|
||||
def __init__(self, storage_type: Type[torch._StorageBase], key: str, location: str, dtype: Optional[torch.dtype] = None, seek_offset: Optional[int] = None, shape: Optional[Tuple[int, ...]] = None, stride: Optional[Tuple[int, ...]] = None, requires_grad=False, backward_hooks: Any = None):
|
||||
def __init__(self, storage_type, key: str, location: str, dtype: Optional[torch.dtype] = None, seek_offset: Optional[int] = None, shape: Optional[Tuple[int, ...]] = None, stride: Optional[Tuple[int, ...]] = None, requires_grad=False, backward_hooks: Any = None):
|
||||
self.storage_type = storage_type
|
||||
self.key = key
|
||||
self.location = location
|
||||
|
@@ -56,6 +56,22 @@ from mesh_transformer.util import to_bf16
|
||||
|
||||
params: Dict[str, Any] = {}
|
||||
|
||||
__seed = random.randrange(sys.maxsize)
|
||||
rng = random.Random(__seed)
|
||||
|
||||
|
||||
def get_rng_seed():
|
||||
return __seed
|
||||
|
||||
def set_rng_seed(seed: int):
|
||||
global __seed, rng
|
||||
rng = random.Random(seed)
|
||||
__seed = seed
|
||||
return seed
|
||||
|
||||
def randomize_rng_seed():
|
||||
return set_rng_seed(random.randrange(sys.maxsize))
|
||||
|
||||
|
||||
def warper_callback(logits) -> np.array:
|
||||
raise NotImplementedError("`tpu_mtj_backend.warper_callback()` needs to be defined")
|
||||
@@ -547,7 +563,7 @@ class PenalizingCausalTransformer(CausalTransformer):
|
||||
compiling_callback()
|
||||
numseqs = numseqs_aux.shape[0]
|
||||
# These are the tokens that we don't want the AI to ever write
|
||||
self.badwords = jnp.array(koboldai_vars.badwordsids).squeeze()
|
||||
badwords = jnp.array(koboldai_vars.badwordsids).squeeze()
|
||||
@hk.transform
|
||||
def generate_sample(context, ctx_length):
|
||||
# Give the initial context to the transformer
|
||||
@@ -605,7 +621,7 @@ class PenalizingCausalTransformer(CausalTransformer):
|
||||
# Remove any tokens in the badwords list by setting
|
||||
# their logits to negative infinity which effectively
|
||||
# makes their probabilities of being chosen zero
|
||||
logits = logits.at[self.badwords].set(-jnp.inf)
|
||||
logits = logits.at[badwords].set(-jnp.inf)
|
||||
# Use the sampler (kobold_sample_static) to pick one token
|
||||
# based on the logits array as a 0D uint32 array
|
||||
# (higher logit means higher probability of being
|
||||
@@ -728,7 +744,7 @@ class PenalizingCausalTransformer(CausalTransformer):
|
||||
assert not return_logits
|
||||
assert gen_length.ndim == 1
|
||||
assert soft_embeddings is not None
|
||||
key = hk.PRNGSequence(random.randint(0, 2 ** 60))
|
||||
key = hk.PRNGSequence(rng.randint(0, 2 ** 60))
|
||||
batch_size = ctx.shape[0]
|
||||
self.batch_size = batch_size
|
||||
_numseqs_aux = jnp.empty((batch_size, numseqs), dtype=np.uint32)
|
||||
@@ -776,7 +792,7 @@ class PenalizingCausalTransformer(CausalTransformer):
|
||||
return sample_data, n_generated, regeneration_required, halt
|
||||
def generate_static(self, ctx, ctx_length, gen_length, numseqs, sampler_options, return_logits=False, soft_embeddings=None):
|
||||
assert not return_logits
|
||||
key = hk.PRNGSequence(random.randint(0, 2 ** 60))
|
||||
key = hk.PRNGSequence(rng.randint(0, 2 ** 60))
|
||||
batch_size = ctx.shape[0]
|
||||
self.batch_size = batch_size
|
||||
started_compiling_callback()
|
||||
@@ -1025,7 +1041,7 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
||||
elif "eos_token_id" in kwargs:
|
||||
pad_token_id = kwargs["eos_token_id"]
|
||||
|
||||
if not hasattr(vars, "sampler_order") or not koboldai_vars.sampler_order:
|
||||
if not hasattr(koboldai_vars, "sampler_order") or not koboldai_vars.sampler_order:
|
||||
koboldai_vars.sampler_order = utils.default_sampler_order.copy()
|
||||
|
||||
default_params = {
|
||||
@@ -1145,9 +1161,9 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
||||
tpu_address = tpu_address.replace("grpc://", "")
|
||||
tpu_address_without_port = tpu_address.split(':', 1)[0]
|
||||
url = f'http://{tpu_address_without_port}:8475/requestversion/{driver_version}'
|
||||
requests.post(url)
|
||||
config.FLAGS.jax_xla_backend = "tpu_driver"
|
||||
config.FLAGS.jax_backend_target = "grpc://" + tpu_address
|
||||
requests.post(url)
|
||||
spinner.terminate()
|
||||
print()
|
||||
|
||||
@@ -1230,13 +1246,14 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
||||
if utils.num_shards is not None:
|
||||
utils.current_shard += 1
|
||||
for key in sorted(model_dict.keys(), key=lambda k: (model_dict[k].key, model_dict[k].seek_offset)):
|
||||
model_spec_key = max((k for k in model_spec.keys() if key.endswith(k)), key=len, default=None)
|
||||
|
||||
# Some model weights are used by transformers but not by MTJ.
|
||||
# We have to materialize these weights anyways because
|
||||
# transformers will throw a tantrum otherwise. To attain
|
||||
# the least possible memory usage, we create them as meta
|
||||
# tensors, which don't take up any actual CPU or TPU memory.
|
||||
if key not in model_spec:
|
||||
if model_spec_key is None:
|
||||
model_dict[key] = torch.empty(model_dict[key].shape, dtype=model_dict[key].dtype, device="meta")
|
||||
utils.bar.update(1)
|
||||
continue
|
||||
@@ -1251,7 +1268,7 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
||||
if current_offset != model_dict[key].seek_offset:
|
||||
f.read(model_dict[key].seek_offset - current_offset)
|
||||
current_offset = model_dict[key].seek_offset
|
||||
spec = model_spec[key]
|
||||
spec = model_spec[model_spec_key]
|
||||
transforms = set(spec.get("transforms", ()))
|
||||
if not isinstance(model_dict[key], torch_lazy_loader.LazyTensor):
|
||||
error = f"Duplicate key {repr(key)}"
|
||||
|
@@ -1,6 +1,5 @@
|
||||
@echo off
|
||||
%~d0
|
||||
cd %~dp0
|
||||
cd /d %~dp0
|
||||
TITLE KoboldAI - Updater
|
||||
SET /P M=<loader.settings
|
||||
IF %M%==1 GOTO drivemap
|
||||
@@ -50,4 +49,9 @@ git remote add origin %origin%
|
||||
git fetch --all
|
||||
git checkout %branch% -f
|
||||
git reset --hard origin/%branch%
|
||||
IF %M%==1 umamba.exe install --no-shortcuts -r K:\python\ -n base -f "%~dp0\environments\huggingface.yml" -y --always-copy
|
||||
IF %M%==2 umamba.exe install --no-shortcuts -r miniconda3 -n base -f environments\huggingface.yml -y --always-copy
|
||||
IF %M%==3 umamba.exe install --no-shortcuts -r B:\python\ -n base -f "%~dp0\environments\huggingface.yml" -y --always-copy
|
||||
|
||||
|
||||
%windir%\system32\timeout -t 10
|
8
userscripts/api_documentation.html
vendored
8
userscripts/api_documentation.html
vendored
@@ -470,11 +470,11 @@
|
||||
<pre class=" language-lua"><code class="prism language-lua"><span class="token keyword">local</span> entry <span class="token operator">=</span> kobold<span class="token punctuation">.</span>worldinfo<span class="token punctuation">[</span><span class="token number">5</span><span class="token punctuation">]</span> <span class="token comment">-- Retrieves fifth entry from top as a KoboldWorldInfoEntry</span>
|
||||
</code></pre>
|
||||
<p>You can use <code>ipairs</code> or a numeric loop to iterate from top to bottom:</p>
|
||||
<pre class=" language-lua"><code class="prism language-lua"><span class="token keyword">for</span> index<span class="token punctuation">,</span> entry <span class="token keyword">in</span> <span class="token function">ipairs</span><span class="token punctuation">(</span>kobold<span class="token punctuation">.</span>worldinfo<span class="token punctuation">)</span><span class="token punctuation">:</span>
|
||||
<pre class=" language-lua"><code class="prism language-lua"><span class="token keyword">for</span> index<span class="token punctuation">,</span> entry <span class="token keyword">in</span> <span class="token function">ipairs</span><span class="token punctuation">(</span>kobold<span class="token punctuation">.</span>worldinfo<span class="token punctuation">)</span> <span class="token keyword">do</span>
|
||||
<span class="token function">print</span><span class="token punctuation">(</span>index<span class="token punctuation">,</span> entry<span class="token punctuation">.</span>content<span class="token punctuation">)</span>
|
||||
<span class="token keyword">end</span>
|
||||
</code></pre>
|
||||
<pre class=" language-lua"><code class="prism language-lua"><span class="token keyword">for</span> index <span class="token operator">=</span> <span class="token number">1</span><span class="token punctuation">,</span> <span class="token operator">#</span>kobold<span class="token punctuation">.</span>worldinfo <span class="token keyword">do</span><span class="token punctuation">:</span>
|
||||
<pre class=" language-lua"><code class="prism language-lua"><span class="token keyword">for</span> index <span class="token operator">=</span> <span class="token number">1</span><span class="token punctuation">,</span> <span class="token operator">#</span>kobold<span class="token punctuation">.</span>worldinfo <span class="token keyword">do</span>
|
||||
<span class="token function">print</span><span class="token punctuation">(</span>index<span class="token punctuation">,</span> kobold<span class="token punctuation">.</span>worldinfo<span class="token punctuation">[</span>index<span class="token punctuation">]</span><span class="token punctuation">.</span>content<span class="token punctuation">)</span>
|
||||
<span class="token keyword">end</span>
|
||||
</code></pre>
|
||||
@@ -531,11 +531,11 @@
|
||||
<p>Can be indexed in amortized constant worst-case time and iterated over and has a <code>finduid</code> method just like <code>kobold.worldinfo</code>, but gets folders (as <code>KoboldWorldInfoFolder</code> objects) instead.</p>
|
||||
<pre class=" language-lua"><code class="prism language-lua"><span class="token keyword">local</span> folder <span class="token operator">=</span> kobold<span class="token punctuation">.</span>worldinfo<span class="token punctuation">.</span>folders<span class="token punctuation">[</span><span class="token number">5</span><span class="token punctuation">]</span> <span class="token comment">-- Retrieves fifth folder from top as a KoboldWorldInfoFolder</span>
|
||||
</code></pre>
|
||||
<pre class=" language-lua"><code class="prism language-lua"><span class="token keyword">for</span> index<span class="token punctuation">,</span> folder <span class="token keyword">in</span> <span class="token function">ipairs</span><span class="token punctuation">(</span>kobold<span class="token punctuation">.</span>worldinfo<span class="token punctuation">.</span>folders<span class="token punctuation">)</span><span class="token punctuation">:</span>
|
||||
<pre class=" language-lua"><code class="prism language-lua"><span class="token keyword">for</span> index<span class="token punctuation">,</span> folder <span class="token keyword">in</span> <span class="token function">ipairs</span><span class="token punctuation">(</span>kobold<span class="token punctuation">.</span>worldinfo<span class="token punctuation">.</span>folders<span class="token punctuation">)</span> <span class="token keyword">do</span>
|
||||
<span class="token function">print</span><span class="token punctuation">(</span>index<span class="token punctuation">,</span> folder<span class="token punctuation">.</span>name<span class="token punctuation">)</span>
|
||||
<span class="token keyword">end</span>
|
||||
</code></pre>
|
||||
<pre class=" language-lua"><code class="prism language-lua"><span class="token keyword">for</span> index <span class="token operator">=</span> <span class="token number">1</span><span class="token punctuation">,</span> <span class="token operator">#</span>kobold<span class="token punctuation">.</span>worldinfo<span class="token punctuation">.</span>folders <span class="token keyword">do</span><span class="token punctuation">:</span>
|
||||
<pre class=" language-lua"><code class="prism language-lua"><span class="token keyword">for</span> index <span class="token operator">=</span> <span class="token number">1</span><span class="token punctuation">,</span> <span class="token operator">#</span>kobold<span class="token punctuation">.</span>worldinfo<span class="token punctuation">.</span>folders <span class="token keyword">do</span>
|
||||
<span class="token function">print</span><span class="token punctuation">(</span>index<span class="token punctuation">,</span> kobold<span class="token punctuation">.</span>worldinfo<span class="token punctuation">.</span>folders<span class="token punctuation">[</span>index<span class="token punctuation">]</span><span class="token punctuation">.</span>name<span class="token punctuation">)</span>
|
||||
<span class="token keyword">end</span>
|
||||
</code></pre>
|
||||
|
@@ -503,13 +503,13 @@ local entry = kobold.worldinfo[5] -- Retrieves fifth entry from top as a Kobold
|
||||
You can use `ipairs` or a numeric loop to iterate from top to bottom:
|
||||
|
||||
```lua
|
||||
for index, entry in ipairs(kobold.worldinfo):
|
||||
for index, entry in ipairs(kobold.worldinfo) do
|
||||
print(index, entry.content)
|
||||
end
|
||||
```
|
||||
|
||||
```lua
|
||||
for index = 1, #kobold.worldinfo do:
|
||||
for index = 1, #kobold.worldinfo do
|
||||
print(index, kobold.worldinfo[index].content)
|
||||
end
|
||||
```
|
||||
@@ -587,13 +587,13 @@ local folder = kobold.worldinfo.folders[5] -- Retrieves fifth folder from top a
|
||||
```
|
||||
|
||||
```lua
|
||||
for index, folder in ipairs(kobold.worldinfo.folders):
|
||||
for index, folder in ipairs(kobold.worldinfo.folders) do
|
||||
print(index, folder.name)
|
||||
end
|
||||
```
|
||||
|
||||
```lua
|
||||
for index = 1, #kobold.worldinfo.folders do:
|
||||
for index = 1, #kobold.worldinfo.folders do
|
||||
print(index, kobold.worldinfo.folders[index].name)
|
||||
end
|
||||
```
|
||||
|
@@ -183,8 +183,8 @@ function userscript.genmod()
|
||||
max_overlap[i] = 0
|
||||
local s = {}
|
||||
local z = {[0] = 0}
|
||||
local l = 1
|
||||
local r = 1
|
||||
local l = 0
|
||||
local r = 0
|
||||
local n_s = math.min(n_tokens, bias_entry.n_tokens)
|
||||
local j = 0
|
||||
for k = 1, n_s do
|
||||
|
3
utils.py
3
utils.py
@@ -175,10 +175,9 @@ def num_layers(config):
|
||||
from flask_socketio import emit
|
||||
class Send_to_socketio(object):
|
||||
def write(self, bar):
|
||||
#print("should be emitting: ", bar, end="")
|
||||
time.sleep(0.01)
|
||||
try:
|
||||
emit('from_server', {'cmd': 'model_load_status', 'data': bar.replace(" ", " ")}, broadcast=True, room="UI_1")
|
||||
emit('from_server', {'cmd': 'model_load_status', 'data': bar.replace(" ", " ")}, broadcast=True)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
Reference in New Issue
Block a user