diff --git a/.gitattributes b/.gitattributes index d234b5e0..661ee711 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,3 @@ *.min.lua linguist-vendored *documentation.html linguist-vendored +/static/swagger-ui/* linguist-vendored diff --git a/.gitignore b/.gitignore index 5b024bd8..90669874 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,7 @@ bin __pycache__ *.log cache +accelerate-disk-cache userscripts !userscripts/examples !userscripts/kaipreset_*.lua @@ -24,6 +25,8 @@ softprompts models !models/models go here.txt Uninstall +flask_session +accelerate-disk-cache .ipynb_checkpoints # Ignore PyCharm project files. diff --git a/aiserver.py b/aiserver.py index 2742af83..38b8dc30 100644 --- a/aiserver.py +++ b/aiserver.py @@ -1,13 +1,13 @@ #!/usr/bin/python3 #==================================================================# # KoboldAI -# Version: 1.18.1 -# By: KoboldAIDev and the KoboldAI Community +# Version: 1.19.0 +# By: The KoboldAI Community #==================================================================# # External packages import eventlet -eventlet.monkey_patch(all=True, thread=False) +eventlet.monkey_patch(all=True, thread=False, os=False) import os os.system("") __file__ = os.path.dirname(os.path.realpath(__file__)) @@ -17,6 +17,8 @@ os.environ['TOKENIZERS_PARALLELISM'] = 'false' from eventlet import tpool import logging +from logger import logger, set_logger_verbosity, quiesce_logger + logging.getLogger("urllib3").setLevel(logging.ERROR) from os import path, getcwd @@ -35,8 +37,11 @@ import bleach import itertools import bisect import functools +import traceback +import inspect +import warnings from collections.abc import Iterable -from typing import Any, Callable, TypeVar, Tuple, Union, Dict, Set, List +from typing import Any, Callable, TypeVar, Tuple, Union, Dict, Set, List, Optional, Type import requests import html @@ -45,6 +50,7 @@ import sys import gc import lupa +import importlib # KoboldAI import fileops @@ -52,11 +58,23 @@ import gensettings from utils import debounce import utils import structures +import torch +from transformers import StoppingCriteria, GPT2Tokenizer, GPT2LMHeadModel, GPTNeoForCausalLM, GPTNeoModel, AutoModelForCausalLM, AutoTokenizer, PreTrainedModel, modeling_utils +from transformers import __version__ as transformers_version +import transformers +try: + from transformers.models.opt.modeling_opt import OPTDecoder +except: + pass +import transformers.generation_utils + +global tpu_mtj_backend if lupa.LUA_VERSION[:2] != (5, 4): - print(f"Please install lupa==1.10. You have lupa {lupa.__version__}.", file=sys.stderr) + logger.error(f"Please install lupa==1.10. You have lupa {lupa.__version__}.") +patch_causallm_patched = False # Make sure tqdm progress bars display properly in Colab from tqdm.auto import tqdm @@ -94,144 +112,157 @@ class colors: END = '\033[0m' UNDERLINE = '\033[4m' -# AI models -mainmenu = [ - ["Load a model from its directory", "NeoCustom", ""], - ["Load an old GPT-2 model (eg CloverEdition)", "GPT2Custom", ""], - ["Adventure Models", "adventurelist", ""], - ["Novel Models", "novellist", ""], - ["NSFW Models", "nsfwlist", ""], - ["Untuned OPT", "optlist", ""], - ["Untuned GPT-Neo/J", "gptneolist", ""], - ["Untuned Fairseq Dense", "fsdlist", ""], - ["Untuned Bloom", "bloomlist", ""], - ["Untuned XGLM", "xglmlist", ""], - ["Untuned GPT2", "gpt2list", ""], - ["Online Services", "apilist", ""], - ["Read Only (No AI)", "ReadOnly", ""] +# AI models Menu +# This is a dict of lists where they key is the menu name, and the list is the menu items. +# Each item takes the 4 elements, 1: Text to display, 2: Model Name (var.model) or menu name (Key name for another menu), +# 3: the memory requirement for the model, 4: if the item is a menu or not (True/False) +model_menu = { + 'mainmenu': [ + ["Load a model from its directory", "NeoCustom", "", False], + ["Load an old GPT-2 model (eg CloverEdition)", "GPT2Custom", "", False], + ["Adventure Models", "adventurelist", "", True], + ["Novel Models", "novellist", "", True], + ["NSFW Models", "nsfwlist", "", True], + ["Untuned OPT", "optlist", "", True], + ["Untuned GPT-Neo/J", "gptneolist", "", True], + ["Untuned Fairseq Dense", "fsdlist", "", True], + ["Untuned Bloom", "bloomlist", "", True], + ["Untuned XGLM", "xglmlist", "", True], + ["Untuned GPT2", "gpt2list", "", True], + ["Online Services", "apilist", "", True], + ["Read Only (No AI)", "ReadOnly", "", False] + ], + 'adventurelist': [ + ["Skein 20B", "KoboldAI/GPT-NeoX-20B-Skein", "64GB", False], + ["Nerys OPT 13B V2 (Hybrid)", "KoboldAI/OPT-13B-Nerys-v2", "32GB", False], + ["Nerys FSD 13B V2 (Hybrid)", "KoboldAI/fairseq-dense-13B-Nerys-v2", "32GB", False], + ["Nerys FSD 13B (Hybrid)", "KoboldAI/fairseq-dense-13B-Nerys", "32GB", False], + ["Skein 6B", "KoboldAI/GPT-J-6B-Skein", "16GB", False], + ["OPT Nerys 6B V2 (Hybrid)", "KoboldAI/OPT-6B-nerys-v2", "16GB", False], + ["Adventure 6B", "KoboldAI/GPT-J-6B-Adventure", "16GB", False], + ["Nerys FSD 2.7B (Hybrid)", "KoboldAI/fairseq-dense-2.7B-Nerys", "8GB", False], + ["Adventure 2.7B", "KoboldAI/GPT-Neo-2.7B-AID", "8GB", False], + ["Adventure 1.3B", "KoboldAI/GPT-Neo-1.3B-Adventure", "6GB", False], + ["Adventure 125M (Mia)", "Merry/AID-Neo-125M", "2GB", False], + ["Return to Main Menu", "mainmenu", "", True], + ], + 'novellist': [ + ["Nerys OPT 13B V2 (Hybrid)", "KoboldAI/OPT-13B-Nerys-v2", "32GB", False], + ["Nerys FSD 13B V2 (Hybrid)", "KoboldAI/fairseq-dense-13B-Nerys-v2", "32GB", False], + ["Janeway FSD 13B", "KoboldAI/fairseq-dense-13B-Janeway", "32GB", False], + ["Nerys FSD 13B (Hybrid)", "KoboldAI/fairseq-dense-13B-Nerys", "32GB", False], + ["OPT Nerys 6B V2 (Hybrid)", "KoboldAI/OPT-6B-nerys-v2", "16GB", False], + ["Janeway FSD 6.7B", "KoboldAI/fairseq-dense-6.7B-Janeway", "16GB", False], + ["Janeway Neo 6B", "KoboldAI/GPT-J-6B-Janeway", "16GB", False], + ["Janeway Neo 2.7B", "KoboldAI/GPT-Neo-2.7B-Janeway", "8GB", False], + ["Janeway FSD 2.7B", "KoboldAI/fairseq-dense-2.7B-Janeway", "8GB", False], + ["Nerys FSD 2.7B (Hybrid)", "KoboldAI/fairseq-dense-2.7B-Nerys", "8GB", False], + ["Horni-LN 2.7B", "KoboldAI/GPT-Neo-2.7B-Horni-LN", "8GB", False], + ["Picard 2.7B (Older Janeway)", "KoboldAI/GPT-Neo-2.7B-Picard", "8GB", False], + ["Return to Main Menu", "mainmenu", "", True], + ], + 'nsfwlist': [ + ["Erebus 20B (NSFW)", "KoboldAI/GPT-NeoX-20B-Erebus", "64GB", False], + ["Erebus 13B (NSFW)", "KoboldAI/OPT-13B-Erebus", "32GB", False], + ["Shinen FSD 13B (NSFW)", "KoboldAI/fairseq-dense-13B-Shinen", "32GB", False], + ["Erebus 6.7B (NSFW)", "KoboldAI/OPT-6.7B-Erebus", "16GB", False], + ["Shinen FSD 6.7B (NSFW)", "KoboldAI/fairseq-dense-6.7B-Shinen", "16GB", False], + ["Lit V2 6B (NSFW)", "hakurei/litv2-6B-rev3", "16GB", False], + ["Lit 6B (NSFW)", "hakurei/lit-6B", "16GB", False], + ["Shinen 6B (NSFW)", "KoboldAI/GPT-J-6B-Shinen", "16GB", False], + ["Erebus 2.7B (NSFW)", "KoboldAI/OPT-2.7B-Erebus", "8GB", False], + ["Horni 2.7B (NSFW)", "KoboldAI/GPT-Neo-2.7B-Horni", "8GB", False], + ["Shinen 2.7B (NSFW)", "KoboldAI/GPT-Neo-2.7B-Shinen", "8GB", False], + ["Return to Main Menu", "mainmenu", "", True], + ], + 'chatlist': [ + ["Convo 6B (Chatbot)", "hitomi-team/convo-6B", "16GB", False], + ["C1 6B (Chatbot)", "hakurei/c1-6B", "16GB", False], + ["C1 1.3B (Chatbot)", "iokru/c1-1.3B", "6GB", False], + ["Return to Main Menu", "mainmenu", "", True], + ], + 'gptneolist': [ + ["GPT-NeoX 20B", "EleutherAI/gpt-neox-20b", "64GB", False], + ["GPT-J 6B", "EleutherAI/gpt-j-6B", "16GB", False], + ["GPT-Neo 2.7B", "EleutherAI/gpt-neo-2.7B", "8GB", False], + ["GPT-Neo 1.3B", "EleutherAI/gpt-neo-1.3B", "6GB", False], + ["GPT-Neo 125M", "EleutherAI/gpt-neo-125M", "2GB", False], + ["Return to Main Menu", "mainmenu", "", True], + ], + 'gpt2list': [ + ["GPT-2 XL", "gpt2-xl", "6GB", False], + ["GPT-2 Large", "gpt2-large", "4GB", False], + ["GPT-2 Med", "gpt2-medium", "2GB", False], + ["GPT-2", "gpt2", "2GB", False], + ["Return to Main Menu", "mainmenu", "", True], + ], + 'bloomlist': [ + ["Bloom 176B", "bigscience/bloom", "", False], + ["Bloom 7.1B", "bigscience/bloom-7b1", "", False], + ["Bloom 3B", "bigscience/bloom-3b", "", False], + ["Bloom 1.7B", "bigscience/bloom-1b7", "", False], + ["Bloom 560M", "bigscience/bloom-560m", "", False], + ["Return to Main Menu", "mainmenu", "", True], + ], + 'optlist': [ + ["OPT 66B", "facebook/opt-66b", "128GB", False], + ["OPT 30B", "facebook/opt-30b", "64GB", False], + ["OPT 13B", "facebook/opt-13b", "32GB", False], + ["OPT 6.7B", "facebook/opt-6.7b", "16GB", False], + ["OPT 2.7B", "facebook/opt-2.7b", "8GB", False], + ["OPT 1.3B", "facebook/opt-1.3b", "4GB", False], + ["OPT 350M", "facebook/opt-350m", "2GB", False], + ["OPT 125M", "facebook/opt-125m", "1GB", False], + ["Return to Main Menu", "mainmenu", "", True], + ], + 'fsdlist': [ + ["Fairseq Dense 13B", "KoboldAI/fairseq-dense-13B", "32GB", False], + ["Fairseq Dense 6.7B", "KoboldAI/fairseq-dense-6.7B", "16GB", False], + ["Fairseq Dense 2.7B", "KoboldAI/fairseq-dense-2.7B", "8GB", False], + ["Fairseq Dense 1.3B", "KoboldAI/fairseq-dense-1.3B", "4GB", False], + ["Fairseq Dense 355M", "KoboldAI/fairseq-dense-355M", "2GB", False], + ["Fairseq Dense 125M", "KoboldAI/fairseq-dense-125M", "1GB", False], + ["Return to Main Menu", "mainmenu", "", True], + ], + 'xglmlist': [ + ["XGLM 4.5B (Larger Dataset)", "facebook/xglm-4.5B", "12GB", False], + ["XGLM 7.5B", "facebook/xglm-7.5B", "18GB", False], + ["XGLM 2.9B", "facebook/xglm-2.9B", "10GB", False], + ["XGLM 1.7B", "facebook/xglm-1.7B", "6GB", False], + ["XGLM 564M", "facebook/xglm-564M", "4GB", False], + ["Return to Main Menu", "mainmenu", "", True], + ], + 'apilist': [ + ["GooseAI API (requires API key)", "GooseAI", "", False], + ["OpenAI API (requires API key)", "OAI", "", False], + ["InferKit API (requires API key)", "InferKit", "", False], + # ["KoboldAI Server API (Old Google Colab)", "Colab", "", False], + ["KoboldAI API", "API", "", False], + ["KoboldAI Horde", "CLUSTER", "", False], + ["Return to Main Menu", "mainmenu", "", True], ] + } -adventurelist= [ - ["Nerys OPT 13B V2 (Hybrid)", "KoboldAI/OPT-13B-Nerys-v2", "32GB"], - ["Nerys FSD 13B V2 (Hybrid)", "KoboldAI/fairseq-dense-13B-Nerys-v2", "32GB"], - ["Nerys FSD 13B (Hybrid)", "KoboldAI/fairseq-dense-13B-Nerys", "32GB"], - ["Skein 6B", "KoboldAI/GPT-J-6B-Skein", "16GB"], - ["OPT Nerys 6B V2", "KoboldAI/OPT-6B-nerys-v2", "16GB"], - ["Adventure 6B", "KoboldAI/GPT-J-6B-Adventure", "16GB"], - ["Nerys FSD 2.7B (Hybrid)", "KoboldAI/fairseq-dense-2.7B-Nerys", "8GB"], - ["Adventure 2.7B", "KoboldAI/GPT-Neo-2.7B-AID", "8GB"], - ["Adventure 1.3B", "KoboldAI/GPT-Neo-1.3B-Adventure", "6GB"], - ["Adventure 125M (Mia)", "Merry/AID-Neo-125M", "2GB"], - ["Return to Main Menu", "Return", ""], -] +class TokenStreamQueue: + def __init__(self): + self.probability_buffer = None + self.queue = [] -novellist= [ - ["Nerys OPT 13B V2 (Hybrid)", "KoboldAI/OPT-13B-Nerys-v2", "32GB"], - ["Nerys FSD 13B V2 (Hybrid)", "KoboldAI/fairseq-dense-13B-Nerys-v2", "32GB"], - ["Janeway FSD 13B", "KoboldAI/fairseq-dense-13B-Janeway", "32GB"], - ["Nerys FSD 13B (Hybrid)", "KoboldAI/fairseq-dense-13B-Nerys", "32GB"], - ["OPT Nerys 6B V2", "KoboldAI/OPT-6B-nerys-v2", "16GB"], - ["Janeway FSD 6.7B", "KoboldAI/fairseq-dense-6.7B-Janeway", "16GB"], - ["Janeway Neo 6B", "KoboldAI/GPT-J-6B-Janeway", "16GB"], - ["Janeway Neo 2.7B", "KoboldAI/GPT-Neo-2.7B-Janeway", "8GB"], - ["Janeway FSD 2.7B", "KoboldAI/fairseq-dense-2.7B-Janeway", "8GB"], - ["Nerys FSD 2.7B (Hybrid)", "KoboldAI/fairseq-dense-2.7B-Nerys", "8GB"], - ["Horni-LN 2.7B", "KoboldAI/GPT-Neo-2.7B-Horni-LN", "8GB"], - ["Picard 2.7B (Older Janeway)", "KoboldAI/GPT-Neo-2.7B-Picard", "8GB"], - ["Return to Main Menu", "Return", ""], -] + def add_text(self, text): + self.queue.append({ + "decoded": text, + "probabilities": self.probability_buffer + }) + self.probability_buffer = None -nsfwlist= [ - ["Erebus 20B (NSFW)", "KoboldAI/GPT-NeoX-20B-Erebus", "64GB"], - ["Erebus 13B (NSFW)", "KoboldAI/OPT-13B-Erebus", "32GB"], - ["Shinen FSD 13B (NSFW)", "KoboldAI/fairseq-dense-13B-Shinen", "32GB"], - ["Erebus 6.7B (NSFW)", "KoboldAI/OPT-6.7B-Erebus", "16GB"], - ["Shinen FSD 6.7B (NSFW)", "KoboldAI/fairseq-dense-6.7B-Shinen", "16GB"], - ["Lit V2 6B (NSFW)", "hakurei/litv2-6B-rev3", "16GB"], - ["Lit 6B (NSFW)", "hakurei/lit-6B", "16GB"], - ["Shinen 6B (NSFW)", "KoboldAI/GPT-J-6B-Shinen", "16GB"], - ["Erebus 2.7B (NSFW)", "KoboldAI/OPT-2.7B-Erebus", "8GB"], - ["Horni 2.7B (NSFW)", "KoboldAI/GPT-Neo-2.7B-Horni", "8GB"], - ["Shinen 2.7B (NSFW)", "KoboldAI/GPT-Neo-2.7B-Shinen", "8GB"], - ["Return to Main Menu", "Return", ""], -] - -chatlist= [ - ["Convo 6B (Chatbot)", "hitomi-team/convo-6B", "16GB"], - ["C1 6B (Chatbot)", "hakurei/c1-6B", "16GB"], - ["C1 1.3B (Chatbot)", "iokru/c1-1.3B", "6GB"], - ["Return to Main Menu", "Return", ""], -] -gptneolist = [ - ["GPT-NeoX 20B", "EleutherAI/gpt-neox-20b", "64GB"], - ["GPT-J 6B", "EleutherAI/gpt-j-6B", "16GB"], - ["GPT-Neo 2.7B", "EleutherAI/gpt-neo-2.7B", "8GB"], - ["GPT-Neo 1.3B", "EleutherAI/gpt-neo-1.3B", "6GB"], - ["GPT-Neo 125M", "EleutherAI/gpt-neo-125M", "2GB"], - ["Return to Main Menu", "Return", ""], -] - -gpt2list = [ - ["GPT-2 XL", "gpt2-xl", "6GB"], - ["GPT-2 Large", "gpt2-large", "4GB"], - ["GPT-2 Med", "gpt2-medium", "2GB"], - ["GPT-2", "gpt2", "2GB"], - ["Return to Main Menu", "Return", ""], - ] - -bloomlist = [ - ["Bloom 176B", "bigscience/bloom", ""], - ["Bloom 7.1B", "bigscience/bloom-7b1", ""], - ["Bloom 3B", "bigscience/bloom-3b", ""], - ["Bloom 1.7B", "bigscience/bloom-1b7", ""], - ["Bloom 560M", "bigscience/bloom-560m", ""], - ["Return to Main Menu", "Return", ""], - ] - -optlist = [ - ["OPT 66B", "facebook/opt-66b", "128GB"], - ["OPT 30B", "facebook/opt-30b", "64GB"], - ["OPT 13B", "facebook/opt-13b", "32GB"], - ["OPT 6.7B", "facebook/opt-6.7b", "16GB"], - ["OPT 2.7B", "facebook/opt-2.7b", "8GB"], - ["OPT 1.3B", "facebook/opt-1.3b", "4GB"], - ["OPT 350M", "facebook/opt-350m", "2GB"], - ["OPT 125M", "facebook/opt-125m", "1GB"], - ["Return to Main Menu", "Return", ""], - ] - -fsdlist = [ - ["Fairseq Dense 13B", "KoboldAI/fairseq-dense-13B", "32GB"], - ["Fairseq Dense 6.7B", "KoboldAI/fairseq-dense-6.7B", "16GB"], - ["Fairseq Dense 2.7B", "KoboldAI/fairseq-dense-2.7B", "8GB"], - ["Fairseq Dense 1.3B", "KoboldAI/fairseq-dense-1.3B", "4GB"], - ["Fairseq Dense 355M", "KoboldAI/fairseq-dense-355M", "2GB"], - ["Fairseq Dense 125M", "KoboldAI/fairseq-dense-125M", "1GB"], - ["Return to Main Menu", "Return", ""], - ] - -xglmlist = [ - ["XGLM 4.5B (Larger Dataset)", "facebook/xglm-4.5B", "12GB"], - ["XGLM 7.5B", "facebook/xglm-7.5B", "18GB"], - ["XGLM 2.9B", "facebook/xglm-2.9B", "10GB"], - ["XGLM 1.7B", "facebook/xglm-1.7B", "6GB"], - ["XGLM 564M", "facebook/xglm-564M", "4GB"], - ["Return to Main Menu", "Return", ""], - ] - -apilist = [ - ["GooseAI API (requires API key)", "GooseAI", ""], - ["OpenAI API (requires API key)", "OAI", ""], - ["InferKit API (requires API key)", "InferKit", ""], - ["KoboldAI Server API (Old Google Colab)", "Colab", ""], - ["Return to Main Menu", "Return", ""], -] # Variables class vars: lastact = "" # The last action received from the user submission = "" # Same as above, but after applying input formatting lastctx = "" # The last context submitted to the generator - model = "" # Model ID string chosen at startup + model = "ReadOnly" # Model ID string chosen at startup + online_model = "" # Used when Model ID is an online service, and there is a secondary option for the actual model name + model_selected = "" #selected model in UI model_type = "" # Model Type (Automatically taken from the model config) noai = False # Runs the script without starting up the transformers pipeline aibusy = False # Stops submissions while the AI is working @@ -249,6 +280,9 @@ class vars: tfs = 1.0 # Default generator tfs (tail-free sampling) typical = 1.0 # Default generator typical sampling threshold numseqs = 1 # Number of sequences to ask the generator to create + full_determinism = False # Whether or not full determinism is enabled + seed_specified = False # Whether or not the current RNG seed was specified by the user (in their settings file) + seed = None # The current RNG seed (as an int), or None if unknown gamestarted = False # Whether the game has started (disables UI elements) gamesaved = True # Whether or not current game is saved serverstarted = False # Whether or not the Flask server has started @@ -290,12 +324,13 @@ class vars: last_userscripts = [] # List of previous userscript filenames from the previous time userscripts were send via usstatitems corescript = "default.lua" # Filename of corescript to load # badwords = [] # Array of str/chr values that should be removed from output - badwordsids = [[13460], [6880], [50256], [42496], [4613], [17414], [22039], [16410], [27], [29], [38430], [37922], [15913], [24618], [28725], [58], [47175], [36937], [26700], [12878], [16471], [37981], [5218], [29795], [13412], [45160], [3693], [49778], [4211], [20598], [36475], [33409], [44167], [32406], [29847], [29342], [42669], [685], [25787], [7359], [3784], [5320], [33994], [33490], [34516], [43734], [17635], [24293], [9959], [23785], [21737], [28401], [18161], [26358], [32509], [1279], [38155], [18189], [26894], [6927], [14610], [23834], [11037], [14631], [26933], [46904], [22330], [25915], [47934], [38214], [1875], [14692], [41832], [13163], [25970], [29565], [44926], [19841], [37250], [49029], [9609], [44438], [16791], [17816], [30109], [41888], [47527], [42924], [23984], [49074], [33717], [31161], [49082], [30138], [31175], [12240], [14804], [7131], [26076], [33250], [3556], [38381], [36338], [32756], [46581], [17912], [49146]] # Tokenized array of badwords used to prevent AI artifacting + badwordsids = [] + badwordsids_default = [[13460], [6880], [50256], [42496], [4613], [17414], [22039], [16410], [27], [29], [38430], [37922], [15913], [24618], [28725], [58], [47175], [36937], [26700], [12878], [16471], [37981], [5218], [29795], [13412], [45160], [3693], [49778], [4211], [20598], [36475], [33409], [44167], [32406], [29847], [29342], [42669], [685], [25787], [7359], [3784], [5320], [33994], [33490], [34516], [43734], [17635], [24293], [9959], [23785], [21737], [28401], [18161], [26358], [32509], [1279], [38155], [18189], [26894], [6927], [14610], [23834], [11037], [14631], [26933], [46904], [22330], [25915], [47934], [38214], [1875], [14692], [41832], [13163], [25970], [29565], [44926], [19841], [37250], [49029], [9609], [44438], [16791], [17816], [30109], [41888], [47527], [42924], [23984], [49074], [33717], [31161], [49082], [30138], [31175], [12240], [14804], [7131], [26076], [33250], [3556], [38381], [36338], [32756], [46581], [17912], [49146]] # Tokenized array of badwords used to prevent AI artifacting badwordsids_neox = [[0], [1], [44162], [9502], [12520], [31841], [36320], [49824], [34417], [6038], [34494], [24815], [26635], [24345], [3455], [28905], [44270], [17278], [32666], [46880], [7086], [43189], [37322], [17778], [20879], [49821], [3138], [14490], [4681], [21391], [26786], [43134], [9336], [683], [48074], [41256], [19181], [29650], [28532], [36487], [45114], [46275], [16445], [15104], [11337], [1168], [5647], [29], [27482], [44965], [43782], [31011], [42944], [47389], [6334], [17548], [38329], [32044], [35487], [2239], [34761], [7444], [1084], [12399], [18990], [17636], [39083], [1184], [35830], [28365], [16731], [43467], [47744], [1138], [16079], [40116], [45564], [18297], [42368], [5456], [18022], [42696], [34476], [23505], [23741], [39334], [37944], [45382], [38709], [33440], [26077], [43600], [34418], [36033], [6660], [48167], [48471], [15775], [19884], [41533], [1008], [31053], [36692], [46576], [20095], [20629], [31759], [46410], [41000], [13488], [30952], [39258], [16160], [27655], [22367], [42767], [43736], [49694], [13811], [12004], [46768], [6257], [37471], [5264], [44153], [33805], [20977], [21083], [25416], [14277], [31096], [42041], [18331], [33376], [22372], [46294], [28379], [38475], [1656], [5204], [27075], [50001], [16616], [11396], [7748], [48744], [35402], [28120], [41512], [4207], [43144], [14767], [15640], [16595], [41305], [44479], [38958], [18474], [22734], [30522], [46267], [60], [13976], [31830], [48701], [39822], [9014], [21966], [31422], [28052], [34607], [2479], [3851], [32214], [44082], [45507], [3001], [34368], [34758], [13380], [38363], [4299], [46802], [30996], [12630], [49236], [7082], [8795], [5218], [44740], [9686], [9983], [45301], [27114], [40125], [1570], [26997], [544], [5290], [49193], [23781], [14193], [40000], [2947], [43781], [9102], [48064], [42274], [18772], [49384], [9884], [45635], [43521], [31258], [32056], [47686], [21760], [13143], [10148], [26119], [44308], [31379], [36399], [23983], [46694], [36134], [8562], [12977], [35117], [28591], [49021], [47093], [28653], [29013], [46468], [8605], [7254], [25896], [5032], [8168], [36893], [38270], [20499], [27501], [34419], [29547], [28571], [36586], [20871], [30537], [26842], [21375], [31148], [27618], [33094], [3291], [31789], [28391], [870], [9793], [41361], [47916], [27468], [43856], [8850], [35237], [15707], [47552], [2730], [41449], [45488], [3073], [49806], [21938], [24430], [22747], [20924], [46145], [20481], [20197], [8239], [28231], [17987], [42804], [47269], [29972], [49884], [21382], [46295], [36676], [34616], [3921], [26991], [27720], [46265], [654], [9855], [40354], [5291], [34904], [44342], [2470], [14598], [880], [19282], [2498], [24237], [21431], [16369], [8994], [44524], [45662], [13663], [37077], [1447], [37786], [30863], [42854], [1019], [20322], [4398], [12159], [44072], [48664], [31547], [18736], [9259], [31], [16354], [21810], [4357], [37982], [5064], [2033], [32871], [47446], [62], [22158], [37387], [8743], [47007], [17981], [11049], [4622], [37916], [36786], [35138], [29925], [14157], [18095], [27829], [1181], [22226], [5709], [4725], [30189], [37014], [1254], [11380], [42989], [696], [24576], [39487], [30119], [1092], [8088], [2194], [9899], [14412], [21828], [3725], [13544], [5180], [44679], [34398], [3891], [28739], [14219], [37594], [49550], [11326], [6904], [17266], [5749], [10174], [23405], [9955], [38271], [41018], [13011], [48392], [36784], [24254], [21687], [23734], [5413], [41447], [45472], [10122], [17555], [15830], [47384], [12084], [31350], [47940], [11661], [27988], [45443], [905], [49651], [16614], [34993], [6781], [30803], [35869], [8001], [41604], [28118], [46462], [46762], [16262], [17281], [5774], [10943], [5013], [18257], [6750], [4713], [3951], [11899], [38791], [16943], [37596], [9318], [18413], [40473], [13208], [16375]] badwordsids_opt = [[44717], [46613], [48513], [49923], [50185], [48755], [8488], [43303], [49659], [48601], [49817], [45405], [48742], [49925], [47720], [11227], [48937], [48784], [50017], [42248], [49310], [48082], [49895], [50025], [49092], [49007], [8061], [44226], [0], [742], [28578], [15698], [49784], [46679], [39365], [49281], [49609], [48081], [48906], [46161], [48554], [49670], [48677], [49721], [49632], [48610], [48462], [47457], [10975], [46077], [28696], [48709], [43839], [49798], [49154], [48203], [49625], [48395], [50155], [47161], [49095], [48833], [49420], [49666], [48443], [22176], [49242], [48651], [49138], [49750], [40389], [48021], [21838], [49070], [45333], [40862], [1], [49915], [33525], [49858], [50254], [44403], [48992], [48872], [46117], [49853], [47567], [50206], [41552], [50068], [48999], [49703], [49940], [49329], [47620], [49868], [49962], [2], [44082], [50236], [31274], [50260], [47052], [42645], [49177], [17523], [48691], [49900], [49069], [49358], [48794], [47529], [46479], [48457], [646], [49910], [48077], [48935], [46386], [48902], [49151], [48759], [49803], [45587], [48392], [47789], [48654], [49836], [49230], [48188], [50264], [46844], [44690], [48505], [50161], [27779], [49995], [41833], [50154], [49097], [48520], [50018], [8174], [50084], [49366], [49526], [50193], [7479], [49982], [3]] fp32_model = False # Whether or not the most recently loaded HF model was in fp32 format deletewi = None # Temporary storage for UID to delete - wirmvwhtsp = False # Whether to remove leading whitespace from WI entries + wirmvwhtsp = True # Whether to remove leading whitespace from WI entries widepth = 3 # How many historical actions to scan for WI hits mode = "play" # Whether the interface is in play, memory, or edit mode editln = 0 # Which line was last selected in Edit Mode @@ -306,11 +341,12 @@ class vars: colaburl = "" # Ngrok url for Google Colab mode apikey = "" # API key to use for InferKit API calls oaiapikey = "" # API key to use for OpenAI API calls - savedir = getcwd()+"\stories" + cluster_requested_models = [] # The models which we allow to generate during cluster mode + savedir = getcwd()+"\\stories" hascuda = False # Whether torch has detected CUDA on the system usegpu = False # Whether to launch pipeline with GPU support custmodpth = "" # Filesystem location of custom model to run - formatoptns = {'frmttriminc': True, 'frmtrmblln': False, 'frmtrmspch': False, 'frmtadsnsp': False, 'singleline': False} # Container for state of formatting options + formatoptns = {'frmttriminc': True, 'frmtrmblln': False, 'frmtrmspch': False, 'frmtadsnsp': True, 'singleline': False} # Container for state of formatting options importnum = -1 # Selection on import popup list importjs = {} # Temporary storage for import data loadselect = "" # Temporary storage for story filename to load @@ -351,17 +387,311 @@ class vars: rngpersist = False nogenmod = False welcome = False # Custom Welcome Text (False is default) - newlinemode = "n" + newlinemode = "ns" quiet = False # If set will suppress any story text from being printed to the console (will only be seen on the client web page) debug = False # If set to true, will send debug information to the client for display lazy_load = True # Whether or not to use torch_lazy_loader.py for transformers models in order to reduce CPU memory usage use_colab_tpu = os.environ.get("COLAB_TPU_ADDR", "") != "" or os.environ.get("TPU_NAME", "") != "" # Whether or not we're in a Colab TPU instance or Kaggle TPU instance and are going to use the TPU rather than the CPU + revision = None + standalone = False + api_tokenizer_id = None + disable_set_aibusy = False + disable_input_formatting = False + disable_output_formatting = False + output_streaming = True + token_stream_queue = TokenStreamQueue() # Queue for the token streaming + show_probs = False # Whether or not to show token probabilities + show_budget = False # Whether or not to show token probabilities + configname = None utils.vars = vars +class Send_to_socketio(object): + def write(self, bar): + print(bar, end="") + time.sleep(0.01) + try: + gui_msg = bar.replace(f"{colors.PURPLE}INIT{colors.END} | ","").replace(" ", " ") + emit('from_server', {'cmd': 'model_load_status', 'data': gui_msg}, broadcast=True) + except: + pass + +# Set logging level to reduce chatter from Flask +import logging +log = logging.getLogger('werkzeug') +log.setLevel(logging.ERROR) + +from flask import Flask, render_template, Response, request, copy_current_request_context, send_from_directory, session, jsonify, abort, redirect +from flask_socketio import SocketIO +from flask_socketio import emit as _emit +from flask_session import Session +from werkzeug.exceptions import HTTPException, NotFound, InternalServerError +import secrets +app = Flask(__name__, root_path=os.getcwd()) +app.secret_key = secrets.token_hex() +app.config['SESSION_TYPE'] = 'filesystem' +app.config['TEMPLATES_AUTO_RELOAD'] = True +socketio = SocketIO(app, async_method="eventlet") + +old_socketio_on = socketio.on +def new_socketio_on(*a, **k): + decorator = old_socketio_on(*a, **k) + def new_decorator(f): + @functools.wraps(f) + def g(*a, **k): + if args.no_ui: + return + return f(*a, **k) + return decorator(g) + return new_decorator +socketio.on = new_socketio_on + +def emit(*args, **kwargs): + try: + return _emit(*args, **kwargs) + except AttributeError: + return socketio.emit(*args, **kwargs) + +# marshmallow/apispec setup +from apispec import APISpec +from apispec.ext.marshmallow import MarshmallowPlugin +from apispec.ext.marshmallow.field_converter import make_min_max_attributes +from apispec_webframeworks.flask import FlaskPlugin +from marshmallow import Schema, fields, validate, EXCLUDE +from marshmallow.exceptions import ValidationError + +class KoboldSchema(Schema): + pass + +def new_make_min_max_attributes(validators, min_attr, max_attr) -> dict: + # Patched apispec function that creates "exclusiveMinimum"/"exclusiveMaximum" OpenAPI attributes insteaed of "minimum"/"maximum" when using validators.Range or validators.Length with min_inclusive=False or max_inclusive=False + attributes = {} + min_list = [validator.min for validator in validators if validator.min is not None] + max_list = [validator.max for validator in validators if validator.max is not None] + min_inclusive_list = [getattr(validator, "min_inclusive", True) for validator in validators if validator.min is not None] + max_inclusive_list = [getattr(validator, "max_inclusive", True) for validator in validators if validator.max is not None] + if min_list: + if min_attr == "minimum" and not min_inclusive_list[max(range(len(min_list)), key=min_list.__getitem__)]: + min_attr = "exclusiveMinimum" + attributes[min_attr] = max(min_list) + if max_list: + if min_attr == "maximum" and not max_inclusive_list[min(range(len(max_list)), key=max_list.__getitem__)]: + min_attr = "exclusiveMaximum" + attributes[max_attr] = min(max_list) + return attributes +make_min_max_attributes.__code__ = new_make_min_max_attributes.__code__ + +def api_format_docstring(f): + f.__doc__ = eval('f"""{}"""'.format(f.__doc__.replace("\\", "\\\\"))) + return f + +def api_catch_out_of_memory_errors(f): + @functools.wraps(f) + def decorated(*args, **kwargs): + try: + return f(*args, **kwargs) + except Exception as e: + if any (s in traceback.format_exc().lower() for s in ("out of memory", "not enough memory")): + for line in reversed(traceback.format_exc().split("\n")): + if any(s in line.lower() for s in ("out of memory", "not enough memory")) and line.count(":"): + line = line.split(":", 1)[1] + line = re.sub(r"\[.+?\] +data\.", "", line).strip() + raise KoboldOutOfMemoryError("KoboldAI ran out of memory: " + line, type="out_of_memory.gpu.cuda" if "cuda out of memory" in line.lower() else "out_of_memory.gpu.hip" if "hip out of memory" in line.lower() else "out_of_memory.tpu.hbm" if "memory space hbm" in line.lower() else "out_of_memory.cpu.default_memory_allocator" if "defaultmemoryallocator" in line.lower() else "out_of_memory.unknown.unknown") + raise KoboldOutOfMemoryError(type="out_of_memory.unknown.unknown") + raise e + return decorated + +def api_schema_wrap(f): + try: + input_schema: Type[Schema] = next(iter(inspect.signature(f).parameters.values())).annotation + except: + HAS_SCHEMA = False + else: + HAS_SCHEMA = inspect.isclass(input_schema) and issubclass(input_schema, Schema) + f = api_format_docstring(f) + f = api_catch_out_of_memory_errors(f) + @functools.wraps(f) + def decorated(*args, **kwargs): + if HAS_SCHEMA: + body = request.get_json() + schema = input_schema.from_dict(input_schema().load(body)) + response = f(schema, *args, **kwargs) + else: + response = f(*args, **kwargs) + if not isinstance(response, Response): + response = jsonify(response) + return response + return decorated + +@app.errorhandler(HTTPException) +def handler(e): + if request.path != "/api" and not request.path.startswith("/api/"): + return e + resp = jsonify(detail={"msg": str(e), "type": "generic.error_" + str(e.code)}) + if e.code == 405 and e.valid_methods is not None: + resp.headers["Allow"] = ", ".join(e.valid_methods) + return resp, e.code + +class KoboldOutOfMemoryError(HTTPException): + code = 507 + description = "KoboldAI ran out of memory." + type = "out_of_memory.unknown.unknown" + def __init__(self, *args, type=None, **kwargs): + super().__init__(*args, **kwargs) + if type is not None: + self.type = type +@app.errorhandler(KoboldOutOfMemoryError) +def handler(e): + if request.path != "/api" and not request.path.startswith("/api/"): + return InternalServerError() + return jsonify(detail={"type": e.type, "msg": e.description}), e.code + +@app.errorhandler(ValidationError) +def handler(e): + if request.path != "/api" and not request.path.startswith("/api/"): + return InternalServerError() + return jsonify(detail=e.messages), 422 + +@app.errorhandler(NotImplementedError) +def handler(e): + if request.path != "/api" and not request.path.startswith("/api/"): + return InternalServerError() + return jsonify(detail={"type": "not_implemented", "msg": str(e).strip()}), 501 + +api_versions: List[str] = [] + +class KoboldAPISpec(APISpec): + class KoboldFlaskPlugin(FlaskPlugin): + def __init__(self, api: "KoboldAPISpec", *args, **kwargs): + self._kobold_api_spec = api + super().__init__(*args, **kwargs) + + def path_helper(self, *args, **kwargs): + return super().path_helper(*args, **kwargs)[len(self._kobold_api_spec._prefixes[0]):] + + def __init__(self, *args, title: str = "KoboldAI API", openapi_version: str = "3.0.3", version: str = "1.0.0", prefixes: List[str] = None, **kwargs): + plugins = [KoboldAPISpec.KoboldFlaskPlugin(self), MarshmallowPlugin()] + self._prefixes = prefixes if prefixes is not None else [""] + self._kobold_api_spec_version = version + api_versions.append(version) + api_versions.sort(key=lambda x: [int(e) for e in x.split(".")]) + super().__init__(*args, title=title, openapi_version=openapi_version, version=version, plugins=plugins, servers=[{"url": self._prefixes[0]}], **kwargs) + for prefix in self._prefixes: + app.route(prefix, endpoint="~KoboldAPISpec~" + prefix)(lambda: redirect(request.path + "/docs/")) + app.route(prefix + "/", endpoint="~KoboldAPISpec~" + prefix + "/")(lambda: redirect("docs/")) + app.route(prefix + "/docs", endpoint="~KoboldAPISpec~" + prefix + "/docs")(lambda: redirect("docs/")) + app.route(prefix + "/docs/", endpoint="~KoboldAPISpec~" + prefix + "/docs/")(lambda: render_template("swagger-ui.html", url=self._prefixes[0] + "/openapi.json")) + app.route(prefix + "/openapi.json", endpoint="~KoboldAPISpec~" + prefix + "/openapi.json")(lambda: jsonify(self.to_dict())) + + def route(self, rule: str, methods=["GET"], **kwargs): + __F = TypeVar("__F", bound=Callable[..., Any]) + if "strict_slashes" not in kwargs: + kwargs["strict_slashes"] = False + def new_decorator(f: __F) -> __F: + @functools.wraps(f) + def g(*args, **kwargs): + global api_version + api_version = self._kobold_api_spec_version + try: + return f(*args, **kwargs) + finally: + api_version = None + for prefix in self._prefixes: + g = app.route(prefix + rule, methods=methods, **kwargs)(g) + with app.test_request_context(): + self.path(view=g, **kwargs) + return g + return new_decorator + + def get(self, rule: str, **kwargs): + return self.route(rule, methods=["GET"], **kwargs) + + def post(self, rule: str, **kwargs): + return self.route(rule, methods=["POST"], **kwargs) + + def put(self, rule: str, **kwargs): + return self.route(rule, methods=["PUT"], **kwargs) + + def patch(self, rule: str, **kwargs): + return self.route(rule, methods=["PATCH"], **kwargs) + + def delete(self, rule: str, **kwargs): + return self.route(rule, methods=["DELETE"], **kwargs) + +tags = [ + {"name": "info", "description": "Metadata about this API"}, + {"name": "generate", "description": "Text generation endpoints"}, + {"name": "model", "description": "Information about the current text generation model"}, + {"name": "story", "description": "Endpoints for managing the story in the KoboldAI GUI"}, + {"name": "world_info", "description": "Endpoints for managing the world info in the KoboldAI GUI"}, + {"name": "config", "description": "Allows you to get/set various setting values"}, +] + +api_version = None # This gets set automatically so don't change this value + +api_v1 = KoboldAPISpec( + version="1.1.4", + prefixes=["/api/v1", "/api/latest"], + tags=tags, +) + +# Returns the expected config filename for the current setup. +# If the model_name is specified, it returns what the settings file would be for that model +def get_config_filename(model_name = None): + if model_name: + return(f"settings/{model_name.replace('/', '_')}.settings") + elif args.configname: + return(f"settings/{args.configname.replace('/', '_')}.settings") + elif vars.configname != '': + return(f"settings/{vars.configname.replace('/', '_')}.settings") + else: + logger.warning(f"Empty configfile name sent back. Defaulting to ReadOnly") + return(f"settings/ReadOnly.settings") #==================================================================# # Function to get model selection at startup #==================================================================# +def sendModelSelection(menu="mainmenu", folder="./models"): + #If we send one of the manual load options, send back the list of model directories, otherwise send the menu + if menu in ('NeoCustom', 'GPT2Custom'): + (paths, breadcrumbs) = get_folder_path_info(folder) + if vars.host: + breadcrumbs = [] + menu_list = [[folder, menu, "", False] for folder in paths] + menu_list.append(["Return to Main Menu", "mainmenu", "", True]) + if os.path.abspath("{}/models".format(os.getcwd())) == os.path.abspath(folder): + showdelete=True + else: + showdelete=False + emit('from_server', {'cmd': 'show_model_menu', 'data': menu_list, 'menu': menu, 'breadcrumbs': breadcrumbs, "showdelete": showdelete}, broadcast=True) + else: + emit('from_server', {'cmd': 'show_model_menu', 'data': model_menu[menu], 'menu': menu, 'breadcrumbs': [], "showdelete": False}, broadcast=True) + +def get_folder_path_info(base): + if base == 'This PC': + breadcrumbs = [['This PC', 'This PC']] + paths = [["{}:\\".format(chr(i)), "{}:\\".format(chr(i))] for i in range(65, 91) if os.path.exists("{}:".format(chr(i)))] + else: + path = os.path.abspath(base) + if path[-1] == "\\": + path = path[:-1] + breadcrumbs = [] + for i in range(len(path.replace("/", "\\").split("\\"))): + breadcrumbs.append(["\\".join(path.replace("/", "\\").split("\\")[:i+1]), + path.replace("/", "\\").split("\\")[i]]) + if len(breadcrumbs) == 1: + breadcrumbs = [["{}:\\".format(chr(i)), "{}:\\".format(chr(i))] for i in range(65, 91) if os.path.exists("{}:".format(chr(i)))] + else: + if len([["{}:\\".format(chr(i)), "{}:\\".format(chr(i))] for i in range(65, 91) if os.path.exists("{}:".format(chr(i)))]) > 0: + breadcrumbs.insert(0, ['This PC', 'This PC']) + paths = [] + base_path = os.path.abspath(base) + for item in os.listdir(base_path): + if os.path.isdir(os.path.join(base_path, item)): + paths.append([os.path.join(base_path, item), item]) + # Paths/breadcrumbs is a list of lists, where the first element in the sublist is the full path and the second is the folder name + return (paths, breadcrumbs) + + def getModelSelection(modellist): print(" # Model\t\t\t\t\t\tVRAM\n ========================================================") i = 1 @@ -384,7 +714,7 @@ def getModelSelection(modellist): except Exception as e: if(vars.model == "Return"): getModelSelection(mainmenu) - + # If custom model was selected, get the filesystem location and store it if(vars.model == "NeoCustom" or vars.model == "GPT2Custom"): print("{0}Please choose the folder where pytorch_model.bin is located:{1}\n".format(colors.CYAN, colors.END)) @@ -399,6 +729,9 @@ def getModelSelection(modellist): print("{0}Select an AI model to continue:{1}\n".format(colors.CYAN, colors.END)) getModelSelection(mainmenu) +def check_if_dir_is_model(path): + return os.path.exists(os.path.join(path, 'config.json')) + #==================================================================# # Return all keys in tokenizer dictionary containing char #==================================================================# @@ -413,9 +746,8 @@ def getModelSelection(modellist): # Return Model Name #==================================================================# def getmodelname(): - if(args.configname): - modelname = args.configname - return modelname + if(vars.online_model != ''): + return(f"{vars.model}/{vars.online_model}") if(vars.model in ("NeoCustom", "GPT2Custom", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX")): modelname = os.path.basename(os.path.normpath(vars.custmodpth)) return modelname @@ -441,15 +773,23 @@ def device_list(n_layers, primary=None, selected=None): print(f"{row_color}{colors.YELLOW + '->' + row_color if i == selected else ' '} {'(primary)' if i == primary else ' '*9} {i:3} {sep_color}|{row_color} {gpu_blocks[i]:3} {sep_color}|{row_color} {name}{colors.END}") row_color = colors.END sep_color = colors.YELLOW + if(utils.HAS_ACCELERATE): + print(f"{row_color}{colors.YELLOW + '->' + row_color if -1 == selected else ' '} {' '*9} N/A {sep_color}|{row_color} {breakmodel.disk_blocks:3} {sep_color}|{row_color} (Disk cache){colors.END}") print(f"{row_color} {' '*9} N/A {sep_color}|{row_color} {n_layers:3} {sep_color}|{row_color} (CPU){colors.END}") def device_config(config): global breakmodel, generator import breakmodel n_layers = utils.num_layers(config) - if(args.breakmodel_gpulayers is not None): + if args.cpu: + breakmodel.gpu_blocks = [0]*n_layers + return + elif(args.breakmodel_gpulayers is not None or (utils.HAS_ACCELERATE and args.breakmodel_disklayers is not None)): try: - breakmodel.gpu_blocks = list(map(int, args.breakmodel_gpulayers.split(','))) + if(not args.breakmodel_gpulayers): + breakmodel.gpu_blocks = [] + else: + breakmodel.gpu_blocks = list(map(int, args.breakmodel_gpulayers.split(','))) assert len(breakmodel.gpu_blocks) <= torch.cuda.device_count() s = n_layers for i in range(len(breakmodel.gpu_blocks)): @@ -460,15 +800,19 @@ def device_config(config): s -= breakmodel.gpu_blocks[i] assert sum(breakmodel.gpu_blocks) <= n_layers n_layers -= sum(breakmodel.gpu_blocks) + if(args.breakmodel_disklayers is not None): + assert args.breakmodel_disklayers <= n_layers + breakmodel.disk_blocks = args.breakmodel_disklayers + n_layers -= args.breakmodel_disklayers except: - print("WARNING: --breakmodel_gpulayers is malformatted. Please use the --help option to see correct usage of --breakmodel_gpulayers. Defaulting to all layers on device 0.", file=sys.stderr) + logger.warning("--breakmodel_gpulayers is malformatted. Please use the --help option to see correct usage of --breakmodel_gpulayers. Defaulting to all layers on device 0.") breakmodel.gpu_blocks = [n_layers] n_layers = 0 elif(args.breakmodel_layers is not None): breakmodel.gpu_blocks = [n_layers - max(0, min(n_layers, args.breakmodel_layers))] n_layers -= sum(breakmodel.gpu_blocks) elif(args.model is not None): - print("Breakmodel not specified, assuming GPU 0") + logger.info("Breakmodel not specified, assuming GPU 0") breakmodel.gpu_blocks = [n_layers] n_layers = 0 else: @@ -512,8 +856,22 @@ def device_config(config): print(f"{colors.RED}Please enter an integer between -1 and {n_layers}.{colors.END}") if(n_layers == 0): break - - print(colors.PURPLE + "\nFinal device configuration:") + + if(utils.HAS_ACCELERATE and n_layers > 0): + device_list(n_layers, primary=breakmodel.primary_device, selected=-1) + print(f"{colors.CYAN}\nHow many of the remaining{colors.YELLOW} {n_layers} {colors.CYAN}layers would you like to put into the disk cache?\nYou can also enter -1 to allocate all remaining layers to this device.{colors.END}\n") + while(True): + layerselect = input("# of layers> ") + if((layerselect.isnumeric() or layerselect.strip() == '-1') and -1 <= int(layerselect) <= n_layers): + layerselect = int(layerselect) + layerselect = n_layers if layerselect == -1 else layerselect + breakmodel.disk_blocks = layerselect + n_layers -= layerselect + break + else: + print(f"{colors.RED}Please enter an integer between -1 and {n_layers}.{colors.END}") + + logger.init_ok("Final device configuration:", status="Info") device_list(n_layers) # If all layers are on the same device, use the old GPU generation mode @@ -526,7 +884,9 @@ def device_config(config): return if(not breakmodel.gpu_blocks): - print("Nothing assigned to a GPU, reverting to CPU only mode") + logger.warning("Nothing assigned to a GPU, reverting to CPU only mode") + import breakmodel + breakmodel.primary_device = "cpu" vars.breakmodel = False vars.usegpu = False return @@ -534,7 +894,7 @@ def device_config(config): def move_model_to_devices(model): global generator - if(not vars.breakmodel): + if(not utils.HAS_ACCELERATE and not vars.breakmodel): if(vars.usegpu): model = model.half().to(vars.gpu_device) else: @@ -542,8 +902,33 @@ def move_model_to_devices(model): generator = model.generate return + import breakmodel + + if(utils.HAS_ACCELERATE): + import accelerate.utils + for key, value in model.state_dict().items(): + target_dtype = torch.float32 if breakmodel.primary_device == "cpu" else torch.float16 + if(value.dtype is not target_dtype): + accelerate.utils.set_module_tensor_to_device(model, key, target_dtype) + disk_blocks = breakmodel.disk_blocks + gpu_blocks = breakmodel.gpu_blocks + ram_blocks = len(utils.layers_module_names) - sum(gpu_blocks) + cumulative_gpu_blocks = tuple(itertools.accumulate(gpu_blocks)) + device_map = {} + for name in utils.layers_module_names: + layer = int(name.rsplit(".", 1)[1]) + device = ("disk" if layer < disk_blocks else "cpu") if layer < ram_blocks else bisect.bisect_right(cumulative_gpu_blocks, layer - ram_blocks) + device_map[name] = device + for name in utils.get_missing_module_names(model, list(device_map.keys())): + device_map[name] = breakmodel.primary_device + breakmodel.dispatch_model_ex(model, device_map, main_device=breakmodel.primary_device, offload_buffers=True, offload_dir="accelerate-disk-cache") + gc.collect() + generator = model.generate + return + model.half() gc.collect() + if(hasattr(model, "transformer")): model.transformer.wte.to(breakmodel.primary_device) model.transformer.ln_f.to(breakmodel.primary_device) @@ -595,7 +980,7 @@ def loadmodelsettings(): js = {} if vars.model_type == "xglm" or js.get("compat", "j") == "fairseq_lm": vars.newlinemode = "s" # Default to newline mode if using XGLM - if vars.model_type == "opt": + if vars.model_type == "opt" or vars.model_type == "bloom": vars.newlinemode = "ns" # Handle but don't convert newlines if using Fairseq models that have newlines trained in them vars.modelconfig = js if("badwordsids" in js): @@ -603,7 +988,10 @@ def loadmodelsettings(): if("nobreakmodel" in js): vars.nobreakmodel = js["nobreakmodel"] if("sampler_order" in js): - vars.sampler_order = js["sampler_order"] + sampler_order = vars.sampler_order + if(len(sampler_order) < 7): + sampler_order = [6] + sampler_order + vars.sampler_order = sampler_order if("temp" in js): vars.temp = js["temp"] if("top_p" in js): @@ -671,8 +1059,18 @@ def savesettings(): js["nopromptgen"] = vars.nopromptgen js["rngpersist"] = vars.rngpersist js["nogenmod"] = vars.nogenmod + js["fulldeterminism"] = vars.full_determinism js["autosave"] = vars.autosave js["welcome"] = vars.welcome + js["output_streaming"] = vars.output_streaming + js["show_probs"] = vars.show_probs + js["show_budget"] = vars.show_budget + + if(vars.seed_specified): + js["seed"] = vars.seed + else: + js["seed"] = None + js["newlinemode"] = vars.newlinemode js["antemplate"] = vars.setauthornotetemplate @@ -684,7 +1082,7 @@ def savesettings(): # Write it if not os.path.exists('settings'): os.mkdir('settings') - file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "w") + file = open(get_config_filename(), "w") try: file.write(json.dumps(js, indent=3)) finally: @@ -695,7 +1093,7 @@ def savesettings(): #==================================================================# @debounce(2) def settingschanged(): - print("{0}Saving settings!{1}".format(colors.GREEN, colors.END)) + logger.info("Saving settings.") savesettings() #==================================================================# @@ -710,9 +1108,9 @@ def loadsettings(): processsettings(js) file.close() - if(path.exists("settings/" + getmodelname().replace('/', '_') + ".settings")): + if(path.exists(get_config_filename())): # Read file contents into JSON object - file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "r") + file = open(get_config_filename(), "r") js = json.load(file) processsettings(js) @@ -721,35 +1119,41 @@ def loadsettings(): def processsettings(js): # Copy file contents to vars if("apikey" in js): - vars.apikey = js["apikey"] + # If the model is the HORDE, then previously saved API key in settings + # Will always override a new key set. + if vars.model != "CLUSTER" or vars.apikey == '': + vars.apikey = js["apikey"] if("andepth" in js): - vars.andepth = js["andepth"] + vars.andepth = js["andepth"] if("sampler_order" in js): - vars.sampler_order = js["sampler_order"] + sampler_order = vars.sampler_order + if(len(sampler_order) < 7): + sampler_order = [6] + sampler_order + vars.sampler_order = sampler_order if("temp" in js): - vars.temp = js["temp"] + vars.temp = js["temp"] if("top_p" in js): - vars.top_p = js["top_p"] + vars.top_p = js["top_p"] if("top_k" in js): - vars.top_k = js["top_k"] + vars.top_k = js["top_k"] if("tfs" in js): - vars.tfs = js["tfs"] + vars.tfs = js["tfs"] if("typical" in js): - vars.typical = js["typical"] + vars.typical = js["typical"] if("top_a" in js): - vars.top_a = js["top_a"] + vars.top_a = js["top_a"] if("rep_pen" in js): - vars.rep_pen = js["rep_pen"] + vars.rep_pen = js["rep_pen"] if("rep_pen_slope" in js): vars.rep_pen_slope = js["rep_pen_slope"] if("rep_pen_range" in js): vars.rep_pen_range = js["rep_pen_range"] if("genamt" in js): - vars.genamt = js["genamt"] + vars.genamt = js["genamt"] if("max_length" in js): vars.max_length = js["max_length"] if("ikgen" in js): - vars.ikgen = js["ikgen"] + vars.ikgen = js["ikgen"] if("formatoptns" in js): vars.formatoptns = js["formatoptns"] if("numseqs" in js): @@ -772,12 +1176,29 @@ def processsettings(js): vars.rngpersist = js["rngpersist"] if("nogenmod" in js): vars.nogenmod = js["nogenmod"] + if("fulldeterminism" in js): + vars.full_determinism = js["fulldeterminism"] if("autosave" in js): vars.autosave = js["autosave"] if("newlinemode" in js): vars.newlinemode = js["newlinemode"] if("welcome" in js): vars.welcome = js["welcome"] + if("output_streaming" in js): + vars.output_streaming = js["output_streaming"] + if("show_probs" in js): + vars.show_probs = js["show_probs"] + if("show_budget" in js): + vars.show_budget = js["show_budget"] + + if("seed" in js): + vars.seed = js["seed"] + if(vars.seed is not None): + vars.seed_specified = True + else: + vars.seed_specified = False + else: + vars.seed_specified = False if("antemplate" in js): vars.setauthornotetemplate = js["antemplate"] @@ -804,12 +1225,22 @@ def processsettings(js): def check_for_sp_change(): while(True): - time.sleep(0.1) + time.sleep(0.05) + if(vars.sp_changed): with app.app_context(): emit('from_server', {'cmd': 'spstatitems', 'data': {vars.spfilename: vars.spmeta} if vars.allowsp and len(vars.spfilename) else {}}, namespace=None, broadcast=True) vars.sp_changed = False + if(vars.token_stream_queue.queue): + # If emit blocks, waiting for it to complete before clearing could + # introduce a race condition that drops tokens. + queued_tokens = list(vars.token_stream_queue.queue) + vars.token_stream_queue.queue.clear() + socketio.emit("from_server", {"cmd": "streamtoken", "data": queued_tokens}, namespace=None, broadcast=True) + +socketio.start_background_task(check_for_sp_change) + def spRequest(filename): if(not vars.allowsp): raise RuntimeError("Soft prompts are not supported by your current model/backend") @@ -874,358 +1305,1344 @@ def spRequest(filename): #==================================================================# # Startup #==================================================================# +def general_startup(override_args=None): + global args + # Parsing Parameters + parser = argparse.ArgumentParser(description="KoboldAI Server") + parser.add_argument("--remote", action='store_true', help="Optimizes KoboldAI for Remote Play") + parser.add_argument("--noaimenu", action='store_true', help="Disables the ability to select the AI") + parser.add_argument("--ngrok", action='store_true', help="Optimizes KoboldAI for Remote Play using Ngrok") + parser.add_argument("--localtunnel", action='store_true', help="Optimizes KoboldAI for Remote Play using Localtunnel") + parser.add_argument("--host", action='store_true', help="Optimizes KoboldAI for Remote Play without using a proxy service") + parser.add_argument("--port", type=int, help="Specify the port on which the application will be joinable") + parser.add_argument("--aria2_port", type=int, help="Specify the port on which aria2's RPC interface will be open if aria2 is installed (defaults to 6799)") + parser.add_argument("--model", help="Specify the Model Type to skip the Menu") + parser.add_argument("--path", help="Specify the Path for local models (For model NeoCustom or GPT2Custom)") + parser.add_argument("--apikey", help="Specify the API key to use for online services") + parser.add_argument("--req_model", type=str, action='append', required=False, help="Which models which we allow to generate for us during cluster mode. Can be specified multiple times.") + parser.add_argument("--revision", help="Specify the model revision for huggingface models (can be a git branch/tag name or a git commit hash)") + parser.add_argument("--cpu", action='store_true', help="By default unattended launches are on the GPU use this option to force CPU usage.") + parser.add_argument("--breakmodel", action='store_true', help=argparse.SUPPRESS) + parser.add_argument("--breakmodel_layers", type=int, help=argparse.SUPPRESS) + parser.add_argument("--breakmodel_gpulayers", type=str, help="If using a model that supports hybrid generation, this is a comma-separated list that specifies how many layers to put on each GPU device. For example to put 8 layers on device 0, 9 layers on device 1 and 11 layers on device 2, use --breakmodel_gpulayers 8,9,11") + parser.add_argument("--breakmodel_disklayers", type=int, help="If using a model that supports hybrid generation, this is the number of layers to put in disk cache.") + parser.add_argument("--override_delete", action='store_true', help="Deleting stories from inside the browser is disabled if you are using --remote and enabled otherwise. Using this option will instead allow deleting stories if using --remote and prevent deleting stories otherwise.") + parser.add_argument("--override_rename", action='store_true', help="Renaming stories from inside the browser is disabled if you are using --remote and enabled otherwise. Using this option will instead allow renaming stories if using --remote and prevent renaming stories otherwise.") + parser.add_argument("--configname", help="Force a fixed configuration name to aid with config management.") + parser.add_argument("--colab", action='store_true', help="Optimize for Google Colab.") + parser.add_argument("--nobreakmodel", action='store_true', help="Disables Breakmodel support completely.") + parser.add_argument("--unblock", action='store_true', default=False, help="Unblocks the KoboldAI port to be accessible from other machines without optimizing for remote play (It is recommended to use --host instead)") + parser.add_argument("--quiet", action='store_true', default=False, help="If present will suppress any story related text from showing on the console") + parser.add_argument("--no_aria2", action='store_true', default=False, help="Prevents KoboldAI from using aria2 to download huggingface models more efficiently, in case aria2 is causing you issues") + parser.add_argument("--lowmem", action='store_true', help="Extra Low Memory loading for the GPU, slower but memory does not peak to twice the usage") + parser.add_argument("--savemodel", action='store_true', help="Saves the model to the models folder even if --colab is used (Allows you to save models to Google Drive)") + parser.add_argument("--customsettings", help="Preloads arguements from json file. You only need to provide the location of the json file. Use customsettings.json template file. It can be renamed if you wish so that you can store multiple configurations. Leave any settings you want as default as null. Any values you wish to set need to be in double quotation marks") + parser.add_argument("--no_ui", action='store_true', default=False, help="Disables the GUI and Socket.IO server while leaving the API server running.") + parser.add_argument('-v', '--verbosity', action='count', default=0, help="The default logging level is ERROR or higher. This value increases the amount of logging seen in your screen") + parser.add_argument('-q', '--quiesce', action='count', default=0, help="The default logging level is ERROR or higher. This value decreases the amount of logging seen in your screen") -# Parsing Parameters -parser = argparse.ArgumentParser(description="KoboldAI Server") -parser.add_argument("--remote", action='store_true', help="Optimizes KoboldAI for Remote Play") -parser.add_argument("--ngrok", action='store_true', help="Optimizes KoboldAI for Remote Play using Ngrok") -parser.add_argument("--localtunnel", action='store_true', help="Optimizes KoboldAI for Remote Play using Localtunnel") -parser.add_argument("--host", action='store_true', help="Optimizes KoboldAI for Remote Play without using a proxy service") -parser.add_argument("--port", type=int, help="Specify the port on which the application will be joinable") -parser.add_argument("--aria2_port", type=int, help="Specify the port on which aria2's RPC interface will be open if aria2 is installed (defaults to 6799)") -parser.add_argument("--model", help="Specify the Model Type to skip the Menu") -parser.add_argument("--path", help="Specify the Path for local models (For model NeoCustom or GPT2Custom)") -parser.add_argument("--revision", help="Specify the model revision for huggingface models (can be a git branch/tag name or a git commit hash)") -parser.add_argument("--cpu", action='store_true', help="By default unattended launches are on the GPU use this option to force CPU usage.") -parser.add_argument("--breakmodel", action='store_true', help=argparse.SUPPRESS) -parser.add_argument("--breakmodel_layers", type=int, help=argparse.SUPPRESS) -parser.add_argument("--breakmodel_gpulayers", type=str, help="If using a model that supports hybrid generation, this is a comma-separated list that specifies how many layers to put on each GPU device. For example to put 8 layers on device 0, 9 layers on device 1 and 11 layers on device 2, use --beakmodel_gpulayers 8,9,11") -parser.add_argument("--override_delete", action='store_true', help="Deleting stories from inside the browser is disabled if you are using --remote and enabled otherwise. Using this option will instead allow deleting stories if using --remote and prevent deleting stories otherwise.") -parser.add_argument("--override_rename", action='store_true', help="Renaming stories from inside the browser is disabled if you are using --remote and enabled otherwise. Using this option will instead allow renaming stories if using --remote and prevent renaming stories otherwise.") -parser.add_argument("--configname", help="Force a fixed configuration name to aid with config management.") -parser.add_argument("--colab", action='store_true', help="Optimize for Google Colab.") -parser.add_argument("--nobreakmodel", action='store_true', help="Disables Breakmodel support completely.") -parser.add_argument("--unblock", action='store_true', default=False, help="Unblocks the KoboldAI port to be accessible from other machines without optimizing for remote play (It is recommended to use --host instead)") -parser.add_argument("--quiet", action='store_true', default=False, help="If present will suppress any story related text from showing on the console") -parser.add_argument("--no_aria2", action='store_true', default=False, help="Prevents KoboldAI from using aria2 to download huggingface models more efficiently, in case aria2 is causing you issues") -parser.add_argument("--lowmem", action='store_true', help="Extra Low Memory loading for the GPU, slower but memory does not peak to twice the usage") -parser.add_argument("--savemodel", action='store_true', help="Saves the model to the models folder even if --colab is used (Allows you to save models to Google Drive)") -args: argparse.Namespace = None -if(os.environ.get("KOBOLDAI_ARGS") is not None): - import shlex - args = parser.parse_args(shlex.split(os.environ["KOBOLDAI_ARGS"])) -else: - args = parser.parse_args() - -vars.model = args.model; -vars.revision = args.revision - -if args.colab: - args.remote = True; - args.override_rename = True; - args.override_delete = True; - args.nobreakmodel = True; - args.quiet = True; - args.lowmem = True; - -if args.quiet: - vars.quiet = True - -if args.nobreakmodel: - vars.nobreakmodel = True; - -if args.remote: - vars.host = True; - -if args.ngrok: - vars.host = True; - -if args.localtunnel: - vars.host = True; - -if args.host: - vars.host = True; - -if args.cpu: - vars.use_colab_tpu = False - -vars.smandelete = vars.host == args.override_delete -vars.smanrename = vars.host == args.override_rename - -vars.aria2_port = args.aria2_port or 6799 - -# Select a model to run -if args.model: - print("Welcome to KoboldAI!\nYou have selected the following Model:", vars.model) - if args.path: - print("You have selected the following path for your Model :", args.path) - vars.custmodpth = args.path; - vars.colaburl = args.path + "/request"; # Lets just use the same parameter to keep it simple - -else: - print("{0}Welcome to the KoboldAI Server!\nListed RAM is the optimal VRAM and CPU ram can be up to twice the amount.\nMost models can run at less VRAM with reduced max tokens or less layers on the GPU.\nSelect an AI model to continue:{1}\n".format(colors.CYAN, colors.END)) - getModelSelection(mainmenu) - -# If transformers model was selected & GPU available, ask to use CPU or GPU -if(vars.model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]): - vars.allowsp = True - # Test for GPU support - import torch - - # Make model path the same as the model name to make this consistent with the other loading method if it isn't a known model type - # This code is not just a workaround for below, it is also used to make the behavior consistent with other loading methods - Henk717 - if(not vars.model in ["NeoCustom", "GPT2Custom"]): - vars.custmodpth = vars.model - elif(vars.model == "NeoCustom"): - vars.model = os.path.basename(os.path.normpath(vars.custmodpth)) - - # Get the model_type from the config or assume a model type if it isn't present - from transformers import AutoConfig - if(os.path.isdir(vars.custmodpth.replace('/', '_'))): - try: - model_config = AutoConfig.from_pretrained(vars.custmodpth.replace('/', '_'), revision=vars.revision, cache_dir="cache") - vars.model_type = model_config.model_type - except ValueError as e: - vars.model_type = "not_found" - elif(os.path.isdir("models/{}".format(vars.custmodpth.replace('/', '_')))): - try: - model_config = AutoConfig.from_pretrained("models/{}".format(vars.custmodpth.replace('/', '_')), revision=vars.revision, cache_dir="cache") - vars.model_type = model_config.model_type - except ValueError as e: - vars.model_type = "not_found" + #args: argparse.Namespace = None + if "pytest" in sys.modules and override_args is None: + args = parser.parse_args([]) + return + if override_args is not None: + import shlex + args = parser.parse_args(shlex.split(override_args)) + elif(os.environ.get("KOBOLDAI_ARGS") is not None): + import shlex + args = parser.parse_args(shlex.split(os.environ["KOBOLDAI_ARGS"])) else: - try: - model_config = AutoConfig.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") - vars.model_type = model_config.model_type - except ValueError as e: - vars.model_type = "not_found" - if(vars.model_type == "not_found" and vars.model == "NeoCustom"): - vars.model_type = "gpt_neo" - elif(vars.model_type == "not_found" and vars.model == "GPT2Custom"): - vars.model_type = "gpt2" - elif(vars.model_type == "not_found"): - print("WARNING: No model type detected, assuming Neo (If this is a GPT2 model use the other menu option or --model GPT2Custom)") - vars.model_type = "gpt_neo" + args = parser.parse_args() - if(vars.model_type == "opt"): - vars.badwordsids = vars.badwordsids_opt - if(vars.model_type == "gpt_neox"): - vars.badwordsids = vars.badwordsids_neox - -if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]): - loadmodelsettings() - loadsettings() - print("{0}Looking for GPU support...{1}".format(colors.PURPLE, colors.END), end="") - vars.hascuda = torch.cuda.is_available() - vars.bmsupported = vars.model_type in ("gpt_neo", "gptj", "xglm", "opt") and not vars.nobreakmodel - if(args.breakmodel is not None and args.breakmodel): - print("WARNING: --breakmodel is no longer supported. Breakmodel mode is now automatically enabled when --breakmodel_gpulayers is used (see --help for details).", file=sys.stderr) - if(args.breakmodel_layers is not None): - print("WARNING: --breakmodel_layers is deprecated. Use --breakmodel_gpulayers instead (see --help for details).", file=sys.stderr) - if(args.model and vars.bmsupported and not args.breakmodel_gpulayers and not args.breakmodel_layers): - print("WARNING: Model launched without the --breakmodel_gpulayers argument, defaulting to GPU only mode.", file=sys.stderr) - vars.bmsupported = False - if(not vars.bmsupported and (args.breakmodel_gpulayers is not None or args.breakmodel_layers is not None)): - print("WARNING: This model does not support hybrid generation. --breakmodel_gpulayers will be ignored.", file=sys.stderr) - if(vars.hascuda): - print("{0}FOUND!{1}".format(colors.GREEN, colors.END)) - else: - print("{0}NOT FOUND!{1}".format(colors.YELLOW, colors.END)) + set_logger_verbosity(args.verbosity) + quiesce_logger(args.quiesce) + if args.customsettings: + f = open (args.customsettings) + importedsettings = json.load(f) + for items in importedsettings: + if importedsettings[items] is not None: + setattr(args, items, importedsettings[items]) + f.close() - if args.model: - if(vars.hascuda): - genselected = True - vars.usegpu = True - vars.breakmodel = False - if(vars.bmsupported): - vars.usegpu = False - vars.breakmodel = True - if(args.cpu): - vars.usegpu = False - vars.breakmodel = False - elif(vars.hascuda): - if(vars.bmsupported): - genselected = True - vars.usegpu = False - vars.breakmodel = True + if args.no_ui: + def new_emit(*args, **kwargs): + return + old_emit = socketio.emit + socketio.emit = new_emit + + vars.model = args.model; + vars.revision = args.revision + + if args.apikey: + vars.apikey = args.apikey + if args.req_model: + vars.cluster_requested_models = args.req_model + + if args.colab: + args.remote = True; + args.override_rename = True; + args.override_delete = True; + args.nobreakmodel = True; + args.quiet = True; + args.lowmem = True; + args.noaimenu = True; + + if args.quiet: + vars.quiet = True + + if args.nobreakmodel: + vars.nobreakmodel = True; + + if args.remote: + vars.host = True; + + if args.ngrok: + vars.host = True; + + if args.localtunnel: + vars.host = True; + + if args.host: + vars.host = True; + + if args.cpu: + vars.use_colab_tpu = False + + vars.smandelete = vars.host == args.override_delete + vars.smanrename = vars.host == args.override_rename + + vars.aria2_port = args.aria2_port or 6799 + + #Now let's look to see if we are going to force a load of a model from a user selected folder + if(vars.model == "selectfolder"): + print("{0}Please choose the folder where pytorch_model.bin is located:{1}\n".format(colors.CYAN, colors.END)) + modpath = fileops.getdirpath(getcwd() + "/models", "Select Model Folder") + + if(modpath): + # Save directory to vars + vars.model = "NeoCustom" + vars.custmodpth = modpath + elif args.model: + logger.message(f"Welcome to KoboldAI!") + logger.message(f"You have selected the following Model: {vars.model}") + if args.path: + logger.message(f"You have selected the following path for your Model: {args.path}") + vars.custmodpth = args.path; + vars.colaburl = args.path + "/request"; # Lets just use the same parameter to keep it simple +#==================================================================# +# Load Model +#==================================================================# + +def tpumtjgetsofttokens(): + soft_tokens = None + if(vars.sp is None): + global np + if 'np' not in globals(): + import numpy as np + tensor = np.zeros((1, tpu_mtj_backend.params.get("d_embed", tpu_mtj_backend.params["d_model"])), dtype=np.float32) + rows = tensor.shape[0] + padding_amount = tpu_mtj_backend.params["seq"] - (tpu_mtj_backend.params["seq"] % -tpu_mtj_backend.params["cores_per_replica"]) - rows + tensor = np.pad(tensor, ((0, padding_amount), (0, 0))) + tensor = tensor.reshape( + tpu_mtj_backend.params["cores_per_replica"], + -1, + tpu_mtj_backend.params.get("d_embed", tpu_mtj_backend.params["d_model"]), + ) + vars.sp = tpu_mtj_backend.shard_xmap(tensor) + soft_tokens = np.arange( + tpu_mtj_backend.params["n_vocab"] + tpu_mtj_backend.params["n_vocab_padding"], + tpu_mtj_backend.params["n_vocab"] + tpu_mtj_backend.params["n_vocab_padding"] + vars.sp_length, + dtype=np.uint32 + ) + return soft_tokens + +def get_model_info(model, directory=""): + # if the model is in the api list + disk_blocks = 0 + key = False + breakmodel = False + gpu = False + layer_count = None + key_value = "" + break_values = [] + url = False + default_url = None + models_on_url = False + multi_online_models = False + gpu_count = torch.cuda.device_count() + gpu_names = [] + send_horde_models = False + for i in range(gpu_count): + gpu_names.append(torch.cuda.get_device_name(i)) + if model in ['Colab', 'API']: + url = True + elif model == 'CLUSTER': + models_on_url = True + url = True + key = True + default_url = 'https://koboldai.net' + multi_online_models = True + if path.exists(get_config_filename(model)): + with open(get_config_filename(model), "r") as file: + # Check if API key exists + js = json.load(file) + if("apikey" in js and js["apikey"] != ""): + # API key exists, grab it and close the file + key_value = js["apikey"] + elif 'oaiapikey' in js and js['oaiapikey'] != "": + key_value = js["oaiapikey"] + if 'url' in js and js['url'] != "": + url = js['url'] + if key_value != "": + send_horde_models = True + elif model in [x[1] for x in model_menu['apilist']]: + if path.exists(get_config_filename(model)): + with open(get_config_filename(model), "r") as file: + # Check if API key exists + js = json.load(file) + if("apikey" in js and js["apikey"] != ""): + # API key exists, grab it and close the file + key_value = js["apikey"] + elif 'oaiapikey' in js and js['oaiapikey'] != "": + key_value = js["oaiapikey"] + key = True + elif model == 'ReadOnly': + pass + elif not utils.HAS_ACCELERATE and not torch.cuda.is_available(): + pass + elif args.cpu: + pass + else: + layer_count = get_layer_count(model, directory=directory) + if layer_count is None: + breakmodel = False + gpu = True else: - print(" 1 - GPU\n 2 - CPU\n") - genselected = False - else: - genselected = False - - if(vars.hascuda): - while(genselected == False): - genselect = input("Mode> ") - if(genselect == ""): - vars.breakmodel = False - vars.usegpu = True - genselected = True - elif(genselect.isnumeric() and int(genselect) == 1): - if(vars.bmsupported): - vars.breakmodel = True - vars.usegpu = False - genselected = True - else: - vars.breakmodel = False - vars.usegpu = True - genselected = True - elif(genselect.isnumeric() and int(genselect) == 2): - vars.breakmodel = False - vars.usegpu = False - genselected = True + breakmodel = True + if model in ["NeoCustom", "GPT2Custom"]: + filename = "settings/{}.breakmodel".format(os.path.basename(os.path.normpath(directory))) else: - print("{0}Please enter a valid selection.{1}".format(colors.RED, colors.END)) - -# Ask for API key if InferKit was selected -if(vars.model == "InferKit"): - if(not path.exists("settings/" + getmodelname().replace('/', '_') + ".settings")): - # If the client settings file doesn't exist, create it - print("{0}Please enter your InferKit API key:{1}\n".format(colors.CYAN, colors.END)) - vars.apikey = input("Key> ") - # Write API key to file - os.makedirs('settings', exist_ok=True) - file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "w") - try: - js = {"apikey": vars.apikey} - file.write(json.dumps(js, indent=3)) - finally: - file.close() - else: - # Otherwise open it up - file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "r") - # Check if API key exists - js = json.load(file) - if("apikey" in js and js["apikey"] != ""): - # API key exists, grab it and close the file - vars.apikey = js["apikey"] - file.close() - else: - # Get API key, add it to settings object, and write it to disk - print("{0}Please enter your InferKit API key:{1}\n".format(colors.CYAN, colors.END)) - vars.apikey = input("Key> ") - js["apikey"] = vars.apikey - # Write API key to file - file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "w") - try: - file.write(json.dumps(js, indent=3)) - finally: - file.close() - -# Swap OAI Server if GooseAI was selected -if(vars.model == "GooseAI"): - vars.oaiengines = "https://api.goose.ai/v1/engines" - vars.model = "OAI" - args.configname = "GooseAI" - -# Ask for API key if OpenAI was selected -if(vars.model == "OAI"): - if not args.configname: - args.configname = "OAI" - if(not path.exists("settings/" + getmodelname().replace('/', '_') + ".settings")): - # If the client settings file doesn't exist, create it - print("{0}Please enter your API key:{1}\n".format(colors.CYAN, colors.END)) - vars.oaiapikey = input("Key> ") - # Write API key to file - os.makedirs('settings', exist_ok=True) - file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "w") - try: - js = {"oaiapikey": vars.oaiapikey} - file.write(json.dumps(js, indent=3)) - finally: - file.close() - else: - # Otherwise open it up - file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "r") - # Check if API key exists - js = json.load(file) - if("oaiapikey" in js and js["oaiapikey"] != ""): - # API key exists, grab it and close the file - vars.oaiapikey = js["oaiapikey"] - file.close() - else: - # Get API key, add it to settings object, and write it to disk - print("{0}Please enter your API key:{1}\n".format(colors.CYAN, colors.END)) - vars.oaiapikey = input("Key> ") - js["oaiapikey"] = vars.oaiapikey - # Write API key to file - file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "w") - try: - file.write(json.dumps(js, indent=3)) - finally: - file.close() + filename = "settings/{}.breakmodel".format(model.replace("/", "_")) + if path.exists(filename): + with open(filename, "r") as file: + data = file.read().split("\n")[:2] + if len(data) < 2: + data.append("0") + break_values, disk_blocks = data + break_values = break_values.split(",") + else: + break_values = [layer_count] + break_values += [0] * (gpu_count - len(break_values)) + #print("Model_info: {}".format({'cmd': 'selected_model_info', 'key_value': key_value, 'key':key, + # 'gpu':gpu, 'layer_count':layer_count, 'breakmodel':breakmodel, + # 'break_values': break_values, 'gpu_count': gpu_count, + # 'url': url, 'gpu_names': gpu_names})) + emit('from_server', {'cmd': 'selected_model_info', 'key_value': key_value, 'key':key, + 'gpu':gpu, 'layer_count':layer_count, 'breakmodel':breakmodel, + 'disk_break_value': disk_blocks, 'accelerate': utils.HAS_ACCELERATE, + 'break_values': break_values, 'gpu_count': gpu_count, 'multi_online_models': multi_online_models, + 'url': url, 'default_url': default_url, 'gpu_names': gpu_names, 'models_on_url': models_on_url}, broadcast=True) + if send_horde_models: + get_cluster_models({'key': key_value, 'url': default_url}) + elif key_value != "" and model in [x[1] for x in model_menu['apilist']] and model != 'CLUSTER': + get_oai_models(key_value) - if vars.custmodpth: - vars.oaiurl = vars.oaiengines + "/" + vars.custmodpth + "/completions" - args.configname = args.configname + "/" + vars.custmodpth - engselected = True - else: - # Get list of models from OAI - print("{0}Retrieving engine list...{1}".format(colors.PURPLE, colors.END), end="") - req = requests.get( - vars.oaiengines, - headers = { - 'Authorization': 'Bearer '+vars.oaiapikey - } - ) - if(req.status_code == 200): - print("{0}OK!{1}".format(colors.GREEN, colors.END)) - print("{0}Please select an engine to use:{1}\n".format(colors.CYAN, colors.END)) - engines = req.json()["data"] - # Print list of engines - i = 0 - for en in engines: - print(" {0} - {1} ({2})".format(i, en["id"], "\033[92mready\033[0m" if en["ready"] == True else "\033[91mnot ready\033[0m")) - i += 1 - # Get engine to use - print("") - engselected = False - while(engselected == False): - engine = input("Engine #> ") - if(engine.isnumeric() and int(engine) < len(engines)): - vars.oaiurl = vars.oaiengines + "/{0}/completions".format(engines[int(engine)]["id"]) - args.configname = args.configname + "/" + engines[int(engine)]["id"] - engselected = True - else: - print("{0}Please enter a valid selection.{1}".format(colors.RED, colors.END)) + +def get_layer_count(model, directory=""): + if(model not in ["InferKit", "Colab", "API", "CLUSTER", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ"]): + if(model == "GPT2Custom"): + with open(os.path.join(directory, "config.json"), "r") as f: + model_config = json.load(f) + # Get the model_type from the config or assume a model type if it isn't present else: - # Something went wrong, print the message and quit since we can't initialize an engine - print("{0}ERROR!{1}".format(colors.RED, colors.END)) - print(req.json()) - quit() - -# Ask for ngrok url if Google Colab was selected -if(vars.model == "Colab"): - if(vars.colaburl == ""): - print("{0}NOTE: For the modern KoboldAI Colab's you open the links directly in your browser.\nThis option is only for the KoboldAI Server API, not all features are supported in this mode.\n".format(colors.YELLOW, colors.END)) - print("{0}Enter the URL of the server (For example a trycloudflare link):{1}\n".format(colors.CYAN, colors.END)) - vars.colaburl = input("URL> ") + "/request" - -if(vars.model == "ReadOnly"): - vars.noai = True - -# Set logging level to reduce chatter from Flask -import logging -log = logging.getLogger('werkzeug') -log.setLevel(logging.ERROR) - -# Start flask & SocketIO -print("{0}Initializing Flask... {1}".format(colors.PURPLE, colors.END), end="") -from flask import Flask, render_template, Response, request, copy_current_request_context -from flask_socketio import SocketIO, emit -app = Flask(__name__, root_path=os.getcwd()) -app.config['SECRET KEY'] = 'secret!' -socketio = SocketIO(app, async_method="eventlet") -socketio.start_background_task(check_for_sp_change) -print("{0}OK!{1}".format(colors.GREEN, colors.END)) - -# Start transformers and create pipeline -if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]): - if(not vars.noai): - print("{0}Initializing transformers, please wait...{1}".format(colors.PURPLE, colors.END)) - from transformers import StoppingCriteria, GPT2TokenizerFast, GPT2LMHeadModel, GPTNeoForCausalLM, GPTNeoModel, AutoModelForCausalLM, AutoTokenizer - for m in ("GPTJModel", "XGLMModel"): - try: - globals()[m] = getattr(__import__("transformers"), m) - except: - pass + if(directory): + model = directory + from transformers import AutoConfig + if(os.path.isdir(model.replace('/', '_'))): + model_config = AutoConfig.from_pretrained(model.replace('/', '_'), revision=vars.revision, cache_dir="cache") + elif(os.path.isdir("models/{}".format(model.replace('/', '_')))): + model_config = AutoConfig.from_pretrained("models/{}".format(model.replace('/', '_')), revision=vars.revision, cache_dir="cache") + elif(os.path.isdir(directory)): + model_config = AutoConfig.from_pretrained(directory, revision=vars.revision, cache_dir="cache") + else: + model_config = AutoConfig.from_pretrained(model, revision=vars.revision, cache_dir="cache") try: - from transformers.models.opt.modeling_opt import OPTDecoder + if ((utils.HAS_ACCELERATE and model_config.model_type != 'gpt2') or model_config.model_type in ("gpt_neo", "gptj", "xglm", "opt")) and not vars.nobreakmodel: + return utils.num_layers(model_config) + else: + return None except: - pass - import transformers.generation_utils - from transformers import __version__ as transformers_version + return None + else: + return None +def get_oai_models(key): + vars.oaiapikey = key + if vars.model_selected == 'OAI': + url = "https://api.openai.com/v1/engines" + elif vars.model_selected == 'GooseAI': + url = "https://api.goose.ai/v1/engines" + else: + return + + # Get list of models from OAI + logger.init("OAI Engines", status="Retrieving") + req = requests.get( + url, + headers = { + 'Authorization': 'Bearer '+key + } + ) + if(req.status_code == 200): + engines = req.json()["data"] + try: + engines = [[en["id"], "{} ({})".format(en['id'], "Ready" if en["ready"] == True else "Not Ready")] for en in engines] + except: + logger.error(engines) + raise + + online_model = "" + changed=False + + #Save the key + if not path.exists("settings"): + # If the client settings file doesn't exist, create it + # Write API key to file + os.makedirs('settings', exist_ok=True) + if path.exists(get_config_filename(vars.model_selected)): + with open(get_config_filename(vars.model_selected), "r") as file: + js = json.load(file) + if 'online_model' in js: + online_model = js['online_model'] + if "apikey" in js: + if js['apikey'] != key: + changed=True + else: + changed=True + if changed: + js={} + with open(get_config_filename(vars.model_selected), "w") as file: + js["apikey"] = key + file.write(json.dumps(js, indent=3)) + + logger.init_ok("OAI Engines", status="OK") + emit('from_server', {'cmd': 'oai_engines', 'data': engines, 'online_model': online_model}, broadcast=True) + else: + # Something went wrong, print the message and quit since we can't initialize an engine + logger.init_err("OAI Engines", status="Failed") + logger.error(req.json()) + emit('from_server', {'cmd': 'errmsg', 'data': req.json()}) + +def get_cluster_models(msg): + vars.oaiapikey = msg['key'] + vars.apikey = vars.oaiapikey + url = msg['url'] + # Get list of models from public cluster + logger.init("KAI Horde Models", status="Retrieving") + try: + req = requests.get("{}/api/v1/models".format(url)) + except requests.exceptions.ConnectionError: + logger.init_err("KAI Horde Models", status="Failed") + logger.error("Provided KoboldAI Horde URL unreachable") + emit('from_server', {'cmd': 'errmsg', 'data': "Provided KoboldAI Horde URL unreachable"}) + return + if(not req.ok): + # Something went wrong, print the message and quit since we can't initialize an engine + logger.init_err("KAI Horde Models", status="Failed") + logger.error(req.json()) + emit('from_server', {'cmd': 'errmsg', 'data': req.json()}) + return + + engines = req.json() + logger.debug(engines) + try: + engines = [[en, en] for en in engines] + except: + logger.error(engines) + raise + + online_model = "" + changed=False + + #Save the key + if not path.exists("settings"): + # If the client settings file doesn't exist, create it + # Write API key to file + os.makedirs('settings', exist_ok=True) + if path.exists(get_config_filename(vars.model_selected)): + with open(get_config_filename(vars.model_selected), "r") as file: + js = json.load(file) + if 'online_model' in js: + online_model = js['online_model'] + if "apikey" in js: + if js['apikey'] != vars.oaiapikey: + changed=True + else: + changed=True + if changed: + js={} + with open(get_config_filename(vars.model_selected), "w") as file: + js["apikey"] = vars.oaiapikey + js["url"] = url + file.write(json.dumps(js, indent=3)) + + logger.init_ok("KAI Horde Models", status="OK") + emit('from_server', {'cmd': 'oai_engines', 'data': engines, 'online_model': online_model}, broadcast=True) + + +# Function to patch transformers to use our soft prompt +def patch_causallm(model): + from torch.nn import Embedding + if(getattr(Embedding, "_koboldai_patch_causallm_model", None)): + Embedding._koboldai_patch_causallm_model = model + return model + old_embedding_call = Embedding.__call__ + def new_embedding_call(self, input_ids, *args, **kwargs): + if(Embedding._koboldai_patch_causallm_model.get_input_embeddings() is not self): + return old_embedding_call(self, input_ids, *args, **kwargs) + assert input_ids is not None + if(vars.sp is not None): + shifted_input_ids = input_ids - model.config.vocab_size + input_ids.clamp_(max=model.config.vocab_size-1) + inputs_embeds = old_embedding_call(self, input_ids, *args, **kwargs) + if(vars.sp is not None): + vars.sp = vars.sp.to(inputs_embeds.dtype).to(inputs_embeds.device) + inputs_embeds = torch.where( + (shifted_input_ids >= 0)[..., None], + vars.sp[shifted_input_ids.clamp(min=0)], + inputs_embeds, + ) + return inputs_embeds + Embedding.__call__ = new_embedding_call + Embedding._koboldai_patch_causallm_model = model + return model + +def patch_transformers_download(): + global transformers + import copy, requests, tqdm, time + class Send_to_socketio(object): + def write(self, bar): + bar = bar.replace("\r", "").replace("\n", "") + if bar != "": + try: + print(bar, end="\r") + emit('from_server', {'cmd': 'model_load_status', 'data': bar.replace(" ", " ")}, broadcast=True) + eventlet.sleep(seconds=0) + except: + pass + def http_get( + url: str, + temp_file, + proxies=None, + resume_size=0, + headers=None, + file_name=None, + ): + """ + Download remote file. Do not gobble up errors. + """ + headers = copy.deepcopy(headers) + if resume_size > 0: + headers["Range"] = f"bytes={resume_size}-" + r = requests.get(url, stream=True, proxies=proxies, headers=headers) + transformers.utils.hub._raise_for_status(r) + content_length = r.headers.get("Content-Length") + total = resume_size + int(content_length) if content_length is not None else None + # `tqdm` behavior is determined by `utils.logging.is_progress_bar_enabled()` + # and can be set using `utils.logging.enable/disable_progress_bar()` + if url[-11:] != 'config.json': + progress = tqdm.tqdm( + unit="B", + unit_scale=True, + unit_divisor=1024, + total=total, + initial=resume_size, + desc=f"Downloading {file_name}" if file_name is not None else "Downloading", + file=Send_to_socketio(), + ) + for chunk in r.iter_content(chunk_size=1024): + if chunk: # filter out keep-alive new chunks + if url[-11:] != 'config.json': + progress.update(len(chunk)) + temp_file.write(chunk) + if url[-11:] != 'config.json': + progress.close() + + transformers.utils.hub.http_get = http_get + + +def patch_transformers(): + global transformers + + patch_transformers_download() + + old_from_pretrained = PreTrainedModel.from_pretrained.__func__ + @classmethod + def new_from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): + vars.fp32_model = False + utils.num_shards = None + utils.current_shard = 0 + utils.from_pretrained_model_name = pretrained_model_name_or_path + utils.from_pretrained_index_filename = None + utils.from_pretrained_kwargs = kwargs + utils.bar = None + if not args.no_aria2: + utils.aria2_hook(pretrained_model_name_or_path, **kwargs) + return old_from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs) + PreTrainedModel.from_pretrained = new_from_pretrained + if(hasattr(modeling_utils, "get_checkpoint_shard_files")): + old_get_checkpoint_shard_files = modeling_utils.get_checkpoint_shard_files + def new_get_checkpoint_shard_files(pretrained_model_name_or_path, index_filename, *args, **kwargs): + utils.num_shards = utils.get_num_shards(index_filename) + utils.from_pretrained_index_filename = index_filename + return old_get_checkpoint_shard_files(pretrained_model_name_or_path, index_filename, *args, **kwargs) + modeling_utils.get_checkpoint_shard_files = new_get_checkpoint_shard_files + + # Some versions of transformers 4.17.0.dev0 are affected by + # https://github.com/huggingface/transformers/issues/15736 + # This is a workaround for those versions of transformers. + if(transformers_version == "4.17.0.dev0"): + try: + from transformers.models.xglm.modeling_xglm import XGLMSinusoidalPositionalEmbedding + except ImportError: + pass + else: + @torch.no_grad() + def new_forward(self, input_ids: torch.Tensor = None, inputs_embeds: torch.Tensor = None, past_key_values_length: int = 0): + bsz, seq_len = inputs_embeds.size()[:-1] + input_shape = inputs_embeds.size()[:-1] + sequence_length = input_shape[1] + position_ids = torch.arange( + past_key_values_length + self.padding_idx + 1, past_key_values_length + sequence_length + self.padding_idx + 1, dtype=torch.long, device=inputs_embeds.device + ).unsqueeze(0).expand(input_shape).contiguous() + max_pos = self.padding_idx + 1 + seq_len + past_key_values_length + if max_pos > self.weights.size(0): + self.make_weights(max_pos + self.offset, self.embedding_dim, self.padding_idx) + return self.weights.index_select(0, position_ids.view(-1)).view(bsz, seq_len, -1).detach() + XGLMSinusoidalPositionalEmbedding.forward = new_forward + + + # Fix a bug in OPTForCausalLM where self.lm_head is the wrong size + if(packaging.version.parse("4.19.0.dev0") <= packaging.version.parse(transformers_version) < packaging.version.parse("4.20.0")): + try: + from transformers import OPTForCausalLM, OPTModel + except ImportError: + pass + else: + # This is the same as the original __init__ but with + # config.hidden_size + # replaced with + # config.word_embed_proj_dim + def new_init(self, config): + super(OPTForCausalLM, self).__init__(config) + self.model = OPTModel(config) + self.lm_head = torch.nn.Linear(config.word_embed_proj_dim, config.vocab_size, bias=False) + self.post_init() + OPTForCausalLM.__init__ = new_init + + + # Patch transformers to use our custom logit warpers + from transformers import LogitsProcessorList, LogitsWarper, LogitsProcessor, TopKLogitsWarper, TopPLogitsWarper, TemperatureLogitsWarper, RepetitionPenaltyLogitsProcessor + from warpers import AdvancedRepetitionPenaltyLogitsProcessor, TailFreeLogitsWarper, TypicalLogitsWarper, TopALogitsWarper + + def dynamic_processor_wrap(cls, field_name, var_name, cond=None): + old_call = cls.__call__ + def new_call(self, *args, **kwargs): + if(not isinstance(field_name, str) and isinstance(field_name, Iterable)): + conds = [] + for f, v in zip(field_name, var_name): + conds.append(getattr(vars, v)) + setattr(self, f, conds[-1]) + else: + conds = getattr(vars, var_name) + setattr(self, field_name, conds) + assert len(args) == 2 + if(cond is None or cond(conds)): + return old_call(self, *args, **kwargs) + return args[1] + cls.__call__ = new_call + dynamic_processor_wrap(AdvancedRepetitionPenaltyLogitsProcessor, ("penalty", "penalty_slope", "penalty_range"), ("rep_pen", "rep_pen_slope", "rep_pen_range"), cond=lambda x: x[0] != 1.0) + dynamic_processor_wrap(TopKLogitsWarper, "top_k", "top_k", cond=lambda x: x > 0) + dynamic_processor_wrap(TopALogitsWarper, "top_a", "top_a", cond=lambda x: x > 0.0) + dynamic_processor_wrap(TopPLogitsWarper, "top_p", "top_p", cond=lambda x: x < 1.0) + dynamic_processor_wrap(TailFreeLogitsWarper, "tfs", "tfs", cond=lambda x: x < 1.0) + dynamic_processor_wrap(TypicalLogitsWarper, "typical", "typical", cond=lambda x: x < 1.0) + dynamic_processor_wrap(TemperatureLogitsWarper, "temperature", "temp", cond=lambda x: x != 1.0) + + class LuaLogitsProcessor(LogitsProcessor): + + def __init__(self): + pass + + def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor: + assert scores.ndim == 2 + assert input_ids.ndim == 2 + self.regeneration_required = False + self.halt = False + + if(vars.standalone): + return scores + + scores_shape = scores.shape + scores_list = scores.tolist() + vars.lua_koboldbridge.logits = vars.lua_state.table() + for r, row in enumerate(scores_list): + vars.lua_koboldbridge.logits[r+1] = vars.lua_state.table(*row) + vars.lua_koboldbridge.vocab_size = scores_shape[-1] + + execute_genmod() + + scores = torch.tensor( + tuple(tuple(row.values()) for row in vars.lua_koboldbridge.logits.values()), + device=scores.device, + dtype=scores.dtype, + ) + assert scores.shape == scores_shape + + return scores + + from torch.nn import functional as F + + class ProbabilityVisualizerLogitsProcessor(LogitsProcessor): + def __init__(self): + pass + + def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor: + assert scores.ndim == 2 + assert input_ids.ndim == 2 + + if vars.numseqs > 1 or not vars.show_probs: + return scores + + probs = F.softmax(scores, dim = -1).cpu().numpy()[0] + + token_prob_info = [] + for token_id, score in sorted(enumerate(probs), key=lambda x: x[1], reverse=True)[:8]: + token_prob_info.append({ + "tokenId": token_id, + "decoded": utils.decodenewlines(tokenizer.decode(token_id)), + "score": float(score), + }) + + vars.token_stream_queue.probability_buffer = token_prob_info + return scores + + def new_get_logits_processor(*args, **kwargs) -> LogitsProcessorList: + processors = new_get_logits_processor.old_get_logits_processor(*args, **kwargs) + processors.insert(0, LuaLogitsProcessor()) + processors.append(ProbabilityVisualizerLogitsProcessor()) + return processors + new_get_logits_processor.old_get_logits_processor = transformers.generation_utils.GenerationMixin._get_logits_processor + transformers.generation_utils.GenerationMixin._get_logits_processor = new_get_logits_processor + + class KoboldLogitsWarperList(LogitsProcessorList): + def __init__(self, beams: int = 1, **kwargs): + self.__warper_list: List[LogitsWarper] = [] + self.__warper_list.append(TopKLogitsWarper(top_k=1, min_tokens_to_keep=1 + (beams > 1))) + self.__warper_list.append(TopALogitsWarper(top_a=0.5, min_tokens_to_keep=1 + (beams > 1))) + self.__warper_list.append(TopPLogitsWarper(top_p=0.5, min_tokens_to_keep=1 + (beams > 1))) + self.__warper_list.append(TailFreeLogitsWarper(tfs=0.5, min_tokens_to_keep=1 + (beams > 1))) + self.__warper_list.append(TypicalLogitsWarper(typical=0.5, min_tokens_to_keep=1 + (beams > 1))) + self.__warper_list.append(TemperatureLogitsWarper(temperature=0.5)) + self.__warper_list.append(AdvancedRepetitionPenaltyLogitsProcessor()) + + def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor, *args, **kwargs): + sampler_order = vars.sampler_order[:] + if len(sampler_order) < 7: # Add repetition penalty at beginning if it's not present + sampler_order = [6] + sampler_order + for k in sampler_order: + scores = self.__warper_list[k](input_ids, scores, *args, **kwargs) + return scores + + def new_get_logits_warper(beams: int = 1,) -> LogitsProcessorList: + return KoboldLogitsWarperList(beams=beams) + + def new_sample(self, *args, **kwargs): + assert kwargs.pop("logits_warper", None) is not None + kwargs["logits_warper"] = new_get_logits_warper( + beams=1, + ) + if(vars.newlinemode == "s") or (vars.newlinemode == "ns"): + kwargs["eos_token_id"] = -1 + kwargs.setdefault("pad_token_id", 2) + return new_sample.old_sample(self, *args, **kwargs) + new_sample.old_sample = transformers.generation_utils.GenerationMixin.sample + transformers.generation_utils.GenerationMixin.sample = new_sample + + + # Allow bad words filter to ban <|endoftext|> token + import transformers.generation_logits_process + def new_init(self, bad_words_ids: List[List[int]], eos_token_id: int): + return new_init.old_init(self, bad_words_ids, -1) + new_init.old_init = transformers.generation_logits_process.NoBadWordsLogitsProcessor.__init__ + transformers.generation_logits_process.NoBadWordsLogitsProcessor.__init__ = new_init + + class TokenStreamer(StoppingCriteria): + # A StoppingCriteria is used here because it seems to run after + # everything has been evaluated score-wise. + def __init__(self, tokenizer): + self.tokenizer = tokenizer + + def __call__( + self, + input_ids: torch.LongTensor, + scores: torch.FloatTensor, + **kwargs, + ) -> bool: + # Do not intermingle multiple generations' outputs! + if vars.numseqs > 1: + return False + + if not (vars.show_probs or vars.output_streaming): + return False + + if vars.chatmode: + return False + tokenizer_text = utils.decodenewlines(tokenizer.decode(input_ids[0, -1])) + vars.token_stream_queue.add_text(tokenizer_text) + return False + + + # Sets up dynamic world info scanner + class DynamicWorldInfoScanCriteria(StoppingCriteria): + def __init__( + self, + tokenizer, + excluded_world_info: List[Set], + ): + self.regeneration_required = False + self.halt = False + self.tokenizer = tokenizer + self.excluded_world_info = excluded_world_info + def __call__( + self, + input_ids: torch.LongTensor, + scores: torch.FloatTensor, + **kwargs, + ) -> bool: + vars.generated_tkns += 1 + if(not vars.standalone and vars.lua_koboldbridge.generated_cols and vars.generated_tkns != vars.lua_koboldbridge.generated_cols): + raise RuntimeError(f"Inconsistency detected between KoboldAI Python and Lua backends ({vars.generated_tkns} != {vars.lua_koboldbridge.generated_cols})") + if(vars.abort or vars.generated_tkns >= vars.genamt): + self.regeneration_required = False + self.halt = False + return True + if(vars.standalone): + return False + + assert input_ids.ndim == 2 + assert len(self.excluded_world_info) == input_ids.shape[0] + self.regeneration_required = vars.lua_koboldbridge.regeneration_required + self.halt = not vars.lua_koboldbridge.generating + vars.lua_koboldbridge.regeneration_required = False + + for i in range(vars.numseqs): + vars.lua_koboldbridge.generated[i+1][vars.generated_tkns] = int(input_ids[i, -1].item()) + + if(not vars.dynamicscan): + return self.regeneration_required or self.halt + tail = input_ids[..., -vars.generated_tkns:] + for i, t in enumerate(tail): + decoded = utils.decodenewlines(tokenizer.decode(t)) + _, found = checkworldinfo(decoded, force_use_txt=True, actions=vars._actions) + found -= self.excluded_world_info[i] + if(len(found) != 0): + self.regeneration_required = True + break + return self.regeneration_required or self.halt + old_get_stopping_criteria = transformers.generation_utils.GenerationMixin._get_stopping_criteria + def new_get_stopping_criteria(self, *args, **kwargs): + stopping_criteria = old_get_stopping_criteria(self, *args, **kwargs) + global tokenizer + self.kai_scanner = DynamicWorldInfoScanCriteria( + tokenizer=tokenizer, + excluded_world_info=self.kai_scanner_excluded_world_info, + ) + token_streamer = TokenStreamer(tokenizer=tokenizer) + + stopping_criteria.insert(0, self.kai_scanner) + stopping_criteria.insert(0, token_streamer) + return stopping_criteria + transformers.generation_utils.GenerationMixin._get_stopping_criteria = new_get_stopping_criteria + +def reset_model_settings(): + vars.socketio = socketio + vars.max_length = 1024 # Maximum number of tokens to submit per action + vars.ikmax = 3000 # Maximum number of characters to submit to InferKit + vars.genamt = 80 # Amount of text for each action to generate + vars.ikgen = 200 # Number of characters for InferKit to generate + vars.rep_pen = 1.1 # Default generator repetition_penalty + vars.rep_pen_slope = 0.7 # Default generator repetition penalty slope + vars.rep_pen_range = 1024 # Default generator repetition penalty range + vars.temp = 0.5 # Default generator temperature + vars.top_p = 0.9 # Default generator top_p + vars.top_k = 0 # Default generator top_k + vars.top_a = 0.0 # Default generator top-a + vars.tfs = 1.0 # Default generator tfs (tail-free sampling) + vars.typical = 1.0 # Default generator typical sampling threshold + vars.numseqs = 1 # Number of sequences to ask the generator to create + vars.generated_tkns = 0 # If using a backend that supports Lua generation modifiers, how many tokens have already been generated, otherwise 0 + vars.badwordsids = [] + vars.fp32_model = False # Whether or not the most recently loaded HF model was in fp32 format + vars.modeldim = -1 # Embedding dimension of your model (e.g. it's 4096 for GPT-J-6B and 2560 for GPT-Neo-2.7B) + vars.sampler_order = [6, 0, 1, 2, 3, 4, 5] + vars.newlinemode = "n" + vars.revision = None + vars.lazy_load = True + + +def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=False, online_model="", use_breakmodel_args=False, breakmodel_args_default_to_cpu=False): + global model + global generator + global torch + global model_config + global GPT2Tokenizer + global tokenizer + if(initial_load): + use_breakmodel_args = True + reset_model_settings() + if not utils.HAS_ACCELERATE: + disk_layers = None + vars.noai = False + if not use_breakmodel_args: + set_aibusy(True) + if vars.model != 'ReadOnly': + emit('from_server', {'cmd': 'model_load_status', 'data': "Loading {}".format(vars.model)}, broadcast=True) + #Have to add a sleep so the server will send the emit for some reason + time.sleep(0.1) + if gpu_layers is not None: + args.breakmodel_gpulayers = gpu_layers + elif use_breakmodel_args: + gpu_layers = args.breakmodel_gpulayers + if breakmodel_args_default_to_cpu and gpu_layers is None: + gpu_layers = args.breakmodel_gpulayers = [] + if disk_layers is not None: + args.breakmodel_disklayers = int(disk_layers) + elif use_breakmodel_args: + disk_layers = args.breakmodel_disklayers + if breakmodel_args_default_to_cpu and disk_layers is None: + disk_layers = args.breakmodel_disklayers = 0 + + #We need to wipe out the existing model and refresh the cuda cache + model = None + generator = None + model_config = None + vars.online_model = '' + with torch.no_grad(): + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", message="torch.distributed.reduce_op is deprecated") + for tensor in gc.get_objects(): + try: + if torch.is_tensor(tensor): + tensor.set_(torch.tensor((), device=tensor.device, dtype=tensor.dtype)) + except: + pass + gc.collect() + try: + torch.cuda.empty_cache() + except: + pass + + #Reload our badwords + vars.badwordsids = vars.badwordsids_default + + if online_model == "": + vars.configname = getmodelname() + #Let's set the GooseAI or OpenAI server URLs if that's applicable + else: + vars.online_model = online_model + # Swap OAI Server if GooseAI was selected + if(vars.model == "GooseAI"): + vars.oaiengines = "https://api.goose.ai/v1/engines" + vars.model = "OAI" + vars.configname = f"GooseAI_{online_model.replace('/', '_')}" + elif(vars.model == "CLUSTER") and type(online_model) is list: + if len(online_model) != 1: + vars.configname = vars.model + else: + vars.configname = f"{vars.model}_{online_model[0].replace('/', '_')}" + else: + vars.configname = f"{vars.model}_{online_model.replace('/', '_')}" + if path.exists(get_config_filename()): + changed=False + with open(get_config_filename(), "r") as file: + # Check if API key exists + js = json.load(file) + if 'online_model' in js: + if js['online_model'] != online_model: + changed=True + js['online_model'] = online_model + else: + changed=True + js['online_model'] = online_model + if changed: + with open(get_config_filename(), "w") as file: + file.write(json.dumps(js, indent=3)) + + # Swap OAI Server if GooseAI was selected + if(vars.model == "GooseAI"): + vars.oaiengines = "https://api.goose.ai/v1/engines" + vars.model = "OAI" + args.configname = "GooseAI" + "/" + online_model + elif vars.model != "CLUSTER": + args.configname = vars.model + "/" + online_model + vars.oaiurl = vars.oaiengines + "/{0}/completions".format(online_model) + + + # If transformers model was selected & GPU available, ask to use CPU or GPU + if(vars.model not in ["InferKit", "Colab", "API", "CLUSTER", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]): + vars.allowsp = True + # Test for GPU support + + # Make model path the same as the model name to make this consistent with the other loading method if it isn't a known model type + # This code is not just a workaround for below, it is also used to make the behavior consistent with other loading methods - Henk717 + if(not vars.model in ["NeoCustom", "GPT2Custom"]): + vars.custmodpth = vars.model + elif(vars.model == "NeoCustom"): + vars.model = os.path.basename(os.path.normpath(vars.custmodpth)) + + # Get the model_type from the config or assume a model type if it isn't present + from transformers import AutoConfig + if(os.path.isdir(vars.custmodpth.replace('/', '_'))): + try: + model_config = AutoConfig.from_pretrained(vars.custmodpth.replace('/', '_'), revision=vars.revision, cache_dir="cache") + vars.model_type = model_config.model_type + except ValueError as e: + vars.model_type = "not_found" + elif(os.path.isdir("models/{}".format(vars.custmodpth.replace('/', '_')))): + try: + model_config = AutoConfig.from_pretrained("models/{}".format(vars.custmodpth.replace('/', '_')), revision=vars.revision, cache_dir="cache") + vars.model_type = model_config.model_type + except ValueError as e: + vars.model_type = "not_found" + else: + try: + model_config = AutoConfig.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") + vars.model_type = model_config.model_type + except ValueError as e: + vars.model_type = "not_found" + if(vars.model_type == "not_found" and vars.model == "NeoCustom"): + vars.model_type = "gpt_neo" + elif(vars.model_type == "not_found" and vars.model == "GPT2Custom"): + vars.model_type = "gpt2" + elif(vars.model_type == "not_found"): + logger.warning("No model type detected, assuming Neo (If this is a GPT2 model use the other menu option or --model GPT2Custom)") + vars.model_type = "gpt_neo" + + if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "API", "CLUSTER", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]): + loadmodelsettings() + loadsettings() + logger.init("GPU support", status="Searching") + vars.hascuda = torch.cuda.is_available() and not args.cpu + vars.bmsupported = ((utils.HAS_ACCELERATE and vars.model_type != 'gpt2') or vars.model_type in ("gpt_neo", "gptj", "xglm", "opt")) and not vars.nobreakmodel + if(args.breakmodel is not None and args.breakmodel): + logger.warning("--breakmodel is no longer supported. Breakmodel mode is now automatically enabled when --breakmodel_gpulayers is used (see --help for details).") + if(args.breakmodel_layers is not None): + logger.warning("--breakmodel_layers is deprecated. Use --breakmodel_gpulayers instead (see --help for details).") + if(args.model and vars.bmsupported and not args.breakmodel_gpulayers and not args.breakmodel_layers and (not utils.HAS_ACCELERATE or not args.breakmodel_disklayers)): + logger.warning("Model launched without the --breakmodel_gpulayers argument, defaulting to GPU only mode.") + vars.bmsupported = False + if(not vars.bmsupported and (args.breakmodel_gpulayers is not None or args.breakmodel_layers is not None or args.breakmodel_disklayers is not None)): + logger.warning("This model does not support hybrid generation. --breakmodel_gpulayers will be ignored.") + if(vars.hascuda): + logger.init_ok("GPU support", status="Found") + else: + logger.init_warn("GPU support", status="Not Found") + + if args.cpu: + vars.usegpu = False + gpu_layers = None + disk_layers = None + vars.breakmodel = False + elif vars.hascuda: + if(vars.bmsupported): + vars.usegpu = False + vars.breakmodel = True + else: + vars.breakmodel = False + vars.usegpu = use_gpu + + + # Ask for API key if InferKit was selected + if(vars.model == "InferKit"): + vars.apikey = vars.oaiapikey + + # Swap OAI Server if GooseAI was selected + if(vars.model == "GooseAI"): + vars.oaiengines = "https://api.goose.ai/v1/engines" + vars.model = "OAI" + vars.configname = "GooseAI" + + # Ask for API key if OpenAI was selected + if(vars.model == "OAI"): + if not vars.configname: + vars.configname = "OAI" + + if(vars.model == "ReadOnly"): + vars.noai = True + + # Start transformers and create pipeline + if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "API", "CLUSTER", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]): + if(not vars.noai): + logger.init("Transformers", status='Starting') + for m in ("GPTJModel", "XGLMModel"): + try: + globals()[m] = getattr(__import__("transformers"), m) + except: + pass + + # Lazy loader + import torch_lazy_loader + def get_lazy_load_callback(n_layers, convert_to_float16=True): + if not vars.lazy_load: + return + + from tqdm.auto import tqdm + + global breakmodel + import breakmodel + + if utils.HAS_ACCELERATE: + import accelerate.utils + + if args.breakmodel_disklayers is not None: + breakmodel.disk_blocks = args.breakmodel_disklayers + + disk_blocks = breakmodel.disk_blocks + gpu_blocks = breakmodel.gpu_blocks + ram_blocks = ram_blocks = n_layers - sum(gpu_blocks) + cumulative_gpu_blocks = tuple(itertools.accumulate(gpu_blocks)) + + def lazy_load_callback(model_dict: Dict[str, Union[torch_lazy_loader.LazyTensor, torch.Tensor]], f, **_): + if lazy_load_callback.nested: + return + lazy_load_callback.nested = True + + device_map: Dict[str, Union[str, int]] = {} + + @functools.lru_cache(maxsize=None) + def get_original_key(key): + return max((original_key for original_key in utils.module_names if original_key.endswith(key)), key=len) + + for key, value in model_dict.items(): + original_key = get_original_key(key) + if isinstance(value, torch_lazy_loader.LazyTensor) and not any(original_key.startswith(n) for n in utils.layers_module_names): + device_map[key] = vars.gpu_device if vars.hascuda and vars.usegpu else "cpu" if not vars.hascuda or not vars.breakmodel else breakmodel.primary_device + else: + layer = int(max((n for n in utils.layers_module_names if original_key.startswith(n)), key=len).rsplit(".", 1)[1]) + device = vars.gpu_device if vars.hascuda and vars.usegpu else "disk" if layer < disk_blocks and layer < ram_blocks else "cpu" if not vars.hascuda or not vars.breakmodel else "shared" if layer < ram_blocks else bisect.bisect_right(cumulative_gpu_blocks, layer - ram_blocks) + device_map[key] = device + + if utils.num_shards is None or utils.current_shard == 0: + utils.offload_index = {} + if utils.HAS_ACCELERATE: + if os.path.isdir("accelerate-disk-cache"): + # Delete all of the files in the disk cache folder without deleting the folder itself to allow people to create symbolic links for this folder + # (the folder doesn't contain any subfolders so os.remove will do just fine) + for filename in os.listdir("accelerate-disk-cache"): + try: + os.remove(os.path.join("accelerate-disk-cache", filename)) + except OSError: + pass + os.makedirs("accelerate-disk-cache", exist_ok=True) + if utils.num_shards is not None: + num_tensors = len(utils.get_sharded_checkpoint_num_tensors(utils.from_pretrained_model_name, utils.from_pretrained_index_filename, **utils.from_pretrained_kwargs)) + else: + num_tensors = len(device_map) + utils.bar = tqdm(total=num_tensors, desc=f"{colors.PURPLE}INIT{colors.END} | Loading model tensors", file=Send_to_socketio()) + + with zipfile.ZipFile(f, "r") as z: + try: + last_storage_key = None + f = None + current_offset = 0 + able_to_pin_layers = True + if utils.num_shards is not None: + utils.current_shard += 1 + for key in sorted(device_map.keys(), key=lambda k: (model_dict[k].key, model_dict[k].seek_offset)): + storage_key = model_dict[key].key + if storage_key != last_storage_key or model_dict[key].seek_offset < current_offset: + last_storage_key = storage_key + if isinstance(f, zipfile.ZipExtFile): + f.close() + f = z.open(f"archive/data/{storage_key}") + current_offset = 0 + if current_offset != model_dict[key].seek_offset: + f.read(model_dict[key].seek_offset - current_offset) + current_offset = model_dict[key].seek_offset + device = device_map[key] + size = functools.reduce(lambda x, y: x * y, model_dict[key].shape, 1) + dtype = model_dict[key].dtype + nbytes = size if dtype is torch.bool else size * ((torch.finfo if dtype.is_floating_point else torch.iinfo)(dtype).bits >> 3) + #print(f"Transferring <{key}> to {f'({device.upper()})' if isinstance(device, str) else '[device ' + str(device) + ']'} ... ", end="", flush=True) + model_dict[key] = model_dict[key].materialize(f, map_location="cpu") + if model_dict[key].dtype is torch.float32: + vars.fp32_model = True + if convert_to_float16 and breakmodel.primary_device != "cpu" and vars.hascuda and (vars.breakmodel or vars.usegpu) and model_dict[key].dtype is torch.float32: + model_dict[key] = model_dict[key].to(torch.float16) + if breakmodel.primary_device == "cpu" or (not vars.usegpu and not vars.breakmodel and model_dict[key].dtype is torch.float16): + model_dict[key] = model_dict[key].to(torch.float32) + if device == "shared": + model_dict[key] = model_dict[key].to("cpu").detach_() + if able_to_pin_layers and utils.HAS_ACCELERATE: + try: + model_dict[key] = model_dict[key].pin_memory() + except: + able_to_pin_layers = False + elif device == "disk": + accelerate.utils.offload_weight(model_dict[key], get_original_key(key), "accelerate-disk-cache", index=utils.offload_index) + model_dict[key] = model_dict[key].to("meta") + else: + model_dict[key] = model_dict[key].to(device) + #print("OK", flush=True) + current_offset += nbytes + utils.bar.update(1) + finally: + if utils.num_shards is None or utils.current_shard >= utils.num_shards: + if utils.offload_index: + for name, tensor in utils.named_buffers: + if name not in utils.offload_index: + accelerate.utils.offload_weight(tensor, name, "accelerate-disk-cache", index=utils.offload_index) + accelerate.utils.save_offload_index(utils.offload_index, "accelerate-disk-cache") + utils.bar.close() + utils.bar = None + lazy_load_callback.nested = False + if isinstance(f, zipfile.ZipExtFile): + f.close() + + lazy_load_callback.nested = False + return lazy_load_callback + + + def get_hidden_size_from_model(model): + return model.get_input_embeddings().embedding_dim + + def maybe_low_cpu_mem_usage() -> Dict[str, Any]: + if(packaging.version.parse(transformers_version) < packaging.version.parse("4.11.0")): + logger.warning(f"Please upgrade to transformers 4.11.0 for lower RAM usage. You have transformers {transformers_version}.") + return {} + return {"low_cpu_mem_usage": True} + + @contextlib.contextmanager + def maybe_use_float16(always_use=False): + if(always_use or (vars.hascuda and args.lowmem and (vars.usegpu or vars.breakmodel))): + original_dtype = torch.get_default_dtype() + torch.set_default_dtype(torch.float16) + yield True + torch.set_default_dtype(original_dtype) + else: + yield False + + # If custom GPT2 model was chosen + if(vars.model_type == "gpt2"): + vars.lazy_load = False + if os.path.exists(vars.custmodpth): + model_config = open(vars.custmodpth + "/config.json", "r") + elif os.path.exists(os.path.join("models/", vars.custmodpth)): + config_path = os.path.join("models/", vars.custmodpth) + config_path = os.path.join(config_path, "config.json").replace("\\", "//") + model_config = open(config_path, "r") + #js = json.load(model_config) + with(maybe_use_float16()): + try: + if os.path.exists(vars.custmodpth): + model = GPT2LMHeadModel.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") + tokenizer = GPT2Tokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") + elif os.path.exists(os.path.join("models/", vars.custmodpth)): + model = GPT2LMHeadModel.from_pretrained(os.path.join("models/", vars.custmodpth), revision=vars.revision, cache_dir="cache") + tokenizer = GPT2Tokenizer.from_pretrained(os.path.join("models/", vars.custmodpth), revision=vars.revision, cache_dir="cache") + else: + model = GPT2LMHeadModel.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") + tokenizer = GPT2Tokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") + except Exception as e: + if("out of memory" in traceback.format_exc().lower()): + raise RuntimeError("One of your GPUs ran out of memory when KoboldAI tried to load your model.") + raise e + tokenizer = GPT2Tokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") + model.save_pretrained("models/{}".format(vars.model.replace('/', '_')), max_shard_size="500MiB") + tokenizer.save_pretrained("models/{}".format(vars.model.replace('/', '_'))) + vars.modeldim = get_hidden_size_from_model(model) + # Is CUDA available? If so, use GPU, otherwise fall back to CPU + if(vars.hascuda and vars.usegpu): + model = model.half().to(vars.gpu_device) + generator = model.generate + else: + model = model.to('cpu').float() + generator = model.generate + patch_causallm(model) + # Use the Generic implementation + else: + lowmem = maybe_low_cpu_mem_usage() + # We must disable low_cpu_mem_usage (by setting lowmem to {}) if + # using a GPT-2 model because GPT-2 is not compatible with this + # feature yet + if(vars.model_type == "gpt2"): + lowmem = {} + vars.lazy_load = False # Also, lazy loader doesn't support GPT-2 models + + # If we're using torch_lazy_loader, we need to get breakmodel config + # early so that it knows where to load the individual model tensors + if (utils.HAS_ACCELERATE or vars.lazy_load and vars.hascuda and vars.breakmodel) and not vars.nobreakmodel: + device_config(model_config) + + # Download model from Huggingface if it does not exist, otherwise load locally + + #If we specify a model and it's in the root directory, we need to move it to the models directory (legacy folder structure to new) + if os.path.isdir(vars.model.replace('/', '_')): + import shutil + shutil.move(vars.model.replace('/', '_'), "models/{}".format(vars.model.replace('/', '_'))) + if(vars.lazy_load): # If we're using lazy loader, we need to figure out what the model's hidden layers are called + with torch_lazy_loader.use_lazy_torch_load(dematerialized_modules=True, use_accelerate_init_empty_weights=True): + try: + metamodel = AutoModelForCausalLM.from_config(model_config) + except Exception as e: + metamodel = GPTNeoForCausalLM.from_config(model_config) + utils.layers_module_names = utils.get_layers_module_names(metamodel) + utils.module_names = list(metamodel.state_dict().keys()) + utils.named_buffers = list(metamodel.named_buffers(recurse=True)) + with maybe_use_float16(), torch_lazy_loader.use_lazy_torch_load(enable=vars.lazy_load, callback=get_lazy_load_callback(utils.num_layers(model_config)) if vars.lazy_load else None, dematerialized_modules=True): + if(vars.lazy_load): # torch_lazy_loader.py and low_cpu_mem_usage can't be used at the same time + lowmem = {} + if(os.path.isdir(vars.custmodpth)): + try: + tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache", use_fast=False) + except Exception as e: + try: + tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") + except Exception as e: + try: + tokenizer = GPT2Tokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") + except Exception as e: + tokenizer = GPT2Tokenizer.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") + try: + model = AutoModelForCausalLM.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache", **lowmem) + except Exception as e: + if("out of memory" in traceback.format_exc().lower()): + raise RuntimeError("One of your GPUs ran out of memory when KoboldAI tried to load your model.") + model = GPTNeoForCausalLM.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache", **lowmem) + elif(os.path.isdir("models/{}".format(vars.model.replace('/', '_')))): + try: + tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache", use_fast=False) + except Exception as e: + try: + tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache") + except Exception as e: + try: + tokenizer = GPT2Tokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache") + except Exception as e: + tokenizer = GPT2Tokenizer.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") + try: + model = AutoModelForCausalLM.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache", **lowmem) + except Exception as e: + if("out of memory" in traceback.format_exc().lower()): + raise RuntimeError("One of your GPUs ran out of memory when KoboldAI tried to load your model.") + model = GPTNeoForCausalLM.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache", **lowmem) + else: + old_rebuild_tensor = torch._utils._rebuild_tensor + def new_rebuild_tensor(storage: Union[torch_lazy_loader.LazyTensor, torch.Storage], storage_offset, shape, stride): + if(not isinstance(storage, torch_lazy_loader.LazyTensor)): + dtype = storage.dtype + else: + dtype = storage.storage_type.dtype + if(not isinstance(dtype, torch.dtype)): + dtype = storage.storage_type(0).dtype + if(dtype is torch.float32 and len(shape) >= 2): + vars.fp32_model = True + return old_rebuild_tensor(storage, storage_offset, shape, stride) + torch._utils._rebuild_tensor = new_rebuild_tensor + + try: + tokenizer = AutoTokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache", use_fast=False) + except Exception as e: + try: + tokenizer = AutoTokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache") + except Exception as e: + try: + tokenizer = GPT2Tokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache") + except Exception as e: + tokenizer = GPT2Tokenizer.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") + try: + model = AutoModelForCausalLM.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache", **lowmem) + except Exception as e: + if("out of memory" in traceback.format_exc().lower()): + raise RuntimeError("One of your GPUs ran out of memory when KoboldAI tried to load your model.") + model = GPTNeoForCausalLM.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache", **lowmem) + + torch._utils._rebuild_tensor = old_rebuild_tensor + + if not args.colab or args.savemodel: + import shutil + tokenizer.save_pretrained("models/{}".format(vars.model.replace('/', '_'))) + if(vars.fp32_model): # Use save_pretrained to convert fp32 models to fp16 + model = model.half() + model.save_pretrained("models/{}".format(vars.model.replace('/', '_')), max_shard_size="500MiB") + else: # For fp16 models, we can just copy the model files directly + import transformers.configuration_utils + import transformers.modeling_utils + import transformers.file_utils + import huggingface_hub + legacy = packaging.version.parse(transformers_version) < packaging.version.parse("4.22.0.dev0") + # Save the config.json + shutil.move(os.path.realpath(huggingface_hub.hf_hub_download(vars.model, transformers.configuration_utils.CONFIG_NAME, revision=vars.revision, cache_dir="cache", local_files_only=True, legacy_cache_layout=legacy)), os.path.join("models/{}".format(vars.model.replace('/', '_')), transformers.configuration_utils.CONFIG_NAME)) + if(utils.num_shards is None): + # Save the pytorch_model.bin of an unsharded model + shutil.move(os.path.realpath(huggingface_hub.hf_hub_download(vars.model, transformers.modeling_utils.WEIGHTS_NAME, revision=vars.revision, cache_dir="cache", local_files_only=True, legacy_cache_layout=legacy)), os.path.join("models/{}".format(vars.model.replace('/', '_')), transformers.modeling_utils.WEIGHTS_NAME)) + else: + with open(utils.from_pretrained_index_filename) as f: + map_data = json.load(f) + filenames = set(map_data["weight_map"].values()) + # Save the pytorch_model.bin.index.json of a sharded model + shutil.move(os.path.realpath(utils.from_pretrained_index_filename), os.path.join("models/{}".format(vars.model.replace('/', '_')), transformers.modeling_utils.WEIGHTS_INDEX_NAME)) + # Then save the pytorch_model-#####-of-#####.bin files + for filename in filenames: + shutil.move(os.path.realpath(huggingface_hub.hf_hub_download(vars.model, filename, revision=vars.revision, cache_dir="cache", local_files_only=True, legacy_cache_layout=legacy)), os.path.join("models/{}".format(vars.model.replace('/', '_')), filename)) + shutil.rmtree("cache/") + + if(vars.badwordsids is vars.badwordsids_default and vars.model_type not in ("gpt2", "gpt_neo", "gptj")): + vars.badwordsids = [[v] for k, v in tokenizer.get_vocab().items() if any(c in str(k) for c in "<>[]") if vars.newlinemode != "s" or str(k) != ""] + + patch_causallm(model) + + if(vars.hascuda): + if(vars.usegpu): + vars.modeldim = get_hidden_size_from_model(model) + model = model.half().to(vars.gpu_device) + generator = model.generate + elif(vars.breakmodel): # Use both RAM and VRAM (breakmodel) + vars.modeldim = get_hidden_size_from_model(model) + if(not vars.lazy_load): + device_config(model.config) + move_model_to_devices(model) + elif(utils.HAS_ACCELERATE and __import__("breakmodel").disk_blocks > 0): + move_model_to_devices(model) + vars.modeldim = get_hidden_size_from_model(model) + generator = model.generate + else: + model = model.to('cpu').float() + vars.modeldim = get_hidden_size_from_model(model) + generator = model.generate + elif(utils.HAS_ACCELERATE and __import__("breakmodel").disk_blocks > 0): + move_model_to_devices(model) + vars.modeldim = get_hidden_size_from_model(model) + generator = model.generate + else: + model.to('cpu').float() + vars.modeldim = get_hidden_size_from_model(model) + generator = model.generate + + # Suppress Author's Note by flagging square brackets (Old implementation) + #vocab = tokenizer.get_vocab() + #vocab_keys = vocab.keys() + #vars.badwords = gettokenids("[") + #for key in vars.badwords: + # vars.badwordsids.append([vocab[key]]) + + logger.info(f"Pipeline created: {vars.model}") + + else: + from transformers import GPT2Tokenizer + tokenizer = GPT2Tokenizer.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") + else: from transformers import PreTrainedModel from transformers import modeling_utils old_from_pretrained = PreTrainedModel.from_pretrained.__func__ @@ -1250,691 +2667,148 @@ if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "OAI", "Go return old_get_checkpoint_shard_files(pretrained_model_name_or_path, index_filename, *args, **kwargs) modeling_utils.get_checkpoint_shard_files = new_get_checkpoint_shard_files - # Lazy loader - import torch_lazy_loader - def get_lazy_load_callback(n_layers, convert_to_float16=True): - if not vars.lazy_load: - return - from tqdm.auto import tqdm + def tpumtjgenerate_warper_callback(scores) -> "np.array": + scores_shape = scores.shape + scores_list = scores.tolist() + vars.lua_koboldbridge.logits = vars.lua_state.table() + for r, row in enumerate(scores_list): + vars.lua_koboldbridge.logits[r+1] = vars.lua_state.table(*row) + vars.lua_koboldbridge.vocab_size = scores_shape[-1] - if "breakmodel" in globals(): - gpu_blocks = breakmodel.gpu_blocks - ram_blocks = ram_blocks = n_layers - sum(gpu_blocks) - cumulative_gpu_blocks = tuple(itertools.accumulate(gpu_blocks)) - else: - ram_blocks = gpu_blocks = cumulative_gpu_blocks = None + execute_genmod() - def lazy_load_callback(model_dict, f, **_): - if lazy_load_callback.nested: - return - lazy_load_callback.nested = True - - device_map = {} - - for _key, spec in lazy_load_spec.get("layer_weights", {}).items(): - for layer in range(n_layers): - key = _key.format(layer=layer) - if key not in model_dict: - continue - device = vars.gpu_device if vars.hascuda and vars.usegpu else "cpu" if not vars.hascuda or not vars.breakmodel or layer < ram_blocks else bisect.bisect_right(cumulative_gpu_blocks, layer - ram_blocks) - device_map[key] = device - - for key, value in model_dict.items(): - if isinstance(value, torch_lazy_loader.LazyTensor) and key not in device_map: - device_map[key] = vars.gpu_device if vars.hascuda and vars.usegpu else "cpu" - - if utils.num_shards is None or utils.current_shard == 0: - if utils.num_shards is not None: - num_tensors = len(utils.get_sharded_checkpoint_num_tensors(utils.from_pretrained_model_name, utils.from_pretrained_index_filename, **utils.from_pretrained_kwargs)) - else: - num_tensors = len(device_map) - print(flush=True) - utils.bar = tqdm(total=num_tensors, desc="Loading model tensors") - - with zipfile.ZipFile(f, "r") as z: - try: - last_storage_key = None - f = None - current_offset = 0 - if utils.num_shards is not None: - utils.current_shard += 1 - for key in sorted(device_map.keys(), key=lambda k: (model_dict[k].key, model_dict[k].seek_offset)): - storage_key = model_dict[key].key - if storage_key != last_storage_key or model_dict[key].seek_offset < current_offset: - last_storage_key = storage_key - if isinstance(f, zipfile.ZipExtFile): - f.close() - f = z.open(f"archive/data/{storage_key}") - current_offset = 0 - if current_offset != model_dict[key].seek_offset: - f.read(model_dict[key].seek_offset - current_offset) - current_offset = model_dict[key].seek_offset - device = device_map[key] - size = functools.reduce(lambda x, y: x * y, model_dict[key].shape, 1) - dtype = model_dict[key].dtype - nbytes = size if dtype is torch.bool else size * ((torch.finfo if dtype.is_floating_point else torch.iinfo)(dtype).bits >> 3) - #print(f"Transferring <{key}> to {'(CPU)' if device == 'cpu' else '[device ' + str(device) + ']'} ... ", end="", flush=True) - model_dict[key] = model_dict[key].materialize(f, map_location="cpu") - if model_dict[key].dtype is torch.float32: - vars.fp32_model = True - if convert_to_float16 and vars.hascuda and (vars.breakmodel or vars.usegpu) and model_dict[key].dtype is torch.float32: - model_dict[key] = model_dict[key].to(torch.float16) - if not vars.usegpu and not vars.breakmodel and model_dict[key].dtype is torch.float16: - model_dict[key] = model_dict[key].to(torch.float32) - model_dict[key] = model_dict[key].to(device) - #print("OK", flush=True) - current_offset += nbytes - utils.bar.update(1) - finally: - if utils.num_shards is None or utils.current_shard >= utils.num_shards: - utils.bar.close() - utils.bar = None - lazy_load_callback.nested = False - if isinstance(f, zipfile.ZipExtFile): - f.close() - - lazy_load_callback.nested = False - return lazy_load_callback - - lazy_load_config_path = os.path.join("maps", vars.model_type + ".json") - if(vars.lazy_load and "model_config" in globals() and os.path.isfile(lazy_load_config_path)): - with open(lazy_load_config_path) as f: - lazy_load_spec = json.load(f) - - else: - vars.lazy_load = False - - # Some versions of transformers 4.17.0.dev0 are affected by - # https://github.com/huggingface/transformers/issues/15736 - # This is a workaround for those versions of transformers. - if(transformers_version == "4.17.0.dev0"): - try: - from transformers.models.xglm.modeling_xglm import XGLMSinusoidalPositionalEmbedding - except ImportError: - pass - else: - @torch.no_grad() - def new_forward(self, input_ids: torch.Tensor = None, inputs_embeds: torch.Tensor = None, past_key_values_length: int = 0): - bsz, seq_len = inputs_embeds.size()[:-1] - input_shape = inputs_embeds.size()[:-1] - sequence_length = input_shape[1] - position_ids = torch.arange( - past_key_values_length + self.padding_idx + 1, past_key_values_length + sequence_length + self.padding_idx + 1, dtype=torch.long, device=inputs_embeds.device - ).unsqueeze(0).expand(input_shape).contiguous() - max_pos = self.padding_idx + 1 + seq_len + past_key_values_length - if max_pos > self.weights.size(0): - self.make_weights(max_pos + self.offset, self.embedding_dim, self.padding_idx) - return self.weights.index_select(0, position_ids.view(-1)).view(bsz, seq_len, -1).detach() - XGLMSinusoidalPositionalEmbedding.forward = new_forward - - # Patch transformers to use our soft prompt - def patch_causallm(cls): - old_forward = cls.forward - def new_causallm_forward(self, *args, **kwargs): - input_ids = kwargs.get('input_ids').to(self.device) - assert input_ids is not None - kwargs['input_ids'] = None - if(vars.sp is not None): - shifted_input_ids = input_ids - self.config.vocab_size - input_ids.clamp_(max=self.config.vocab_size-1) - if(hasattr(self, "transformer")): - inputs_embeds = self.transformer.wte(input_ids) - elif(not hasattr(self.model, "decoder")): - inputs_embeds = self.model.embed_tokens(input_ids) - else: - inputs_embeds = self.model.decoder.embed_tokens(input_ids) - if(vars.sp is not None): - vars.sp = vars.sp.to(inputs_embeds.dtype).to(inputs_embeds.device) - inputs_embeds = torch.where( - (shifted_input_ids >= 0)[..., None], - vars.sp[shifted_input_ids.clamp(min=0)], - inputs_embeds, - ) - if(hasattr(self, "model") and hasattr(self.model, "embed_scale")): - inputs_embeds *= self.model.embed_scale - kwargs['inputs_embeds'] = inputs_embeds - return old_forward(self, *args, **kwargs) - cls.forward = new_causallm_forward - for cls in (GPT2LMHeadModel, GPTNeoForCausalLM): - patch_causallm(cls) - for c in ("GPTJForCausalLM", "XGLMForCausalLM", "OPTForCausalLM"): - try: - patch_causallm(getattr(__import__("transformers"), c)) - except: - pass - - - # Fix a bug in OPTForCausalLM where self.lm_head is the wrong size - if(packaging.version.parse("4.19.0.dev0") <= packaging.version.parse(transformers_version) < packaging.version.parse("4.20.0")): - try: - from transformers import OPTForCausalLM, OPTModel - except ImportError: - pass - else: - # This is the same as the original __init__ but with - # config.hidden_size - # replaced with - # config.word_embed_proj_dim - def new_init(self, config): - super(OPTForCausalLM, self).__init__(config) - self.model = OPTModel(config) - self.lm_head = torch.nn.Linear(config.word_embed_proj_dim, config.vocab_size, bias=False) - self.post_init() - OPTForCausalLM.__init__ = new_init - - - # Patch transformers to use our custom logit warpers - from transformers import LogitsProcessorList, LogitsWarper, LogitsProcessor, TopKLogitsWarper, TopPLogitsWarper, TemperatureLogitsWarper, RepetitionPenaltyLogitsProcessor - from warpers import AdvancedRepetitionPenaltyLogitsProcessor, TailFreeLogitsWarper, TypicalLogitsWarper, TopALogitsWarper - - def dynamic_processor_wrap(cls, field_name, var_name, cond=None): - old_call = cls.__call__ - def new_call(self, *args, **kwargs): - if(not isinstance(field_name, str) and isinstance(field_name, Iterable)): - conds = [] - for f, v in zip(field_name, var_name): - conds.append(getattr(vars, v)) - setattr(self, f, conds[-1]) - else: - conds = getattr(vars, var_name) - setattr(self, field_name, conds) - assert len(args) == 2 - if(cond is None or cond(conds)): - return old_call(self, *args, **kwargs) - return args[1] - cls.__call__ = new_call - dynamic_processor_wrap(AdvancedRepetitionPenaltyLogitsProcessor, ("penalty", "penalty_slope", "penalty_range"), ("rep_pen", "rep_pen_slope", "rep_pen_range"), cond=lambda x: x[0] != 1.0) - dynamic_processor_wrap(TopKLogitsWarper, "top_k", "top_k", cond=lambda x: x > 0) - dynamic_processor_wrap(TopALogitsWarper, "top_a", "top_a", cond=lambda x: x > 0.0) - dynamic_processor_wrap(TopPLogitsWarper, "top_p", "top_p", cond=lambda x: x < 1.0) - dynamic_processor_wrap(TailFreeLogitsWarper, "tfs", "tfs", cond=lambda x: x < 1.0) - dynamic_processor_wrap(TypicalLogitsWarper, "typical", "typical", cond=lambda x: x < 1.0) - dynamic_processor_wrap(TemperatureLogitsWarper, "temperature", "temp", cond=lambda x: x != 1.0) - RepetitionPenaltyLogitsProcessor.__init__ = AdvancedRepetitionPenaltyLogitsProcessor.__init__ - RepetitionPenaltyLogitsProcessor.__call__ = AdvancedRepetitionPenaltyLogitsProcessor.__call__ - - class LuaLogitsProcessor(LogitsProcessor): - - def __init__(self): - pass - - def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor: - assert scores.ndim == 2 - assert input_ids.ndim == 2 - self.regeneration_required = False - self.halt = False - - scores_shape = scores.shape - scores_list = scores.tolist() - vars.lua_koboldbridge.logits = vars.lua_state.table() - for r, row in enumerate(scores_list): - vars.lua_koboldbridge.logits[r+1] = vars.lua_state.table(*row) - vars.lua_koboldbridge.vocab_size = scores_shape[-1] - - execute_genmod() - - scores = torch.tensor( - tuple(tuple(row.values()) for row in vars.lua_koboldbridge.logits.values()), - device=scores.device, - dtype=scores.dtype, - ) - assert scores.shape == scores_shape - - return scores - - def new_get_logits_processor(*args, **kwargs) -> LogitsProcessorList: - processors = new_get_logits_processor.old_get_logits_processor(*args, **kwargs) - processors.insert(0, LuaLogitsProcessor()) - return processors - new_get_logits_processor.old_get_logits_processor = transformers.generation_utils.GenerationMixin._get_logits_processor - transformers.generation_utils.GenerationMixin._get_logits_processor = new_get_logits_processor - - class KoboldLogitsWarperList(LogitsProcessorList): - def __init__(self, beams: int = 1, **kwargs): - self.__warper_list: List[LogitsWarper] = [] - self.__warper_list.append(TopKLogitsWarper(top_k=1, min_tokens_to_keep=1 + (beams > 1))) - self.__warper_list.append(TopALogitsWarper(top_a=0.5, min_tokens_to_keep=1 + (beams > 1))) - self.__warper_list.append(TopPLogitsWarper(top_p=0.5, min_tokens_to_keep=1 + (beams > 1))) - self.__warper_list.append(TailFreeLogitsWarper(tfs=0.5, min_tokens_to_keep=1 + (beams > 1))) - self.__warper_list.append(TypicalLogitsWarper(typical=0.5, min_tokens_to_keep=1 + (beams > 1))) - self.__warper_list.append(TemperatureLogitsWarper(temperature=0.5)) - - def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor, *args, **kwargs): - for k in vars.sampler_order: - scores = self.__warper_list[k](input_ids, scores, *args, **kwargs) - return scores - - def new_get_logits_warper(beams: int = 1,) -> LogitsProcessorList: - return KoboldLogitsWarperList(beams=beams) - - def new_sample(self, *args, **kwargs): - assert kwargs.pop("logits_warper", None) is not None - kwargs["logits_warper"] = new_get_logits_warper( - beams=1, + scores = np.array( + tuple(tuple(row.values()) for row in vars.lua_koboldbridge.logits.values()), + dtype=scores.dtype, ) - if(vars.newlinemode == "s") or (vars.newlinemode == "ns"): - kwargs["eos_token_id"] = -1 - kwargs.setdefault("pad_token_id", 2) - return new_sample.old_sample(self, *args, **kwargs) - new_sample.old_sample = transformers.generation_utils.GenerationMixin.sample - transformers.generation_utils.GenerationMixin.sample = new_sample + assert scores.shape == scores_shape - - # Allow bad words filter to ban <|endoftext|> token - import transformers.generation_logits_process - def new_init(self, bad_words_ids: List[List[int]], eos_token_id: int): - return new_init.old_init(self, bad_words_ids, -1) - new_init.old_init = transformers.generation_logits_process.NoBadWordsLogitsProcessor.__init__ - transformers.generation_logits_process.NoBadWordsLogitsProcessor.__init__ = new_init - - - # Sets up dynamic world info scanner - class DynamicWorldInfoScanCriteria(StoppingCriteria): - def __init__( - self, - tokenizer, - excluded_world_info: List[Set], - ): - self.regeneration_required = False - self.halt = False - self.tokenizer = tokenizer - self.excluded_world_info = excluded_world_info - def __call__( - self, - input_ids: torch.LongTensor, - scores: torch.FloatTensor, - **kwargs, - ) -> bool: - vars.generated_tkns += 1 - if(vars.lua_koboldbridge.generated_cols and vars.generated_tkns != vars.lua_koboldbridge.generated_cols): - raise RuntimeError(f"Inconsistency detected between KoboldAI Python and Lua backends ({vars.generated_tkns} != {vars.lua_koboldbridge.generated_cols})") - if(vars.abort or vars.generated_tkns >= vars.genamt): - self.regeneration_required = False - self.halt = False - return True - - assert input_ids.ndim == 2 - assert len(self.excluded_world_info) == input_ids.shape[0] - self.regeneration_required = vars.lua_koboldbridge.regeneration_required - self.halt = not vars.lua_koboldbridge.generating - vars.lua_koboldbridge.regeneration_required = False - - for i in range(vars.numseqs): - vars.lua_koboldbridge.generated[i+1][vars.generated_tkns] = int(input_ids[i, -1].item()) - - if(not vars.dynamicscan): - return self.regeneration_required or self.halt - tail = input_ids[..., -vars.generated_tkns:] - for i, t in enumerate(tail): - decoded = utils.decodenewlines(tokenizer.decode(t)) - _, found = checkworldinfo(decoded, force_use_txt=True, actions=vars._actions) - found -= self.excluded_world_info[i] - if(len(found) != 0): - self.regeneration_required = True - break - return self.regeneration_required or self.halt - old_get_stopping_criteria = transformers.generation_utils.GenerationMixin._get_stopping_criteria - def new_get_stopping_criteria(self, *args, **kwargs): - stopping_criteria = old_get_stopping_criteria(self, *args, **kwargs) - global tokenizer - self.kai_scanner = DynamicWorldInfoScanCriteria( - tokenizer=tokenizer, - excluded_world_info=self.kai_scanner_excluded_world_info, - ) - stopping_criteria.insert(0, self.kai_scanner) - return stopping_criteria - transformers.generation_utils.GenerationMixin._get_stopping_criteria = new_get_stopping_criteria - - def get_hidden_size_from_model(model): - try: - return int(model.model.decoder.project_in.in_features) - except: - try: - return int(model.model.decoder.embed_tokens.out_features) - except: - try: - return int(model.transformer.hidden_size) - except: - try: - return int(model.transformer.embed_dim) - except: - return int(model.lm_head.in_features) + return scores - def maybe_low_cpu_mem_usage() -> Dict[str, Any]: - if(packaging.version.parse(transformers_version) < packaging.version.parse("4.11.0")): - print(f"\nWARNING: Please upgrade to transformers 4.11.0 for lower RAM usage. You have transformers {transformers_version}.", file=sys.stderr) - return {} - return {"low_cpu_mem_usage": True} - - @contextlib.contextmanager - def maybe_use_float16(always_use=False): - if(always_use or (vars.hascuda and args.lowmem and (vars.usegpu or vars.breakmodel))): - original_dtype = torch.get_default_dtype() - torch.set_default_dtype(torch.float16) - yield True - torch.set_default_dtype(original_dtype) - else: - yield False + def tpumtjgenerate_stopping_callback(generated, n_generated, excluded_world_info) -> Tuple[List[set], bool, bool]: + vars.generated_tkns += 1 - # If custom GPT2 model was chosen - if(vars.model == "GPT2Custom"): - vars.lazy_load = False - model_config = open(vars.custmodpth + "/config.json", "r") - js = json.load(model_config) - with(maybe_use_float16()): - model = GPT2LMHeadModel.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") - tokenizer = GPT2TokenizerFast.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") - vars.modeldim = get_hidden_size_from_model(model) - # Is CUDA available? If so, use GPU, otherwise fall back to CPU - if(vars.hascuda and vars.usegpu): - model = model.half().to(vars.gpu_device) - generator = model.generate - else: - model = model.to('cpu').float() - generator = model.generate - # Use the Generic implementation - else: - lowmem = maybe_low_cpu_mem_usage() - # We must disable low_cpu_mem_usage (by setting lowmem to {}) if - # using a GPT-2 model because GPT-2 is not compatible with this - # feature yet - if(vars.model_type == "gpt2"): - lowmem = {} - - # If we're using torch_lazy_loader, we need to get breakmodel config - # early so that it knows where to load the individual model tensors - if(vars.lazy_load and vars.hascuda and vars.breakmodel): - device_config(model_config) + assert len(excluded_world_info) == len(generated) + regeneration_required = vars.lua_koboldbridge.regeneration_required + halt = vars.abort or not vars.lua_koboldbridge.generating or vars.generated_tkns >= vars.genamt + vars.lua_koboldbridge.regeneration_required = False - # Download model from Huggingface if it does not exist, otherwise load locally - - #If we specify a model and it's in the root directory, we need to move it to the models directory (legacy folder structure to new) - if os.path.isdir(vars.model.replace('/', '_')): - import shutil - shutil.move(vars.model.replace('/', '_'), "models/{}".format(vars.model.replace('/', '_'))) - print("\n", flush=True) - with maybe_use_float16(), torch_lazy_loader.use_lazy_torch_load(enable=vars.lazy_load, callback=get_lazy_load_callback(utils.num_layers(model_config)) if vars.lazy_load else None, dematerialized_modules=True): - if(vars.lazy_load): # torch_lazy_loader.py and low_cpu_mem_usage can't be used at the same time - lowmem = {} - if(os.path.isdir(vars.custmodpth)): - try: - tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") - except Exception as e: - pass - try: - tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache", use_fast=False) - except Exception as e: - try: - tokenizer = GPT2TokenizerFast.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache") - except Exception as e: - tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") - try: - model = AutoModelForCausalLM.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache", **lowmem) - except Exception as e: - model = GPTNeoForCausalLM.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache", **lowmem) - elif(os.path.isdir("models/{}".format(vars.model.replace('/', '_')))): - try: - tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache") - except Exception as e: - pass - try: - tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache", use_fast=False) - except Exception as e: - try: - tokenizer = GPT2TokenizerFast.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache") - except Exception as e: - tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") - try: - model = AutoModelForCausalLM.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache", **lowmem) - except Exception as e: - model = GPTNeoForCausalLM.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache", **lowmem) - else: - old_rebuild_tensor = torch._utils._rebuild_tensor - def new_rebuild_tensor(storage: Union[torch_lazy_loader.LazyTensor, torch.Storage], storage_offset, shape, stride): - if(not isinstance(storage, torch_lazy_loader.LazyTensor)): - dtype = storage.dtype - else: - dtype = storage.storage_type.dtype - if(not isinstance(dtype, torch.dtype)): - dtype = storage.storage_type(0).dtype - if(dtype is torch.float32 and len(shape) >= 2): - vars.fp32_model = True - return old_rebuild_tensor(storage, storage_offset, shape, stride) - torch._utils._rebuild_tensor = new_rebuild_tensor + global past - try: - tokenizer = AutoTokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache") - except Exception as e: - pass - try: - tokenizer = AutoTokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache", use_fast=False) - except Exception as e: - try: - tokenizer = GPT2TokenizerFast.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache") - except Exception as e: - tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") - try: - model = AutoModelForCausalLM.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache", **lowmem) - except Exception as e: - model = GPTNeoForCausalLM.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache", **lowmem) + for i in range(vars.numseqs): + vars.lua_koboldbridge.generated[i+1][vars.generated_tkns] = int(generated[i, tpu_mtj_backend.params["seq"] + n_generated - 1].item()) - torch._utils._rebuild_tensor = old_rebuild_tensor + if(not vars.dynamicscan or halt): + return excluded_world_info, regeneration_required, halt - if not args.colab or args.savemodel: - import shutil - tokenizer.save_pretrained("models/{}".format(vars.model.replace('/', '_'))) - if(vars.fp32_model): # Use save_pretrained to convert fp32 models to fp16 - model = model.half() - model.save_pretrained("models/{}".format(vars.model.replace('/', '_')), max_shard_size="500MiB") - else: # For fp16 models, we can just copy the model files directly - import transformers.configuration_utils - import transformers.modeling_utils - import transformers.file_utils - import huggingface_hub - legacy = packaging.version.parse(transformers_version) < packaging.version.parse("4.22.0.dev0") - # Save the config.json - shutil.move(os.path.realpath(huggingface_hub.hf_hub_download(vars.model, transformers.configuration_utils.CONFIG_NAME, revision=vars.revision, cache_dir="cache", local_files_only=True, legacy_cache_layout=legacy)), os.path.join("models/{}".format(vars.model.replace('/', '_')), transformers.configuration_utils.CONFIG_NAME)) - if(utils.num_shards is None): - # Save the pytorch_model.bin of an unsharded model - shutil.move(os.path.realpath(huggingface_hub.hf_hub_download(vars.model, transformers.modeling_utils.WEIGHTS_NAME, revision=vars.revision, cache_dir="cache", local_files_only=True, legacy_cache_layout=legacy)), os.path.join("models/{}".format(vars.model.replace('/', '_')), transformers.modeling_utils.WEIGHTS_NAME)) - else: - with open(utils.from_pretrained_index_filename) as f: - map_data = json.load(f) - filenames = set(map_data["weight_map"].values()) - # Save the pytorch_model.bin.index.json of a sharded model - shutil.move(os.path.realpath(utils.from_pretrained_index_filename), os.path.join("models/{}".format(vars.model.replace('/', '_')), transformers.modeling_utils.WEIGHTS_INDEX_NAME)) - # Then save the pytorch_model-#####-of-#####.bin files - for filename in filenames: - shutil.move(os.path.realpath(huggingface_hub.hf_hub_download(vars.model, filename, revision=vars.revision, cache_dir="cache", local_files_only=True, legacy_cache_layout=legacy)), os.path.join("models/{}".format(vars.model.replace('/', '_')), filename)) - shutil.rmtree("cache/") - - if(vars.hascuda): - if(vars.usegpu): - vars.modeldim = get_hidden_size_from_model(model) - model = model.half().to(vars.gpu_device) - generator = model.generate - elif(vars.breakmodel): # Use both RAM and VRAM (breakmodel) - vars.modeldim = get_hidden_size_from_model(model) - if(not vars.lazy_load): - device_config(model.config) - move_model_to_devices(model) - else: - model = model.to('cpu').float() - vars.modeldim = get_hidden_size_from_model(model) - generator = model.generate - else: - model.to('cpu').float() - vars.modeldim = get_hidden_size_from_model(model) - generator = model.generate - - # Suppress Author's Note by flagging square brackets (Old implementation) - #vocab = tokenizer.get_vocab() - #vocab_keys = vocab.keys() - #vars.badwords = gettokenids("[") - #for key in vars.badwords: - # vars.badwordsids.append([vocab[key]]) - - print("{0}OK! {1} pipeline created!{2}".format(colors.GREEN, vars.model, colors.END)) - - else: - from transformers import GPT2TokenizerFast - tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") -else: - from transformers import PreTrainedModel - from transformers import modeling_utils - old_from_pretrained = PreTrainedModel.from_pretrained.__func__ - @classmethod - def new_from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): - vars.fp32_model = False - utils.num_shards = None - utils.current_shard = 0 - utils.from_pretrained_model_name = pretrained_model_name_or_path - utils.from_pretrained_index_filename = None - utils.from_pretrained_kwargs = kwargs - utils.bar = None - if not args.no_aria2: - utils.aria2_hook(pretrained_model_name_or_path, **kwargs) - return old_from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs) - PreTrainedModel.from_pretrained = new_from_pretrained - if(hasattr(modeling_utils, "get_checkpoint_shard_files")): - old_get_checkpoint_shard_files = modeling_utils.get_checkpoint_shard_files - def new_get_checkpoint_shard_files(pretrained_model_name_or_path, index_filename, *args, **kwargs): - utils.num_shards = utils.get_num_shards(index_filename) - utils.from_pretrained_index_filename = index_filename - return old_get_checkpoint_shard_files(pretrained_model_name_or_path, index_filename, *args, **kwargs) - modeling_utils.get_checkpoint_shard_files = new_get_checkpoint_shard_files - - def tpumtjgetsofttokens(): - soft_tokens = None - if(vars.sp is None): - global np - if 'np' not in globals(): - import numpy as np - tensor = np.zeros((1, tpu_mtj_backend.params.get("d_embed", tpu_mtj_backend.params["d_model"])), dtype=np.float32) - rows = tensor.shape[0] - padding_amount = tpu_mtj_backend.params["seq"] - (tpu_mtj_backend.params["seq"] % -tpu_mtj_backend.params["cores_per_replica"]) - rows - tensor = np.pad(tensor, ((0, padding_amount), (0, 0))) - tensor = tensor.reshape( - tpu_mtj_backend.params["cores_per_replica"], - -1, - tpu_mtj_backend.params.get("d_embed", tpu_mtj_backend.params["d_model"]), - ) - vars.sp = tpu_mtj_backend.shard_xmap(tensor) - soft_tokens = np.arange( - tpu_mtj_backend.params["n_vocab"] + tpu_mtj_backend.params["n_vocab_padding"], - tpu_mtj_backend.params["n_vocab"] + tpu_mtj_backend.params["n_vocab_padding"] + vars.sp_length, - dtype=np.uint32 - ) - return soft_tokens - - def tpumtjgenerate_warper_callback(scores) -> "np.array": - scores_shape = scores.shape - scores_list = scores.tolist() - vars.lua_koboldbridge.logits = vars.lua_state.table() - for r, row in enumerate(scores_list): - vars.lua_koboldbridge.logits[r+1] = vars.lua_state.table(*row) - vars.lua_koboldbridge.vocab_size = scores_shape[-1] - - execute_genmod() - - scores = np.array( - tuple(tuple(row.values()) for row in vars.lua_koboldbridge.logits.values()), - dtype=scores.dtype, - ) - assert scores.shape == scores_shape - - return scores - - def tpumtjgenerate_stopping_callback(generated, n_generated, excluded_world_info) -> Tuple[List[set], bool, bool]: - vars.generated_tkns += 1 - - assert len(excluded_world_info) == len(generated) - regeneration_required = vars.lua_koboldbridge.regeneration_required - halt = vars.abort or not vars.lua_koboldbridge.generating or vars.generated_tkns >= vars.genamt - vars.lua_koboldbridge.regeneration_required = False - - global past - - for i in range(vars.numseqs): - vars.lua_koboldbridge.generated[i+1][vars.generated_tkns] = int(generated[i, tpu_mtj_backend.params["seq"] + n_generated - 1].item()) - - if(not vars.dynamicscan or halt): + for i, t in enumerate(generated): + decoded = utils.decodenewlines(tokenizer.decode(past[i])) + utils.decodenewlines(tokenizer.decode(t[tpu_mtj_backend.params["seq"] : tpu_mtj_backend.params["seq"] + n_generated])) + _, found = checkworldinfo(decoded, force_use_txt=True, actions=vars._actions) + found -= excluded_world_info[i] + if(len(found) != 0): + regeneration_required = True + break return excluded_world_info, regeneration_required, halt - for i, t in enumerate(generated): - decoded = utils.decodenewlines(tokenizer.decode(past[i])) + utils.decodenewlines(tokenizer.decode(t[tpu_mtj_backend.params["seq"] : tpu_mtj_backend.params["seq"] + n_generated])) - _, found = checkworldinfo(decoded, force_use_txt=True, actions=vars._actions) - found -= excluded_world_info[i] - if(len(found) != 0): - regeneration_required = True - break - return excluded_world_info, regeneration_required, halt + def tpumtjgenerate_compiling_callback() -> None: + print(colors.GREEN + "TPU backend compilation triggered" + colors.END) + vars.compiling = True - def tpumtjgenerate_compiling_callback() -> None: - print(colors.GREEN + "TPU backend compilation triggered" + colors.END) - vars.compiling = True + def tpumtjgenerate_stopped_compiling_callback() -> None: + vars.compiling = False + + def tpumtjgenerate_settings_callback() -> dict: + sampler_order = vars.sampler_order[:] + if len(sampler_order) < 7: # Add repetition penalty at beginning if it's not present + sampler_order = [6] + sampler_order + return { + "sampler_order": sampler_order, + "top_p": float(vars.top_p), + "temp": float(vars.temp), + "top_k": int(vars.top_k), + "tfs": float(vars.tfs), + "typical": float(vars.typical), + "top_a": float(vars.top_a), + "repetition_penalty": float(vars.rep_pen), + "rpslope": float(vars.rep_pen_slope), + "rprange": int(vars.rep_pen_range), + } - def tpumtjgenerate_stopped_compiling_callback() -> None: - vars.compiling = False + # If we're running Colab or OAI, we still need a tokenizer. + if(vars.model in ("Colab", "API", "CLUSTER")): + from transformers import GPT2Tokenizer + tokenizer = GPT2Tokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B", revision=vars.revision, cache_dir="cache") + loadsettings() + elif(vars.model == "OAI"): + from transformers import GPT2Tokenizer + tokenizer = GPT2Tokenizer.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") + loadsettings() + # Load the TPU backend if requested + elif(vars.use_colab_tpu or vars.model in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX")): + global tpu_mtj_backend + import tpu_mtj_backend + if(vars.model == "TPUMeshTransformerGPTNeoX"): + vars.badwordsids = vars.badwordsids_neox + print("{0}Initializing Mesh Transformer JAX, please wait...{1}".format(colors.PURPLE, colors.END)) + if vars.model in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX") and (not vars.custmodpth or not os.path.isdir(vars.custmodpth)): + raise FileNotFoundError(f"The specified model path {repr(vars.custmodpth)} is not the path to a valid folder") + import tpu_mtj_backend + if(vars.model == "TPUMeshTransformerGPTNeoX"): + tpu_mtj_backend.pad_token_id = 2 + tpu_mtj_backend.vars = vars + tpu_mtj_backend.warper_callback = tpumtjgenerate_warper_callback + tpu_mtj_backend.stopping_callback = tpumtjgenerate_stopping_callback + tpu_mtj_backend.compiling_callback = tpumtjgenerate_compiling_callback + tpu_mtj_backend.stopped_compiling_callback = tpumtjgenerate_stopped_compiling_callback + tpu_mtj_backend.settings_callback = tpumtjgenerate_settings_callback + vars.allowsp = True + loadmodelsettings() + loadsettings() + tpu_mtj_backend.load_model(vars.custmodpth, hf_checkpoint=vars.model not in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX") and vars.use_colab_tpu, **vars.modelconfig) + vars.modeldim = int(tpu_mtj_backend.params.get("d_embed", tpu_mtj_backend.params["d_model"])) + tokenizer = tpu_mtj_backend.tokenizer + if(vars.badwordsids is vars.badwordsids_default and vars.model_type not in ("gpt2", "gpt_neo", "gptj")): + vars.badwordsids = [[v] for k, v in tokenizer.get_vocab().items() if any(c in str(k) for c in "<>[]") if vars.newlinemode != "s" or str(k) != ""] + else: + loadsettings() - def tpumtjgenerate_settings_callback() -> dict: - return { - "sampler_order": vars.sampler_order, - "top_p": float(vars.top_p), - "temp": float(vars.temp), - "top_k": int(vars.top_k), - "tfs": float(vars.tfs), - "typical": float(vars.typical), - "top_a": float(vars.top_a), - "repetition_penalty": float(vars.rep_pen), - "rpslope": float(vars.rep_pen_slope), - "rprange": int(vars.rep_pen_range), - } + lua_startup() + # Load scripts + load_lua_scripts() + + final_startup() + if not initial_load: + set_aibusy(False) + emit('from_server', {'cmd': 'hide_model_name'}, broadcast=True) + time.sleep(0.1) + + if not vars.gamestarted: + setStartState() + sendsettings() + refresh_settings() - # If we're running Colab or OAI, we still need a tokenizer. - if(vars.model == "Colab"): - from transformers import GPT2TokenizerFast - tokenizer = GPT2TokenizerFast.from_pretrained("EleutherAI/gpt-neo-2.7B", revision=vars.revision, cache_dir="cache") - loadsettings() - elif(vars.model == "OAI"): - from transformers import GPT2TokenizerFast - tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") - loadsettings() - # Load the TPU backend if requested - elif(vars.use_colab_tpu or vars.model in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX")): - if(vars.model == "TPUMeshTransformerGPTNeoX"): - vars.badwordsids = vars.badwordsids_neox - print("{0}Initializing Mesh Transformer JAX, please wait...{1}".format(colors.PURPLE, colors.END)) - if vars.model in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX") and (not vars.custmodpth or not os.path.isdir(vars.custmodpth)): - raise FileNotFoundError(f"The specified model path {repr(vars.custmodpth)} is not the path to a valid folder") - import tpu_mtj_backend - if(vars.model_type == "opt"): - tpu_mtj_backend.pad_token_id = 1 - elif(vars.model == "TPUMeshTransformerGPTNeoX" or vars.model_type == "gpt_neox"): - tpu_mtj_backend.pad_token_id = 2 - tpu_mtj_backend.vars = vars - tpu_mtj_backend.warper_callback = tpumtjgenerate_warper_callback - tpu_mtj_backend.stopping_callback = tpumtjgenerate_stopping_callback - tpu_mtj_backend.compiling_callback = tpumtjgenerate_compiling_callback - tpu_mtj_backend.stopped_compiling_callback = tpumtjgenerate_stopped_compiling_callback - tpu_mtj_backend.settings_callback = tpumtjgenerate_settings_callback - vars.allowsp = True - loadmodelsettings() - loadsettings() - tpu_mtj_backend.load_model(vars.custmodpth, hf_checkpoint=vars.model not in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX") and vars.use_colab_tpu, **vars.modelconfig) - vars.modeldim = int(tpu_mtj_backend.params.get("d_embed", tpu_mtj_backend.params["d_model"])) - tokenizer = tpu_mtj_backend.tokenizer - else: - loadsettings() # Set up Flask routes @app.route('/') @app.route('/index') def index(): - return render_template('index.html') + if args.no_ui: + return redirect('/api/latest') + else: + return render_template('index.html', hide_ai_menu=args.noaimenu) +@app.route('/api', strict_slashes=False) +def api(): + return redirect('/api/latest') +@app.route('/favicon.ico') +def favicon(): + return send_from_directory(app.root_path, + 'koboldai.ico', mimetype='image/vnd.microsoft.icon') @app.route('/download') def download(): + if args.no_ui: + raise NotFound() + save_format = request.args.get("format", "json").strip().lower() if(save_format == "plaintext"): @@ -1979,29 +2853,67 @@ def download(): #============================ LUA API =============================# +_bridged = {} +F = TypeVar("F", bound=Callable) +def lua_startup(): + global _bridged + global F + global bridged + if(path.exists(get_config_filename())): + file = open(get_config_filename(), "r") + js = json.load(file) + if("userscripts" in js): + vars.userscripts = [] + for userscript in js["userscripts"]: + if type(userscript) is not str: + continue + userscript = userscript.strip() + if len(userscript) != 0 and all(q not in userscript for q in ("..", ":")) and all(userscript[0] not in q for q in ("/", "\\")) and os.path.exists(fileops.uspath(userscript)): + vars.userscripts.append(userscript) + if("corescript" in js and type(js["corescript"]) is str and all(q not in js["corescript"] for q in ("..", ":")) and all(js["corescript"][0] not in q for q in ("/", "\\"))): + vars.corescript = js["corescript"] + else: + vars.corescript = "default.lua" + file.close() + + #==================================================================# + # Lua runtime startup + #==================================================================# + + print("", end="", flush=True) + logger.init("LUA bridge", status="Starting") + + # Set up Lua state + vars.lua_state = lupa.LuaRuntime(unpack_returned_tuples=True) + + # Load bridge.lua + bridged = { + "corescript_path": "cores", + "userscript_path": "userscripts", + "config_path": "userscripts", + "lib_paths": vars.lua_state.table("lualibs", os.path.join("extern", "lualibs")), + "vars": vars, + } + for kwarg in _bridged: + bridged[kwarg] = _bridged[kwarg] + try: + vars.lua_kobold, vars.lua_koboldcore, vars.lua_koboldbridge = vars.lua_state.globals().dofile("bridge.lua")( + vars.lua_state.globals().python, + bridged, + ) + except lupa.LuaError as e: + print(colors.RED + "ERROR!" + colors.END) + vars.lua_koboldbridge.obliterate_multiverse() + logger.debug('LUA ERROR: ' + str(e).replace("\033", "")) + logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.") + exit(1) + logger.init_ok("LUA bridge", status="OK") -if(path.exists("settings/" + getmodelname().replace('/', '_') + ".settings")): - file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "r") - js = json.load(file) - if("userscripts" in js): - vars.userscripts = [] - for userscript in js["userscripts"]: - if type(userscript) is not str: - continue - userscript = userscript.strip() - if len(userscript) != 0 and all(q not in userscript for q in ("..", ":")) and all(userscript[0] not in q for q in ("/", "\\")) and os.path.exists(fileops.uspath(userscript)): - vars.userscripts.append(userscript) - if("corescript" in js and type(js["corescript"]) is str and all(q not in js["corescript"] for q in ("..", ":")) and all(js["corescript"][0] not in q for q in ("/", "\\"))): - vars.corescript = js["corescript"] - else: - vars.corescript = "default.lua" - file.close() def lua_log_format_name(name): return f"[{name}]" if type(name) is str else "CORE" -_bridged = {} -F = TypeVar("F", bound=Callable) + def bridged_kwarg(name=None): def _bridged_kwarg(f: F): _bridged[name if name is not None else f.__name__[4:] if f.__name__[:4] == "lua_" else f.__name__] = f @@ -2019,7 +2931,7 @@ def load_callback(filename, modulename): # Load all Lua scripts #==================================================================# def load_lua_scripts(): - print(colors.GREEN + "Loading Core Script" + colors.END) + logger.init("LUA Scripts", status="Starting") filenames = [] modulenames = [] @@ -2051,11 +2963,11 @@ def load_lua_scripts(): if(vars.serverstarted): emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True) sendUSStatItems() - print("{0}{1}{2}".format(colors.RED, "***LUA ERROR***: ", colors.END), end="", file=sys.stderr) - print("{0}{1}{2}".format(colors.RED, str(e).replace("\033", ""), colors.END), file=sys.stderr) - print("{0}{1}{2}".format(colors.YELLOW, "Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.", colors.END), file=sys.stderr) + logger.debug('LUA ERROR: ' + str(e).replace("\033", "")) + logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.") if(vars.serverstarted): set_aibusy(0) + logger.init_ok("LUA Scripts", status="OK") #==================================================================# # Print message that originates from the userscript with the given name @@ -2085,9 +2997,9 @@ def lua_decode(tokens): tokens = list(tokens.values()) assert type(tokens) is list if("tokenizer" not in globals()): - from transformers import GPT2TokenizerFast + from transformers import GPT2Tokenizer global tokenizer - tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") + tokenizer = GPT2Tokenizer.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") return utils.decodenewlines(tokenizer.decode(tokens)) #==================================================================# @@ -2097,9 +3009,9 @@ def lua_decode(tokens): def lua_encode(string): assert type(string) is str if("tokenizer" not in globals()): - from transformers import GPT2TokenizerFast + from transformers import GPT2Tokenizer global tokenizer - tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") + tokenizer = GPT2Tokenizer.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") return tokenizer.encode(utils.encodenewlines(string), max_length=int(4e9), truncation=True) #==================================================================# @@ -2294,6 +3206,8 @@ def lua_has_setting(setting): "rmspch", "adsnsp", "singleline", + "output_streaming", + "show_probs" ) #==================================================================# @@ -2325,6 +3239,8 @@ def lua_get_setting(setting): if(setting in ("frmtrmspch", "rmspch")): return vars.formatoptns["frmttrmspch"] if(setting in ("frmtadsnsp", "adsnsp")): return vars.formatoptns["frmtadsnsp"] if(setting in ("frmtsingleline", "singleline")): return vars.formatoptns["singleline"] + if(setting == "output_streaming"): return vars.output_streaming + if(setting == "show_probs"): return vars.show_probs #==================================================================# # Set the setting with the given name if it exists @@ -2361,6 +3277,8 @@ def lua_set_setting(setting, v): if(setting in ("frmtrmspch", "rmspch")): vars.formatoptns["frmttrmspch"] = v if(setting in ("frmtadsnsp", "adsnsp")): vars.formatoptns["frmtadsnsp"] = v if(setting in ("frmtsingleline", "singleline")): vars.formatoptns["singleline"] = v + if(setting == "output_streaming"): vars.output_streaming = v + if(setting == "show_probs"): vars.show_probs = v #==================================================================# # Get contents of memory @@ -2462,7 +3380,7 @@ def lua_set_chunk(k, v): def lua_get_modeltype(): if(vars.noai): return "readonly" - if(vars.model in ("Colab", "OAI", "InferKit")): + if(vars.model in ("Colab", "API", "CLUSTER", "OAI", "InferKit")): return "api" if(not vars.use_colab_tpu and vars.model not in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX") and (vars.model in ("GPT2Custom", "NeoCustom") or vars.model_type in ("gpt2", "gpt_neo", "gptj"))): hidden_size = get_hidden_size_from_model(model) @@ -2491,7 +3409,7 @@ def lua_get_modeltype(): def lua_get_modelbackend(): if(vars.noai): return "readonly" - if(vars.model in ("Colab", "OAI", "InferKit")): + if(vars.model in ("Colab", "API", "CLUSTER", "OAI", "InferKit")): return "api" if(vars.use_colab_tpu or vars.model in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX")): return "mtj" @@ -2542,9 +3460,8 @@ def execute_inmod(): vars.lua_running = False emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True) sendUSStatItems() - print("{0}{1}{2}".format(colors.RED, "***LUA ERROR***: ", colors.END), end="", file=sys.stderr) - print("{0}{1}{2}".format(colors.RED, str(e).replace("\033", ""), colors.END), file=sys.stderr) - print("{0}{1}{2}".format(colors.YELLOW, "Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.", colors.END), file=sys.stderr) + logger.debug('LUA ERROR: ' + str(e).replace("\033", "")) + logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.") set_aibusy(0) def execute_genmod(): @@ -2560,9 +3477,8 @@ def execute_outmod(): vars.lua_running = False emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True) sendUSStatItems() - print("{0}{1}{2}".format(colors.RED, "***LUA ERROR***: ", colors.END), end="", file=sys.stderr) - print("{0}{1}{2}".format(colors.RED, str(e).replace("\033", ""), colors.END), file=sys.stderr) - print("{0}{1}{2}".format(colors.YELLOW, "Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.", colors.END), file=sys.stderr) + logger.debug('LUA ERROR: ' + str(e).replace("\033", "")) + logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.") set_aibusy(0) if(vars.lua_koboldbridge.resend_settings_required): vars.lua_koboldbridge.resend_settings_required = False @@ -2572,41 +3488,7 @@ def execute_outmod(): for k in vars.lua_deleted: inlinedelete(k) -#==================================================================# -# Lua runtime startup -#==================================================================# -print("", end="", flush=True) -print(colors.PURPLE + "Initializing Lua Bridge... " + colors.END, end="", flush=True) - -# Set up Lua state -vars.lua_state = lupa.LuaRuntime(unpack_returned_tuples=True) - -# Load bridge.lua -bridged = { - "corescript_path": "cores", - "userscript_path": "userscripts", - "config_path": "userscripts", - "lib_paths": vars.lua_state.table("lualibs", os.path.join("extern", "lualibs")), - "vars": vars, -} -for kwarg in _bridged: - bridged[kwarg] = _bridged[kwarg] -try: - vars.lua_kobold, vars.lua_koboldcore, vars.lua_koboldbridge = vars.lua_state.globals().dofile("bridge.lua")( - vars.lua_state.globals().python, - bridged, - ) -except lupa.LuaError as e: - print(colors.RED + "ERROR!" + colors.END) - vars.lua_koboldbridge.obliterate_multiverse() - print("{0}{1}{2}".format(colors.RED, "***LUA ERROR***: ", colors.END), end="", file=sys.stderr) - print("{0}{1}{2}".format(colors.RED, str(e).replace("\033", ""), colors.END), file=sys.stderr) - exit(1) -print(colors.GREEN + "OK!" + colors.END) - -# Load scripts -load_lua_scripts() #============================ METHODS =============================# @@ -2616,7 +3498,7 @@ load_lua_scripts() #==================================================================# @socketio.on('connect') def do_connect(): - print("{0}Client connected!{1}".format(colors.GREEN, colors.END)) + logger.info("Client connected!") emit('from_server', {'cmd': 'setchatname', 'data': vars.chatname}) emit('from_server', {'cmd': 'setanotetemplate', 'data': vars.authornotetemplate}) emit('from_server', {'cmd': 'connected', 'smandelete': vars.smandelete, 'smanrename': vars.smanrename, 'modelname': getmodelname()}) @@ -2667,7 +3549,7 @@ def do_connect(): @socketio.on('message') def get_message(msg): if not vars.quiet: - print("{0}Data received:{1}{2}".format(colors.GREEN, msg, colors.END)) + logger.debug(f"Data received: {msg}") # Submit action if(msg['cmd'] == 'submit'): if(vars.mode == "play"): @@ -2942,14 +3824,118 @@ def get_message(msg): sendUSStatItems() elif(msg['cmd'] == 'samplers'): sampler_order = msg["data"] + sampler_order_min_length = 6 + sampler_order_max_length = 7 if(not isinstance(sampler_order, list)): raise ValueError(f"Sampler order must be a list, but got a {type(sampler_order)}") - if(len(sampler_order) != len(vars.sampler_order)): - raise ValueError(f"Sampler order must be a list of length {len(vars.sampler_order)}, but got a list of length {len(sampler_order)}") + if(not (sampler_order_min_length <= len(sampler_order) <= sampler_order_max_length)): + raise ValueError(f"Sampler order must be a list of length greater than or equal to {sampler_order_min_length} and less than or equal to {sampler_order_max_length}, but got a list of length {len(sampler_order)}") if(not all(isinstance(e, int) for e in sampler_order)): raise ValueError(f"Sampler order must be a list of ints, but got a list with at least one non-int element") + if(min(sampler_order) != 0 or max(sampler_order) != len(sampler_order) - 1 or len(set(sampler_order)) != len(sampler_order)): + raise ValueError(f"Sampler order list of length {len(sampler_order)} must be a permutation of the first {len(sampler_order)} nonnegative integers") vars.sampler_order = sampler_order settingschanged() + elif(msg['cmd'] == 'list_model'): + sendModelSelection(menu=msg['data']) + elif(msg['cmd'] == 'load_model'): + logger.debug(f"Selected Model: {vars.model_selected}") + if not os.path.exists("settings/"): + os.mkdir("settings") + changed = True + if not utils.HAS_ACCELERATE: + msg['disk_layers'] = "0" + if os.path.exists("settings/" + vars.model_selected.replace('/', '_') + ".breakmodel"): + with open("settings/" + vars.model_selected.replace('/', '_') + ".breakmodel", "r") as file: + data = file.read().split('\n')[:2] + if len(data) < 2: + data.append("0") + gpu_layers, disk_layers = data + if gpu_layers == msg['gpu_layers'] and disk_layers == msg['disk_layers']: + changed = False + if changed: + if vars.model_selected in ["NeoCustom", "GPT2Custom"]: + filename = "settings/{}.breakmodel".format(os.path.basename(os.path.normpath(vars.custmodpth))) + else: + filename = "settings/{}.breakmodel".format(vars.model_selected.replace('/', '_')) + f = open(filename, "w") + f.write(str(msg['gpu_layers']) + '\n' + str(msg['disk_layers'])) + f.close() + vars.colaburl = msg['url'] + "/request" + vars.model = vars.model_selected + if vars.model == "CLUSTER": + if type(msg['online_model']) is not list: + if msg['online_model'] == '': + vars.cluster_requested_models = [] + else: + vars.cluster_requested_models = [msg['online_model']] + else: + vars.cluster_requested_models = msg['online_model'] + load_model(use_gpu=msg['use_gpu'], gpu_layers=msg['gpu_layers'], disk_layers=msg['disk_layers'], online_model=msg['online_model']) + elif(msg['cmd'] == 'show_model'): + logger.info(f"Model Name: {getmodelname()}") + emit('from_server', {'cmd': 'show_model_name', 'data': getmodelname()}, broadcast=True) + elif(msg['cmd'] == 'selectmodel'): + # This is run when a model line is selected from the UI (line from the model_menu variable) that is tagged as not a menu + # otherwise we should be running the msg['cmd'] == 'list_model' + + # We have to do a bit of processing though, if we select a custom path, we need to list out the contents of folders + # But if we select something else, we need to potentially show model layers for each GPU + # We might also need to show key input. All of that happens here + + # The data variable will contain the model name. But our Custom lines need a bit more processing + # If we're on a custom line that we have selected a model for, the path variable will be in msg + # so if that's missing we need to run the menu to show the model folders in the models folder + if msg['data'] in ('NeoCustom', 'GPT2Custom') and 'path' not in msg and 'path_modelname' not in msg: + if 'folder' not in msg or vars.host: + folder = "./models" + else: + folder = msg['folder'] + sendModelSelection(menu=msg['data'], folder=folder) + elif msg['data'] in ('NeoCustom', 'GPT2Custom') and 'path_modelname' in msg: + #Here the user entered custom text in the text box. This could be either a model name or a path. + if check_if_dir_is_model(msg['path_modelname']): + vars.model_selected = msg['data'] + vars.custmodpth = msg['path_modelname'] + get_model_info(msg['data'], directory=msg['path']) + else: + vars.model_selected = msg['path_modelname'] + try: + get_model_info(vars.model_selected) + except: + emit('from_server', {'cmd': 'errmsg', 'data': "The model entered doesn't exist."}) + elif msg['data'] in ('NeoCustom', 'GPT2Custom'): + if check_if_dir_is_model(msg['path']): + vars.model_selected = msg['data'] + vars.custmodpth = msg['path'] + get_model_info(msg['data'], directory=msg['path']) + else: + if vars.host: + sendModelSelection(menu=msg['data'], folder="./models") + else: + sendModelSelection(menu=msg['data'], folder=msg['path']) + else: + vars.model_selected = msg['data'] + if 'path' in msg: + vars.custmodpth = msg['path'] + get_model_info(msg['data'], directory=msg['path']) + else: + get_model_info(vars.model_selected) + elif(msg['cmd'] == 'delete_model'): + if "{}/models".format(os.getcwd()) in os.path.abspath(msg['data']) or "{}\\models".format(os.getcwd()) in os.path.abspath(msg['data']): + if check_if_dir_is_model(msg['data']): + logger.warning(f"Someone deleted {msg['data']}") + import shutil + shutil.rmtree(msg['data']) + sendModelSelection(menu=msg['menu']) + else: + logger.error(f"Someone attempted to delete {msg['data']} but this is not a valid model") + else: + logger.critical(f"Someone maliciously attempted to delete {msg['data']}. The attempt has been blocked.") + elif(msg['cmd'] == 'OAI_Key_Update'): + get_oai_models(msg['key']) + elif(msg['cmd'] == 'Cluster_Key_Update'): + get_cluster_models(msg) elif(msg['cmd'] == 'loadselect'): vars.loadselect = msg["data"] elif(msg['cmd'] == 'spselect'): @@ -3013,6 +3999,22 @@ def get_message(msg): vars.nogenmod = msg['data'] settingschanged() refresh_settings() + elif(msg['cmd'] == 'setfulldeterminism'): + vars.full_determinism = msg['data'] + settingschanged() + refresh_settings() + elif(msg['cmd'] == 'setoutputstreaming'): + vars.output_streaming = msg['data'] + settingschanged() + refresh_settings() + elif(msg['cmd'] == 'setshowbudget'): + vars.show_budget = msg['data'] + settingschanged() + refresh_settings() + elif(msg['cmd'] == 'setshowprobs'): + vars.show_probs = msg['data'] + settingschanged() + refresh_settings() elif(not vars.host and msg['cmd'] == 'importwi'): wiimportrequest() elif(msg['cmd'] == 'debug'): @@ -3020,6 +4022,39 @@ def get_message(msg): emit('from_server', {'cmd': 'set_debug', 'data': msg['data']}, broadcast=True) if vars.debug: send_debug() + elif(msg['cmd'] == 'getfieldbudget'): + unencoded = msg["data"]["unencoded"] + field = msg["data"]["field"] + + # Tokenizer may be undefined here when a model has not been chosen. + if "tokenizer" not in globals(): + # We don't have a tokenizer, just return nulls. + emit( + 'from_server', + {'cmd': 'showfieldbudget', 'data': {"length": None, "max": None, "field": field}}, + ) + return + + header_length = len(tokenizer._koboldai_header) + max_tokens = vars.max_length - header_length - vars.sp_length - vars.genamt + + if not unencoded: + # Unencoded is empty, just return 0 + emit( + 'from_server', + {'cmd': 'showfieldbudget', 'data': {"length": 0, "max": max_tokens, "field": field}}, + broadcast=True + ) + else: + if field == "anoteinput": + unencoded = buildauthorsnote(unencoded, msg["data"]["anotetemplate"]) + tokens_length = len(tokenizer.encode(unencoded)) + + emit( + 'from_server', + {'cmd': 'showfieldbudget', 'data': {"length": tokens_length, "max": max_tokens, "field": field}}, + broadcast=True + ) #==================================================================# # Send userscripts list to client @@ -3035,7 +4070,7 @@ def sendUSStatItems(): # KoboldAI Markup Formatting (Mixture of Markdown and sanitized html) #==================================================================# def kml(txt): - txt = txt.replace('\>', '>') + txt = txt.replace('>', '>') txt = bleach.clean(markdown.markdown(txt), tags = ['p', 'em', 'strong', 'code', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'li', 'ul', 'b', 'i', 'a', 'span', 'button'], styles = ['color', 'font-weight'], attributes=['id', 'class', 'style', 'href']) return txt @@ -3059,6 +4094,7 @@ def setStartState(): #==================================================================# def sendsettings(): # Send settings for selected AI type + emit('from_server', {'cmd': 'reset_menus'}) if(vars.model != "InferKit"): for set in gensettings.gensettingstf: emit('from_server', {'cmd': 'addsetting', 'data': set}) @@ -3097,14 +4133,48 @@ def check_for_backend_compilation(): break vars.checking = False -def actionsubmit(data, actionmode=0, force_submit=False, force_prompt_gen=False, disable_recentrng=False): +def actionsubmit(data, actionmode=0, force_submit=False, force_prompt_gen=False, disable_recentrng=False, no_generate=False, ignore_aibusy=False): # Ignore new submissions if the AI is currently busy - if(vars.aibusy): + if(not ignore_aibusy and vars.aibusy): return while(True): set_aibusy(1) + if(vars.model in ["API","CLUSTER"]): + global tokenizer + if vars.model == "API": + tokenizer_id = requests.get( + vars.colaburl[:-8] + "/api/v1/model", + ).json()["result"] + elif len(vars.cluster_requested_models) >= 1: + # If the player has requested one or more models, we use the first one for the tokenizer + tokenizer_id = vars.cluster_requested_models[0] + # The cluster can return any number of possible models for each gen, but this happens after this step + # So at this point, this is unknown + else: + tokenizer_id = "" + if tokenizer_id != vars.api_tokenizer_id: + try: + if(os.path.isdir(tokenizer_id)): + try: + tokenizer = AutoTokenizer.from_pretrained(tokenizer_id, revision=vars.revision, cache_dir="cache") + except: + tokenizer = AutoTokenizer.from_pretrained(tokenizer_id, revision=vars.revision, cache_dir="cache", use_fast=False) + elif(os.path.isdir("models/{}".format(tokenizer_id.replace('/', '_')))): + try: + tokenizer = AutoTokenizer.from_pretrained("models/{}".format(tokenizer_id.replace('/', '_')), revision=vars.revision, cache_dir="cache") + except: + tokenizer = AutoTokenizer.from_pretrained("models/{}".format(tokenizer_id.replace('/', '_')), revision=vars.revision, cache_dir="cache", use_fast=False) + else: + try: + tokenizer = AutoTokenizer.from_pretrained(tokenizer_id, revision=vars.revision, cache_dir="cache") + except: + tokenizer = AutoTokenizer.from_pretrained(tokenizer_id, revision=vars.revision, cache_dir="cache", use_fast=False) + except: + logger.warning(f"Unknown tokenizer {repr(tokenizer_id)}") + vars.api_tokenizer_id = tokenizer_id + if(disable_recentrng): vars.recentrng = vars.recentrngm = None @@ -3131,20 +4201,21 @@ def actionsubmit(data, actionmode=0, force_submit=False, force_prompt_gen=False, if(not vars.gamestarted): vars.submission = data - execute_inmod() + if(not no_generate): + execute_inmod() vars.submission = re.sub(r"[^\S\r\n]*([\r\n]*)$", r"\1", vars.submission) # Remove trailing whitespace, excluding newlines data = vars.submission if(not force_submit and len(data.strip()) == 0): assert False # Start the game vars.gamestarted = True - if(not vars.noai and vars.lua_koboldbridge.generating and (not vars.nopromptgen or force_prompt_gen)): + if(not no_generate and not vars.noai and vars.lua_koboldbridge.generating and (not vars.nopromptgen or force_prompt_gen)): # Save this first action as the prompt vars.prompt = data # Clear the startup text from game screen emit('from_server', {'cmd': 'updatescreen', 'gamestarted': False, 'data': 'Please wait, generating story...'}, broadcast=True) calcsubmit(data) # Run the first action through the generator - if(not vars.abort and vars.lua_koboldbridge.restart_sequence is not None and len(vars.genseqs) == 0): + if(not no_generate and not vars.abort and vars.lua_koboldbridge.restart_sequence is not None and len(vars.genseqs) == 0): data = "" force_submit = True disable_recentrng = True @@ -3156,7 +4227,8 @@ def actionsubmit(data, actionmode=0, force_submit=False, force_prompt_gen=False, vars.prompt = data if len(data) > 0 else '"' for i in range(vars.numseqs): vars.lua_koboldbridge.outputs[i+1] = "" - execute_outmod() + if(not no_generate): + execute_outmod() vars.lua_koboldbridge.regeneration_required = False genout = [] for i in range(vars.numseqs): @@ -3190,7 +4262,8 @@ def actionsubmit(data, actionmode=0, force_submit=False, force_prompt_gen=False, if(vars.actionmode == 0): data = applyinputformatting(data) vars.submission = data - execute_inmod() + if(not no_generate): + execute_inmod() vars.submission = re.sub(r"[^\S\r\n]*([\r\n]*)$", r"\1", vars.submission) # Remove trailing whitespace, excluding newlines data = vars.submission # Dont append submission if it's a blank/continue action @@ -3210,8 +4283,8 @@ def actionsubmit(data, actionmode=0, force_submit=False, force_prompt_gen=False, try: alternatives = [item['Text'] for item in vars.actions_metadata[len(vars.actions)-1]["Alternative Text"]] except: - print(len(vars.actions)) - print(vars.actions_metadata) + logger.debug(len(vars.actions)) + logger.debug(vars.actions_metadata) raise if data in alternatives: alternatives = [item for item in vars.actions_metadata[vars.actions.get_last_key() ]["Alternative Text"] if item['Text'] != data] @@ -3220,7 +4293,7 @@ def actionsubmit(data, actionmode=0, force_submit=False, force_prompt_gen=False, update_story_chunk('last') send_debug() - if(not vars.noai and vars.lua_koboldbridge.generating): + if(not no_generate and not vars.noai and vars.lua_koboldbridge.generating): # Off to the tokenizer! calcsubmit(data) if(not vars.abort and vars.lua_koboldbridge.restart_sequence is not None and len(vars.genseqs) == 0): @@ -3231,23 +4304,24 @@ def actionsubmit(data, actionmode=0, force_submit=False, force_prompt_gen=False, emit('from_server', {'cmd': 'scrolldown', 'data': ''}, broadcast=True) break else: - for i in range(vars.numseqs): - vars.lua_koboldbridge.outputs[i+1] = "" - execute_outmod() - vars.lua_koboldbridge.regeneration_required = False + if(not no_generate): + for i in range(vars.numseqs): + vars.lua_koboldbridge.outputs[i+1] = "" + execute_outmod() + vars.lua_koboldbridge.regeneration_required = False genout = [] for i in range(vars.numseqs): - genout.append({"generated_text": vars.lua_koboldbridge.outputs[i+1]}) + genout.append({"generated_text": vars.lua_koboldbridge.outputs[i+1] if not no_generate else ""}) assert type(genout[-1]["generated_text"]) is str if(len(genout) == 1): genresult(genout[0]["generated_text"]) - if(not vars.abort and vars.lua_koboldbridge.restart_sequence is not None): + if(not no_generate and not vars.abort and vars.lua_koboldbridge.restart_sequence is not None): data = "" force_submit = True disable_recentrng = True continue else: - if(not vars.abort and vars.lua_koboldbridge.restart_sequence is not None and vars.lua_koboldbridge.restart_sequence > 0): + if(not no_generate and not vars.abort and vars.lua_koboldbridge.restart_sequence is not None and vars.lua_koboldbridge.restart_sequence > 0): genresult(genout[vars.lua_koboldbridge.restart_sequence-1]["generated_text"]) data = "" force_submit = True @@ -3258,6 +4332,134 @@ def actionsubmit(data, actionmode=0, force_submit=False, force_prompt_gen=False, emit('from_server', {'cmd': 'scrolldown', 'data': ''}, broadcast=True) break +def apiactionsubmit_generate(txt, minimum, maximum): + vars.generated_tkns = 0 + + if not vars.quiet: + logger.debug(f"Prompt Min:{minimum}, Max:{maximum}") + logger.prompt(utils.decodenewlines(tokenizer.decode(txt)).encode("unicode_escape").decode("utf-8")) + + # Clear CUDA cache if using GPU + if(vars.hascuda and (vars.usegpu or vars.breakmodel)): + gc.collect() + torch.cuda.empty_cache() + + # Submit input text to generator + _genout, already_generated = tpool.execute(_generate, txt, minimum, maximum, set()) + + genout = [applyoutputformatting(utils.decodenewlines(tokenizer.decode(tokens[-already_generated:]))) for tokens in _genout] + + # Clear CUDA cache again if using GPU + if(vars.hascuda and (vars.usegpu or vars.breakmodel)): + del _genout + gc.collect() + torch.cuda.empty_cache() + + return genout + +def apiactionsubmit_tpumtjgenerate(txt, minimum, maximum): + vars.generated_tkns = 0 + + if(vars.full_determinism): + tpu_mtj_backend.set_rng_seed(vars.seed) + + if not vars.quiet: + logger.debug(f"Prompt Min:{minimum}, Max:{maximum}") + logger.prompt(utils.decodenewlines(tokenizer.decode(txt)).encode("unicode_escape").decode("utf-8")) + + vars._actions = vars.actions + vars._prompt = vars.prompt + if(vars.dynamicscan): + vars._actions = vars._actions.copy() + + # Submit input text to generator + soft_tokens = tpumtjgetsofttokens() + genout = tpool.execute( + tpu_mtj_backend.infer_static, + np.uint32(txt), + gen_len = maximum-minimum+1, + temp=vars.temp, + top_p=vars.top_p, + top_k=vars.top_k, + tfs=vars.tfs, + typical=vars.typical, + top_a=vars.top_a, + numseqs=vars.numseqs, + repetition_penalty=vars.rep_pen, + rpslope=vars.rep_pen_slope, + rprange=vars.rep_pen_range, + soft_embeddings=vars.sp, + soft_tokens=soft_tokens, + sampler_order=vars.sampler_order, + ) + genout = [applyoutputformatting(utils.decodenewlines(tokenizer.decode(txt))) for txt in genout] + + return genout + +def apiactionsubmit(data, use_memory=False, use_world_info=False, use_story=False, use_authors_note=False): + if(vars.model == "Colab"): + raise NotImplementedError("API generation is not supported in old Colab API mode.") + elif(vars.model == "API"): + raise NotImplementedError("API generation is not supported in API mode.") + elif(vars.model == "CLUSTER"): + raise NotImplementedError("API generation is not supported in API mode.") + elif(vars.model == "OAI"): + raise NotImplementedError("API generation is not supported in OpenAI/GooseAI mode.") + elif(vars.model == "ReadOnly"): + raise NotImplementedError("API generation is not supported in read-only mode; please load a model and then try again.") + + data = applyinputformatting(data) + + if(vars.memory != "" and vars.memory[-1] != "\n"): + mem = vars.memory + "\n" + else: + mem = vars.memory + if(use_authors_note and vars.authornote != ""): + anotetxt = ("\n" + vars.authornotetemplate + "\n").replace("<|>", vars.authornote) + else: + anotetxt = "" + MIN_STORY_TOKENS = 8 + story_tokens = [] + mem_tokens = [] + wi_tokens = [] + story_budget = lambda: vars.max_length - vars.sp_length - vars.genamt - len(tokenizer._koboldai_header) - len(story_tokens) - len(mem_tokens) - len(wi_tokens) + budget = lambda: story_budget() + MIN_STORY_TOKENS + if budget() < 0: + abort(Response(json.dumps({"detail": { + "msg": f"Your Max Tokens setting is too low for your current soft prompt and tokenizer to handle. It needs to be at least {vars.max_length - budget()}.", + "type": "token_overflow", + }}), mimetype="application/json", status=500)) + if use_memory: + mem_tokens = tokenizer.encode(utils.encodenewlines(mem))[-budget():] + if use_world_info: + world_info, _ = checkworldinfo(data, force_use_txt=True, scan_story=use_story) + wi_tokens = tokenizer.encode(utils.encodenewlines(world_info))[-budget():] + if use_story: + if vars.useprompt: + story_tokens = tokenizer.encode(utils.encodenewlines(vars.prompt))[-budget():] + story_tokens = tokenizer.encode(utils.encodenewlines(data))[-story_budget():] + story_tokens + if use_story: + for i, action in enumerate(reversed(vars.actions.values())): + if story_budget() <= 0: + assert story_budget() == 0 + break + story_tokens = tokenizer.encode(utils.encodenewlines(action))[-story_budget():] + story_tokens + if i == vars.andepth - 1: + story_tokens = tokenizer.encode(utils.encodenewlines(anotetxt))[-story_budget():] + story_tokens + if not vars.useprompt: + story_tokens = tokenizer.encode(utils.encodenewlines(vars.prompt))[-budget():] + story_tokens + tokens = tokenizer._koboldai_header + mem_tokens + wi_tokens + story_tokens + assert story_budget() >= 0 + minimum = len(tokens) + 1 + maximum = len(tokens) + vars.genamt + + if(not vars.use_colab_tpu and vars.model not in ["Colab", "API", "CLUSTER", "OAI", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]): + genout = apiactionsubmit_generate(tokens, minimum, maximum) + elif(vars.use_colab_tpu or vars.model in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX")): + genout = apiactionsubmit_tpumtjgenerate(tokens, minimum, maximum) + + return genout + #==================================================================# # #==================================================================# @@ -3349,6 +4551,12 @@ def actionredo(): #==================================================================# # #==================================================================# +def buildauthorsnote(authorsnote, template): + # Build Author's Note if set + if authorsnote == "": + return "" + return ("\n" + template + "\n").replace("<|>", authorsnote) + def calcsubmitbudgetheader(txt, **kwargs): # Scan for WorldInfo matches winfo, found_entries = checkworldinfo(txt, **kwargs) @@ -3359,11 +4567,7 @@ def calcsubmitbudgetheader(txt, **kwargs): else: mem = vars.memory - # Build Author's Note if set - if(vars.authornote != ""): - anotetxt = ("\n" + vars.authornotetemplate + "\n").replace("<|>", vars.authornote) - else: - anotetxt = "" + anotetxt = buildauthorsnote(vars.authornote, vars.authornotetemplate) return winfo, mem, anotetxt, found_entries @@ -3376,9 +4580,9 @@ def calcsubmitbudget(actionlen, winfo, mem, anotetxt, actions, submission=None, lnsp = vars.sp_length if("tokenizer" not in globals()): - from transformers import GPT2TokenizerFast + from transformers import GPT2Tokenizer global tokenizer - tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") + tokenizer = GPT2Tokenizer.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache") lnheader = len(tokenizer._koboldai_header) @@ -3417,7 +4621,7 @@ def calcsubmitbudget(actionlen, winfo, mem, anotetxt, actions, submission=None, if(actionlen == 0): # First/Prompt action - tokens = tokenizer._koboldai_header + memtokens + witokens + anotetkns + prompttkns + tokens = (tokenizer._koboldai_header if vars.model not in ("Colab", "API", "CLUSTER", "OAI") else []) + memtokens + witokens + anotetkns + prompttkns assert len(tokens) <= vars.max_length - lnsp - vars.genamt - budget_deduction ln = len(tokens) + lnsp return tokens, ln+1, ln+vars.genamt @@ -3465,12 +4669,12 @@ def calcsubmitbudget(actionlen, winfo, mem, anotetxt, actions, submission=None, # Did we get to add the A.N.? If not, do it here if(anotetxt != ""): if((not anoteadded) or forceanote): - tokens = tokenizer._koboldai_header + memtokens + witokens + anotetkns + prompttkns + tokens + tokens = (tokenizer._koboldai_header if vars.model not in ("Colab", "API", "CLUSTER", "OAI") else []) + memtokens + witokens + anotetkns + prompttkns + tokens else: - tokens = tokenizer._koboldai_header + memtokens + witokens + prompttkns + tokens + tokens = (tokenizer._koboldai_header if vars.model not in ("Colab", "API", "CLUSTER", "OAI") else []) + memtokens + witokens + prompttkns + tokens else: # Prepend Memory, WI, and Prompt before action tokens - tokens = tokenizer._koboldai_header + memtokens + witokens + prompttkns + tokens + tokens = (tokenizer._koboldai_header if vars.model not in ("Colab", "API", "CLUSTER", "OAI") else []) + memtokens + witokens + prompttkns + tokens # Send completed bundle to generator assert len(tokens) <= vars.max_length - lnsp - vars.genamt - budget_deduction @@ -3492,19 +4696,27 @@ def calcsubmit(txt): if(vars.model != "InferKit"): subtxt, min, max = calcsubmitbudget(actionlen, winfo, mem, anotetxt, vars.actions, submission=txt) if(actionlen == 0): - if(not vars.use_colab_tpu and vars.model not in ["Colab", "OAI", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]): + if(not vars.use_colab_tpu and vars.model not in ["Colab", "API", "CLUSTER", "OAI", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]): generate(subtxt, min, max, found_entries=found_entries) elif(vars.model == "Colab"): sendtocolab(utils.decodenewlines(tokenizer.decode(subtxt)), min, max) + elif(vars.model == "API"): + sendtoapi(utils.decodenewlines(tokenizer.decode(subtxt)), min, max) + elif(vars.model == "CLUSTER"): + sendtocluster(utils.decodenewlines(tokenizer.decode(subtxt)), min, max) elif(vars.model == "OAI"): oairequest(utils.decodenewlines(tokenizer.decode(subtxt)), min, max) elif(vars.use_colab_tpu or vars.model in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX")): tpumtjgenerate(subtxt, min, max, found_entries=found_entries) else: - if(not vars.use_colab_tpu and vars.model not in ["Colab", "OAI", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]): + if(not vars.use_colab_tpu and vars.model not in ["Colab", "API", "CLUSTER", "OAI", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]): generate(subtxt, min, max, found_entries=found_entries) elif(vars.model == "Colab"): sendtocolab(utils.decodenewlines(tokenizer.decode(subtxt)), min, max) + elif(vars.model == "API"): + sendtoapi(utils.decodenewlines(tokenizer.decode(subtxt)), min, max) + elif(vars.model == "CLUSTER"): + sendtocluster(utils.decodenewlines(tokenizer.decode(subtxt)), min, max) elif(vars.model == "OAI"): oairequest(utils.decodenewlines(tokenizer.decode(subtxt)), min, max) elif(vars.use_colab_tpu or vars.model in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX")): @@ -3571,6 +4783,9 @@ def calcsubmit(txt): #==================================================================# def _generate(txt, minimum, maximum, found_entries): + if(vars.full_determinism): + torch.manual_seed(vars.seed) + gen_in = torch.tensor(txt, dtype=torch.long)[None] if(vars.sp is not None): soft_tokens = torch.arange( @@ -3602,7 +4817,7 @@ def _generate(txt, minimum, maximum, found_entries): gen_in, do_sample=True, max_length=int(2e9), - repetition_penalty=1.1, + repetition_penalty=1.0, bad_words_ids=vars.badwordsids, use_cache=True, num_return_sequences=numseqs @@ -3662,7 +4877,8 @@ def generate(txt, minimum, maximum, found_entries=None): found_entries = tuple(found_entries.copy() for _ in range(vars.numseqs)) if not vars.quiet: - print("{0}Min:{1}, Max:{2}, Txt:{3}{4}".format(colors.YELLOW, minimum, maximum, utils.decodenewlines(tokenizer.decode(txt)), colors.END)) + logger.debug(f"Prompt Min:{minimum}, Max:{maximum}") + logger.prompt(utils.decodenewlines(tokenizer.decode(txt)).encode("unicode_escape").decode("utf-8")) # Store context in memory to use it for comparison with generated content vars.lastctx = utils.decodenewlines(tokenizer.decode(txt)) @@ -3681,12 +4897,11 @@ def generate(txt, minimum, maximum, found_entries=None): vars.lua_running = False emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True) sendUSStatItems() - print("{0}{1}{2}".format(colors.RED, "***LUA ERROR***: ", colors.END), end="", file=sys.stderr) - print("{0}{1}{2}".format(colors.RED, str(e).replace("\033", ""), colors.END), file=sys.stderr) - print("{0}{1}{2}".format(colors.YELLOW, "Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.", colors.END), file=sys.stderr) + logger.debug('LUA ERROR: ' + str(e).replace("\033", "")) + logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.") else: emit('from_server', {'cmd': 'errmsg', 'data': 'Error occurred during generator call; please check console.'}, broadcast=True) - print("{0}{1}{2}".format(colors.RED, traceback.format_exc().replace("\033", ""), colors.END), file=sys.stderr) + logger.error(traceback.format_exc().replace("\033", "")) set_aibusy(0) return @@ -3725,7 +4940,7 @@ def generate(txt, minimum, maximum, found_entries=None): #==================================================================# def genresult(genout, flash=True, ignore_formatting=False): if not vars.quiet: - print("{0}{1}{2}".format(colors.CYAN, genout, colors.END)) + logger.generation(genout.encode("unicode_escape").decode("utf-8")) # Format output before continuing if not ignore_formatting: @@ -3759,7 +4974,8 @@ def genselect(genout): # Apply output formatting rules to sequences result["generated_text"] = applyoutputformatting(result["generated_text"]) if not vars.quiet: - print("{0}[Result {1}]\n{2}{3}".format(colors.CYAN, i, result["generated_text"], colors.END)) + logger.info(f"Generation Result {i}") + logger.generation(result["generated_text"].encode("unicode_escape").decode("utf-8")) i += 1 # Add the options to the actions metadata @@ -3907,10 +5123,192 @@ def sendtocolab(txt, min, max): emit('from_server', {'cmd': 'errmsg', 'data': errmsg}, broadcast=True) set_aibusy(0) + +#==================================================================# +# Send transformers-style request to KoboldAI API +#==================================================================# +def sendtoapi(txt, min, max): + # Log request to console + if not vars.quiet: + print("{0}Tokens:{1}, Txt:{2}{3}".format(colors.YELLOW, min-1, txt, colors.END)) + + # Store context in memory to use it for comparison with generated content + vars.lastctx = txt + + # Build request JSON data + reqdata = { + 'prompt': txt, + 'max_length': max - min + 1, + 'max_context_length': vars.max_length, + 'rep_pen': vars.rep_pen, + 'rep_pen_slope': vars.rep_pen_slope, + 'rep_pen_range': vars.rep_pen_range, + 'temperature': vars.temp, + 'top_p': vars.top_p, + 'top_k': vars.top_k, + 'top_a': vars.top_a, + 'tfs': vars.tfs, + 'typical': vars.typical, + 'n': vars.numseqs, + } + + # Create request + while True: + req = requests.post( + vars.colaburl[:-8] + "/api/v1/generate", + json=reqdata, + ) + if(req.status_code == 503): # Server is currently generating something else so poll until it's our turn + time.sleep(1) + continue + js = req.json() + if(req.status_code != 200): + errmsg = "KoboldAI API Error: Failed to get a reply from the server. Please check the console." + print("{0}{1}{2}".format(colors.RED, json.dumps(js, indent=2), colors.END)) + emit('from_server', {'cmd': 'errmsg', 'data': errmsg}, broadcast=True) + set_aibusy(0) + return + + genout = [obj["text"] for obj in js["results"]] + + for i in range(vars.numseqs): + vars.lua_koboldbridge.outputs[i+1] = genout[i] + + execute_outmod() + if(vars.lua_koboldbridge.regeneration_required): + vars.lua_koboldbridge.regeneration_required = False + genout = [] + for i in range(vars.numseqs): + genout.append(vars.lua_koboldbridge.outputs[i+1]) + assert type(genout[-1]) is str + + if(len(genout) == 1): + genresult(genout[0]) + else: + adjusted_genout = [] + for item in genout: + adjusted_genout.append({"generated_text": item}) + # Convert torch output format to transformers + seqs = [] + for seq in adjusted_genout: + seqs.append({"generated_text": seq}) + if(vars.lua_koboldbridge.restart_sequence is not None and vars.lua_koboldbridge.restart_sequence > 0): + genresult(adjusted_genout[vars.lua_koboldbridge.restart_sequence-1]["generated_text"]) + else: + genselect(adjusted_genout) + + set_aibusy(0) + return + +#==================================================================# +# Send transformers-style request to KoboldAI Cluster +#==================================================================# +def sendtocluster(txt, min, max): + # Log request to console + if not vars.quiet: + logger.debug(f"Tokens Min:{min-1}") + logger.prompt(txt.encode("unicode_escape").decode("utf-8")) + + # Store context in memory to use it for comparison with generated content + vars.lastctx = txt + # Build request JSON data + reqdata = { + 'max_length': max - min + 1, + 'max_context_length': vars.max_length, + 'rep_pen': vars.rep_pen, + 'rep_pen_slope': vars.rep_pen_slope, + 'rep_pen_range': vars.rep_pen_range, + 'temperature': vars.temp, + 'top_p': vars.top_p, + 'top_k': vars.top_k, + 'top_a': vars.top_a, + 'tfs': vars.tfs, + 'typical': vars.typical, + 'n': vars.numseqs, + } + cluster_metadata = { + 'prompt': txt, + 'params': reqdata, + 'api_key': vars.apikey, + 'models': vars.cluster_requested_models, + } + logger.debug(f"Horde Payload: {cluster_metadata}") + try: + # Create request + req = requests.post( + vars.colaburl[:-8] + "/api/v1/generate/sync", + json=cluster_metadata, + ) + except requests.exceptions.ConnectionError: + errmsg = f"Horde unavailable. Please try again later" + logger.error(errmsg) + emit('from_server', {'cmd': 'errmsg', 'data': errmsg}, broadcast=True) + set_aibusy(0) + return + if(req.status_code == 503): + errmsg = f"KoboldAI API Error: No available KoboldAI servers found in Horde to fulfil this request using the selected models or other properties." + logger.error(req.text) + emit('from_server', {'cmd': 'errmsg', 'data': errmsg}, broadcast=True) + set_aibusy(0) + return + if(not req.ok): + errmsg = f"KoboldAI API Error: Failed to get a standard reply from the Horde. Please check the console." + logger.error(req.text) + emit('from_server', {'cmd': 'errmsg', 'data': errmsg}, broadcast=True) + set_aibusy(0) + return + try: + js = req.json() + except requests.exceptions.JSONDecodeError: + errmsg = f"Unexpected message received from the Horde: '{req.text}'" + logger.error(errmsg) + emit('from_server', {'cmd': 'errmsg', 'data': errmsg}, broadcast=True) + set_aibusy(0) + return + gen_servers = [(cgen['server_name'],cgen['server_id']) for cgen in js] + logger.info(f"Generations by: {gen_servers}") + # Just in case we want to announce it to the user + if len(js) == 1: + warnmsg = f"Text generated by {js[0]['server_name']}" + emit('from_server', {'cmd': 'warnmsg', 'data': warnmsg}, broadcast=True) + genout = [cgen['text'] for cgen in js] + + for i in range(vars.numseqs): + vars.lua_koboldbridge.outputs[i+1] = genout[i] + + execute_outmod() + if(vars.lua_koboldbridge.regeneration_required): + vars.lua_koboldbridge.regeneration_required = False + genout = [] + for i in range(vars.numseqs): + genout.append(vars.lua_koboldbridge.outputs[i+1]) + assert type(genout[-1]) is str + + if(len(genout) == 1): + genresult(genout[0]) + else: + adjusted_genout = [] + for item in genout: + adjusted_genout.append({"generated_text": item}) + # Convert torch output format to transformers + seqs = [] + for seq in adjusted_genout: + seqs.append({"generated_text": seq}) + if(vars.lua_koboldbridge.restart_sequence is not None and vars.lua_koboldbridge.restart_sequence > 0): + genresult(adjusted_genout[vars.lua_koboldbridge.restart_sequence-1]["generated_text"]) + else: + genselect(adjusted_genout) + + set_aibusy(0) + return + #==================================================================# # Send text to TPU mesh transformer backend #==================================================================# def tpumtjgenerate(txt, minimum, maximum, found_entries=None): + if(vars.full_determinism): + tpu_mtj_backend.set_rng_seed(vars.seed) + vars.generated_tkns = 0 if(found_entries is None): @@ -3918,7 +5316,8 @@ def tpumtjgenerate(txt, minimum, maximum, found_entries=None): found_entries = tuple(found_entries.copy() for _ in range(vars.numseqs)) if not vars.quiet: - print("{0}Min:{1}, Max:{2}, Txt:{3}{4}".format(colors.YELLOW, minimum, maximum, utils.decodenewlines(tokenizer.decode(txt)), colors.END)) + logger.debug(f"Prompt Min:{minimum}, Max:{maximum}") + logger.prompt(utils.decodenewlines(tokenizer.decode(txt)).encode("unicode_escape").decode("utf-8")) vars._actions = vars.actions vars._prompt = vars.prompt @@ -4006,9 +5405,8 @@ def tpumtjgenerate(txt, minimum, maximum, found_entries=None): vars.lua_running = False emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True) sendUSStatItems() - print("{0}{1}{2}".format(colors.RED, "***LUA ERROR***: ", colors.END), end="", file=sys.stderr) - print("{0}{1}{2}".format(colors.RED, str(e).replace("\033", ""), colors.END), file=sys.stderr) - print("{0}{1}{2}".format(colors.YELLOW, "Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.", colors.END), file=sys.stderr) + logger.debug('LUA ERROR: ' + str(e).replace("\033", "")) + logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.") else: emit('from_server', {'cmd': 'errmsg', 'data': 'Error occurred during generator call; please check console.'}, broadcast=True) print("{0}{1}{2}".format(colors.RED, traceback.format_exc().replace("\033", ""), colors.END), file=sys.stderr) @@ -4196,12 +5594,16 @@ def refresh_settings(): emit('from_server', {'cmd': 'updatenopromptgen', 'data': vars.nopromptgen}, broadcast=True) emit('from_server', {'cmd': 'updaterngpersist', 'data': vars.rngpersist}, broadcast=True) emit('from_server', {'cmd': 'updatenogenmod', 'data': vars.nogenmod}, broadcast=True) + emit('from_server', {'cmd': 'updatefulldeterminism', 'data': vars.full_determinism}, broadcast=True) emit('from_server', {'cmd': 'updatefrmttriminc', 'data': vars.formatoptns["frmttriminc"]}, broadcast=True) emit('from_server', {'cmd': 'updatefrmtrmblln', 'data': vars.formatoptns["frmtrmblln"]}, broadcast=True) emit('from_server', {'cmd': 'updatefrmtrmspch', 'data': vars.formatoptns["frmtrmspch"]}, broadcast=True) emit('from_server', {'cmd': 'updatefrmtadsnsp', 'data': vars.formatoptns["frmtadsnsp"]}, broadcast=True) emit('from_server', {'cmd': 'updatesingleline', 'data': vars.formatoptns["singleline"]}, broadcast=True) + emit('from_server', {'cmd': 'updateoutputstreaming', 'data': vars.output_streaming}, broadcast=True) + emit('from_server', {'cmd': 'updateshowbudget', 'data': vars.show_budget}, broadcast=True) + emit('from_server', {'cmd': 'updateshowprobs', 'data': vars.show_probs}, broadcast=True) # Allow toggle events again emit('from_server', {'cmd': 'allowtoggle', 'data': True}, broadcast=True) @@ -4210,6 +5612,8 @@ def refresh_settings(): # Sets the logical and display states for the AI Busy condition #==================================================================# def set_aibusy(state): + if(vars.disable_set_aibusy): + return if(state): vars.aibusy = True emit('from_server', {'cmd': 'setgamestate', 'data': 'wait'}, broadcast=True) @@ -4287,7 +5691,7 @@ def inlineedit(chunk, data): vars.actions_metadata[chunk-1]['Selected Text'] = data vars.actions[chunk-1] = data else: - print(f"WARNING: Attempted to edit non-existent chunk {chunk}") + logger.warning(f"Attempted to edit non-existent chunk {chunk}") setgamesaved(False) update_story_chunk(chunk) @@ -4313,9 +5717,9 @@ def inlinedelete(chunk): "Previous Selection": True, "Edited": False}] + vars.actions_metadata[chunk-1]['Alternative Text'] vars.actions_metadata[chunk-1]['Selected Text'] = '' - vars.actions[chunk-1] = '' + del vars.actions[chunk-1] else: - print(f"WARNING: Attempted to delete non-existent chunk {chunk}") + logger.warning(f"Attempted to delete non-existent chunk {chunk}") setgamesaved(False) remove_story_chunk(chunk) emit('from_server', {'cmd': 'editmode', 'data': 'false'}, broadcast=True) @@ -4607,14 +6011,14 @@ def checkworldinfo(txt, allowed_entries=None, allowed_folders=None, force_use_tx # Remove leading/trailing spaces if the option is enabled if(vars.wirmvwhtsp): ky = k.strip() - if ky in txt: + if ky.lower() in txt.lower(): if wi.get("selective", False) and len(keys_secondary): found = False for ks in keys_secondary: ksy = ks if(vars.wirmvwhtsp): ksy = ks.strip() - if ksy in txt: + if ksy.lower() in txt.lower(): wimem = wimem + wi["content"] + "\n" found_entries.add(id(wi)) found = True @@ -4746,7 +6150,9 @@ def oairequest(txt, min, max): vars.lastctx = txt # Build request JSON data - if 'GooseAI' in args.configname: + # GooseAI is a subntype of OAI. So to check if it's this type, we check the configname as a workaround + # as the vars.model will always be OAI + if 'GooseAI' in vars.configname: reqdata = { 'prompt': txt, 'max_tokens': vars.genamt, @@ -5079,6 +6485,7 @@ def loadRequest(loadpath, filename=None): vars.lastact = "" vars.submission = "" vars.lastctx = "" + vars.genseqs = [] del vars.actions vars.actions = structures.KoboldStoryRegister() @@ -5354,7 +6761,7 @@ def importgame(): vars.importjs = {} # Reset current save - vars.savedir = getcwd()+"\stories" + vars.savedir = getcwd()+"\\stories" # Refresh game screen vars.laststory = None @@ -5441,7 +6848,7 @@ def importAidgRequest(id): vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]] # Reset current save - vars.savedir = getcwd()+"\stories" + vars.savedir = getcwd()+"\\stories" # Refresh game screen vars.laststory = None @@ -5534,7 +6941,7 @@ def newGameRequest(): vars.lastctx = "" # Reset current save - vars.savedir = getcwd()+"\stories" + vars.savedir = getcwd()+"\\stories" # Refresh game screen vars.laststory = None @@ -5565,55 +6972,74 @@ def randomGameRequest(topic, memory=""): vars.memory = memory emit('from_server', {'cmd': 'setmemory', 'data': vars.memory}, broadcast=True) -# Prevent tokenizer from taking extra time the first time it's used -def __preempt_tokenizer(): - if("tokenizer" not in globals()): - return - utils.decodenewlines(tokenizer.decode([25678, 559])) - tokenizer.encode(utils.encodenewlines("eunoia")) -threading.Thread(target=__preempt_tokenizer).start() +def final_startup(): + # Prevent tokenizer from taking extra time the first time it's used + def __preempt_tokenizer(): + if("tokenizer" not in globals()): + return + utils.decodenewlines(tokenizer.decode([25678, 559])) + tokenizer.encode(utils.encodenewlines("eunoia")) + threading.Thread(target=__preempt_tokenizer).start() -# Load soft prompt specified by the settings file, if applicable -if(path.exists("settings/" + getmodelname().replace('/', '_') + ".settings")): - file = open("settings/" + getmodelname().replace('/', '_') + ".settings", "r") - js = json.load(file) - if(vars.allowsp and "softprompt" in js and type(js["softprompt"]) is str and all(q not in js["softprompt"] for q in ("..", ":")) and (len(js["softprompt"]) == 0 or all(js["softprompt"][0] not in q for q in ("/", "\\")))): - spRequest(js["softprompt"]) - else: - vars.spfilename = "" - file.close() + # Load soft prompt specified by the settings file, if applicable + if(path.exists(get_config_filename())): + file = open(get_config_filename(), "r") + js = json.load(file) + if(vars.allowsp and "softprompt" in js and type(js["softprompt"]) is str and all(q not in js["softprompt"] for q in ("..", ":")) and (len(js["softprompt"]) == 0 or all(js["softprompt"][0] not in q for q in ("/", "\\")))): + spRequest(js["softprompt"]) + else: + vars.spfilename = "" + file.close() -# Precompile TPU backend if required -if(vars.use_colab_tpu or vars.model in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX")): - soft_tokens = tpumtjgetsofttokens() - if(vars.dynamicscan or (not vars.nogenmod and vars.has_genmod)): - threading.Thread( - target=tpu_mtj_backend.infer_dynamic, - args=(np.tile(np.uint32((23403, 727, 20185)), (vars.numseqs, 1)),), - kwargs={ - "soft_embeddings": vars.sp, - "soft_tokens": soft_tokens, - "gen_len": 1, - "use_callback": False, - "numseqs": vars.numseqs, - "excluded_world_info": list(set() for _ in range(vars.numseqs)), - }, - ).start() - else: - threading.Thread( - target=tpu_mtj_backend.infer_static, - args=(np.uint32((23403, 727, 20185)),), - kwargs={ - "soft_embeddings": vars.sp, - "soft_tokens": soft_tokens, - "gen_len": 1, - "numseqs": vars.numseqs, - }, - ).start() + # Precompile TPU backend if required + if(vars.use_colab_tpu or vars.model in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX")): + soft_tokens = tpumtjgetsofttokens() + if(vars.dynamicscan or (not vars.nogenmod and vars.has_genmod)): + threading.Thread( + target=tpu_mtj_backend.infer_dynamic, + args=(np.tile(np.uint32((23403, 727, 20185)), (vars.numseqs, 1)),), + kwargs={ + "soft_embeddings": vars.sp, + "soft_tokens": soft_tokens, + "gen_len": 1, + "use_callback": False, + "numseqs": vars.numseqs, + "excluded_world_info": list(set() for _ in range(vars.numseqs)), + }, + ).start() + else: + threading.Thread( + target=tpu_mtj_backend.infer_static, + args=(np.uint32((23403, 727, 20185)),), + kwargs={ + "soft_embeddings": vars.sp, + "soft_tokens": soft_tokens, + "gen_len": 1, + "numseqs": vars.numseqs, + }, + ).start() + + # Set the initial RNG seed + if(vars.seed is not None): + if(vars.use_colab_tpu): + if(vars.seed_specified): + __import__("tpu_mtj_backend").set_rng_seed(vars.seed) + else: + __import__("tpu_mtj_backend").randomize_rng_seed() + else: + if(vars.seed_specified): + __import__("torch").manual_seed(vars.seed) + else: + __import__("torch").seed() + vars.seed = __import__("tpu_mtj_backend").get_rng_seed() if vars.use_colab_tpu else __import__("torch").initial_seed() def send_debug(): if vars.debug: debug_info = "" + try: + debug_info = "{}Seed: {} ({})\n".format(debug_info, repr(__import__("tpu_mtj_backend").get_rng_seed() if vars.use_colab_tpu else __import__("torch").initial_seed()), "specified by user in settings file" if vars.seed_specified else "randomly generated") + except: + pass try: debug_info = "{}Newline Mode: {}\n".format(debug_info, vars.newlinemode) except: @@ -5645,16 +7071,2997 @@ def send_debug(): emit('from_server', {'cmd': 'debug_info', 'data': debug_info}, broadcast=True) +#==================================================================# +# Load file browser for soft prompts +#==================================================================# +@socketio.on('show_folder_soft_prompt') +def show_folder_soft_prompt(data): + file_popup("Load Softprompt", "./softprompts", "", renameable=True, folder_only=False, editable=False, deleteable=True, jailed=True, item_check=None) + +#==================================================================# +# Load file browser for user scripts +#==================================================================# +@socketio.on('show_folder_usersripts') +def show_folder_usersripts(data): + file_popup("Load Softprompt", "./userscripts", "", renameable=True, folder_only=False, editable=True, deleteable=True, jailed=True, item_check=None) + + + +#==================================================================# +# File Popup options +#==================================================================# + +@socketio.on('upload_file') +def upload_file(data): + print("upload_file {}".format(data['filename'])) + print('current_folder' in session) + print('popup_jailed_dir' not in session) + print(session['popup_jailed_dir']) + print(session['current_folder']) + if 'current_folder' in session: + path = os.path.abspath(os.path.join(session['current_folder'], data['filename']).replace("\\", "/")).replace("\\", "/") + print(path) + print(os.path.exists(path)) + if 'popup_jailed_dir' not in session: + print("Someone is trying to upload a file to your server. Blocked.") + elif session['popup_jailed_dir'] is None: + if os.path.exists(path): + print("popup error") + emit("error_popup", "The file already exists. Please delete it or rename the file before uploading", room="UI_2"); + else: + with open(path, "wb") as f: + f.write(data['data']) + get_files_folders(session['current_folder']) + print("saved") + elif session['popup_jailed_dir'] in session['current_folder']: + if os.path.exists(path): + print("popup error") + emit("error_popup", "The file already exists. Please delete it or rename the file before uploading", room="UI_2"); + else: + with open(path, "wb") as f: + f.write(data['data']) + get_files_folders(session['current_folder']) + print("saved") + +@socketio.on('popup_change_folder') +def popup_change_folder(data): + print("Doing popup change folder: {}".format(data)) + if 'popup_jailed_dir' not in session: + print("Someone is trying to get at files in your server. Blocked.") + return + if session['popup_jailed_dir'] is None: + get_files_folders(data) + elif session['popup_jailed_dir'] in data: + get_files_folders(data) + else: + print("User is trying to get at files in your server outside the jail. Blocked. Jailed Dir: {} Requested Dir: {}".format(session['popup_jailed_dir'], data)) + +@socketio.on('popup_rename') +def popup_rename(data): + if 'popup_renameable' not in session: + print("Someone is trying to rename a file in your server. Blocked.") + return + if not session['popup_renameable']: + print("Someone is trying to rename a file in your server. Blocked.") + return + + if session['popup_jailed_dir'] is None: + os.rename(data['file'], data['new_name']) + get_files_folders(os.path.dirname(data['file'])) + elif session['popup_jailed_dir'] in data: + os.rename(data['file'], data['new_name']) + get_files_folders(os.path.dirname(data['file'])) + else: + print("User is trying to rename files in your server outside the jail. Blocked. Jailed Dir: {} Requested Dir: {}".format(session['popup_jailed_dir'], data['file'])) + + +@socketio.on('popup_delete') +def popup_delete(data): + if 'popup_deletable' not in session: + print("Someone is trying to delete a file in your server. Blocked.") + return + if not session['popup_deletable']: + print("Someone is trying to delete a file in your server. Blocked.") + return + + if session['popup_jailed_dir'] is None: + import shutil + if os.path.isdir(data): + shutil.rmtree(data) + else: + os.remove(data) + path = os.path.abspath(data).replace("\\", "/") + if path[-1] == "/": + path = path[:-1] + path = "/".join(path.split("/")[:-1]) + get_files_folders(path) + elif session['popup_jailed_dir'] in data: + import shutil + if os.path.isdir(data): + shutil.rmtree(data) + else: + os.remove(data) + path = os.path.abspath(data).replace("\\", "/") + if path[-1] == "/": + path = path[:-1] + path = "/".join(path.split("/")[:-1]) + get_files_folders(path) + else: + print("User is trying to delete files in your server outside the jail. Blocked. Jailed Dir: {} Requested Dir: {}".format(session['popup_jailed_dir'], data)) + +@socketio.on('popup_edit') +def popup_edit(data): + if 'popup_editable' not in session: + print("Someone is trying to edit a file in your server. Blocked.") + return + if not session['popup_editable']: + print("Someone is trying to edit a file in your server. Blocked.") + return + + if session['popup_jailed_dir'] is None: + emit("popup_edit_file", {"file": data, "text": open(data, 'r', encoding='utf-8').read()}); + elif session['popup_jailed_dir'] in data: + emit("popup_edit_file", {"file": data, "text": open(data, 'r', encoding='utf-8').read()}); + else: + print("User is trying to delete files in your server outside the jail. Blocked. Jailed Dir: {} Requested Dir: {}".format(session['popup_jailed_dir'], data)) + +@socketio.on('popup_change_file') +def popup_change_file(data): + if 'popup_editable' not in session: + print("Someone is trying to edit a file in your server. Blocked.") + return + if not session['popup_editable']: + print("Someone is trying to edit a file in your server. Blocked.") + return + + if session['popup_jailed_dir'] is None: + with open(data['file'], 'w') as f: + f.write(data['data']) + elif session['popup_jailed_dir'] in data['file']: + with open(data['file'], 'w') as f: + f.write(data['data']) + else: + print("User is trying to delete files in your server outside the jail. Blocked. Jailed Dir: {} Requested Dir: {}".format(session['popup_jailed_dir'], data)) + +def file_popup(popup_title, starting_folder, return_event, upload=True, jailed=True, folder_only=True, renameable=False, deleteable=False, editable=False, show_breadcrumbs=True, item_check=None, show_hidden=False): + #starting_folder = The folder we're going to get folders and/or items from + #return_event = the socketio event that will be emitted when the load button is clicked + #jailed = if set to true will look for the session variable jailed_folder and prevent navigation outside of that folder + #folder_only = will only show folders, no files + #deletable = will show the delete icons/methods. + #editable = will show the edit icons/methods + #show_breadcrumbs = will show the breadcrumbs at the top of the screen + #item_check will call this function to check if the item is valid as a selection if not none. Will pass absolute directory as only argument to function + #show_hidden = ... really, you have to ask? + if jailed: + session['popup_jailed_dir'] = os.path.abspath(starting_folder).replace("\\", "/") + else: + session['popup_jailed_dir'] = None + session['popup_deletable'] = deleteable + session['popup_renameable'] = renameable + session['popup_editable'] = editable + session['popup_show_hidden'] = show_hidden + session['popup_item_check'] = item_check + session['popup_folder_only'] = folder_only + session['popup_show_breadcrumbs'] = show_breadcrumbs + session['upload'] = upload + + socketio.emit("load_popup", {"popup_title": popup_title, "call_back": return_event, "renameable": renameable, "deleteable": deleteable, "editable": editable, 'upload': upload}, broadcast=True) + + get_files_folders(starting_folder) + + +def get_files_folders(starting_folder): + import stat + session['current_folder'] = os.path.abspath(starting_folder).replace("\\", "/") + item_check = session['popup_item_check'] + show_breadcrumbs = session['popup_show_breadcrumbs'] + show_hidden = session['popup_show_hidden'] + folder_only = session['popup_folder_only'] + + if starting_folder == 'This PC': + breadcrumbs = [['This PC', 'This PC']] + items = [["{}:/".format(chr(i)), "{}:\\".format(chr(i))] for i in range(65, 91) if os.path.exists("{}:".format(chr(i)))] + else: + path = os.path.abspath(starting_folder).replace("\\", "/") + if path[-1] == "/": + path = path[:-1] + breadcrumbs = [] + for i in range(len(path.split("/"))): + breadcrumbs.append(["/".join(path.split("/")[:i+1]), + path.split("/")[i]]) + if len(breadcrumbs) == 1: + breadcrumbs = [["{}:/".format(chr(i)), "{}:\\".format(chr(i))] for i in range(65, 91) if os.path.exists("{}:".format(chr(i)))] + else: + if len([["{}:/".format(chr(i)), "{}:\\".format(chr(i))] for i in range(65, 91) if os.path.exists("{}:".format(chr(i)))]) > 0: + breadcrumbs.insert(0, ['This PC', 'This PC']) + + #if we're jailed, remove the stuff before the jail from the breadcrumbs + if session['popup_jailed_dir'] is not None: + + breadcrumbs = breadcrumbs[len(session['popup_jailed_dir'].split("/")):] + + folders = [] + files = [] + base_path = os.path.abspath(starting_folder).replace("\\", "/") + for item in os.listdir(base_path): + item_full_path = os.path.join(base_path, item).replace("\\", "/") + if hasattr(os.stat(item_full_path), "st_file_attributes"): + hidden = bool(os.stat(item_full_path).st_file_attributes & stat.FILE_ATTRIBUTE_HIDDEN) + else: + hidden = item[0] == "." + if item_check is None: + valid_selection = True + else: + valid_selection = item_check(item_full_path) + + if (show_hidden and hidden) or not hidden: + if os.path.isdir(os.path.join(base_path, item)): + folders.append([True, item_full_path, item, valid_selection]) + else: + files.append([False, item_full_path, item, valid_selection]) + items = folders + if not folder_only: + items += files + + socketio.emit("popup_items", items, broadcast=True, include_self=True) + if show_breadcrumbs: + socketio.emit("popup_breadcrumbs", breadcrumbs, broadcast=True) + + +class EmptySchema(KoboldSchema): + pass + +class BasicTextResultInnerSchema(KoboldSchema): + text: str = fields.String(required=True) + +class BasicTextResultSchema(KoboldSchema): + result: BasicTextResultInnerSchema = fields.Nested(BasicTextResultInnerSchema) + +class BasicResultInnerSchema(KoboldSchema): + result: str = fields.String(required=True) + +class BasicResultSchema(KoboldSchema): + result: BasicResultInnerSchema = fields.Nested(BasicResultInnerSchema, required=True) + +class BasicResultsSchema(KoboldSchema): + results: BasicResultInnerSchema = fields.List(fields.Nested(BasicResultInnerSchema), required=True) + +class BasicStringSchema(KoboldSchema): + value: str = fields.String(required=True) + +class BasicBooleanSchema(KoboldSchema): + value: bool = fields.Boolean(required=True) + +class BasicUIDSchema(KoboldSchema): + uid: str = fields.Integer(required=True, validate=validate.Range(min=-2147483648, max=2147483647), metadata={"description": "32-bit signed integer unique to this world info entry/folder."}) + +class BasicErrorSchema(KoboldSchema): + msg: str = fields.String(required=True) + type: str = fields.String(required=True) + +class StoryEmptyErrorSchema(KoboldSchema): + detail: BasicErrorSchema = fields.Nested(BasicErrorSchema, required=True) + +class StoryTooShortErrorSchema(KoboldSchema): + detail: BasicErrorSchema = fields.Nested(BasicErrorSchema, required=True) + +class OutOfMemoryErrorSchema(KoboldSchema): + detail: BasicErrorSchema = fields.Nested(BasicErrorSchema, required=True) + +class NotFoundErrorSchema(KoboldSchema): + detail: BasicErrorSchema = fields.Nested(BasicErrorSchema, required=True) + +api_out_of_memory_response = """507: + description: Out of memory + content: + application/json: + schema: OutOfMemoryErrorSchema + examples: + gpu.cuda: + value: + detail: + msg: "KoboldAI ran out of memory: CUDA out of memory. Tried to allocate 20.00 MiB (GPU 0; 4.00 GiB total capacity; 2.97 GiB already allocated; 0 bytes free; 2.99 GiB reserved in total by PyTorch)" + type: out_of_memory.gpu.cuda + gpu.hip: + value: + detail: + msg: "KoboldAI ran out of memory: HIP out of memory. Tried to allocate 20.00 MiB (GPU 0; 4.00 GiB total capacity; 2.97 GiB already allocated; 0 bytes free; 2.99 GiB reserved in total by PyTorch)" + type: out_of_memory.gpu.hip + tpu.hbm: + value: + detail: + msg: "KoboldAI ran out of memory: Compilation failed: Compilation failure: Ran out of memory in memory space hbm. Used 8.83G of 8.00G hbm. Exceeded hbm capacity by 848.88M." + type: out_of_memory.tpu.hbm + cpu.default_cpu_allocator: + value: + detail: + msg: "KoboldAI ran out of memory: DefaultCPUAllocator: not enough memory: you tried to allocate 209715200 bytes." + type: out_of_memory.cpu.default_cpu_allocator + unknown.unknown: + value: + detail: + msg: "KoboldAI ran out of memory." + type: out_of_memory.unknown.unknown""" + +class ValidationErrorSchema(KoboldSchema): + detail: Dict[str, List[str]] = fields.Dict(keys=fields.String(), values=fields.List(fields.String(), validate=validate.Length(min=1)), required=True) + +api_validation_error_response = """422: + description: Validation error + content: + application/json: + schema: ValidationErrorSchema""" + +class ServerBusyErrorSchema(KoboldSchema): + detail: BasicErrorSchema = fields.Nested(BasicErrorSchema, required=True) + +api_server_busy_response = """503: + description: Server is busy + content: + application/json: + schema: ServerBusyErrorSchema + example: + detail: + msg: Server is busy; please try again later. + type: service_unavailable""" + +class NotImplementedErrorSchema(KoboldSchema): + detail: BasicErrorSchema = fields.Nested(BasicErrorSchema, required=True) + +api_not_implemented_response = """501: + description: Not implemented + content: + application/json: + schema: NotImplementedErrorSchema + example: + detail: + msg: API generation is not supported in read-only mode; please load a model and then try again. + type: not_implemented""" + +class SamplerSettingsSchema(KoboldSchema): + rep_pen: Optional[float] = fields.Float(validate=validate.Range(min=1), metadata={"description": "Base repetition penalty value."}) + rep_pen_range: Optional[int] = fields.Integer(validate=validate.Range(min=0), metadata={"description": "Repetition penalty range."}) + rep_pen_slope: Optional[float] = fields.Float(validate=validate.Range(min=0), metadata={"description": "Repetition penalty slope."}) + top_k: Optional[int] = fields.Integer(validate=validate.Range(min=0), metadata={"description": "Top-k sampling value."}) + top_a: Optional[float] = fields.Float(validate=validate.Range(min=0), metadata={"description": "Top-a sampling value."}) + top_p: Optional[float] = fields.Float(validate=validate.Range(min=0, max=1), metadata={"description": "Top-p sampling value."}) + tfs: Optional[float] = fields.Float(validate=validate.Range(min=0, max=1), metadata={"description": "Tail free sampling value."}) + typical: Optional[float] = fields.Float(validate=validate.Range(min=0, max=1), metadata={"description": "Typical sampling value."}) + temperature: Optional[float] = fields.Float(validate=validate.Range(min=0, min_inclusive=False), metadata={"description": "Temperature value."}) + +def soft_prompt_validator(soft_prompt: str): + if len(soft_prompt.strip()) == 0: + return + if not vars.allowsp: + raise ValidationError("Cannot use soft prompts with current backend.") + if any(q in soft_prompt for q in ("/", "\\")): + return + z, _, _, _, _ = fileops.checksp(soft_prompt.strip(), vars.modeldim) + if isinstance(z, int): + raise ValidationError("Must be a valid soft prompt name.") + z.close() + return True + +def story_load_validator(name: str): + if any(q in name for q in ("/", "\\")): + return + if len(name.strip()) == 0 or not os.path.isfile(fileops.storypath(name)): + raise ValidationError("Must be a valid story name.") + return True + +class GenerationInputSchema(SamplerSettingsSchema): + prompt: str = fields.String(required=True, metadata={"description": "This is the submission."}) + use_memory: bool = fields.Boolean(load_default=False, metadata={"description": "Whether or not to use the memory from the KoboldAI GUI when generating text."}) + use_story: bool = fields.Boolean(load_default=False, metadata={"description": "Whether or not to use the story from the KoboldAI GUI when generating text."}) + use_authors_note: bool = fields.Boolean(load_default=False, metadata={"description": "Whether or not to use the author's note from the KoboldAI GUI when generating text. This has no effect unless `use_story` is also enabled."}) + use_world_info: bool = fields.Boolean(load_default=False, metadata={"description": "Whether or not to use the world info from the KoboldAI GUI when generating text."}) + use_userscripts: bool = fields.Boolean(load_default=False, metadata={"description": "Whether or not to use the userscripts from the KoboldAI GUI when generating text."}) + soft_prompt: Optional[str] = fields.String(metadata={"description": "Soft prompt to use when generating. If set to the empty string or any other string containing no non-whitespace characters, uses no soft prompt."}, validate=[soft_prompt_validator, validate.Regexp(r"^[^/\\]*$")]) + max_length: int = fields.Integer(validate=validate.Range(min=1, max=512), metadata={"description": "Number of tokens to generate."}) + max_context_length: int = fields.Integer(validate=validate.Range(min=512, max=2048), metadata={"description": "Maximum number of tokens to send to the model."}) + n: int = fields.Integer(validate=validate.Range(min=1, max=5), metadata={"description": "Number of outputs to generate."}) + disable_output_formatting: bool = fields.Boolean(load_default=True, metadata={"description": "When enabled, all output formatting options default to `false` instead of the value in the KoboldAI GUI."}) + frmttriminc: Optional[bool] = fields.Boolean(metadata={"description": "Output formatting option. When enabled, removes some characters from the end of the output such that the output doesn't end in the middle of a sentence. If the output is less than one sentence long, does nothing.\n\nIf `disable_output_formatting` is `true`, this defaults to `false` instead of the value in the KoboldAI GUI."}) + frmtrmblln: Optional[bool] = fields.Boolean(metadata={"description": "Output formatting option. When enabled, replaces all occurrences of two or more consecutive newlines in the output with one newline.\n\nIf `disable_output_formatting` is `true`, this defaults to `false` instead of the value in the KoboldAI GUI."}) + frmtrmspch: Optional[bool] = fields.Boolean(metadata={"description": "Output formatting option. When enabled, removes `#/@%{}+=~|\^<>` from the output.\n\nIf `disable_output_formatting` is `true`, this defaults to `false` instead of the value in the KoboldAI GUI."}) + singleline: Optional[bool] = fields.Boolean(metadata={"description": "Output formatting option. When enabled, removes everything after the first line of the output, including the newline.\n\nIf `disable_output_formatting` is `true`, this defaults to `false` instead of the value in the KoboldAI GUI."}) + disable_input_formatting: bool = fields.Boolean(load_default=True, metadata={"description": "When enabled, all input formatting options default to `false` instead of the value in the KoboldAI GUI"}) + frmtadsnsp: Optional[bool] = fields.Boolean(metadata={"description": "Input formatting option. When enabled, adds a leading space to your input if there is no trailing whitespace at the end of the previous action.\n\nIf `disable_input_formatting` is `true`, this defaults to `false` instead of the value in the KoboldAI GUI."}) + quiet: Optional[bool] = fields.Boolean(metadata={"description": "When enabled, Generated output will not be displayed in the console."}) + +class GenerationResultSchema(KoboldSchema): + text: str = fields.String(required=True, metadata={"description": "Generated output as plain text."}) + +class GenerationOutputSchema(KoboldSchema): + results: List[GenerationResultSchema] = fields.List(fields.Nested(GenerationResultSchema), required=True, metadata={"description": "Array of generated outputs."}) + +class StoryNumsChunkSchema(KoboldSchema): + num: int = fields.Integer(required=True, metadata={"description": "Guaranteed to not equal the `num` of any other active story chunk. Equals 0 iff this is the first action of the story (the prompt)."}) + +class StoryChunkSchema(StoryNumsChunkSchema, KoboldSchema): + text: str = fields.String(required=True, metadata={"description": "The text inside this story chunk."}) + +class StorySchema(KoboldSchema): + results: List[StoryChunkSchema] = fields.List(fields.Nested(StoryChunkSchema), required=True, metadata={"description": "Array of story actions. The array is sorted such that actions closer to the end of this array are closer to the end of the story."}) + +class BasicBooleanSchema(KoboldSchema): + result: bool = fields.Boolean(required=True) + +class StoryNumsSchema(KoboldSchema): + results: List[int] = fields.List(fields.Integer(), required=True, metadata={"description": "Array of story action nums. The array is sorted such that actions closer to the end of this array are closer to the end of the story."}) + +class StoryChunkResultSchema(KoboldSchema): + result: StoryChunkSchema = fields.Nested(StoryChunkSchema, required=True) + +class StoryChunkNumSchema(KoboldSchema): + value: int = fields.Integer(required=True) + +class StoryChunkTextSchema(KoboldSchema): + value: str = fields.String(required=True) + +class StoryChunkSetTextSchema(KoboldSchema): + value: str = fields.String(required=True, validate=validate.Regexp(r"^(.|\n)*\S$")) + +class StoryLoadSchema(KoboldSchema): + name: str = fields.String(required=True, validate=[story_load_validator, validate.Regexp(r"^[^/\\]*$")]) + +class StorySaveSchema(KoboldSchema): + name: str = fields.String(required=True, validate=validate.Regexp(r"^(?=.*\S)(?!.*[/\\]).*$")) + +class WorldInfoEntrySchema(KoboldSchema): + uid: int = fields.Integer(required=True, validate=validate.Range(min=-2147483648, max=2147483647), metadata={"description": "32-bit signed integer unique to this world info entry."}) + content: str = fields.String(required=True, metadata={"description": "The \"What To Remember\" for this entry."}) + key: str = fields.String(required=True, metadata={"description": "Comma-separated list of keys, or of primary keys if selective mode is enabled."}) + keysecondary: str = fields.String(metadata={"description": "Comma-separated list of secondary keys if selective mode is enabled."}) + selective: bool = fields.Boolean(required=True, metadata={"description": "Whether or not selective mode is enabled for this world info entry."}) + constant: bool = fields.Boolean(required=True, metadata={"description": "Whether or not constant mode is enabled for this world info entry."}) + comment: bool = fields.String(required=True, metadata={"description": "The comment/description/title for this world info entry."}) + +class WorldInfoEntryResultSchema(KoboldSchema): + result: WorldInfoEntrySchema = fields.Nested(WorldInfoEntrySchema, required=True) + +class WorldInfoFolderBasicSchema(KoboldSchema): + uid: int = fields.Integer(required=True, validate=validate.Range(min=-2147483648, max=2147483647), metadata={"description": "32-bit signed integer unique to this world info folder."}) + name: str = fields.String(required=True, metadata={"description": "Name of this world info folder."}) + +class WorldInfoFolderSchema(WorldInfoFolderBasicSchema): + entries: List[WorldInfoEntrySchema] = fields.List(fields.Nested(WorldInfoEntrySchema), required=True) + +class WorldInfoFolderUIDsSchema(KoboldSchema): + uid: int = fields.Integer(required=True, validate=validate.Range(min=-2147483648, max=2147483647), metadata={"description": "32-bit signed integer unique to this world info folder."}) + entries: List[int] = fields.List(fields.Integer(required=True, validate=validate.Range(min=-2147483648, max=2147483647), metadata={"description": "32-bit signed integer unique to this world info entry."}), required=True) + +class WorldInfoEntriesSchema(KoboldSchema): + entries: List[WorldInfoEntrySchema] = fields.List(fields.Nested(WorldInfoEntrySchema), required=True) + +class WorldInfoFoldersSchema(KoboldSchema): + folders: List[WorldInfoFolderBasicSchema] = fields.List(fields.Nested(WorldInfoFolderBasicSchema), required=True) + +class WorldInfoSchema(WorldInfoEntriesSchema): + folders: List[WorldInfoFolderSchema] = fields.List(fields.Nested(WorldInfoFolderSchema), required=True) + +class WorldInfoEntriesUIDsSchema(KoboldSchema): + entries: List[int] = fields.List(fields.Integer(required=True, validate=validate.Range(min=-2147483648, max=2147483647), metadata={"description": "32-bit signed integer unique to this world info entry."}), required=True) + +class WorldInfoFoldersUIDsSchema(KoboldSchema): + folders: List[int] = fields.List(fields.Integer(required=True, validate=validate.Range(min=-2147483648, max=2147483647), metadata={"description": "32-bit signed integer unique to this world info folder."}), required=True) + +class WorldInfoUIDsSchema(WorldInfoEntriesUIDsSchema): + folders: List[WorldInfoFolderSchema] = fields.List(fields.Nested(WorldInfoFolderUIDsSchema), required=True) + +class ModelSelectionSchema(KoboldSchema): + model: str = fields.String(required=True, validate=validate.Regexp(r"^(?!\s*NeoCustom)(?!\s*GPT2Custom)(?!\s*TPUMeshTransformerGPTJ)(?!\s*TPUMeshTransformerGPTNeoX)(?!\s*GooseAI)(?!\s*OAI)(?!\s*InferKit)(?!\s*Colab)(?!\s*API).*$"), metadata={"description": 'Hugging Face model ID, the path to a model folder (relative to the "models" folder in the KoboldAI root folder) or "ReadOnly" for no model'}) + +def _generate_text(body: GenerationInputSchema): + if vars.aibusy or vars.genseqs: + abort(Response(json.dumps({"detail": { + "msg": "Server is busy; please try again later.", + "type": "service_unavailable", + }}), mimetype="application/json", status=503)) + # This maps each property of the setting to use when sending the generate idempotently + # To the object which typically contains it's value + # This allows to set the property only for the API generation, and then revert the setting + # To what it was before. + mapping = { + "disable_input_formatting": ("vars", "disable_input_formatting", None), + "disable_output_formatting": ("vars", "disable_output_formatting", None), + "rep_pen": ("vars", "rep_pen", None), + "rep_pen_range": ("vars", "rep_pen_range", None), + "rep_pen_slope": ("vars", "rep_pen_slope", None), + "top_k": ("vars", "top_k", None), + "top_a": ("vars", "top_a", None), + "top_p": ("vars", "top_p", None), + "tfs": ("vars", "tfs", None), + "typical": ("vars", "typical", None), + "temperature": ("vars", "temp", None), + "frmtadsnsp": ("vars.formatoptns", "@frmtadsnsp", "input"), + "frmttriminc": ("vars.formatoptns", "@frmttriminc", "output"), + "frmtrmblln": ("vars.formatoptns", "@frmtrmblln", "output"), + "frmtrmspch": ("vars.formatoptns", "@frmtrmspch", "output"), + "singleline": ("vars.formatoptns", "@singleline", "output"), + "max_length": ("vars", "genamt", None), + "max_context_length": ("vars", "max_length", None), + "n": ("vars", "numseqs", None), + "quiet": ("vars", "quiet", None), + } + saved_settings = {} + set_aibusy(1) + disable_set_aibusy = vars.disable_set_aibusy + vars.disable_set_aibusy = True + _standalone = vars.standalone + vars.standalone = True + show_probs = vars.show_probs + vars.show_probs = False + output_streaming = vars.output_streaming + vars.output_streaming = False + for key, entry in mapping.items(): + obj = {"vars": vars, "vars.formatoptns": vars.formatoptns}[entry[0]] + if entry[2] == "input" and vars.disable_input_formatting and not hasattr(body, key): + setattr(body, key, False) + if entry[2] == "output" and vars.disable_output_formatting and not hasattr(body, key): + setattr(body, key, False) + if getattr(body, key, None) is not None: + if entry[1].startswith("@"): + saved_settings[key] = obj[entry[1][1:]] + obj[entry[1][1:]] = getattr(body, key) + else: + saved_settings[key] = getattr(obj, entry[1]) + setattr(obj, entry[1], getattr(body, key)) + try: + if vars.allowsp and getattr(body, "soft_prompt", None) is not None: + if any(q in body.soft_prompt for q in ("/", "\\")): + raise RuntimeError + old_spfilename = vars.spfilename + spRequest(body.soft_prompt.strip()) + genout = apiactionsubmit(body.prompt, use_memory=body.use_memory, use_story=body.use_story, use_world_info=body.use_world_info, use_authors_note=body.use_authors_note) + output = {"results": [{"text": txt} for txt in genout]} + finally: + for key in saved_settings: + entry = mapping[key] + obj = {"vars": vars, "vars.formatoptns": vars.formatoptns}[entry[0]] + if getattr(body, key, None) is not None: + if entry[1].startswith("@"): + if obj[entry[1][1:]] == getattr(body, key): + obj[entry[1][1:]] = saved_settings[key] + else: + if getattr(obj, entry[1]) == getattr(body, key): + setattr(obj, entry[1], saved_settings[key]) + vars.disable_set_aibusy = disable_set_aibusy + vars.standalone = _standalone + vars.show_probs = show_probs + vars.output_streaming = output_streaming + if vars.allowsp and getattr(body, "soft_prompt", None) is not None: + spRequest(old_spfilename) + set_aibusy(0) + return output + + +@api_v1.get("/info/version") +@api_schema_wrap +def get_version(): + """--- + get: + summary: Current API version + tags: + - info + description: |-2 + Returns the version of the API that you are currently using. + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicResultSchema + example: + result: 1.0.0 + """ + return {"result": api_version} + + +@api_v1.get("/info/version/latest") +@api_schema_wrap +def get_version_latest(): + """--- + get: + summary: Latest API version + tags: + - info + description: |-2 + Returns the latest API version available. + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicResultSchema + example: + result: 1.0.0 + """ + return {"result": api_versions[-1]} + + +@api_v1.get("/info/version/list") +@api_schema_wrap +def get_version_list(): + """--- + get: + summary: List API versions + tags: + - info + description: |-2 + Returns a list of available API versions sorted in ascending order. + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicResultsSchema + example: + results: + - 1.0.0 + """ + return {"results": api_versions} + + +@api_v1.post("/generate") +@api_schema_wrap +def post_generate(body: GenerationInputSchema): + """--- + post: + summary: Generate text + tags: + - generate + description: |-2 + Generates text given a submission, sampler settings, soft prompt and number of return sequences. + + By default, the story, userscripts, memory, author's note and world info are disabled. + + Unless otherwise specified, optional values default to the values in the KoboldAI GUI. + requestBody: + required: true + content: + application/json: + schema: GenerationInputSchema + example: + prompt: |-2 + Niko the kobold stalked carefully down the alley, his small scaly figure obscured by a dusky cloak that fluttered lightly in the cold winter breeze. + top_p: 0.9 + temperature: 0.5 + responses: + 200: + description: Successful request + content: + application/json: + schema: GenerationOutputSchema + example: + results: + - text: |-2 + Holding up his tail to keep it from dragging in the dirty snow that covered the cobblestone, he waited patiently for the butcher to turn his attention from his stall so that he could pilfer his next meal: a tender-looking chicken. + {api_validation_error_response} + {api_not_implemented_response} + {api_server_busy_response} + {api_out_of_memory_response} + """ + return _generate_text(body) + + +@api_v1.get("/model") +@api_schema_wrap +def get_model(): + """--- + get: + summary: Retrieve the current model string + description: |-2 + Gets the current model string, which is shown in the title of the KoboldAI GUI in parentheses, e.g. "KoboldAI Client (KoboldAI/fairseq-dense-13B-Nerys-v2)". + tags: + - model + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicResultSchema + example: + result: KoboldAI/fairseq-dense-13B-Nerys-v2 + """ + return {"result": vars.model} + + +@api_v1.put("/model") +@api_schema_wrap +def put_model(body: ModelSelectionSchema): + """--- + put: + summary: Load a model + description: |-2 + Loads a model given its Hugging Face model ID, the path to a model folder (relative to the "models" folder in the KoboldAI root folder) or "ReadOnly" for no model. + tags: + - model + requestBody: + required: true + content: + application/json: + schema: ModelSelectionSchema + example: + model: ReadOnly + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + {api_validation_error_response} + {api_server_busy_response} + """ + if vars.aibusy or vars.genseqs: + abort(Response(json.dumps({"detail": { + "msg": "Server is busy; please try again later.", + "type": "service_unavailable", + }}), mimetype="application/json", status=503)) + set_aibusy(1) + old_model = vars.model + vars.model = body.model.strip() + try: + load_model(use_breakmodel_args=True, breakmodel_args_default_to_cpu=True) + except Exception as e: + vars.model = old_model + raise e + set_aibusy(0) + return {} + + +def prompt_validator(prompt: str): + if len(prompt.strip()) == 0: + raise ValidationError("String does not match expected pattern.") + +class SubmissionInputSchema(KoboldSchema): + prompt: str = fields.String(required=True, validate=prompt_validator, metadata={"pattern": r"^.*\S.*$", "description": "This is the submission."}) + disable_input_formatting: bool = fields.Boolean(load_default=True, metadata={"description": "When enabled, disables all input formatting options, overriding their individual enabled/disabled states."}) + frmtadsnsp: Optional[bool] = fields.Boolean(metadata={"description": "Input formatting option. When enabled, adds a leading space to your input if there is no trailing whitespace at the end of the previous action."}) + +@api_v1.post("/story/end") +@api_schema_wrap +def post_story_end(body: SubmissionInputSchema): + """--- + post: + summary: Add an action to the end of the story + tags: + - story + description: |-2 + Inserts a single action at the end of the story in the KoboldAI GUI without generating text. + requestBody: + required: true + content: + application/json: + schema: SubmissionInputSchema + example: + prompt: |-2 + This is some text to put at the end of the story. + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + {api_validation_error_response} + {api_server_busy_response} + """ + if vars.aibusy or vars.genseqs: + abort(Response(json.dumps({"detail": { + "msg": "Server is busy; please try again later.", + "type": "service_unavailable", + }}), mimetype="application/json", status=503)) + set_aibusy(1) + disable_set_aibusy = vars.disable_set_aibusy + vars.disable_set_aibusy = True + _standalone = vars.standalone + vars.standalone = True + numseqs = vars.numseqs + vars.numseqs = 1 + try: + actionsubmit(body.prompt, force_submit=True, no_generate=True, ignore_aibusy=True) + finally: + vars.disable_set_aibusy = disable_set_aibusy + vars.standalone = _standalone + vars.numseqs = numseqs + set_aibusy(0) + return {} + + +@api_v1.get("/story/end") +@api_schema_wrap +def get_story_end(): + """--- + get: + summary: Retrieve the last action of the story + tags: + - story + description: |-2 + Returns the last action of the story in the KoboldAI GUI. + responses: + 200: + description: Successful request + content: + application/json: + schema: StoryChunkResultSchema + 510: + description: Story is empty + content: + application/json: + schema: StoryEmptyErrorSchema + example: + detail: + msg: Could not retrieve the last action of the story because the story is empty. + type: story_empty + """ + if not vars.gamestarted: + abort(Response(json.dumps({"detail": { + "msg": "Could not retrieve the last action of the story because the story is empty.", + "type": "story_empty", + }}), mimetype="application/json", status=510)) + if len(vars.actions) == 0: + return {"result": {"text": vars.prompt, "num": 0}} + return {"result": {"text": vars.actions[vars.actions.get_last_key()], "num": vars.actions.get_last_key() + 1}} + + +@api_v1.get("/story/end/num") +@api_schema_wrap +def get_story_end_num(): + """--- + get: + summary: Retrieve the num of the last action of the story + tags: + - story + description: |-2 + Returns the `num` of the last action of the story in the KoboldAI GUI. + responses: + 200: + description: Successful request + content: + application/json: + schema: StoryChunkNumSchema + 510: + description: Story is empty + content: + application/json: + schema: StoryEmptyErrorSchema + example: + detail: + msg: Could not retrieve the last action of the story because the story is empty. + type: story_empty + """ + if not vars.gamestarted: + abort(Response(json.dumps({"detail": { + "msg": "Could not retrieve the last action of the story because the story is empty.", + "type": "story_empty", + }}), mimetype="application/json", status=510)) + if len(vars.actions) == 0: + return {"result": {"text": 0}} + return {"result": {"text": vars.actions.get_last_key() + 1}} + + +@api_v1.get("/story/end/text") +@api_schema_wrap +def get_story_end_text(): + """--- + get: + summary: Retrieve the text of the last action of the story + tags: + - story + description: |-2 + Returns the text of the last action of the story in the KoboldAI GUI. + responses: + 200: + description: Successful request + content: + application/json: + schema: StoryChunkTextSchema + 510: + description: Story is empty + content: + application/json: + schema: StoryEmptyErrorSchema + example: + detail: + msg: Could not retrieve the last action of the story because the story is empty. + type: story_empty + """ + if not vars.gamestarted: + abort(Response(json.dumps({"detail": { + "msg": "Could not retrieve the last action of the story because the story is empty.", + "type": "story_empty", + }}), mimetype="application/json", status=510)) + if len(vars.actions) == 0: + return {"result": {"text": vars.prompt}} + return {"result": {"text": vars.actions[vars.actions.get_last_key()]}} + + +@api_v1.put("/story/end/text") +@api_schema_wrap +def put_story_end_text(body: StoryChunkSetTextSchema): + """--- + put: + summary: Set the text of the last action of the story + tags: + - story + description: |-2 + Sets the text of the last action of the story in the KoboldAI GUI to the desired value. + requestBody: + required: true + content: + application/json: + schema: StoryChunkSetTextSchema + example: + value: string + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 510: + description: Story is empty + content: + application/json: + schema: StoryEmptyErrorSchema + example: + detail: + msg: Could not retrieve the last action of the story because the story is empty. + type: story_empty + {api_validation_error_response} + """ + if not vars.gamestarted: + abort(Response(json.dumps({"detail": { + "msg": "Could not retrieve the last action of the story because the story is empty.", + "type": "story_empty", + }}), mimetype="application/json", status=510)) + value = body.value.rstrip() + if len(vars.actions) == 0: + inlineedit(0, value) + else: + inlineedit(vars.actions.get_last_key() + 1, value) + return {} + + +@api_v1.post("/story/end/delete") +@api_schema_wrap +def post_story_end_delete(body: EmptySchema): + """--- + post: + summary: Remove the last action of the story + tags: + - story + description: |-2 + Removes the last action of the story in the KoboldAI GUI. + requestBody: + required: true + content: + application/json: + schema: EmptySchema + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 510: + description: Story too short + content: + application/json: + schema: StoryTooShortErrorSchema + example: + detail: + msg: Could not delete the last action of the story because the number of actions in the story is less than or equal to 1. + type: story_too_short + {api_validation_error_response} + {api_server_busy_response} + """ + if vars.aibusy or vars.genseqs: + abort(Response(json.dumps({"detail": { + "msg": "Server is busy; please try again later.", + "type": "service_unavailable", + }}), mimetype="application/json", status=503)) + if not vars.gamestarted or not len(vars.actions): + abort(Response(json.dumps({"detail": { + "msg": "Could not delete the last action of the story because the number of actions in the story is less than or equal to 1.", + "type": "story_too_short", + }}), mimetype="application/json", status=510)) + actionback() + return {} + + +@api_v1.get("/story") +@api_schema_wrap +def get_story(): + """--- + get: + summary: Retrieve the entire story + tags: + - story + description: |-2 + Returns the entire story currently shown in the KoboldAI GUI. + responses: + 200: + description: Successful request + content: + application/json: + schema: StorySchema + """ + chunks = [] + if vars.gamestarted: + chunks.append({"num": 0, "text": vars.prompt}) + for num, action in vars.actions.items(): + chunks.append({"num": num + 1, "text": action}) + return {"results": chunks} + + +@api_v1.get("/story/nums") +@api_schema_wrap +def get_story_nums(): + """--- + get: + summary: Retrieve a list of the nums of the chunks in the current story + tags: + - story + description: |-2 + Returns the `num`s of the story chunks currently shown in the KoboldAI GUI. + responses: + 200: + description: Successful request + content: + application/json: + schema: StorySchema + """ + chunks = [] + if vars.gamestarted: + chunks.append(0) + for num in vars.actions.keys(): + chunks.append(num + 1) + return {"results": chunks} + + +@api_v1.get("/story/nums/") +@api_schema_wrap +def get_story_nums_num(num: int): + """--- + get: + summary: Determine whether or not there is a story chunk with the given num + tags: + - story + parameters: + - name: num + in: path + description: |-2 + `num` of the desired story chunk. + schema: + type: integer + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicBooleanSchema + """ + if num == 0: + return {"result": vars.gamestarted} + return {"result": num - 1 in vars.actions} + + +@api_v1.get("/story/") +@api_schema_wrap +def get_story_num(num: int): + """--- + get: + summary: Retrieve a story chunk + tags: + - story + description: |-2 + Returns information about a story chunk given its `num`. + parameters: + - name: num + in: path + description: |-2 + `num` of the desired story chunk. + schema: + type: integer + responses: + 200: + description: Successful request + content: + application/json: + schema: StoryChunkResultSchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No chunk with the given num exists. + type: key_error + """ + if num == 0: + if not vars.gamestarted: + abort(Response(json.dumps({"detail": { + "msg": "No chunk with the given num exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + return {"result": {"text": vars.prompt, "num": num}} + if num - 1 not in vars.actions: + abort(Response(json.dumps({"detail": { + "msg": "No chunk with the given num exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + return {"result": {"text": vars.actions[num - 1], "num": num}} + + +@api_v1.get("/story//text") +@api_schema_wrap +def get_story_num_text(num: int): + """--- + get: + summary: Retrieve the text of a story chunk + tags: + - story + description: |-2 + Returns the text inside a story chunk given its `num`. + parameters: + - name: num + in: path + description: |-2 + `num` of the desired story chunk. + schema: + type: integer + responses: + 200: + description: Successful request + content: + application/json: + schema: StoryChunkTextSchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No chunk with the given num exists. + type: key_error + """ + if num == 0: + if not vars.gamestarted: + abort(Response(json.dumps({"detail": { + "msg": "No chunk with the given num exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + return {"value": vars.prompt} + if num - 1 not in vars.actions: + abort(Response(json.dumps({"detail": { + "msg": "No chunk with the given num exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + return {"value": vars.actions[num - 1]} + + +@api_v1.put("/story//text") +@api_schema_wrap +def put_story_num_text(body: StoryChunkSetTextSchema, num: int): + """--- + put: + summary: Set the text of a story chunk + tags: + - story + description: |-2 + Sets the text inside a story chunk given its `num`. + parameters: + - name: num + in: path + description: |-2 + `num` of the desired story chunk. + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: StoryChunkSetTextSchema + example: + value: string + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No chunk with the given num exists. + type: key_error + {api_validation_error_response} + """ + if num == 0: + if not vars.gamestarted: + abort(Response(json.dumps({"detail": { + "msg": "No chunk with the given num exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + inlineedit(0, body.value.rstrip()) + return {} + if num - 1 not in vars.actions: + abort(Response(json.dumps({"detail": { + "msg": "No chunk with the given num exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + inlineedit(num, body.value.rstrip()) + return {} + + +@api_v1.delete("/story/") +@api_schema_wrap +def post_story_num_delete(num: int): + """--- + delete: + summary: Remove a story chunk + tags: + - story + description: |-2 + Removes a story chunk from the story in the KoboldAI GUI given its `num`. Cannot be used to delete the first action (the prompt). + parameters: + - name: num + in: path + description: |-2 + `num` of the desired story chunk. Must be larger than or equal to 1. + schema: + type: integer + minimum: 1 + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No chunk with the given num exists. + type: key_error + {api_server_busy_response} + """ + if num < 1: + abort(Response(json.dumps({"detail": { + "num": ["Must be greater than or equal to 1."], + }}), mimetype="application/json", status=422)) + if num - 1 not in vars.actions: + abort(Response(json.dumps({"detail": { + "msg": "No chunk with the given num exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + if vars.aibusy or vars.genseqs: + abort(Response(json.dumps({"detail": { + "msg": "Server is busy; please try again later.", + "type": "service_unavailable", + }}), mimetype="application/json", status=503)) + inlinedelete(num) + return {} + + +@api_v1.delete("/story") +@api_schema_wrap +def delete_story(): + """--- + delete: + summary: Clear the story + tags: + - story + description: |-2 + Starts a new blank story. + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + {api_server_busy_response} + """ + if vars.aibusy or vars.genseqs: + abort(Response(json.dumps({"detail": { + "msg": "Server is busy; please try again later.", + "type": "service_unavailable", + }}), mimetype="application/json", status=503)) + newGameRequest() + return {} + + +@api_v1.put("/story/load") +@api_schema_wrap +def put_story_load(body: StoryLoadSchema): + """--- + put: + summary: Load a story + tags: + - story + description: |-2 + Loads a story given its filename (without the .json). + requestBody: + required: true + content: + application/json: + schema: StoryLoadSchema + example: + name: string + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + {api_validation_error_response} + {api_server_busy_response} + """ + if vars.aibusy or vars.genseqs: + abort(Response(json.dumps({"detail": { + "msg": "Server is busy; please try again later.", + "type": "service_unavailable", + }}), mimetype="application/json", status=503)) + loadRequest(fileops.storypath(body.name.strip())) + return {} + + +@api_v1.put("/story/save") +@api_schema_wrap +def put_story_save(body: StorySaveSchema): + """--- + put: + summary: Save the current story + tags: + - story + description: |-2 + Saves the current story given its destination filename (without the .json). + requestBody: + required: true + content: + application/json: + schema: StorySaveSchema + example: + name: string + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + {api_validation_error_response} + """ + saveRequest(fileops.storypath(body.name.strip())) + return {} + + +@api_v1.get("/world_info") +@api_schema_wrap +def get_world_info(): + """--- + get: + summary: Retrieve all world info entries + tags: + - world_info + description: |-2 + Returns all world info entries currently shown in the KoboldAI GUI. + + The `folders` are sorted in the same order as they are in the GUI and the `entries` within the folders and within the parent `result` object are all sorted in the same order as they are in their respective parts of the GUI. + responses: + 200: + description: Successful request + content: + application/json: + schema: WorldInfoSchema + """ + folders = [] + entries = [] + ln = len(vars.worldinfo) + stablesortwi() + vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]] + folder: Optional[list] = None + if ln: + last_folder = ... + for wi in vars.worldinfo_i: + if wi["folder"] != last_folder: + folder = [] + if wi["folder"] is not None: + folders.append({"uid": wi["folder"], "name": vars.wifolders_d[wi["folder"]]["name"], "entries": folder}) + last_folder = wi["folder"] + (folder if wi["folder"] is not None else entries).append({k: v for k, v in wi.items() if k not in ("init", "folder", "num") and (wi["selective"] or k != "keysecondary")}) + return {"folders": folders, "entries": entries} + +@api_v1.get("/world_info/uids") +@api_schema_wrap +def get_world_info_uids(): + """--- + get: + summary: Retrieve the UIDs of all world info entries + tags: + - world_info + description: |-2 + Returns in a similar format as GET /world_info except only the `uid`s are returned. + responses: + 200: + description: Successful request + content: + application/json: + schema: WorldInfoUIDsSchema + """ + folders = [] + entries = [] + ln = len(vars.worldinfo) + stablesortwi() + vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]] + folder: Optional[list] = None + if ln: + last_folder = ... + for wi in vars.worldinfo_i: + if wi["folder"] != last_folder: + folder = [] + if wi["folder"] is not None: + folders.append({"uid": wi["folder"], "entries": folder}) + last_folder = wi["folder"] + (folder if wi["folder"] is not None else entries).append(wi["uid"]) + return {"folders": folders, "entries": entries} + + +@api_v1.get("/world_info/uids/") +@api_schema_wrap +def get_world_info_uids_uid(uid: int): + """--- + get: + summary: Determine whether or not there is a world info entry with the given UID + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicBooleanSchema + """ + return {"result": uid in vars.worldinfo_u and vars.worldinfo_u[uid]["init"]} + + +@api_v1.get("/world_info/folders") +@api_schema_wrap +def get_world_info_folders(): + """--- + get: + summary: Retrieve all world info folders + tags: + - world_info + description: |-2 + Returns details about all world info folders currently shown in the KoboldAI GUI. + + The `folders` are sorted in the same order as they are in the GUI. + responses: + 200: + description: Successful request + content: + application/json: + schema: WorldInfoFoldersSchema + """ + stablesortwi() + vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]] + return {"folders": [{"uid": folder, **{k: v for k, v in vars.wifolders_d[folder].items() if k != "collapsed"}} for folder in vars.wifolders_l]} + + +@api_v1.get("/world_info/folders/uids") +@api_schema_wrap +def get_world_info_folders_uids(): + """--- + get: + summary: Retrieve the UIDs all world info folders + tags: + - world_info + description: |-2 + Returns the `uid`s of all world info folders currently shown in the KoboldAI GUI. + + The `folders` are sorted in the same order as they are in the GUI. + responses: + 200: + description: Successful request + content: + application/json: + schema: WorldInfoFoldersUIDsSchema + """ + stablesortwi() + vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]] + return {"folders": vars.wifolders_l} + + +@api_v1.get("/world_info/folders/none") +@api_schema_wrap +def get_world_info_folders_none(): + """--- + get: + summary: Retrieve all world info entries not in a folder + tags: + - world_info + description: |-2 + Returns all world info entries that are not in a world info folder. + + The `entries` are sorted in the same order as they are in the KoboldAI GUI. + responses: + 200: + description: Successful request + content: + application/json: + schema: WorldInfoEntriesSchema + """ + entries = [] + stablesortwi() + vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]] + for wi in reversed(vars.worldinfo_i): + if wi["folder"] is not None: + break + entries.append({k: v for k, v in wi.items() if k not in ("init", "folder", "num") and (wi["selective"] or k != "keysecondary")}) + return {"entries": list(reversed(entries))} + + +@api_v1.get("/world_info/folders/none/uids") +@api_schema_wrap +def get_world_info_folders_none_uids(): + """--- + get: + summary: Retrieve the UIDs of all world info entries not in a folder + tags: + - world_info + description: |-2 + Returns the `uid`s of all world info entries that are not in a world info folder. + + The `entries` are sorted in the same order as they are in the KoboldAI GUI. + responses: + 200: + description: Successful request + content: + application/json: + schema: WorldInfoEntriesUIDsSchema + """ + entries = [] + stablesortwi() + vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]] + for wi in reversed(vars.worldinfo_i): + if wi["folder"] is not None: + break + entries.append(wi["uid"]) + return {"entries": list(reversed(entries))} + + +@api_v1.get("/world_info/folders/none/uids/") +@api_schema_wrap +def get_world_info_folders_none_uids_uid(uid: int): + """--- + get: + summary: Determine whether or not there is a world info entry with the given UID that is not in a world info folder + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicBooleanSchema + """ + return {"result": uid in vars.worldinfo_u and vars.worldinfo_u[uid]["folder"] is None and vars.worldinfo_u[uid]["init"]} + + +@api_v1.get("/world_info/folders/") +@api_schema_wrap +def get_world_info_folders_uid(uid: int): + """--- + get: + summary: Retrieve all world info entries in the given folder + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info folder. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + description: |-2 + Returns all world info entries that are in the world info folder with the given `uid`. + + The `entries` are sorted in the same order as they are in the KoboldAI GUI. + responses: + 200: + description: Successful request + content: + application/json: + schema: WorldInfoEntriesSchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info folder with the given uid exists. + type: key_error + """ + if uid not in vars.wifolders_d: + abort(Response(json.dumps({"detail": { + "msg": "No world info folder with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + entries = [] + stablesortwi() + vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]] + for wi in vars.wifolders_u[uid]: + if wi["init"]: + entries.append({k: v for k, v in wi.items() if k not in ("init", "folder", "num") and (wi["selective"] or k != "keysecondary")}) + return {"entries": entries} + + +@api_v1.get("/world_info/folders//uids") +@api_schema_wrap +def get_world_info_folders_uid_uids(uid: int): + """--- + get: + summary: Retrieve the UIDs of all world info entries in the given folder + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info folder. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + description: |-2 + Returns the `uid`s of all world info entries that are in the world info folder with the given `uid`. + + The `entries` are sorted in the same order as they are in the KoboldAI GUI. + responses: + 200: + description: Successful request + content: + application/json: + schema: WorldInfoEntriesUIDsSchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info folder with the given uid exists. + type: key_error + """ + if uid not in vars.wifolders_d: + abort(Response(json.dumps({"detail": { + "msg": "No world info folder with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + entries = [] + stablesortwi() + vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]] + for wi in vars.wifolders_u[uid]: + if wi["init"]: + entries.append(wi["uid"]) + return {"entries": entries} + + +@api_v1.get("/world_info/folders//uids/") +@api_schema_wrap +def get_world_info_folders_folder_uid_uids_entry_uid(folder_uid: int, entry_uid: int): + """--- + get: + summary: Determine whether or not there is a world info entry with the given UID in the world info folder with the given UID + tags: + - world_info + parameters: + - name: folder_uid + in: path + description: |-2 + `uid` of the desired world info folder. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + - name: entry_uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicBooleanSchema + """ + return {"result": entry_uid in vars.worldinfo_u and vars.worldinfo_u[entry_uid]["folder"] == folder_uid and vars.worldinfo_u[entry_uid]["init"]} + + +@api_v1.get("/world_info/folders//name") +@api_schema_wrap +def get_world_info_folders_uid_name(uid: int): + """--- + get: + summary: Retrieve the name of the world info folder with the given UID + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info folder. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicStringSchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info folder with the given uid exists. + type: key_error + """ + if uid not in vars.wifolders_d: + abort(Response(json.dumps({"detail": { + "msg": "No world info folder with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + return {"value": vars.wifolders_d[uid]["name"]} + + +@api_v1.put("/world_info/folders//name") +@api_schema_wrap +def put_world_info_folders_uid_name(body: BasicStringSchema, uid: int): + """--- + put: + summary: Set the name of the world info folder with the given UID to the specified value + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info folder. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + requestBody: + required: true + content: + application/json: + schema: BasicStringSchema + example: + value: string + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info folder with the given uid exists. + type: key_error + {api_validation_error_response} + """ + if uid not in vars.wifolders_d: + abort(Response(json.dumps({"detail": { + "msg": "No world info folder with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + vars.wifolders_d[uid]["name"] = body.value + setgamesaved(False) + return {} + + +@api_v1.get("/world_info/") +@api_schema_wrap +def get_world_info_uid(uid: int): + """--- + get: + summary: Retrieve information about the world info entry with the given UID + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: WorldInfoEntrySchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + wi = vars.worldinfo_u[uid] + return {k: v for k, v in wi.items() if k not in ("init", "folder", "num") and (wi["selective"] or k != "keysecondary")} + + +@api_v1.get("/world_info//comment") +@api_schema_wrap +def get_world_info_uid_comment(uid: int): + """--- + get: + summary: Retrieve the comment of the world info entry with the given UID + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicStringSchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + return {"value": vars.worldinfo_u[uid]["comment"]} + + +@api_v1.put("/world_info//comment") +@api_schema_wrap +def put_world_info_uid_comment(body: BasicStringSchema, uid: int): + """--- + put: + summary: Set the comment of the world info entry with the given UID to the specified value + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + requestBody: + required: true + content: + application/json: + schema: BasicStringSchema + example: + value: string + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + {api_validation_error_response} + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + vars.worldinfo_u[uid]["comment"] = body.value + setgamesaved(False) + return {} + + +@api_v1.get("/world_info//content") +@api_schema_wrap +def get_world_info_uid_content(uid: int): + """--- + get: + summary: Retrieve the content of the world info entry with the given UID + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicStringSchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + return {"value": vars.worldinfo_u[uid]["content"]} + + +@api_v1.put("/world_info//content") +@api_schema_wrap +def put_world_info_uid_content(body: BasicStringSchema, uid: int): + """--- + put: + summary: Set the content of the world info entry with the given UID to the specified value + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + requestBody: + required: true + content: + application/json: + schema: BasicStringSchema + example: + value: string + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + {api_validation_error_response} + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + vars.worldinfo_u[uid]["content"] = body.value + setgamesaved(False) + return {} + + +@api_v1.get("/world_info//key") +@api_schema_wrap +def get_world_info_uid_key(uid: int): + """--- + get: + summary: Retrieve the keys or primary keys of the world info entry with the given UID + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicStringSchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + return {"value": vars.worldinfo_u[uid]["key"]} + + +@api_v1.put("/world_info//key") +@api_schema_wrap +def put_world_info_uid_key(body: BasicStringSchema, uid: int): + """--- + put: + summary: Set the keys or primary keys of the world info entry with the given UID to the specified value + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + requestBody: + required: true + content: + application/json: + schema: BasicStringSchema + example: + value: string + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + {api_validation_error_response} + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + vars.worldinfo_u[uid]["key"] = body.value + setgamesaved(False) + return {} + + +@api_v1.get("/world_info//keysecondary") +@api_schema_wrap +def get_world_info_uid_keysecondary(uid: int): + """--- + get: + summary: Retrieve the secondary keys of the world info entry with the given UID + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicStringSchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + return {"value": vars.worldinfo_u[uid]["keysecondary"]} + + +@api_v1.put("/world_info//keysecondary") +@api_schema_wrap +def put_world_info_uid_keysecondary(body: BasicStringSchema, uid: int): + """--- + put: + summary: Set the secondary keys of the world info entry with the given UID to the specified value + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + requestBody: + required: true + content: + application/json: + schema: BasicStringSchema + example: + value: string + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + {api_validation_error_response} + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + vars.worldinfo_u[uid]["keysecondary"] = body.value + setgamesaved(False) + return {} + + +@api_v1.get("/world_info//selective") +@api_schema_wrap +def get_world_info_uid_selective(uid: int): + """--- + get: + summary: Retrieve the selective mode state of the world info entry with the given UID + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicBooleanSchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + return {"value": vars.worldinfo_u[uid]["selective"]} + + +@api_v1.put("/world_info//selective") +@api_schema_wrap +def put_world_info_uid_selective(body: BasicBooleanSchema, uid: int): + """--- + put: + summary: Set the selective mode state of the world info entry with the given UID to the specified value + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + requestBody: + required: true + content: + application/json: + schema: BasicBooleanSchema + example: + value: true + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + {api_validation_error_response} + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + vars.worldinfo_u[uid]["selective"] = body.value + setgamesaved(False) + return {} + + +@api_v1.get("/world_info//constant") +@api_schema_wrap +def get_world_info_uid_constant(uid: int): + """--- + get: + summary: Retrieve the constant mode state of the world info entry with the given UID + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicBooleanSchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + return {"value": vars.worldinfo_u[uid]["constant"]} + + +@api_v1.put("/world_info//constant") +@api_schema_wrap +def put_world_info_uid_constant(body: BasicBooleanSchema, uid: int): + """--- + put: + summary: Set the constant mode state of the world info entry with the given UID to the specified value + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + requestBody: + required: true + content: + application/json: + schema: BasicBooleanSchema + example: + value: true + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + {api_validation_error_response} + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + vars.worldinfo_u[uid]["constant"] = body.value + setgamesaved(False) + return {} + + +@api_v1.post("/world_info/folders/none") +@api_schema_wrap +def post_world_info_folders_none(body: EmptySchema): + """--- + post: + summary: Create a new world info entry outside of a world info folder, at the end of the world info + tags: + - world_info + requestBody: + required: true + content: + application/json: + schema: EmptySchema + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicUIDSchema + {api_validation_error_response} + """ + stablesortwi() + vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]] + setgamesaved(False) + emit('from_server', {'cmd': 'wiexpand', 'data': vars.worldinfo[-1]["num"]}, broadcast=True) + vars.worldinfo[-1]["init"] = True + addwiitem(folder_uid=None) + return {"uid": vars.worldinfo[-2]["uid"]} + + +@api_v1.post("/world_info/folders/") +@api_schema_wrap +def post_world_info_folders_uid(body: EmptySchema, uid: int): + """--- + post: + summary: Create a new world info entry at the end of the world info folder with the given UID + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info folder. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + requestBody: + required: true + content: + application/json: + schema: EmptySchema + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicUIDSchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info folder with the given uid exists. + type: key_error + {api_validation_error_response} + """ + if uid not in vars.wifolders_d: + abort(Response(json.dumps({"detail": { + "msg": "No world info folder with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + stablesortwi() + vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]] + setgamesaved(False) + emit('from_server', {'cmd': 'wiexpand', 'data': vars.wifolders_u[uid][-1]["num"]}, broadcast=True) + vars.wifolders_u[uid][-1]["init"] = True + addwiitem(folder_uid=uid) + return {"uid": vars.wifolders_u[uid][-2]["uid"]} + + +@api_v1.delete("/world_info/") +@api_schema_wrap +def delete_world_info_uid(uid: int): + """--- + delete: + summary: Delete the world info entry with the given UID + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info entry. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info entry with the given uid exists. + type: key_error + """ + if uid not in vars.worldinfo_u: + abort(Response(json.dumps({"detail": { + "msg": "No world info entry with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + deletewi(uid) + return {} + + +@api_v1.post("/world_info/folders") +@api_schema_wrap +def post_world_info_folders(body: EmptySchema): + """--- + post: + summary: Create a new world info folder at the end of the world info + tags: + - world_info + requestBody: + required: true + content: + application/json: + schema: EmptySchema + responses: + 200: + description: Successful request + content: + application/json: + schema: BasicUIDSchema + {api_validation_error_response} + """ + addwifolder() + return {"uid": vars.wifolders_l[-1]} + + +@api_v1.delete("/world_info/folders/") +@api_schema_wrap +def delete_world_info_folders_uid(uid: int): + """--- + delete: + summary: Delete the world info folder with the given UID + tags: + - world_info + parameters: + - name: uid + in: path + description: |-2 + `uid` of the desired world info folder. + schema: + type: integer + minimum: -2147483648 + maximum: 2147483647 + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + 404: + description: Not found + content: + application/json: + schema: NotFoundErrorSchema + example: + detail: + msg: No world info folders with the given uid exists. + type: key_error + """ + if uid not in vars.wifolders_d: + abort(Response(json.dumps({"detail": { + "msg": "No world info folder with the given uid exists.", + "type": "key_error", + }}), mimetype="application/json", status=404)) + deletewifolder(uid) + return {} + + +def _make_f_get(obj, _var_name, _name, _schema, _example_yaml_value): + def f_get(): + """--- + get: + summary: Retrieve the current {} setting value + tags: + - config + responses: + 200: + description: Successful request + content: + application/json: + schema: {} + example: + value: {} + """ + _obj = {"vars": vars, "vars.formatoptns": vars.formatoptns}[obj] + if _var_name.startswith("@"): + return {"value": _obj[_var_name[1:]]} + else: + return {"value": getattr(_obj, _var_name)} + f_get.__doc__ = f_get.__doc__.format(_name, _schema, _example_yaml_value) + return f_get + +def _make_f_put(schema_class: Type[KoboldSchema], obj, _var_name, _name, _schema, _example_yaml_value): + def f_put(body: schema_class): + """--- + put: + summary: Set {} setting to specified value + tags: + - config + requestBody: + required: true + content: + application/json: + schema: {} + example: + value: {} + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + {api_validation_error_response} + """ + _obj = {"vars": vars, "vars.formatoptns": vars.formatoptns}[obj] + if _var_name.startswith("@"): + _obj[_var_name[1:]] = body.value + else: + setattr(_obj, _var_name, body.value) + settingschanged() + refresh_settings() + return {} + f_put.__doc__ = f_put.__doc__.format(_name, _schema, _example_yaml_value, api_validation_error_response=api_validation_error_response) + return f_put + +def create_config_endpoint(method="GET", schema="MemorySchema"): + _name = globals()[schema].KoboldMeta.name + _var_name = globals()[schema].KoboldMeta.var_name + _route_name = globals()[schema].KoboldMeta.route_name + _obj = globals()[schema].KoboldMeta.obj + _example_yaml_value = globals()[schema].KoboldMeta.example_yaml_value + _schema = schema + f = _make_f_get(_obj, _var_name, _name, _schema, _example_yaml_value) if method == "GET" else _make_f_put(globals()[schema], _obj, _var_name, _name, _schema, _example_yaml_value) + f.__name__ = f"{method.lower()}_config_{_name}" + f = api_schema_wrap(f) + for api in (api_v1,): + f = api.route(f"/config/{_route_name}", methods=[method])(f) + +class SoftPromptSettingSchema(KoboldSchema): + value: str = fields.String(required=True, validate=[soft_prompt_validator, validate.Regexp(r"^[^/\\]*$")], metadata={"description": "Soft prompt name, or a string containing only whitespace for no soft prompt. If using the GET method and no soft prompt is loaded, this will always be the empty string."}) + +@api_v1.get("/config/soft_prompt") +@api_schema_wrap +def get_config_soft_prompt(): + """--- + get: + summary: Retrieve the current soft prompt name + tags: + - config + responses: + 200: + description: Successful request + content: + application/json: + schema: SoftPromptSettingSchema + example: + value: "" + """ + return {"value": vars.spfilename.strip()} + +class SoftPromptsListSchema(KoboldSchema): + values: List[SoftPromptSettingSchema] = fields.List(fields.Nested(SoftPromptSettingSchema), required=True, metadata={"description": "Array of available softprompts."}) + +@api_v1.get("/config/soft_prompts_list") +@api_schema_wrap +def get_config_soft_prompts_list(): + """--- + get: + summary: Retrieve all available softprompt filenames + tags: + - config + responses: + 200: + description: Successful request + content: + application/json: + schema: SoftPromptsListSchema + example: + values: [] + """ + splist = [] + for sp in fileops.getspfiles(vars.modeldim): + + splist.append({"value":sp["filename"]}) + return {"values": splist} + +@api_v1.put("/config/soft_prompt") +@api_schema_wrap +def put_config_soft_prompt(body: SoftPromptSettingSchema): + """--- + put: + summary: Set soft prompt by name + tags: + - config + requestBody: + required: true + content: + application/json: + schema: SoftPromptSettingSchema + example: + value: "" + responses: + 200: + description: Successful request + content: + application/json: + schema: EmptySchema + {api_validation_error_response} + """ + if vars.allowsp: + spRequest(body.value) + settingschanged() + return {} + +config_endpoint_schemas: List[Type[KoboldSchema]] = [] + +def config_endpoint_schema(c: Type[KoboldSchema]): + config_endpoint_schemas.append(c) + return c + + +@config_endpoint_schema +class MemorySettingSchema(KoboldSchema): + value = fields.String(required=True) + class KoboldMeta: + route_name = "memory" + obj = "vars" + var_name = "memory" + name = "memory" + example_yaml_value = "Memory" + +@config_endpoint_schema +class AuthorsNoteSettingSchema(KoboldSchema): + value = fields.String(required=True) + class KoboldMeta: + route_name = "authors_note" + obj = "vars" + var_name = "authornote" + name = "author's note" + example_yaml_value = "''" + +@config_endpoint_schema +class AuthorsNoteTemplateSettingSchema(KoboldSchema): + value = fields.String(required=True) + class KoboldMeta: + route_name = "authors_note_template" + obj = "vars" + var_name = "authornotetemplate" + name = "author's note template" + example_yaml_value = "\"[Author's note: <|>]\"" + +@config_endpoint_schema +class TopKSamplingSettingSchema(KoboldSchema): + value = fields.Integer(validate=validate.Range(min=0), required=True) + class KoboldMeta: + route_name = "top_k" + obj = "vars" + var_name = "top_k" + name = "top-k sampling" + example_yaml_value = "0" + +@config_endpoint_schema +class TopASamplingSettingSchema(KoboldSchema): + value = fields.Float(validate=validate.Range(min=0), required=True) + class KoboldMeta: + route_name = "top_a" + obj = "vars" + var_name = "top_a" + name = "top-a sampling" + example_yaml_value = "0.0" + +@config_endpoint_schema +class TopPSamplingSettingSchema(KoboldSchema): + value = fields.Float(validate=validate.Range(min=0, max=1), required=True) + class KoboldMeta: + route_name = "top_p" + obj = "vars" + var_name = "top_p" + name = "top-p sampling" + example_yaml_value = "0.9" + +@config_endpoint_schema +class TailFreeSamplingSettingSchema(KoboldSchema): + value = fields.Float(validate=validate.Range(min=0, max=1), required=True) + class KoboldMeta: + route_name = "tfs" + obj = "vars" + var_name = "tfs" + name = "tail free sampling" + example_yaml_value = "1.0" + +@config_endpoint_schema +class TypicalSamplingSettingSchema(KoboldSchema): + value = fields.Float(validate=validate.Range(min=0, max=1), required=True) + class KoboldMeta: + route_name = "typical" + obj = "vars" + var_name = "typical" + name = "typical sampling" + example_yaml_value = "1.0" + +@config_endpoint_schema +class TemperatureSamplingSettingSchema(KoboldSchema): + value = fields.Float(validate=validate.Range(min=0, min_inclusive=False), required=True) + class KoboldMeta: + route_name = "temperature" + obj = "vars" + var_name = "temp" + name = "temperature" + example_yaml_value = "0.5" + +@config_endpoint_schema +class GensPerActionSettingSchema(KoboldSchema): + value = fields.Integer(validate=validate.Range(min=0, max=5), required=True) + class KoboldMeta: + route_name = "n" + obj = "vars" + var_name = "numseqs" + name = "Gens Per Action" + example_yaml_value = "1" + +@config_endpoint_schema +class MaxLengthSettingSchema(KoboldSchema): + value = fields.Integer(validate=validate.Range(min=1, max=512), required=True) + class KoboldMeta: + route_name = "max_length" + obj = "vars" + var_name = "genamt" + name = "max length" + example_yaml_value = "80" + +@config_endpoint_schema +class WorldInfoDepthSettingSchema(KoboldSchema): + value = fields.Integer(validate=validate.Range(min=1, max=5), required=True) + class KoboldMeta: + route_name = "world_info_depth" + obj = "vars" + var_name = "widepth" + name = "world info depth" + example_yaml_value = "3" + +@config_endpoint_schema +class AuthorsNoteDepthSettingSchema(KoboldSchema): + value = fields.Integer(validate=validate.Range(min=1, max=5), required=True) + class KoboldMeta: + route_name = "authors_note_depth" + obj = "vars" + var_name = "andepth" + name = "author's note depth" + example_yaml_value = "3" + +@config_endpoint_schema +class MaxContextLengthSettingSchema(KoboldSchema): + value = fields.Integer(validate=validate.Range(min=512, max=2048), required=True) + class KoboldMeta: + route_name = "max_context_length" + obj = "vars" + var_name = "max_length" + name = "max context length" + example_yaml_value = "2048" + +@config_endpoint_schema +class TrimIncompleteSentencesSettingsSchema(KoboldSchema): + value = fields.Boolean(required=True) + class KoboldMeta: + route_name = "frmttriminc" + obj = "vars.formatoptns" + var_name = "@frmttriminc" + name = "trim incomplete sentences (output formatting)" + example_yaml_value = "false" + +@config_endpoint_schema +class RemoveBlankLinesSettingsSchema(KoboldSchema): + value = fields.Boolean(required=True) + class KoboldMeta: + route_name = "frmtrmblln" + obj = "vars.formatoptns" + var_name = "@frmtrmblln" + name = "remove blank lines (output formatting)" + example_yaml_value = "false" + +@config_endpoint_schema +class RemoveSpecialCharactersSettingsSchema(KoboldSchema): + value = fields.Boolean(required=True) + class KoboldMeta: + route_name = "frmtrmspch" + obj = "vars.formatoptns" + var_name = "@frmtrmspch" + name = "remove special characters (output formatting)" + example_yaml_value = "false" + +@config_endpoint_schema +class SingleLineSettingsSchema(KoboldSchema): + value = fields.Boolean(required=True) + class KoboldMeta: + route_name = "singleline" + obj = "vars.formatoptns" + var_name = "@singleline" + name = "single line (output formatting)" + example_yaml_value = "false" + +@config_endpoint_schema +class AddSentenceSpacingSettingsSchema(KoboldSchema): + value = fields.Boolean(required=True) + class KoboldMeta: + route_name = "frmtadsnsp" + obj = "vars.formatoptns" + var_name = "@frmtadsnsp" + name = "add sentence spacing (input formatting)" + example_yaml_value = "false" + + + +for schema in config_endpoint_schemas: + create_config_endpoint(schema=schema.__name__, method="GET") + create_config_endpoint(schema=schema.__name__, method="PUT") + + #==================================================================# # Final startup commands to launch Flask app #==================================================================# -print("", end="", flush=True) if __name__ == "__main__": - port = args.port if "port" in args and args.port is not None else 5000 - print("{0}\nStarting webserver...{1}".format(colors.GREEN, colors.END), flush=True) + + general_startup() + # Start flask & SocketIO + logger.init("Flask", status="Starting") + Session(app) + logger.init_ok("Flask", status="OK") + logger.init("Webserver", status="Starting") + patch_transformers() + #show_select_model_list() + if vars.model == "" or vars.model is None: + vars.model = "ReadOnly" + load_model(initial_load=True) # Start Flask/SocketIO (Blocking, so this must be last method!) - + port = args.port if "port" in args and args.port is not None else 5000 + #socketio.run(app, host='0.0.0.0', port=port) if(vars.host): if(args.localtunnel): @@ -5683,22 +10090,48 @@ if __name__ == "__main__": if(args.localtunnel or args.ngrok or args.remote): with open('cloudflare.log', 'w') as cloudflarelog: cloudflarelog.write("KoboldAI has finished loading and is available at the following link : " + cloudflare) - print(format(colors.GREEN) + "KoboldAI has finished loading and is available at the following link : " + cloudflare + format(colors.END)) + logger.init_ok("Webserver", status="OK") + logger.message(f"KoboldAI has finished loading and is available at the following link: {cloudflare}") else: - print("{0}Webserver has started, you can now connect to this machine at port {1}{2}" - .format(colors.GREEN, port, colors.END)) + logger.init_ok("Webserver", status="OK") + logger.message(f"Webserver has started, you can now connect to this machine at port: {port}") vars.serverstarted = True socketio.run(app, host='0.0.0.0', port=port) else: - import webbrowser - webbrowser.open_new('http://localhost:{0}'.format(port)) - print("{0}Server started!\nYou may now connect with a browser at http://127.0.0.1:{1}/{2}" - .format(colors.GREEN, port, colors.END)) - vars.serverstarted = True if args.unblock: + if not args.no_ui: + try: + import webbrowser + webbrowser.open_new('http://localhost:{0}'.format(port)) + except: + pass + logger.init_ok("Webserver", status="OK") + logger.message(f"Webserver started! You may now connect with a browser at http://127.0.0.1:{port}") + vars.serverstarted = True socketio.run(app, port=port, host='0.0.0.0') else: + if not args.no_ui: + try: + import webbrowser + webbrowser.open_new('http://localhost:{0}'.format(port)) + except: + pass + logger.init_ok("Webserver", status="OK") + logger.message(f"Webserver started! You may now connect with a browser at http://127.0.0.1:{port}") + vars.serverstarted = True socketio.run(app, port=port) + logger.init("Webserver", status="Closed") + else: + general_startup() + # Start flask & SocketIO + logger.init("Flask", status="Starting") + Session(app) + logger.init_ok("Flask", status="OK") + patch_transformers() + #show_select_model_list() + if vars.model == "" or vars.model is None: + vars.model = "ReadOnly" + load_model(initial_load=True) print("{0}\nServer started in WSGI mode!{1}".format(colors.GREEN, colors.END), flush=True) diff --git a/breakmodel.py b/breakmodel.py index eb49e669..52000335 100644 --- a/breakmodel.py +++ b/breakmodel.py @@ -4,7 +4,7 @@ https://github.com/arrmansa/Basic-UI-for-GPT-J-6B-with-low-vram/blob/main/GPT-J- The ORIGINAL version of the patch is released under the Apache License 2.0 Copyright 2021 arrmansa Copyright 2021 finetuneanon -Copyright 2018 The Hugging Face team +Copyright 2018, 2022 The Hugging Face team Apache License @@ -216,11 +216,13 @@ from torch import nn import torch.cuda.comm import copy import gc +import os import sys import itertools import bisect import random -from typing import Optional +import utils +from typing import Dict, List, Optional, Union from transformers.modeling_outputs import BaseModelOutputWithPast, BaseModelOutputWithPastAndCrossAttentions @@ -230,7 +232,100 @@ logger = logging.get_logger(__name__) breakmodel = True gpu_blocks = [] -primary_device = 0 +disk_blocks = 0 +primary_device = 0 if torch.cuda.device_count() > 0 else "cpu" + + +if utils.HAS_ACCELERATE: + from accelerate.hooks import attach_align_device_hook_on_blocks + from accelerate.utils import OffloadedWeightsLoader, check_device_map, extract_submodules_state_dict, offload_state_dict + from accelerate import dispatch_model + +def dispatch_model_ex( + model: nn.Module, + device_map: Dict[str, Union[str, int, torch.device]], + main_device: Optional[torch.device] = None, + state_dict: Optional[Dict[str, torch.Tensor]] = None, + offload_dir: Union[str, os.PathLike] = None, + offload_buffers: bool = False, + **kwargs, +): + """ + This is a modified version of + https://github.com/huggingface/accelerate/blob/eeaba598f455fbd2c48661d7e816d3ff25ab050b/src/accelerate/big_modeling.py#L130 + that still works when the main device is the CPU. + + Dispatches a model according to a given device map. Layers of the model might be spread across GPUs, offloaded on + the CPU or even the disk. + + Args: + model (`torch.nn.Module`): + The model to dispatch. + device_map (`Dict[str, Union[str, int, torch.device]]`): + A dictionary mapping module names in the models `state_dict` to the device they should go to. Note that + `"disk"` is accepted even if it's not a proper value for `torch.device`. + main_device (`str`, `int` or `torch.device`, *optional*): + The main execution device. Will default to the first device in the `device_map` different from `"cpu"` or + `"disk"`. + state_dict (`Dict[str, torch.Tensor]`, *optional*): + The state dict of the part of the model that will be kept on CPU. + offload_dir (`str` or `os.PathLike`): + The folder in which to offload the model weights (or where the model weights are already offloaded). + offload_buffers (`bool`, *optional*, defaults to `False`): + Whether or not to offload the buffers with the model parameters. + preload_module_classes (`List[str]`, *optional*): + A list of classes whose instances should load all their weights (even in the submodules) at the beginning + of the forward. This should only be used for classes that have submodules which are registered but not + called directly during the forward, for instance if a `dense` linear layer is registered, but at forward, + `dense.weight` and `dense.bias` are used in some operations instead of calling `dense` directly. + """ + if main_device != "cpu": + return dispatch_model(model, device_map, main_device, state_dict, offload_dir=offload_dir, offload_buffers=offload_buffers, **kwargs) + + # Error early if the device map is incomplete. + check_device_map(model, device_map) + + offload_devices = ["cpu", "disk"] if main_device != "cpu" else ["disk"] + + if main_device is None: + main_device = [d for d in device_map.values() if d not in offload_devices][0] + + cpu_modules = [name for name, device in device_map.items() if device == "cpu"] if main_device != "cpu" else [] + if state_dict is None and len(cpu_modules) > 0: + state_dict = extract_submodules_state_dict(model.state_dict(), cpu_modules) + + disk_modules = [name for name, device in device_map.items() if device == "disk"] + if offload_dir is None and len(disk_modules) > 0: + raise ValueError( + "We need an `offload_dir` to dispatch this model according to this `device_map`, the following submodules " + f"need to be offloaded: {', '.join(disk_modules)}." + ) + if len(disk_modules) > 0 and ( + not os.path.isdir(offload_dir) or not os.path.isfile(os.path.join(offload_dir, "index.json")) + ): + disk_state_dict = extract_submodules_state_dict(model.state_dict(), disk_modules) + offload_state_dict(offload_dir, disk_state_dict) + + execution_device = { + name: main_device if device in offload_devices else device for name, device in device_map.items() + } + offload = {name: device in offload_devices for name, device in device_map.items()} + save_folder = offload_dir if len(disk_modules) > 0 else None + if state_dict is not None or save_folder is not None: + weights_map = OffloadedWeightsLoader(state_dict=state_dict, save_folder=save_folder) + else: + weights_map = None + + attach_align_device_hook_on_blocks( + model, + execution_device=execution_device, + offload=offload, + offload_buffers=offload_buffers, + weights_map=weights_map, + **kwargs, + ) + model.hf_device_map = device_map + return model # Copied from transformers.models.bart.modeling_bart._expand_mask diff --git a/customsettings_template.json b/customsettings_template.json new file mode 100644 index 00000000..361ca4e6 --- /dev/null +++ b/customsettings_template.json @@ -0,0 +1 @@ +{"aria2_port":null, "breakmodel":null, "breakmodel_disklayers":null, "breakmodel_gpulayers":null, "breakmodel_layers":null, "colab":null, "configname":null, "cpu":null, "host":null, "localtunnel":null, "lowmem":null, "model":null, "ngrok":null, "no_aria2":null, "noaimenu":null, "nobreakmodel":null, "override_delete":null, "override_rename":null, "path":null, "port":null, "quiet":null, "remote":null, "revision":null, "savemodel":null, "unblock":null} diff --git a/docker-standalone/Dockerfile b/docker-standalone/Dockerfile new file mode 100644 index 00000000..6e13ce0e --- /dev/null +++ b/docker-standalone/Dockerfile @@ -0,0 +1,8 @@ +FROM debian +RUN apt update && apt install wget aria2 git bzip2 -y +RUN git clone https://github.com/henk717/koboldai /opt/koboldai +WORKDIR /opt/koboldai +RUN ./install_requirements.sh cuda +COPY docker-helper.sh /opt/koboldai/docker-helper.sh +EXPOSE 5000/tcp +CMD /opt/koboldai/docker-helper.sh diff --git a/docker-standalone/Readme.txt b/docker-standalone/Readme.txt new file mode 100644 index 00000000..9b698f90 --- /dev/null +++ b/docker-standalone/Readme.txt @@ -0,0 +1,17 @@ +These are the source files for the official versions of the standalone docker and are provided for completeness. +Using these files you will not use any of the local modifications you make, instead it will use the latest github version of KoboldAI as the basis. + +If you wish to run KoboldAI containerised with access to the local directory you can do so using docker-cuda.sh or docker-rocm.sh instead. + +We do not support ROCm in the standalone docker as it is intended for cloud deployment on CUDA systems. +If you wish to build a ROCm version instead, you can do so by modifying the Dockerfile and changing the install_requirements.sh from cuda to rocm. + +Similarly you need to modify the Dockerfile to specify which branch of KoboldAI the docker is being built for. + +Usage: +This docker will automatically assume the persistent volume is mounted to /content and will by default not store models there. +The following environment variables exist to adjust the behavior if desired. + +KOBOLDAI_DATADIR=/content , this can be used to specify a different default location for your stories, settings, userscripts, etc in case your provider does not let you change the mounted folder path. +KOBOLDAI_MODELDIR= , This variable can be used to make model storage persistent, it can be the same location as your datadir but this is not required. +KOBOLDAI_ARGS= , This variable is built in KoboldAI and can be used to override the default launch options. Right now the docker by default will launch in remote mode, with output hidden from the logs and file management enabled. \ No newline at end of file diff --git a/docker-standalone/docker-helper.sh b/docker-standalone/docker-helper.sh new file mode 100755 index 00000000..30107e8a --- /dev/null +++ b/docker-standalone/docker-helper.sh @@ -0,0 +1,47 @@ +#!/bin/bash +cd /opt/koboldai +git pull +#./install_requirements.sh cuda + +if [[ ! -v KOBOLDAI_DATADIR ]];then + mkdir /content + KOBOLDAI_DATADIR=/content +fi + +mkdir $KOBOLDAI_DATADIR/stories +if [[ ! -v KOBOLDAI_MODELDIR ]];then + mkdir $KOBOLDAI_MODELDIR/models +fi +mkdir $KOBOLDAI_DATADIR/settings +mkdir $KOBOLDAI_DATADIR/softprompts +mkdir $KOBOLDAI_DATADIR/userscripts +#mkdir $KOBOLDAI_MODELDIR/cache + +cp -rn stories/* $KOBOLDAI_DATADIR/stories/ +cp -rn userscripts/* $KOBOLDAI_DATADIR/userscripts/ +cp -rn softprompts/* $KOBOLDAI_DATADIR/softprompts/ + +rm stories +rm -rf stories/ +rm userscripts +rm -rf userscripts/ +rm softprompts +rm -rf softprompts/ + +if [[ ! -v KOBOLDAI_MODELDIR ]];then + rm models + rm -rf models/ + #rm cache + #rm -rf cache/ +fi + +ln -s $KOBOLDAI_DATADIR/stories/ stories +ln -s $KOBOLDAI_DATADIR/settings/ settings +ln -s $KOBOLDAI_DATADIR/softprompts/ softprompts +ln -s $KOBOLDAI_DATADIR/userscripts/ userscripts +if [[ ! -v KOBOLDAI_MODELDIR ]];then + ln -s $KOBOLDAI_MODELDIR/models/ models + #ln -s $KOBOLDAI_MODELDIR/cache/ cache +fi + +PYTHONUNBUFFERED=1 ./play.sh --remote --quiet --override_delete --override_rename diff --git a/environments/finetuneanon.yml b/environments/finetuneanon.yml index b49f0bd7..85d5ea66 100644 --- a/environments/finetuneanon.yml +++ b/environments/finetuneanon.yml @@ -6,6 +6,7 @@ channels: dependencies: - colorama - flask-socketio + - flask-session - pytorch - cudatoolkit=11.1 - tensorflow-gpu @@ -15,6 +16,9 @@ dependencies: - bleach=4.1.0 - pip - git=2.35.1 + - marshmallow>=3.13 + - apispec-webframeworks + - loguru - pip: - git+https://github.com/finetuneanon/transformers@gpt-neo-localattention3-rp-b - flask-cloudflared diff --git a/environments/huggingface.yml b/environments/huggingface.yml index 205d5e31..7abceefa 100644 --- a/environments/huggingface.yml +++ b/environments/huggingface.yml @@ -6,6 +6,7 @@ channels: dependencies: - colorama - flask-socketio + - flask-session - pytorch=1.11.* - python=3.8.* - cudatoolkit=11.1 @@ -16,9 +17,12 @@ dependencies: - git=2.35.1 - sentencepiece - protobuf + - marshmallow>=3.13 + - apispec-webframeworks + - loguru - pip: - flask-cloudflared - flask-ngrok - lupa==1.10 - transformers>=4.20.1 - - accelerate \ No newline at end of file + - accelerate diff --git a/environments/rocm-finetune.yml b/environments/rocm-finetune.yml index 5672ed21..fc56eb4f 100644 --- a/environments/rocm-finetune.yml +++ b/environments/rocm-finetune.yml @@ -5,12 +5,16 @@ channels: dependencies: - colorama - flask-socketio + - flask-session - python=3.8.* - eventlet - markdown - bleach=4.1.0 - pip - git=2.35.1 + - marshmallow>=3.13 + - apispec-webframeworks + - loguru - pip: - --find-links https://download.pytorch.org/whl/rocm4.2/torch_stable.html - torch diff --git a/environments/rocm.yml b/environments/rocm.yml index 8ade341f..e885f4df 100644 --- a/environments/rocm.yml +++ b/environments/rocm.yml @@ -5,6 +5,7 @@ channels: dependencies: - colorama - flask-socketio + - flask-session - python=3.8.* - eventlet - markdown @@ -13,9 +14,12 @@ dependencies: - git=2.35.1 - sentencepiece - protobuf + - marshmallow>=3.13 + - apispec-webframeworks + - loguru - pip: - - --find-links https://download.pytorch.org/whl/rocm4.2/torch_stable.html - - torch==1.10.* + - --extra-index-url https://download.pytorch.org/whl/rocm5.1.1 + - torch - torchvision - flask-cloudflared - flask-ngrok diff --git a/fileops.py b/fileops.py index c303764e..a416f24d 100644 --- a/fileops.py +++ b/fileops.py @@ -3,6 +3,7 @@ from typing import Tuple, Union, Optional import os import json import zipfile +from logger import logger #==================================================================# # Generic Method for prompting for file path @@ -149,16 +150,16 @@ def getspfiles(model_dimension: int): continue z, version, shape, fortran_order, dtype = checksp(file, model_dimension) if z == 1: - print(f"Browser SP loading error: {file} is malformed or not a soft prompt ZIP file.") + logger.warning(f"Softprompt {file} is malformed or not a soft prompt ZIP file.") continue if z == 2: - print(f"Browser SP loading error: {file} tensor.npy has unsupported dtype '{dtype.name}'.") + logger.warning(f"Softprompt {file} tensor.npy has unsupported dtype '{dtype.name}'.") continue if z == 3: - print(f"Browser SP loading error: {file} tensor.npy has model dimension {shape[1]} which does not match your model's model dimension of {model_dimension}. This usually means this soft prompt is not compatible with your model.") + logger.debug(f"Softprompt {file} tensor.npy has model dimension {shape[1]} which does not match your model's model dimension of {model_dimension}. This usually means this soft prompt is not compatible with your model.") continue if z == 4: - print(f"Browser SP loading error: {file} tensor.npy has {shape[0]} tokens but it is supposed to have less than 2048 tokens.") + logger.warning(f"Softprompt {file} tensor.npy has {shape[0]} tokens but it is supposed to have less than 2048 tokens.") continue assert isinstance(z, zipfile.ZipFile) try: diff --git a/gensettings.py b/gensettings.py index 50b84995..a823f59b 100644 --- a/gensettings.py +++ b/gensettings.py @@ -230,6 +230,50 @@ gensettingstf = [ "default": 0, "tooltip": "Disables userscript generation modifiers." }, + { + "uitype": "toggle", + "unit": "bool", + "label": "Full Determinism", + "id": "setfulldeterminism", + "min": 0, + "max": 1, + "step": 1, + "default": 0, + "tooltip": "Causes generation to be fully deterministic -- the model will always output the same thing as long as your story, settings and RNG seed are the same. If this is off, only the sequence of outputs that the model makes will be deterministic." + }, + { + "uitype": "toggle", + "unit": "bool", + "label": "Token Streaming", + "id": "setoutputstreaming", + "min": 0, + "max": 1, + "step": 1, + "default": 0, + "tooltip": "Shows outputs to you as they are made. Does not work with more than one gens per action." + }, + { + "uitype": "toggle", + "unit": "bool", + "label": "Probability Viewer", + "id": "setshowprobs", + "min": 0, + "max": 1, + "step": 1, + "default": 0, + "tooltip": "Shows token selection probabilities. Does not work with more than one gens per action." + }, + { + "uitype": "toggle", + "unit": "bool", + "label": "Show Field Budget", + "id": "setshowbudget", + "min": 0, + "max": 1, + "step": 1, + "default": 0, + "tooltip": "Shows token usage when typing in relevant text boxes. May lag slower devices." + }, { "uitype": "toggle", "unit": "bool", @@ -240,7 +284,7 @@ gensettingstf = [ "step": 1, "default": 0, "tooltip": "Show debug info" - } + }, ] gensettingsik =[{ @@ -404,9 +448,9 @@ formatcontrols = [{ "tooltip": "Remove special characters (@,#,%,^, etc)" }, { - "label": "Add sentence spacing", + "label": "Automatic spacing", "id": "frmtadsnsp", - "tooltip": "If the last action ended with punctuation, add a space to the beginning of the next action." + "tooltip": "Add spaces automatically if needed" }, { "label": "Single Line", diff --git a/logger.py b/logger.py new file mode 100644 index 00000000..8da2aa7e --- /dev/null +++ b/logger.py @@ -0,0 +1,99 @@ +import sys +from functools import partialmethod +from loguru import logger + +STDOUT_LEVELS = ["GENERATION", "PROMPT"] +INIT_LEVELS = ["INIT", "INIT_OK", "INIT_WARN", "INIT_ERR"] +MESSAGE_LEVELS = ["MESSAGE"] +# By default we're at error level or higher +verbosity = 20 +quiet = 0 + +def set_logger_verbosity(count): + global verbosity + # The count comes reversed. So count = 0 means minimum verbosity + # While count 5 means maximum verbosity + # So the more count we have, the lowe we drop the versbosity maximum + verbosity = 20 - (count * 10) + +def quiesce_logger(count): + global quiet + # The bigger the count, the more silent we want our logger + quiet = count * 10 + +def is_stdout_log(record): + if record["level"].name not in STDOUT_LEVELS: + return(False) + if record["level"].no < verbosity + quiet: + return(False) + return(True) + +def is_init_log(record): + if record["level"].name not in INIT_LEVELS: + return(False) + if record["level"].no < verbosity + quiet: + return(False) + return(True) + +def is_msg_log(record): + if record["level"].name not in MESSAGE_LEVELS: + return(False) + if record["level"].no < verbosity + quiet: + return(False) + return(True) + +def is_stderr_log(record): + if record["level"].name in STDOUT_LEVELS + INIT_LEVELS + MESSAGE_LEVELS: + return(False) + if record["level"].no < verbosity + quiet: + return(False) + return(True) + +def test_logger(): + logger.generation("This is a generation message\nIt is typically multiline\nThee Lines".encode("unicode_escape").decode("utf-8")) + logger.prompt("This is a prompt message") + logger.debug("Debug Message") + logger.info("Info Message") + logger.warning("Info Warning") + logger.error("Error Message") + logger.critical("Critical Message") + logger.init("This is an init message", status="Starting") + logger.init_ok("This is an init message", status="OK") + logger.init_warn("This is an init message", status="Warning") + logger.init_err("This is an init message", status="Error") + logger.message("This is user message") + sys.exit() + + +logfmt = "{level: <10} | {name}:{function}:{line} - {message}" +genfmt = "{level: <10} @ {time:YYYY-MM-DD HH:mm:ss} | {message}" +initfmt = "INIT | {extra[status]: <10} | {message}" +msgfmt = "{level: <10} | {message}" + +logger.level("GENERATION", no=24, color="") +logger.level("PROMPT", no=23, color="") +logger.level("INIT", no=31, color="") +logger.level("INIT_OK", no=31, color="") +logger.level("INIT_WARN", no=31, color="") +logger.level("INIT_ERR", no=31, color="") +# Messages contain important information without which this application might not be able to be used +# As such, they have the highest priority +logger.level("MESSAGE", no=61, color="") + +logger.__class__.generation = partialmethod(logger.__class__.log, "GENERATION") +logger.__class__.prompt = partialmethod(logger.__class__.log, "PROMPT") +logger.__class__.init = partialmethod(logger.__class__.log, "INIT") +logger.__class__.init_ok = partialmethod(logger.__class__.log, "INIT_OK") +logger.__class__.init_warn = partialmethod(logger.__class__.log, "INIT_WARN") +logger.__class__.init_err = partialmethod(logger.__class__.log, "INIT_ERR") +logger.__class__.message = partialmethod(logger.__class__.log, "MESSAGE") + +config = { + "handlers": [ + {"sink": sys.stderr, "format": logfmt, "colorize":True, "filter": is_stderr_log}, + {"sink": sys.stdout, "format": genfmt, "level": "PROMPT", "colorize":True, "filter": is_stdout_log}, + {"sink": sys.stdout, "format": initfmt, "level": "INIT", "colorize":True, "filter": is_init_log}, + {"sink": sys.stdout, "format": msgfmt, "level": "MESSAGE", "colorize":True, "filter": is_msg_log} + ], +} +logger.configure(**config) diff --git a/maps/bloom.json b/maps/bloom.json new file mode 100644 index 00000000..e3f5feb9 --- /dev/null +++ b/maps/bloom.json @@ -0,0 +1,30 @@ +{ + "mtj_compat": "bloom", + "mtj_pe": "alibi", + "mtj_config_map": { + "d_model": "n_embed", + "n_heads": "num_attention_heads", + "layers": "n_layer" + }, + "static_weights": { + "word_embeddings.weight": {"mtj": {"module": "embedding_shard/~/linear", "param": "w", "transforms": ["no_transpose", "vocab_pad"]}}, + "word_embeddings_layernorm.weight": {"mtj": {"module": "embedding_shard/~/replicated_layer_norm", "param": "scale"}}, + "word_embeddings_layernorm.bias": {"mtj": {"module": "embedding_shard/~/replicated_layer_norm", "param": "offset"}}, + "ln_f.weight": {"mtj": {"module": "projection_shard/~/replicated_layer_norm", "param": "scale"}}, + "ln_f.bias": {"mtj": {"module": "projection_shard/~/replicated_layer_norm", "param": "offset"}} + }, + "layer_weights": { + "h.{layer}.self_attention.query_key_value.weight": {"mtj": {"module": "layer_{layer}/~/combined_qkv", "param": "w"}}, + "h.{layer}.self_attention.query_key_value.bias": {"mtj": {"module": "layer_{layer}/~/combined_qkv", "param": "b"}}, + "h.{layer}.self_attention.dense.weight": {"mtj": {"module": "layer_{layer}/~/linear_3", "param": "w"}}, + "h.{layer}.self_attention.dense.bias": {"mtj": {"module": "layer_{layer}/~/linear_3", "param": "b", "transforms": ["divide_by_shards"]}}, + "h.{layer}.mlp.dense_h_to_4h.weight": {"mtj": {"module": "layer_{layer}/~/linear_4", "param": "w"}}, + "h.{layer}.mlp.dense_h_to_4h.bias": {"mtj": {"module": "layer_{layer}/~/linear_4", "param": "b"}}, + "h.{layer}.mlp.dense_4h_to_h.weight": {"mtj": {"module": "layer_{layer}/~/linear_5", "param": "w"}}, + "h.{layer}.mlp.dense_4h_to_h.bias": {"mtj": {"module": "layer_{layer}/~/linear_5", "param": "b", "transforms": ["divide_by_shards"]}}, + "h.{layer}.input_layernorm.weight": {"mtj": {"module": "layer_{layer}/~/replicated_layer_norm", "param": "scale"}}, + "h.{layer}.input_layernorm.bias": {"mtj": {"module": "layer_{layer}/~/replicated_layer_norm", "param": "offset"}}, + "h.{layer}.post_attention_layernorm.weight": {"mtj": {"module": "layer_{layer}/~/replicated_layer_norm_1", "param": "scale"}}, + "h.{layer}.post_attention_layernorm.bias": {"mtj": {"module": "layer_{layer}/~/replicated_layer_norm_1", "param": "offset"}} + } +} diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 00000000..c930ba37 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +addopts = --ignore=miniconda3 --ignore=runtime --html=unit_test_report.html --self-contained-html -v \ No newline at end of file diff --git a/readme.md b/readme.md index 0d69934b..413242d5 100644 --- a/readme.md +++ b/readme.md @@ -50,30 +50,35 @@ Each edition features different models and requires different hardware to run, t ## [TPU Edition Model Descriptions](https://colab.research.google.com/github/KoboldAI/KoboldAI-Client/blob/main/colab/TPU.ipynb) -| Model | Size | Style | Description | -| --- | --- | --- | --- | -| [Nerys](https://huggingface.co/KoboldAI/fairseq-dense-13B-Nerys) by Mr Seeker | 13B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of Shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. | -| [Janeway](https://huggingface.co/KoboldAI/fairseq-dense-13B-Janeway) by Mr Seeker | 13B | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. | -| [Shinen](https://huggingface.co/KoboldAI/fairseq-dense-13B-Shinen) by Mr Seeker | 13B | NSFW | Shinen is an NSFW model designed to be more explicit. Trained on a variety of stories from the website Sexstories it contains many different kinks. | -| [Skein](https://huggingface.co/KoboldAI/GPT-J-6B-Skein) by VE\_FORBRYDERNE | 6B | Adventure | Skein is best used with Adventure mode enabled, it consists of a 4 times larger adventure dataset than the Adventure model making it excellent for text adventure gaming. On top of that it also consists of light novel training further expanding its knowledge and writing capabilities. It can be used with the You filter bias if you wish to write Novels with it, but dedicated Novel models can perform better for this task. | -| [Adventure](https://huggingface.co/KoboldAI/GPT-J-6B-Adventure) by VE\_FORBRYDERNE | 6B | Adventure | Adventure is a 6B model designed to mimick the behavior of AI Dungeon. It is exclusively for Adventure Mode and can take you on the epic and wackey adventures that AI Dungeon players love. It also features the many tropes of AI Dungeon as it has been trained on very similar data. It must be used in second person (You). | -| [Lit](https://huggingface.co/hakurei/lit-6B) by Haru | 6B | NSFW | Lit is a great NSFW model trained by Haru on both a large set of Literotica stories and high quality novels along with tagging support. Creating a high quality model for your NSFW stories. This model is exclusively a novel model and is best used in third person. | -| Neo(X) by EleutherAI | 20B | Generic | NeoX is the largest EleutherAI model currently available, being a generic model it is not particularly trained towards anything and can do a variety of writing, Q&A and coding tasks. 20B's performance is closely compared to the 13B models and it is worth trying both especially if you have a task that does not involve english writing. Its behavior will be similar to the GPT-J-6B model since they are trained on the same dataset but with more sensitivity towards repetition penalty and with more knowledge. | -| [Fairseq Dense](https://huggingface.co/KoboldAI/fairseq-dense-13B) | 13B | Generic | Trained by Facebook Researchers this model stems from the MOE research project within Fairseq. This particular version has been converted by us for use in KoboldAI. It is known to be on par with the larger 20B model from EleutherAI and considered as better for pop culture and language tasks. Because the model has never seen a new line (enter) it may perform worse on formatting and paragraphing. | -| [GPT-J-6B](https://huggingface.co/EleutherAI/gpt-j-6B) by EleutherAI | 6B | Generic | This model serves as the basis for most other 6B models (Some being based on Fairseq Dense instead). Being trained on the Pile and not biased towards anything in particular it is suitable for a variety of tasks such as writing, Q&A and coding tasks. You will likely get better result with larger generic models or finetuned models. | +| Model | Style | Description | +| --- | --- | --- | +| [Nerys](https://huggingface.co/KoboldAI/fairseq-dense-13B-Nerys) by Mr Seeker | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of Shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. | +| [Erebus](https://huggingface.co/KoboldAI/OPT-13B-Erebus) by Mr Seeker | NSFW | Erebus is our community's flagship NSFW model, being a combination of multiple large datasets that include Literotica, Shinen and erotic novels from Nerys and featuring thourough tagging support it covers the vast majority of erotic writing styles. This model is capable of replacing both the Lit and Shinen models in terms of content and style and has been well received as (one of) the best NSFW models out there. If you wish to use this model for commercial or non research usage we recommend choosing the 20B version as that one is not subject to the restrictive OPT license. | +| [Janeway](https://huggingface.co/KoboldAI/fairseq-dense-13B-Janeway) by Mr Seeker | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. | +| [Shinen](https://huggingface.co/KoboldAI/fairseq-dense-13B-Shinen) by Mr Seeker | NSFW | Shinen is an NSFW model trained on a variety of stories from the website Sexstories it contains many different kinks. It has been merged into the larger (and better) Erebus model. | +| [Skein](https://huggingface.co/KoboldAI/GPT-J-6B-Skein) by VE\_FORBRYDERNE | Adventure | Skein is best used with Adventure mode enabled, it consists of a 4 times larger adventure dataset than the Adventure model making it excellent for text adventure gaming. On top of that it also consists of light novel training further expanding its knowledge and writing capabilities. It can be used with the You filter bias if you wish to write Novels with it, but dedicated Novel models can perform better for this task. | +| [Adventure](https://huggingface.co/KoboldAI/GPT-J-6B-Adventure) by VE\_FORBRYDERNE | Adventure | Adventure is a 6B model designed to mimick the behavior of AI Dungeon. It is exclusively for Adventure Mode and can take you on the epic and wackey adventures that AI Dungeon players love. It also features the many tropes of AI Dungeon as it has been trained on very similar data. It must be used in second person (You). | +| [Lit](https://huggingface.co/hakurei/lit-6B) ([V2](https://huggingface.co/hakurei/litv2-6B-rev3)) by Haru | NSFW | Lit is a great NSFW model trained by Haru on both a large set of Literotica stories and high quality novels along with tagging support. Creating a high quality model for your NSFW stories. This model is exclusively a novel model and is best used in third person. | +| [OPT](https://huggingface.co/facebook/opt-13b) by Metaseq | Generic | OPT is considered one of the best base models as far as content goes, its behavior has the strengths of both GPT-Neo and Fairseq Dense. Compared to Neo duplicate and unnecessary content has been left out, while additional literature was added in similar to the Fairseq Dense model. The Fairseq Dense model however lacks the broader data that OPT does have. The biggest downfall of OPT is its license, which prohibits any commercial usage, or usage beyond research purposes. | +| [Neo(X)](https://huggingface.co/EleutherAI/gpt-neox-20b) by EleutherAI | Generic | NeoX is the largest EleutherAI model currently available, being a generic model it is not particularly trained towards anything and can do a variety of writing, Q&A and coding tasks. 20B's performance is closely compared to the 13B models and it is worth trying both especially if you have a task that does not involve english writing. Its behavior will be similar to the GPT-J-6B model since they are trained on the same dataset but with more sensitivity towards repetition penalty and with more knowledge. | +| [Fairseq Dense](https://huggingface.co/KoboldAI/fairseq-dense-13B) | Generic | Trained by Facebook Researchers this model stems from the MOE research project within Fairseq. This particular version has been converted by us for use in KoboldAI. It is known to be on par with the larger 20B model from EleutherAI and considered as better for pop culture and language tasks. Because the model has never seen a new line (enter) it may perform worse on formatting and paragraphing. Compared to other models the dataset focuses primarily on literature and contains little else. | +| [GPT-J-6B](https://huggingface.co/EleutherAI/gpt-j-6B) by EleutherAI | Generic | This model serves as the basis for most other 6B models (Some being based on Fairseq Dense instead). Being trained on the Pile and not biased towards anything in particular it is suitable for a variety of tasks such as writing, Q&A and coding tasks. You will likely get better result with larger generic models or finetuned models. | ## [GPU Edition Model Descriptions](https://colab.research.google.com/github/KoboldAI/KoboldAI-Client/blob/main/colab/GPU.ipynb) -| Model | Size | Style | Description | -| --- | --- | --- | --- | -| [Nerys 2.7B](https://huggingface.co/KoboldAI/fairseq-dense-2.7B-Nerys) by Mr Seeker | 2.7B | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of Shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. | -| [Janeway 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Janeway) by Mr Seeker | 2.7B | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. | -| [Picard 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Picard) by Mr Seeker | 2.7B | Novel | Picard is a model trained for SFW Novels based on Neo 2.7B. It is focused on Novel style writing without the NSFW bias. While the name suggests a sci-fi model this model is designed for Novels of a variety of genre's. It is meant to be used in KoboldAI's regular mode. | -| [AID 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-AID) by melastacho | 2.7B | Adventure | Also know as Adventure 2.7B this is a clone of the AI Dungeon Classic model and is best known for the epic wackey adventures that AI Dungeon Classic players love. | -| [Horni LN 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni-LN) by finetune | 2.7B | Novel | This model is based on Horni 2.7B and retains its NSFW knowledge, but was then further biased towards SFW novel stories. If you seek a balance between a SFW Novel model and a NSFW model this model should be a good choice. | -| [Horni 2.7B](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni) by finetune | 2.7B | NSFW | This model is tuned on Literotica to produce a Novel style model biased towards NSFW content. Can still be used for SFW stories but will have a bias towards NSFW content. It is meant to be used in KoboldAI's regular mode. | -| [Shinen 2.7B ](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Shinen) by Mr Seeker | 2.7B | NSFW | Shinen is an alternative to the Horni model designed to be more explicit. If Horni is to tame for you Shinen might produce better results. While it is a Novel model it is unsuitable for SFW stories due to its heavy NSFW bias. Shinen will not hold back. It is meant to be used in KoboldAI's regular mode. | -| [Neo 2.7B](https://huggingface.co/EleutherAI/gpt-neo-2.7B) by EleutherAI | 2.7B | Generic | This is the base model for all the other 2.7B models, it is best used when you have a use case that we have no other models available for, such as writing blog articles or programming. It can also be a good basis for the experience of some of the softprompts if your softprompt is not about a subject the other models cover. | +| Model | Style | Description | +| --- | --- | --- | +| [Nerys](https://huggingface.co/KoboldAI/fairseq-dense-2.7B-Nerys) by Mr Seeker | Novel/Adventure | Nerys is a hybrid model based on Pike (A newer Janeway), on top of the Pike dataset you also get some Light Novels, Adventure mode support and a little bit of Shinen thrown in the mix. The end result is a very diverse model that is heavily biased towards SFW novel writing, but one that can go beyond its novel training and make for an excellent adventure model to. Adventure mode is best played from a second person perspective, but can be played in first or third person as well. Novel writing can be done best from the first or third person. | +| [Erebus](https://huggingface.co/KoboldAI/OPT-2.7B-Erebus) by Mr Seeker | NSFW | Erebus is our community's flagship NSFW model, being a combination of multiple large datasets that include Literotica, Shinen and erotic novels from Nerys and featuring thourough tagging support it covers the vast majority of erotic writing styles. This model is capable of replacing both the Lit and Shinen models in terms of content and style and has been well received as (one of) the best NSFW models out there. If you wish to use this model for commercial or non research usage we recommend choosing the 20B version as that one is not subject to the restrictive OPT license. | +| [Janeway](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Janeway) by Mr Seeker | Novel | Janeway is a model created from Picard's dataset combined with a brand new collection of ebooks. This model is trained on 20% more content than Picard and has been trained on literature from various genres. Although the model is mainly focussed on SFW, romantic scenes might involve a degree of nudity. | +| [Picard](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Picard) by Mr Seeker | Novel | Picard is a model trained for SFW Novels based on Neo 2.7B. It is focused on Novel style writing without the NSFW bias. While the name suggests a sci-fi model this model is designed for Novels of a variety of genre's. It is meant to be used in KoboldAI's regular mode. | +| [AID](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-AID) by melastacho | Adventure | Also know as Adventure 2.7B this is a clone of the AI Dungeon Classic model and is best known for the epic wackey adventures that AI Dungeon Classic players love. | +| [Horni LN](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni-LN) by finetune | Novel | This model is based on Horni 2.7B and retains its NSFW knowledge, but was then further biased towards SFW novel stories. If you seek a balance between a SFW Novel model and a NSFW model this model should be a good choice. | +| [Horni](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Horni) by finetune | NSFW | This model is tuned on Literotica to produce a Novel style model biased towards NSFW content. Can still be used for SFW stories but will have a bias towards NSFW content. It is meant to be used in KoboldAI's regular mode. | +| [Shinen](https://huggingface.co/KoboldAI/GPT-Neo-2.7B-Shinen) by Mr Seeker | NSFW | Shinen is an alternative to the Horni model designed to be more explicit. If Horni is to tame for you Shinen might produce better results. While it is a Novel model it is unsuitable for SFW stories due to its heavy NSFW bias. Shinen will not hold back. It is meant to be used in KoboldAI's regular mode. | +| [OPT](https://huggingface.co/facebook/opt-2.7b) by Metaseq | Generic | OPT is considered one of the best base models as far as content goes, its behavior has the strengths of both GPT-Neo and Fairseq Dense. Compared to Neo duplicate and unnecessary content has been left out, while additional literature was added in similar to the Fairseq Dense model. The Fairseq Dense model however lacks the broader data that OPT does have. The biggest downfall of OPT is its license, which prohibits any commercial usage, or usage beyond research purposes. | +| [Fairseq Dense](https://huggingface.co/KoboldAI/fairseq-dense-2.7B) | Generic | Trained by Facebook Researchers this model stems from the MOE research project within Fairseq. This particular version has been converted by us for use in KoboldAI. It is known to be on par with the larger models from EleutherAI and considered as better for pop culture and language tasks. Because the model has never seen a new line (enter) it may perform worse on formatting and paragraphing. Compared to other models the dataset focuses primarily on literature and contains little else. | +| [Neo](https://huggingface.co/EleutherAI/gpt-neo-2.7B) by EleutherAI | Generic | This is the base model for all the other 2.7B models, it is best used when you have a use case that we have no other models available for, such as writing blog articles or programming. It can also be a good basis for the experience of some of the softprompts if your softprompt is not about a subject the other models cover. | ### Styles @@ -192,14 +197,21 @@ Lastly the all the features of our userscript API are documented inside the API For our TPU versions keep in mind that scripts modifying AI behavior relies on a different way of processing that is slower than if you leave these userscripts disabled even if your script only sporadically uses this modifier. If you want to partially use a script at its full speed than you can enable "No Gen Modifiers" to ensure that the parts that would make the TPU slow are not active. +## API + +KoboldAI has a REST API that can be accessed by adding /api to the URL that Kobold provides you (For example http://127.0.0.1:5000/api). +When accessing this link in a browser you will be taken to the interactive documentation. + ## Contributors This project contains work from the following contributors : * The Gantian - Creator of KoboldAI, has created most features such as the interface, the different AI model / API integrations and in general the largest part of the project. -* VE FORBRYDERNE - Contributed many features such as the Editing overhaul, Adventure Mode, expansions to the world info section, breakmodel integration, scripting support, softpromtps and much more. As well as vastly improving the TPU compatibility and integrating external code into KoboldAI so we could use official versions of Transformers with virtually no downsides. +* VE FORBRYDERNE - Contributed many features such as the Editing overhaul, Adventure Mode, expansions to the world info section, breakmodel integration, scripting support, API, softpromtps and much more. As well as vastly improving the TPU compatibility and integrating external code into KoboldAI so we could use official versions of Transformers with virtually no downsides. * Henk717 - Contributed the installation scripts, this readme, random story generator, the docker scripts, the foundation for the commandline interface and other smaller changes as well as integrating multiple parts of the code of different forks to unite it all. He also optimized the model loading so that downloaded models get converted to efficient offline models and that in future models are more likely to work out of the box. Not all code Github attributes to Henk717 is by Henk717 as some of it has been integrations of other people's work. We try to clarify this in the contributors list as much as we can. -* Ebolam - Automatic Saving +* Ebolam - Automatic Saving, back/redo, pinning, web loading of models +* one-some, Logits Viewer and Token Streaming +* db0, KoboldAI Horde * Frogging101 - top\_k / tfs support (Part of this support was later redone by VE to integrate what was originally inside of finetuneanon's transformers) * UWUplus (Ralf) - Contributed storage systems for community colabs, as well as cleaning up and integrating the website dependencies/code better. He is also the maintainer of flask-cloudflared which we use to generate the cloudflare links. * Javalar - Initial Performance increases on the story\_refresh @@ -216,4 +228,4 @@ Did we miss your contribution? Feel free to issue a commit adding your name to t KoboldAI is licensed with a AGPL license, in short this means that it can be used by anyone for any purpose. However, if you decide to make a publicly available instance your users are entitled to a copy of the source code including all modifications that you have made (which needs to be available trough an interface such as a button on your website), you may also not distribute this project in a form that does not contain the source code (Such as compiling / encrypting the code and distributing this version without also distributing the source code that includes the changes that you made. You are allowed to distribute this in a closed form if you also provide a separate archive with the source code.). -umamba.exe is bundled for convenience because we observed that many of our users had trouble with command line download methods, it is not part of our project and does not fall under the AGPL license. It is licensed under the BSD-3-Clause license. Other files with differing licenses will have a reference or embedded version of this license within the file. It has been sourced from https://anaconda.org/conda-forge/micromamba/files and its source code can be found here : https://github.com/mamba-org/mamba/tree/master/micromamba +umamba.exe is bundled for convenience because we observed that many of our users had trouble with command line download methods, it is not part of our project and does not fall under the AGPL license. It is licensed under the BSD-3-Clause license. Other files with differing licenses will have a reference or embedded version of this license within the file. It has been sourced from https://anaconda.org/conda-forge/micromamba/files and its source code can be found here : https://github.com/mamba-org/mamba/tree/master/micromamba \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index efba7e4a..b1e2247c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ transformers>=4.20.1 Flask Flask-SocketIO requests -torch==1.11 +torch >= 1.9, < 1.13 flask-cloudflared flask-ngrok eventlet @@ -12,3 +12,7 @@ bleach==4.1.0 sentencepiece protobuf accelerate +flask-session +marshmallow>=3.13 +apispec-webframeworks +loguru diff --git a/requirements_mtj.txt b/requirements_mtj.txt index 613e9203..743c9c1d 100644 --- a/requirements_mtj.txt +++ b/requirements_mtj.txt @@ -1,4 +1,4 @@ -torch >= 1.9, <= 1.11 +torch >= 1.9, < 1.13 numpy tqdm requests @@ -16,3 +16,7 @@ eventlet lupa==1.10 markdown bleach==4.1.0 +flask-session +marshmallow>=3.13 +apispec-webframeworks +loguru diff --git a/static/application.js b/static/application.js index 8da83f0a..6cdb531b 100644 --- a/static/application.js +++ b/static/application.js @@ -7,6 +7,7 @@ var socket; // UI references for jQuery var connect_status; +var button_loadmodel; var button_newgame; var button_rndgame; var button_save; @@ -56,6 +57,7 @@ var savepins; var topic; var saveas_accept; var saveas_close; +var loadmodelpopup; var loadpopup; var loadcontent; var load_accept; @@ -76,6 +78,8 @@ var rs_accept; var rs_close; var seqselmenu; var seqselcontents; +var stream_preview; +var token_prob_container; var storyname = null; var memorymode = false; @@ -100,6 +104,11 @@ var remote = false; var gamestate = ""; var gamesaved = true; var modelname = null; +var model = ""; +var ignore_stream = false; + +//timer for loading CLUSTER models +var online_model_timmer; // This is true iff [we're in macOS and the browser is Safari] or [we're in iOS] var using_webkit_patch = true; @@ -154,6 +163,12 @@ function getThrottle(ms) { } } +function reset_menus() { + settings_menu.html(""); + format_menu.html(""); + wi_menu.html(""); +} + function addSetting(ob) { // Add setting block to Settings Menu if(ob.uitype == "slider"){ @@ -229,8 +244,27 @@ function addSetting(ob) { if(ob.id == "setadventure"){ setadventure($(this).prop('checked')); } + }); } + + if (ob.id === "setshowbudget") { + $("#setshowbudget").on("change", function () { + for (const el of document.getElementsByClassName("input-token-usage")) { + if (this.checked) { + el.classList.remove("hidden"); + } else { + el.classList.add("hidden"); + } + } + }); + + if (!$("#setshowbudget")[0].checked) { + for (const el of document.getElementsByClassName("input-token-usage")) { + el.classList.add("hidden"); + } + } + } } function refreshTitle() { @@ -500,6 +534,16 @@ function addWiLine(ob) { $(".wisortable-excluded-dynamic").removeClass("wisortable-excluded-dynamic"); $(this).parent().css("max-height", "").find(".wicomment").find(".form-control").css("max-height", ""); }); + + for (const wientry of document.getElementsByClassName("wientry")) { + // If we are uninitialized, skip. + if ($(wientry).closest(".wilistitem-uninitialized").length) continue; + + // add() will not add if the class is already present + wientry.classList.add("tokens-counted"); + } + + registerTokenCounters(); } function addWiFolder(uid, ob) { @@ -823,6 +867,7 @@ function exitMemoryMode() { button_actmem.html("Memory"); show([button_actback, button_actfwd, button_actretry, button_actwi]); input_text.val(""); + updateInputBudget(input_text[0]); // Hide Author's Note field anote_menu.slideUp("fast"); } @@ -879,6 +924,8 @@ function formatChunkInnerText(chunk) { } function dosubmit(disallow_abort) { + beginStream(); + submit_start = Date.now(); var txt = input_text.val().replace(/\u00a0/g, " "); if((disallow_abort || gamestate !== "wait") && !memorymode && !gamestarted && ((!adventure || !action_mode) && txt.trim().length == 0)) { return; @@ -892,6 +939,7 @@ function dosubmit(disallow_abort) { } function _dosubmit() { + beginStream(); var txt = submit_throttle.txt; var disallow_abort = submit_throttle.disallow_abort; submit_throttle = null; @@ -958,7 +1006,18 @@ function hideSaveAsPopup() { } function sendSaveAsRequest() { - socket.send({'cmd': 'saveasrequest', 'data': {"name": saveasinput.val(), "pins": savepins.val()}}); + socket.send({'cmd': 'saveasrequest', 'data': {"name": saveasinput.val(), "pins": savepins.prop('checked')}}); +} + +function showLoadModelPopup() { + loadmodelpopup.removeClass("hidden"); + loadmodelpopup.addClass("flex"); +} + +function hideLoadModelPopup() { + loadmodelpopup.removeClass("flex"); + loadmodelpopup.addClass("hidden"); + loadmodelcontent.html(""); } function showLoadPopup() { @@ -1004,6 +1063,108 @@ function hideSamplersPopup() { samplerspopup.addClass("hidden"); } + +function buildLoadModelList(ar, menu, breadcrumbs, showdelete) { + disableButtons([load_model_accept]); + loadmodelcontent.html(""); + $("#loadmodellistbreadcrumbs").html(""); + $("#custommodelname").addClass("hidden"); + var i; + for(i=0; i"+breadcrumbs[i][1]+"\\"); + $("#model_breadcrumbs"+i).off("click").on("click", (function () { + return function () { + socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name"), 'folder': $(this).attr("value")}); + disableButtons([load_model_accept]); + } + })(i)); + } + if (breadcrumbs.length > 0) { + $("#loadmodellistbreadcrumbs").append("
") + } + //If we're in the custom load menu (we need to send the path data back in that case) + if(['NeoCustom', 'GPT2Custom'].includes(menu)) { + $("#loadmodel"+i).off("click").on("click", (function () { + return function () { + socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name"), 'path': $(this).attr("pretty_name")}); + highlightLoadLine($(this)); + } + })(i)); + $("#custommodelname").removeClass("hidden"); + $("#custommodelname")[0].setAttribute("menu", menu); + } + + for(i=0; i\ +
" + //if the menu item is a link to another menu + console.log(ar[i]); + if((ar[i][3]) || (['Load a model from its directory', 'Load an old GPT-2 model (eg CloverEdition)'].includes(ar[i][0]))) { + html = html + "" + } else { + //this is a model + html = html + "
   " + } + + //now let's do the delete icon if applicable + if (['NeoCustom', 'GPT2Custom'].includes(menu) && !ar[i][3] && showdelete) { + html = html + "" + } else { + html = html + "
" + } + + html = html + "
\ +
\ +
"+folder+"
\ +
"+ar[i][2]+"
\ +
\ + " + loadmodelcontent.append(html); + //If this is a menu + console.log(ar[i]); + if(ar[i][3]) { + $("#loadmodel"+i).off("click").on("click", (function () { + return function () { + socket.send({'cmd': 'list_model', 'data': $(this).attr("name"), 'pretty_name': $(this).attr("pretty_name")}); + disableButtons([load_model_accept]); + } + })(i)); + //Normal load + } else { + if (['NeoCustom', 'GPT2Custom'].includes(menu)) { + $("#loadmodel"+i).off("click").on("click", (function () { + return function () { + $("#use_gpu_div").addClass("hidden"); + $("#modelkey").addClass("hidden"); + $("#modellayers").addClass("hidden"); + socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name"), 'path': $(this).attr("pretty_name")}); + highlightLoadLine($(this)); + } + })(i)); + } else { + $("#loadmodel"+i).off("click").on("click", (function () { + return function () { + $("#use_gpu_div").addClass("hidden"); + $("#modelkey").addClass("hidden"); + $("#modellayers").addClass("hidden"); + socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name")}); + highlightLoadLine($(this)); + } + })(i)); + } + } + } +} + function buildLoadList(ar) { disableButtons([load_accept]); loadcontent.html(""); @@ -1148,12 +1309,13 @@ function buildSamplerList(samplers) { "Tail-free Sampling", "Typical Sampling", "Temperature", + "Repetition Penalty", ] for(i=0; i\
\
\ -
"+samplers_lookup_table[samplers[i]]+"
\ +
"+(samplers[i] < samplers_lookup_table.length ? samplers_lookup_table[samplers[i]] : "Unknown sampler #" + samplers[i])+"
\
\
\ "); @@ -1162,6 +1324,7 @@ function buildSamplerList(samplers) { function highlightLoadLine(ref) { $("#loadlistcontent > div > div.popuplistselected").removeClass("popuplistselected"); + $("#loadmodellistcontent > div > div.popuplistselected").removeClass("popuplistselected"); ref.addClass("popuplistselected"); } @@ -1977,6 +2140,89 @@ function unbindGametext() { gametext_bound = false; } +function beginStream() { + ignore_stream = false; + token_prob_container[0].innerHTML = ""; +} + +function endStream() { + // Clear stream, the real text is about to be displayed. + ignore_stream = true; + if (stream_preview) { + stream_preview.remove(); + stream_preview = null; + } +} + +function update_gpu_layers() { + var gpu_layers + gpu_layers = 0; + for (let i=0; i < $("#gpu_count")[0].value; i++) { + gpu_layers += parseInt($("#gpu_layers"+i)[0].value); + $("#gpu_layers_box_"+i)[0].value=$("#gpu_layers"+i)[0].value; + } + if ($("#disk_layers").length > 0) { + gpu_layers += parseInt($("#disk_layers")[0].value); + $("#disk_layers_box")[0].value=$("#disk_layers")[0].value; + } + if (gpu_layers > parseInt(document.getElementById("gpu_layers_max").innerHTML)) { + disableButtons([load_model_accept]); + $("#gpu_layers_current").html(""+gpu_layers+"/"+ document.getElementById("gpu_layers_max").innerHTML +""); + } else { + enableButtons([load_model_accept]); + $("#gpu_layers_current").html(gpu_layers+"/"+document.getElementById("gpu_layers_max").innerHTML); + } +} + + +function RemoveAllButFirstOption(selectElement) { + var i, L = selectElement.options.length - 1; + for(i = L; i >= 1; i--) { + selectElement.remove(i); + } +} + +function interpolateRGB(color0, color1, t) { + return [ + color0[0] + ((color1[0] - color0[0]) * t), + color0[1] + ((color1[1] - color0[1]) * t), + color0[2] + ((color1[2] - color0[2]) * t), + ] +} + +function updateInputBudget(inputElement) { + let budgetElement = document.getElementById("setshowbudget"); + if (budgetElement && !budgetElement.checked) return; + + let data = {"unencoded": inputElement.value, "field": inputElement.id}; + + if (inputElement.id === "anoteinput") { + data["anotetemplate"] = $("#anotetemplate").val(); + } + + socket.send({"cmd": "getfieldbudget", "data": data}); +} + +function registerTokenCounters() { + // Add token counters to all input containers with the class of "tokens-counted", + // if a token counter is not already a child of said container. + for (const el of document.getElementsByClassName("tokens-counted")) { + if (el.getElementsByClassName("input-token-usage").length) continue; + + let span = document.createElement("span"); + span.classList.add("input-token-usage"); + el.appendChild(span); + + let inputElement = el.querySelector("input, textarea"); + + inputElement.addEventListener("input", function() { + updateInputBudget(this); + }); + + updateInputBudget(inputElement); + } +} + //=================================================================// // READY/RUNTIME //=================================================================// @@ -1985,6 +2231,8 @@ $(document).ready(function(){ // Bind UI references connect_status = $('#connectstatus'); + button_loadmodel = $('#btn_loadmodel'); + button_showmodel = $('#btn_showmodel'); button_newgame = $('#btn_newgame'); button_rndgame = $('#btn_rndgame'); button_save = $('#btn_save'); @@ -2038,9 +2286,13 @@ $(document).ready(function(){ saveas_accept = $("#btn_saveasaccept"); saveas_close = $("#btn_saveasclose"); loadpopup = $("#loadcontainer"); + loadmodelpopup = $("#loadmodelcontainer"); loadcontent = $("#loadlistcontent"); + loadmodelcontent = $("#loadmodellistcontent"); load_accept = $("#btn_loadaccept"); load_close = $("#btn_loadclose"); + load_model_accept = $("#btn_loadmodelaccept"); + load_model_close = $("#btn_loadmodelclose"); sppopup = $("#spcontainer"); spcontent = $("#splistcontent"); sp_accept = $("#btn_spaccept"); @@ -2062,11 +2314,19 @@ $(document).ready(function(){ rs_close = $("#btn_rsclose"); seqselmenu = $("#seqselmenu"); seqselcontents = $("#seqselcontents"); + token_prob_container = $("#token_prob_container"); + token_prob_menu = $("#token_prob_menu"); // Connect to SocketIO server socket = io.connect(window.document.origin, {transports: ['polling', 'websocket'], closeOnBeforeunload: false}); + socket.on('load_popup', function(data){load_popup(data);}); + socket.on('popup_items', function(data){popup_items(data);}); + socket.on('popup_breadcrumbs', function(data){popup_breadcrumbs(data);}); + socket.on('popup_edit_file', function(data){popup_edit_file(data);}); + socket.on('error_popup', function(data){error_popup(data);}); socket.on('from_server', function(msg) { + //console.log(msg); if(msg.cmd == "connected") { // Connected to Server Actions sman_allow_delete = msg.hasOwnProperty("smandelete") && msg.smandelete; @@ -2080,9 +2340,7 @@ $(document).ready(function(){ connect_status.removeClass("color_orange"); connect_status.addClass("color_green"); // Reset Menus - settings_menu.html(""); - format_menu.html(""); - wi_menu.html(""); + reset_menus(); // Set up "Allow Editing" $('body').on('input', autofocus); $('#allowediting').prop('checked', allowedit).prop('disabled', false).change().off('change').on('change', function () { @@ -2115,6 +2373,75 @@ $(document).ready(function(){ active_element.focus(); })(); $("body").addClass("connected"); + } else if (msg.cmd == "streamtoken") { + // Sometimes the stream_token messages will come in too late, after + // we have recieved the full text. This leads to some stray tokens + // appearing after the output. To combat this, we only allow tokens + // to be displayed after requesting and before recieving text. + if (ignore_stream) return; + + let streamingEnabled = $("#setoutputstreaming")[0].checked; + let probabilitiesEnabled = $("#setshowprobs")[0].checked; + + if (!streamingEnabled && !probabilitiesEnabled) return; + + if (!stream_preview && streamingEnabled) { + stream_preview = document.createElement("span"); + game_text.append(stream_preview); + } + + for (const token of msg.data) { + if (streamingEnabled) stream_preview.innerText += token.decoded; + + if (probabilitiesEnabled) { + // Probability display + let probDiv = document.createElement("div"); + probDiv.classList.add("token-probs"); + + let probTokenSpan = document.createElement("span"); + probTokenSpan.classList.add("token-probs-header"); + probTokenSpan.innerText = token.decoded.replaceAll("\n", "\\n"); + probDiv.appendChild(probTokenSpan); + + let probTable = document.createElement("table"); + let probTBody = document.createElement("tbody"); + probTable.appendChild(probTBody); + + for (const probToken of token.probabilities) { + let tr = document.createElement("tr"); + let rgb = interpolateRGB( + [255, 255, 255], + [0, 255, 0], + probToken.score + ).map(Math.round); + let color = `rgb(${rgb.join(", ")})`; + + if (probToken.decoded === token.decoded) { + tr.classList.add("token-probs-final-token"); + } + + let tds = {}; + + for (const property of ["tokenId", "decoded", "score"]) { + let td = document.createElement("td"); + td.style.color = color; + tds[property] = td; + tr.appendChild(td); + } + + tds.tokenId.innerText = probToken.tokenId; + tds.decoded.innerText = probToken.decoded.toString().replaceAll("\n", "\\n"); + tds.score.innerText = (probToken.score * 100).toFixed(2) + "%"; + + probTBody.appendChild(tr); + } + + probDiv.appendChild(probTable); + token_prob_container.append(probDiv); + } + } + + scrollToBottom(); } else if(msg.cmd == "updatescreen") { var _gamestarted = gamestarted; gamestarted = msg.gamestarted; @@ -2145,6 +2472,7 @@ $(document).ready(function(){ scrollToBottom(); } else if(msg.cmd == "updatechunk") { hideMessage(); + game_text.attr('contenteditable', allowedit); var index = msg.data.index; var html = msg.data.html; var existingChunk = game_text.children('#n' + index); @@ -2160,7 +2488,11 @@ $(document).ready(function(){ } else if (!empty_chunks.has(index.toString())) { // Append at the end unbindGametext(); - var lc = game_text[0].lastChild; + + // game_text can contain things other than chunks (stream + // preview), so we use querySelector to get the last chunk. + var lc = game_text[0].querySelector("chunk:last-of-type"); + if(lc.tagName === "CHUNK" && lc.lastChild !== null && lc.lastChild.tagName === "BR") { lc.removeChild(lc.lastChild); } @@ -2176,7 +2508,11 @@ $(document).ready(function(){ var element = game_text.children('#n' + index); if(element.length) { unbindGametext(); - if((element[0].nextSibling === null || element[0].nextSibling.nodeType !== 1 || element[0].nextSibling.tagName !== "CHUNK") && element[0].previousSibling !== null && element[0].previousSibling.tagName === "CHUNK") { + if( + (element[0].nextSibling === null || element[0].nextSibling.nodeType !== 1 || element[0].nextSibling.tagName !== "CHUNK") + && element[0].previousSibling !== null + && element[0].previousSibling.tagName === "CHUNK" + ) { element[0].previousSibling.appendChild(document.createElement("br")); } element.remove(); // Remove the chunk @@ -2186,18 +2522,22 @@ $(document).ready(function(){ } else if(msg.cmd == "setgamestate") { // Enable or Disable buttons if(msg.data == "ready") { + endStream(); enableSendBtn(); enableButtons([button_actmem, button_actwi, button_actback, button_actfwd, button_actretry]); hideWaitAnimation(); gamestate = "ready"; + favicon.stop_swap(); } else if(msg.data == "wait") { gamestate = "wait"; disableSendBtn(); disableButtons([button_actmem, button_actwi, button_actback, button_actfwd, button_actretry]); showWaitAnimation(); + favicon.start_swap(); } else if(msg.data == "start") { setStartState(); gamestate = "ready"; + favicon.stop_swap(); } } else if(msg.cmd == "allowsp") { allowsp = !!msg.data; @@ -2222,6 +2562,7 @@ $(document).ready(function(){ memorytext = msg.data; input_text.val(msg.data); } + updateInputBudget(input_text[0]); } else if(msg.cmd == "setmemory") { memorytext = msg.data; if(memorymode) { @@ -2343,9 +2684,12 @@ $(document).ready(function(){ } else if(msg.cmd == "setanote") { // Set contents of Author's Note field anote_input.val(msg.data); + updateInputBudget(anote_input[0]); } else if(msg.cmd == "setanotetemplate") { // Set contents of Author's Note Template field $("#anotetemplate").val(msg.data); + } else if(msg.cmd == "reset_menus") { + reset_menus(); } else if(msg.cmd == "addsetting") { // Add setting controls addSetting(msg.data); @@ -2367,6 +2711,20 @@ $(document).ready(function(){ } else if(msg.cmd == "updatesingleline") { // Update toggle state $("#singleline").prop('checked', msg.data).change(); + } else if(msg.cmd == "updateoutputstreaming") { + // Update toggle state + $("#setoutputstreaming").prop('checked', msg.data).change(); + } else if(msg.cmd == "updateshowbudget") { + // Update toggle state + $("#setshowbudget").prop('checked', msg.data).change(); + } else if(msg.cmd == "updateshowprobs") { + $("#setshowprobs").prop('checked', msg.data).change(); + + if(msg.data) { + token_prob_menu.removeClass("hidden"); + } else { + token_prob_menu.addClass("hidden"); + } } else if(msg.cmd == "allowtoggle") { // Allow toggle change states to propagate allowtoggle = msg.data; @@ -2498,17 +2856,17 @@ $(document).ready(function(){ chat_name.val(msg.data); } else if(msg.cmd == "setlabelnumseq") { // Update setting label with value from server - $("#setnumseqcur").html(msg.data); + $("#setnumseqcur").val(msg.data); } else if(msg.cmd == "updatenumseq") { // Send current max tokens value to input - $("#setnumseqcur").html(msg.data); + $("#setnumseqcur").val(msg.data); $("#setnumseq").val(parseInt(msg.data)).trigger("change"); } else if(msg.cmd == "setlabelwidepth") { // Update setting label with value from server - $("#setwidepthcur").html(msg.data); + $("#setwidepthcur").val(msg.data); } else if(msg.cmd == "updatewidepth") { // Send current max tokens value to input - $("#setwidepthcur").html(msg.data); + $("#setwidepthcur").val(msg.data); $("#setwidepth").val(parseInt(msg.data)).trigger("change"); } else if(msg.cmd == "updateuseprompt") { // Update toggle state @@ -2541,6 +2899,9 @@ $(document).ready(function(){ } else if(msg.cmd == "updatenogenmod") { // Update toggle state $("#setnogenmod").prop('checked', msg.data).change(); + } else if(msg.cmd == "updatefulldeterminism") { + // Update toggle state + $("#setfulldeterminism").prop('checked', msg.data).change(); } else if(msg.cmd == "runs_remotely") { remote = true; hide([button_savetofile, button_import, button_importwi]); @@ -2552,7 +2913,137 @@ $(document).ready(function(){ } else { debug_area.addClass("hidden"); } + } else if(msg.cmd == 'show_model_menu') { + //console.log(msg) + $("#use_gpu_div").addClass("hidden"); + $("#modelkey").addClass("hidden"); + $("#modellayers").addClass("hidden"); + $("#oaimodel").addClass("hidden") + buildLoadModelList(msg.data, msg.menu, msg.breadcrumbs, msg.showdelete); + } else if(msg.cmd == 'selected_model_info') { + console.log(msg); + enableButtons([load_model_accept]); + $("#oaimodel").addClass("hidden") + $("#oaimodel")[0].options[0].selected = true; + if (msg.key) { + $("#modelkey").removeClass("hidden"); + $("#modelkey")[0].value = msg.key_value; + if (msg.models_on_url) { + $("#modelkey")[0].oninput = function() {clearTimeout(online_model_timmer); + online_model_timmer = setTimeout(function() { + socket.send({'cmd': 'Cluster_Key_Update', 'key': document.getElementById("modelkey").value, + 'url': document.getElementById("modelurl").value}); + }, 1000); + } + $("#modelkey")[0].onblur = function () {socket.send({'cmd': 'Cluster_Key_Update', 'key': this.value, 'url': document.getElementById("modelurl").value});}; + $("#modelurl")[0].onblur = function () {socket.send({'cmd': 'Cluster_Key_Update', 'key': document.getElementById("modelkey").value, 'url': this.value});}; + } else { + $("#modelkey")[0].onblur = function () {socket.send({'cmd': 'OAI_Key_Update', 'key': $('#modelkey')[0].value});}; + $("#modelurl")[0].onblur = null; + } + //if we're in the API list, disable to load button until the model is selected (after the API Key is entered) + disableButtons([load_model_accept]); + } else { + $("#modelkey").addClass("hidden"); + } + + console.log(msg.multi_online_models); + if (msg.multi_online_models) { + $("#oaimodel")[0].setAttribute("multiple", ""); + $("#oaimodel")[0].options[0].textContent = "All" + } else { + $("#oaimodel")[0].removeAttribute("multiple"); + $("#oaimodel")[0].options[0].textContent = "Select Model(s)" + } + + + + if (msg.url) { + $("#modelurl").removeClass("hidden"); + if (msg.default_url != null) { + document.getElementById("modelurl").value = msg.default_url; + } + } else { + $("#modelurl").addClass("hidden"); + } + if (msg.gpu) { + $("#use_gpu_div").removeClass("hidden"); + } else { + $("#use_gpu_div").addClass("hidden"); + } + if (msg.breakmodel) { + var html; + $("#modellayers").removeClass("hidden"); + html = ""; + for (let i = 0; i < msg.gpu_names.length; i++) { + html += "GPU " + i + " " + msg.gpu_names[i] + ": "; + html += ''; + html += ""; + } + html += "Disk cache: "; + html += ''; + html += ""; + $("#model_layer_bars").html(html); + $("#gpu_layers_max").html(msg.layer_count); + $("#gpu_count")[0].value = msg.gpu_count; + update_gpu_layers(); + } else { + $("#modellayers").addClass("hidden"); + } + } else if(msg.cmd == 'oai_engines') { + $("#oaimodel").removeClass("hidden") + enableButtons([load_model_accept]); + selected_item = 0; + length = $("#oaimodel")[0].options.length; + for (let i = 0; i < length; i++) { + $("#oaimodel")[0].options.remove(1); + } + msg.data.forEach(function (item, index) { + var option = document.createElement("option"); + option.value = item[0]; + option.text = item[1]; + if(msg.online_model == item[0]) { + selected_item = index+1; + } + $("#oaimodel")[0].appendChild(option); + if(selected_item != "") { + $("#oaimodel")[0].options[selected_item].selected = true; + } + }) + } else if(msg.cmd == 'show_model_name') { + $("#showmodelnamecontent").html("
" + msg.data + "
"); + $("#showmodelnamecontainer").removeClass("hidden"); + } else if(msg.cmd == 'hide_model_name') { + $("#showmodelnamecontainer").addClass("hidden"); + $(window).off('beforeunload'); + location.reload(); + //console.log("Closing window"); + } else if(msg.cmd == 'model_load_status') { + $("#showmodelnamecontent").html("
" + msg.data + "
"); + $("#showmodelnamecontainer").removeClass("hidden"); + //console.log(msg.data); + } else if(msg.cmd == 'oai_engines') { + RemoveAllButFirstOption($("#oaimodel")[0]); + for (const engine of msg.data) { + var opt = document.createElement('option'); + opt.value = engine[0]; + opt.innerHTML = engine[1]; + $("#oaimodel")[0].appendChild(opt); + } + } else if(msg.cmd == 'showfieldbudget') { + let inputElement = document.getElementById(msg.data.field); + let tokenBudgetElement = inputElement.parentNode.getElementsByClassName("input-token-usage")[0]; + if (msg.data.max === null) { + tokenBudgetElement.innerText = ""; + } else { + let tokenLength = msg.data.length ?? "?"; + let tokenMax = msg.data.max ?? "?"; + tokenBudgetElement.innerText = `${tokenLength}/${tokenMax} Tokens`; + } } + enableButtons([load_model_accept]); }); socket.on('disconnect', function() { @@ -2673,6 +3164,7 @@ $(document).ready(function(){ }); button_actretry.on("click", function(ev) { + beginStream(); hideMessage(); socket.send({'cmd': 'retry', 'chatname': chatmode ? chat_name.val() : undefined, 'data': ''}); hidegenseqs(); @@ -2806,12 +3298,40 @@ $(document).ready(function(){ hideLoadPopup(); }); + load_model_close.on("click", function(ev) { + $("#modellayers").addClass("hidden"); + hideLoadModelPopup(); + }); + load_accept.on("click", function(ev) { hideMessage(); newly_loaded = true; socket.send({'cmd': 'loadrequest', 'data': ''}); hideLoadPopup(); }); + + load_model_accept.on("click", function(ev) { + hideMessage(); + var gpu_layers; + var message; + if($("#modellayers")[0].classList.contains('hidden')) { + gpu_layers = "," + } else { + gpu_layers = "" + for (let i=0; i < $("#gpu_count")[0].value; i++) { + gpu_layers += $("#gpu_layers"+i)[0].value + ","; + } + } + var disk_layers = $("#disk_layers").length > 0 ? $("#disk_layers")[0].value : 0; + models = getSelectedOptions(document.getElementById('oaimodel')); + if (models.length == 1) { + models = models[0]; + } + message = {'cmd': 'load_model', 'use_gpu': $('#use_gpu')[0].checked, 'key': $('#modelkey')[0].value, 'gpu_layers': gpu_layers.slice(0, -1), 'disk_layers': disk_layers, 'url': $('#modelurl')[0].value, 'online_model': models}; + socket.send(message); + loadmodelcontent.html(""); + hideLoadModelPopup(); + }); sp_close.on("click", function(ev) { hideSPPopup(); @@ -2845,6 +3365,14 @@ $(document).ready(function(){ hideSamplersPopup(); }); + button_loadmodel.on("click", function(ev) { + showLoadModelPopup(); + socket.send({'cmd': 'list_model', 'data': 'mainmenu'}); + }); + button_showmodel.on("click", function(ev) { + socket.send({'cmd': 'show_model', 'data': ''}); + }); + button_newgame.on("click", function(ev) { if(connected) { showNewStoryPopup(); @@ -2887,6 +3415,7 @@ $(document).ready(function(){ }); rs_accept.on("click", function(ev) { + beginStream(); hideMessage(); socket.send({'cmd': 'rndgame', 'memory': $("#rngmemory").val(), 'data': topic.val()}); hideRandomStoryPopup(); @@ -2960,4 +3489,311 @@ $(document).ready(function(){ return true; } }); + + // Shortcuts + $(window).keydown(function (ev) { + // Only ctrl prefixed (for now) + if (!ev.ctrlKey) return; + + let handled = true; + switch (ev.key) { + // Ctrl+Z - Back + case "z": + button_actback.click(); + break; + // Ctrl+Y - Forward + case "y": + button_actfwd.click(); + break; + // Ctrl+E - Retry + case "e": + button_actretry.click(); + break; + default: + handled = false; + } + + if (handled) ev.preventDefault(); + }); + + $("#anotetemplate").on("input", function() { + updateInputBudget(anote_input[0]); + }) + + registerTokenCounters(); + + updateInputBudget(input_text[0]); + }); + + + +var popup_deleteable = false; +var popup_editable = false; +var popup_renameable = false; + +function load_popup(data) { + document.getElementById('spcontainer').classList.add('hidden'); + document.getElementById('uscontainer').classList.add('hidden'); + popup_deleteable = data.deleteable; + popup_editable = data.editable; + popup_renameable = data.renameable; + var popup = document.getElementById("popup"); + var popup_title = document.getElementById("popup_title"); + popup_title.textContent = data.popup_title; + var popup_list = document.getElementById("popup_list"); + //first, let's clear out our existing data + while (popup_list.firstChild) { + popup_list.removeChild(popup_list.firstChild); + } + var breadcrumbs = document.getElementById('popup_breadcrumbs'); + while (breadcrumbs.firstChild) { + breadcrumbs.removeChild(breadcrumbs.firstChild); + } + + if (data.upload) { + const dropArea = document.getElementById('popup_list'); + dropArea.addEventListener('dragover', (event) => { + event.stopPropagation(); + event.preventDefault(); + // Style the drag-and-drop as a "copy file" operation. + event.dataTransfer.dropEffect = 'copy'; + }); + + dropArea.addEventListener('drop', (event) => { + event.stopPropagation(); + event.preventDefault(); + const fileList = event.dataTransfer.files; + for (file of fileList) { + reader = new FileReader(); + reader.onload = function (event) { + socket.emit("upload_file", {'filename': file.name, "data": event.target.result}); + }; + reader.readAsArrayBuffer(file); + } + }); + } else { + + } + + popup.classList.remove("hidden"); + + //adjust accept button + if (data.call_back == "") { + document.getElementById("popup_accept").classList.add("hidden"); + } else { + document.getElementById("popup_accept").classList.remove("hidden"); + var accept = document.getElementById("popup_accept"); + accept.classList.add("disabled"); + accept.setAttribute("emit", data.call_back); + accept.setAttribute("selected_value", ""); + accept.onclick = function () { + socket.emit(this.emit, this.getAttribute("selected_value")); + document.getElementById("popup").classList.add("hidden"); + }; + } + +} + +function popup_items(data) { + var popup_list = document.getElementById('popup_list'); + //first, let's clear out our existing data + while (popup_list.firstChild) { + popup_list.removeChild(popup_list.firstChild); + } + document.getElementById('popup_upload_input').value = ""; + + for (item of data) { + var list_item = document.createElement("span"); + list_item.classList.add("item"); + + //create the folder icon + var folder_icon = document.createElement("span"); + folder_icon.classList.add("folder_icon"); + if (item[0]) { + folder_icon.classList.add("oi"); + folder_icon.setAttribute('data-glyph', "folder"); + } + list_item.append(folder_icon); + + //create the edit icon + var edit_icon = document.createElement("span"); + edit_icon.classList.add("edit_icon"); + if ((popup_editable) && !(item[0])) { + edit_icon.classList.add("oi"); + edit_icon.setAttribute('data-glyph', "spreadsheet"); + edit_icon.title = "Edit" + edit_icon.id = item[1]; + edit_icon.onclick = function () { + socket.emit("popup_edit", this.id); + }; + } + list_item.append(edit_icon); + + //create the rename icon + var rename_icon = document.createElement("span"); + rename_icon.classList.add("rename_icon"); + if ((popup_renameable) && !(item[0])) { + rename_icon.classList.add("oi"); + rename_icon.setAttribute('data-glyph', "pencil"); + rename_icon.title = "Rename" + rename_icon.id = item[1]; + rename_icon.setAttribute("filename", item[2]); + rename_icon.onclick = function () { + var new_name = prompt("Please enter new filename for \n"+ this.getAttribute("filename")); + if (new_name != null) { + socket.emit("popup_rename", {"file": this.id, "new_name": new_name}); + } + }; + } + list_item.append(rename_icon); + + //create the delete icon + var delete_icon = document.createElement("span"); + delete_icon.classList.add("delete_icon"); + if (popup_deleteable) { + delete_icon.classList.add("oi"); + delete_icon.setAttribute('data-glyph', "x"); + delete_icon.title = "Delete" + delete_icon.id = item[1]; + delete_icon.setAttribute("folder", item[0]); + delete_icon.onclick = function () { + if (this.getAttribute("folder") == "true") { + if (window.confirm("Do you really want to delete this folder and ALL files under it?")) { + socket.emit("popup_delete", this.id); + } + } else { + if (window.confirm("Do you really want to delete this file?")) { + socket.emit("popup_delete", this.id); + } + } + }; + } + list_item.append(delete_icon); + + //create the actual item + var popup_item = document.createElement("span"); + popup_item.classList.add("file"); + popup_item.id = item[1]; + popup_item.setAttribute("folder", item[0]); + popup_item.setAttribute("valid", item[3]); + popup_item.textContent = item[2]; + popup_item.onclick = function () { + var accept = document.getElementById("popup_accept"); + if (this.getAttribute("valid") == "true") { + accept.classList.remove("disabled"); + accept.setAttribute("selected_value", this.id); + } else { + console.log("not valid"); + accept.setAttribute("selected_value", ""); + accept.classList.add("disabled"); + if (this.getAttribute("folder") == "true") { + console.log("folder"); + socket.emit("popup_change_folder", this.id); + } + } + }; + list_item.append(popup_item); + + + popup_list.append(list_item); + + + } +} + +function popup_breadcrumbs(data) { + var breadcrumbs = document.getElementById('popup_breadcrumbs') + while (breadcrumbs.firstChild) { + breadcrumbs.removeChild(breadcrumbs.firstChild); + } + + for (item of data) { + var button = document.createElement("button"); + button.id = item[0]; + button.textContent = item[1]; + button.classList.add("breadcrumbitem"); + button.onclick = function () { + socket.emit("popup_change_folder", this.id); + }; + breadcrumbs.append(button); + var span = document.createElement("span"); + span.textContent = "\\"; + breadcrumbs.append(span); + } +} + +function popup_edit_file(data) { + var popup_list = document.getElementById('popup_list'); + var accept = document.getElementById("popup_accept"); + accept.classList.add("btn-secondary"); + accept.classList.remove("btn-primary"); + accept.textContent = "Save"; + //first, let's clear out our existing data + while (popup_list.firstChild) { + popup_list.removeChild(popup_list.firstChild); + } + var accept = document.getElementById("popup_accept"); + accept.setAttribute("selected_value", ""); + accept.onclick = function () { + var textarea = document.getElementById("filecontents"); + socket.emit("popup_change_file", {"file": textarea.getAttribute("filename"), "data": textarea.value}); + document.getElementById("popup").classList.add("hidden"); + this.classList.add("hidden"); + }; + + var textarea = document.createElement("textarea"); + textarea.classList.add("fullwidth"); + textarea.rows = 25; + textarea.id = "filecontents" + textarea.setAttribute("filename", data.file); + textarea.value = data.text; + textarea.onblur = function () { + var accept = document.getElementById("popup_accept"); + accept.classList.remove("hidden"); + accept.classList.remove("btn-secondary"); + accept.classList.add("btn-primary"); + }; + popup_list.append(textarea); + +} + +function error_popup(data) { + alert(data); +} + +function upload_file(file_box) { + var fileList = file_box.files; + for (file of fileList) { + reader = new FileReader(); + reader.onload = function (event) { + socket.emit("upload_file", {'filename': file.name, "data": event.target.result}); + }; + reader.readAsArrayBuffer(file); + } +} + +function getSelectedOptions(element) { + // validate element + if(!element || !element.options) + return []; //or null? + + // return HTML5 implementation of selectedOptions instead. + if (element.selectedOptions) { + selectedOptions = element.selectedOptions; + } else { + // you are here because your browser doesn't have the HTML5 selectedOptions + var opts = element.options; + var selectedOptions = []; + for(var i = 0; i < opts.length; i++) { + if(opts[i].selected) { + selectedOptions.push(opts[i]); + } + } + } + output = [] + for (item of selectedOptions) { + output.push(item.value); + } + return output; +} \ No newline at end of file diff --git a/static/custom.css b/static/custom.css index 2b6db54d..d4bfe872 100644 --- a/static/custom.css +++ b/static/custom.css @@ -291,7 +291,7 @@ body.connected #formatmenu, #formatmenu.always-available { align-items: center; } -#popup { +#popup_old { width: 75%; min-width: 500px; max-width: 1000px; @@ -369,14 +369,14 @@ body.connected #popupfooter, #popupfooter.always-available { margin-top: 200px; } -#loadpopup { +.loadpopup { width: 500px; background-color: #262626; margin-top: 100px; } @media (max-width: 768px) { - #loadpopup { + .loadpopup { width: 100%; background-color: #262626; margin-top: 100px; @@ -473,7 +473,7 @@ body.connected #popupfooter, #popupfooter.always-available { } #samplerslist { - height: 300px; + height: 310px; overflow-y: scroll; overflow-wrap: anywhere; } @@ -1056,7 +1056,7 @@ body.connected .statusiconlabel, .statusiconlabel.always-available { } .loadlistitem { - padding: 5px 10px 5px 10px; + padding: 0px 0px 0px 0px; display: flex; flex-grow: 1; color: #ffffff; @@ -1072,6 +1072,28 @@ body.connected .statusiconlabel, .statusiconlabel.always-available { background-color: #688f1f; } +.breadcrumbitem { + padding: 5px 10px 5px 10px; + color: #ffffff; + background-color: transparent; + border: none; + + -moz-transition: background-color 0.25s ease-in; + -o-transition: background-color 0.25s ease-in; + -webkit-transition: background-color 0.25s ease-in; + transition: background-color 0.25s ease-in; +} + +.breadcrumbitem:hover { + cursor: pointer; + background-color: #688f1f; +} + +hr { + padding: 0px; + margin: 0px; +} + .loadlistpadding { padding-right: 10px; } @@ -1463,3 +1485,240 @@ body.connected .popupfooter, .popupfooter.always-available { overflow: hidden; font-size: 12pt; } + +.model_layers { + width: 3ch; + background-color: inherit; + border: none; + outline: none; +} + +.model_layers:focus { + color: #cdf; +} + +.menu_icon { + position: fixed; + top:10px; + left: 5px; + z-index:100; + display: inline-block; + cursor: pointer; +} + +.SideMenu { + height: 100%; + width: 0; + position: fixed; + z-index: 1; + top: 0; + left: 0; + background-color: #111; + overflow-x: hidden; + transition: 0.5s; + padding-top: 60px; +} + +.SideMenu.open { + width: 450px; +} + +@media only screen and (max-width: 768px) { + .SideMenu.open { + width: 100%; + } +} + + +.menubar1, .menubar2, .menubar3 { + width: 21px; + height: 3px; + background-color: #999; + margin: 3px 0; + transition: 0.4s; +} + +.change .menubar1 { + transform: translate(0px, 6px) rotate(-45deg); +} + +.change .menubar2 {opacity: 0;} + +.change .menubar3 { + transform: translate(0px, -6px) rotate(45deg); +} + + +/*---------------------------------- Popup -------------------------------------------------*/ +.new_popup { + position: absolute; + top: 10vh; + left: 10%; + z-index: 999; + width: 80%; + height: 80vh; + background-color: black; + display: flex; + flex-direction: column; + background-color: #474B4F; + color: white; +} + +.new_popup .title { + width: 100%; + background-color: #337AB7; + text-align: center; + font-size: 1.3em; +} + +.new_popup .popup_list_area { + height: 70vh; + overflow-x: hidden; +} +.new_popup .item { + width: 100%; + background-color: #262626; + padding: 2px; + display: grid; + grid-template-areas: "folder_icon delete_icon edit_icon rename_icon file"; + grid-template-columns: 20px 20px 20px 20px auto; + +} + +.new_popup .item .folder_icon { + grid-area: folder_icon; +} + +.new_popup .item .edit_icon { + grid-area: edit_icon; +} + +.new_popup .item .rename_icon { + grid-area: rename_icon; +} + +.new_popup .item .delete_icon { + grid-area: delete_icon; +} + +.new_popup .item .file { + grid-area: file; +} + +.new_popup .item .file:hover { + background-color: #688f1f; +} + +.new_popup textarea { + grid-area: textarea; + background-color: #404040; + color: white; + resize: none; + width: 100%; +} + +.new_popup .popup_load_cancel { + text-align: center; + background-color: #285070; +} + +.popup_load_cancel_button { + vertical-align: bottom; + display: inline; +} + +.popup_load_cancel_button.btn-secondary { + color: rgb(51, 51, 51); + background-color: #686c68; +} + +.breadcrumbitem { + padding: 5px 10px 5px 10px; + color: #ffffff; + background-color: transparent; + border: none; + + -moz-transition: background-color 0.25s ease-in; + -o-transition: background-color 0.25s ease-in; + -webkit-transition: background-color 0.25s ease-in; + transition: background-color 0.25s ease-in; +} + +.breadcrumbitem:hover { + cursor: pointer; + background-color: #688f1f; +} + +#token_prob_menu { + color: white; + background-color: #262626; +} + +.token-probs { + display: inline-block; + text-align: center; + margin-right: 5px; +} + +.token-probs > table { + width: 100%; +} + +.token-probs > table > tbody > tr > td { + border: 1px solid #262626; + border-collapse: collapse; + padding: 2px 15px; +} + +.token-probs > table > tbody > tr { + background-color: #3e3e3e; +} + +.token-probs > table > tbody > tr:nth-child(2n) { + background-color: #575757; +} + +.token-probs-final-token { + font-weight: bold; + text-decoration: underline; +} + +.token-probs-final-token > td { + background: #5c8a5a; +} + +.token-probs-header { + display: block; +} + +#token_prob_container { + overflow-x: auto; + white-space: nowrap; +} + +.tokens-counted { + position: relative; +} + +.input-token-usage { + color: white; + position: absolute; + font-size: 10px; + bottom: 2px; + right: 5px; + + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} + +/* Override needed here due to the 10px right padding on inputrowleft; add 10 px. */ +#inputrowleft > .input-token-usage { + right: 15px; + bottom: 1px; +} + +.wientry > .input-token-usage { + bottom: 8px; +} \ No newline at end of file diff --git a/static/favicon.js b/static/favicon.js new file mode 100644 index 00000000..fb40ac84 --- /dev/null +++ b/static/favicon.js @@ -0,0 +1,70 @@ +// Global Definitions +var fav_icon2 = "data:image/x-icon;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAB+1BMVEUAAAAAAAAAAAAAAAAAAQAAAAAAAQAAAAAAAAASFhBBWD4iUyoFEwgFEwguUTM+VDoMFAwAAAA+elIudz8AAAAAAAA0MigyLyQAAAAbLh1LdElSbUoVMBkAAABAZ0M2fkUAAAABAQFMiGQraDkAAQANFxEGFQkLFg8EEAYAAAAsZDonZjUAAABCgVVAnFYrSjhEjFpFi1sdRScAAAAjOi8VMxx1dGOFgGYAAABOTEabmIdlYlQaGhgaGhddXFauqY5JRjoAAAAAAAABAQFGeExIl1lX0XRW0XRHi1RFe02vv5W31KFd1Hpc1Hpe1HvO1KvDvJlqZ1plYVOmoIVt1IFl1H7AuZp1cV9jX1AmSCw3Nzg7NmA1MTJuz4Bm1H5MST9HPl9BQEMgNiNXgWKiobFgXICDd5dfw3RZVnJiV3zGv9Bqf29Oj2G/v8hTTpGhl8dbxHVd0npiYoxhWJvIxtlcimZFn1lRclg9SkZNblZBeEpDbEZCa0ZBc0hLY1BAS1BdaV87j01Vx3FWynJSrGZOhlVasGtas2xatm1at21WnWJQm15WyXJQvmlavnBZrGlEYEJWe1RBWz9Um2BavXBgxn9XhllGY0RLaklXiFlTwG5OpmVSfFNMbUpGZEVLa0lShldEhVCChHiKiHvWz6/Kw6WWlZGAfmj///8kr0X+AAAARHRSTlMAASFrcAhxIjLb/vWvsPb+20b4+DFFyMkz2vf43CP9/m5y9vZysLGvsQn19mz+/tz4+NxHycr3+Ejb/vaxsPX+3TRtcBrzrrgAAAABYktHRKhQCDaSAAAAB3RJTUUH5gYJFyQy3tftxgAAAQBJREFUGNNjYGBgYGRiZmFlZWNmZ2SAAA5OLm4eXj5+AQ6ogKCQi6ubu4ensCCIxygiKubl7ePr6+cfIC4owcjAJCkVGBQc4usbGhYeIS0jy8AsFxkVHRPr6xsXn5CYJK/AoKiUnJKalg5UkZGZla2swsCqmpObl1/g61tYVFxSqsbKwKpeVl5RWVVdU1tX39CoocnAotXU3NLa1t7R2dXd06utwqCj6+vb1z9h4sRJk6f4+uopMLDrG0z1nTZ94sQZM31nGRrJMjBKGJvMnjN3wrz5CxaaCnKAvSNqtmjxkqXLlptbQP0iYmllbWNrZ+/gCBVgZHdS1GR1VpAFqQcApI0/jqlZOvEAAAAldEVYdGRhdGU6Y3JlYXRlADIwMjItMDYtMDlUMjM6MzY6NTArMDA6MDDi0xr+AAAAJXRFWHRkYXRlOm1vZGlmeQAyMDIyLTA2LTA5VDIzOjM2OjUwKzAwOjAwk46iQgAAAABJRU5ErkJggg=="; +var fav_icon1 = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAB+FBMVEUAAAAAAAAAAAAAAAAAAAEAAAAAAQEAAAAAAAAUFRlLVGYrSWgHEBoHEBk3S19HUGMOExkAAABOcos7apIAAAAAAAA2Ly01KyoAAAAgKzdVaX9bZHIaKzwAAABKYHhDcZgAAAABAQFfgJY2XX0AAQEQFhoIEhwOFRgGDRUAAAAAAQE3W3cyWnwAAABSeJJRjLs1R1FVgaFWgJ4lPlMAAAAsOD4aLj55bm2Md3QAAABPSkmfko9pXlsbGRkbGRlfWlm1oJxMQkAAAAAAAAABAQFTb4tYibFtvPpWgKNScpC6s7nExtNzwPp1wPnZx8jMsKtuZGFoXVutmJODwfJ7wfbHr6p5a2hnW1gtQlI4ODk7N2A2LzWDvet8wPZPRkRHPl9CQUQlMTthe4+ko7RhXYGEeJhzsuJaVXRjWHzIwtNwfYddhqLCwcpTTpGimMhvsuVzv/djYpBgWJvLydxlgptVirdZbX1ASFZUaXtOb4xOZX1OZHxNa4ZRX21DSV5gaG9Je6lqsepstO1knclcfJxtoc5tpNFuptVup9ZnkbdgjrVss+xjpuBvrd9snspOW29jdI5LVmlkj7Vvrd54t+RlfptQXXJWZHtlf51oruNgmMFfdJBYZn1RXnRWZXthfZxSeZiGgYGOhYLdxb/RubWZlpWFd3T////2kwjgAAAARXRSTlMAASFrcAhxIjLb/vWvsPb+20b4+DFFyMkz2vf43CP9/m5y9vZysLGvsQlw9fZs/v7c+PjcR8nK9/hI2/72sbD1/t00bXBAFktiAAAAAWJLR0SnwLcrAwAAAAd0SU1FB+YGCRchHQhxJNoAAAD/SURBVBjTY2BgYGBkYmZhZWVjZmdkgAAOTi5uHl4+fgEOqICgkKubu7uHp7AgiMcoIirm5e3j4+Pr5y8uKMHIwCQpFRAYFOzjExIaFi4tI8vALBcRGRUd4+MTGxefkCivwKColJSckpoGVJGekZmlrMLAqpqdk5uX7+NTUFhUXKLGysCqXlpWXlFZVV1TW1ffoKHJoKXd2NTc0trW3tHZ1d2jo8Kgq+fj09vXP2HCxEmTfXz0FRjYDQyn+EydNmHC9Bk+M42MZRkYJUxMZ82e0z933vwFZoIcYO+Imi9ctHjJ0mUWllC/iFhZ29ja2Ts4OkEFGNmdFTVZXRRkQeoBhkE/Yj5NSZ4AAAAldEVYdGRhdGU6Y3JlYXRlADIwMjItMDYtMDlUMjM6MzM6MjgrMDA6MDA90JbEAAAAJXRFWHRkYXRlOm1vZGlmeQAyMDIyLTA2LTA5VDIzOjMzOjI4KzAwOjAwTI0ueAAAAABJRU5ErkJggg=="; +var fav_icon = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAB8lBMVEUAAAAAAAAAAAAAAAABAAAAAAABAAAAAAAAAAAdEBB0Pz5rKCgaBwcZBwdkMzJxPDocDAwAAACLTU6SOzsAAAAAAAA9Mic/LyEAAAA6HByQUUaIVEY+GBgAAACAQkKaQUIAAAABAQGWXl9+NjYBAAAaEBAcCAgZDQ0WBQUAAAB3Nzd9MjIAAACTUVK7UVJRNTWhVVaeVldTJSUAAAA+LC0+GhuGcmCgf2EAAABUTESrl4NzYlEdGhcdGhdiXFbIqIhWRjcAAAAAAAABAQGUSkq1VVX6bW6oUVGXS0vmro7+uJn6c3T6dXX/yqPnu5F3aFhxYVG/oH/7gHv6enjeuJOEcFtzX01VLCs4ODk7NmA5MTH1gHr6e3hWSTxHPl9CQUQ/JCKPYGGko7RhXYGEeJjmcW9cVnFjWH3IwtOHb3CjXV3CwcpTTpGimMjlb3D4c3RmYI1gWJvLydybZWW+T0x+V1hRP0Z7U1WTSEiHRUWGRUSORkZuTlBRQVBwX2CvRkXtaGjvamrNYWKmU1PVZ2fXaGjbaWncaWnAX1+7W1vkYF/ja2zRZWV9QkGeVFN2Pz69XV3ia2zkeHmpWFd/REOJSUirWVjjaGjBYGCeUlKMSkl8QkGBRUSoVlWeUE2QgXeWiHr1zqjmw5+bl5KVe2T///8NZLRGAAAARHRSTlMAASFrcAhxIjLb/vWvsPb+20b4+DFFyMkz2vf43CP9/m5y9vZysLGvsQn19mz+/tz4+NxHycr3+Ejb/vaxsPX+3TRtcBrzrrgAAAABYktHRKUuuUovAAAAB3RJTUUH5gYJFzsfVlK/LQAAAP9JREFUGNNjYGBgYGRiZmFlZWNmZ2SAAA5OLm4eXj5+AQ6ogKCQi6ubm7uHsCCIxygiKubp5e3t7ePrJy4owcjAJCnlHxAY5O0dHBIaJi0jy8AsFx4RGRXt7R0TGxefIK/AoKiUmJSckgpUkZaekamswsCqmpWdk5vn7Z1fUFhUrMbKwKpeUlpWXlFZVV1TW1evocnAotXQ2NTc0trW3tHZ2KWtwqCj6+3d3dPb19c/YaK3t54CA7u+wSTvyVP6+qZO855uaCTLwChhbDJj5qzZc6bOnWcqyAH2jqjZ/AULFy1eYm4B9YuIpZW1ja2dvYMjVICR3UlRk9VZQRakHgAlRz6K4dvoSgAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAyMi0wNi0wOVQyMzo1OTozMSswMDowMJt1iQMAAAAldEVYdGRhdGU6bW9kaWZ5ADIwMjItMDYtMDlUMjM6NTk6MzErMDA6MDDqKDG/AAAAAElFTkSuQmCC" +var submit_start; + +var favicon = { + + // Change the Page Icon and Title. + change: function(iconURL) { + this.addLink(iconURL, "icon"); + this.addLink(iconURL, "shortcut icon"); + }, + + addLink: function(iconURL, relValue) { + var link = document.createElement("link"); + link.type = "image/x-icon"; + link.rel = relValue; + link.href = iconURL; + this.removeLink(relValue); + this.docHead.appendChild(link); + }, + + removeLink: function(relValue) { + var links = this.docHead.getElementsByTagName("link"); + for (var i = 0; i < links.length; i++) { + var link = links[i]; + if (link.type == "image/x-icon" && link.rel == relValue) { + this.docHead.removeChild(link); + return; // Assuming only one match at most. + } + } + }, + + swapLink: function() { + if (this.run == true) { + if (this.icon == 1) { + this.change(fav_icon2); + this.icon = 2; + } else { + this.change(fav_icon1); + this.icon = 1; + } + } + }, + + auto_swap: function() { + if (this.run == true) { + this.swapLink(); + setTimeout(() => { this.auto_swap(); }, 1000); + } + }, + + start_swap: function() { + this.run = true; + this.auto_swap(); + submit_start = Date.now(); + }, + + stop_swap: function() { + this.run = false; + this.change(fav_icon); + if (typeof submit_start !== 'undefined') { + $("#runtime")[0].innerHTML = `Execution time: ${Math.round((Date.now() - submit_start)/1000)} sec`; + delete submit_start; + } + }, + + docHead:document.getElementsByTagName("head")[0] +} \ No newline at end of file diff --git a/static/open-iconic/css/open-iconic-bootstrap.css b/static/open-iconic/css/open-iconic-bootstrap.css new file mode 100644 index 00000000..56c4e5f3 --- /dev/null +++ b/static/open-iconic/css/open-iconic-bootstrap.css @@ -0,0 +1,952 @@ +/* Bootstrap */ + +@font-face { + font-family: 'Icons'; + src: url('../fonts/open-iconic.eot'); + src: url('../fonts/open-iconic.eot?#iconic-sm') format('embedded-opentype'), url('../fonts/open-iconic.woff') format('woff'), url('../fonts/open-iconic.ttf') format('truetype'), url('../fonts/open-iconic.otf') format('opentype'), url('../fonts/open-iconic.svg#iconic-sm') format('svg'); + font-weight: normal; + font-style: normal; +} + +.oi { + position: relative; + top: 1px; + display: inline-block; + speak:none; + font-family: 'Icons'; + font-style: normal; + font-weight: normal; + line-height: 1; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +.oi:empty:before { + width: 1em; + text-align: center; + box-sizing: content-box; +} + +.oi.oi-align-center:before { + text-align: center; +} + +.oi.oi-align-left:before { + text-align: left; +} + +.oi.oi-align-right:before { + text-align: right; +} + + +.oi.oi-flip-horizontal:before { + -webkit-transform: scale(-1, 1); + -ms-transform: scale(-1, 1); + transform: scale(-1, 1); +} + +.oi.oi-flip-vertical:before { + -webkit-transform: scale(1, -1); + -ms-transform: scale(-1, 1); + transform: scale(1, -1); +} + +.oi.oi-flip-horizontal-vertical:before { + -webkit-transform: scale(-1, -1); + -ms-transform: scale(-1, 1); + transform: scale(-1, -1); +} + + +.oi-account-login:before { + content:'\e000'; +} + +.oi-account-logout:before { + content:'\e001'; +} + +.oi-action-redo:before { + content:'\e002'; +} + +.oi-action-undo:before { + content:'\e003'; +} + +.oi-align-center:before { + content:'\e004'; +} + +.oi-align-left:before { + content:'\e005'; +} + +.oi-align-right:before { + content:'\e006'; +} + +.oi-aperture:before { + content:'\e007'; +} + +.oi-arrow-bottom:before { + content:'\e008'; +} + +.oi-arrow-circle-bottom:before { + content:'\e009'; +} + +.oi-arrow-circle-left:before { + content:'\e00a'; +} + +.oi-arrow-circle-right:before { + content:'\e00b'; +} + +.oi-arrow-circle-top:before { + content:'\e00c'; +} + +.oi-arrow-left:before { + content:'\e00d'; +} + +.oi-arrow-right:before { + content:'\e00e'; +} + +.oi-arrow-thick-bottom:before { + content:'\e00f'; +} + +.oi-arrow-thick-left:before { + content:'\e010'; +} + +.oi-arrow-thick-right:before { + content:'\e011'; +} + +.oi-arrow-thick-top:before { + content:'\e012'; +} + +.oi-arrow-top:before { + content:'\e013'; +} + +.oi-audio-spectrum:before { + content:'\e014'; +} + +.oi-audio:before { + content:'\e015'; +} + +.oi-badge:before { + content:'\e016'; +} + +.oi-ban:before { + content:'\e017'; +} + +.oi-bar-chart:before { + content:'\e018'; +} + +.oi-basket:before { + content:'\e019'; +} + +.oi-battery-empty:before { + content:'\e01a'; +} + +.oi-battery-full:before { + content:'\e01b'; +} + +.oi-beaker:before { + content:'\e01c'; +} + +.oi-bell:before { + content:'\e01d'; +} + +.oi-bluetooth:before { + content:'\e01e'; +} + +.oi-bold:before { + content:'\e01f'; +} + +.oi-bolt:before { + content:'\e020'; +} + +.oi-book:before { + content:'\e021'; +} + +.oi-bookmark:before { + content:'\e022'; +} + +.oi-box:before { + content:'\e023'; +} + +.oi-briefcase:before { + content:'\e024'; +} + +.oi-british-pound:before { + content:'\e025'; +} + +.oi-browser:before { + content:'\e026'; +} + +.oi-brush:before { + content:'\e027'; +} + +.oi-bug:before { + content:'\e028'; +} + +.oi-bullhorn:before { + content:'\e029'; +} + +.oi-calculator:before { + content:'\e02a'; +} + +.oi-calendar:before { + content:'\e02b'; +} + +.oi-camera-slr:before { + content:'\e02c'; +} + +.oi-caret-bottom:before { + content:'\e02d'; +} + +.oi-caret-left:before { + content:'\e02e'; +} + +.oi-caret-right:before { + content:'\e02f'; +} + +.oi-caret-top:before { + content:'\e030'; +} + +.oi-cart:before { + content:'\e031'; +} + +.oi-chat:before { + content:'\e032'; +} + +.oi-check:before { + content:'\e033'; +} + +.oi-chevron-bottom:before { + content:'\e034'; +} + +.oi-chevron-left:before { + content:'\e035'; +} + +.oi-chevron-right:before { + content:'\e036'; +} + +.oi-chevron-top:before { + content:'\e037'; +} + +.oi-circle-check:before { + content:'\e038'; +} + +.oi-circle-x:before { + content:'\e039'; +} + +.oi-clipboard:before { + content:'\e03a'; +} + +.oi-clock:before { + content:'\e03b'; +} + +.oi-cloud-download:before { + content:'\e03c'; +} + +.oi-cloud-upload:before { + content:'\e03d'; +} + +.oi-cloud:before { + content:'\e03e'; +} + +.oi-cloudy:before { + content:'\e03f'; +} + +.oi-code:before { + content:'\e040'; +} + +.oi-cog:before { + content:'\e041'; +} + +.oi-collapse-down:before { + content:'\e042'; +} + +.oi-collapse-left:before { + content:'\e043'; +} + +.oi-collapse-right:before { + content:'\e044'; +} + +.oi-collapse-up:before { + content:'\e045'; +} + +.oi-command:before { + content:'\e046'; +} + +.oi-comment-square:before { + content:'\e047'; +} + +.oi-compass:before { + content:'\e048'; +} + +.oi-contrast:before { + content:'\e049'; +} + +.oi-copywriting:before { + content:'\e04a'; +} + +.oi-credit-card:before { + content:'\e04b'; +} + +.oi-crop:before { + content:'\e04c'; +} + +.oi-dashboard:before { + content:'\e04d'; +} + +.oi-data-transfer-download:before { + content:'\e04e'; +} + +.oi-data-transfer-upload:before { + content:'\e04f'; +} + +.oi-delete:before { + content:'\e050'; +} + +.oi-dial:before { + content:'\e051'; +} + +.oi-document:before { + content:'\e052'; +} + +.oi-dollar:before { + content:'\e053'; +} + +.oi-double-quote-sans-left:before { + content:'\e054'; +} + +.oi-double-quote-sans-right:before { + content:'\e055'; +} + +.oi-double-quote-serif-left:before { + content:'\e056'; +} + +.oi-double-quote-serif-right:before { + content:'\e057'; +} + +.oi-droplet:before { + content:'\e058'; +} + +.oi-eject:before { + content:'\e059'; +} + +.oi-elevator:before { + content:'\e05a'; +} + +.oi-ellipses:before { + content:'\e05b'; +} + +.oi-envelope-closed:before { + content:'\e05c'; +} + +.oi-envelope-open:before { + content:'\e05d'; +} + +.oi-euro:before { + content:'\e05e'; +} + +.oi-excerpt:before { + content:'\e05f'; +} + +.oi-expand-down:before { + content:'\e060'; +} + +.oi-expand-left:before { + content:'\e061'; +} + +.oi-expand-right:before { + content:'\e062'; +} + +.oi-expand-up:before { + content:'\e063'; +} + +.oi-external-link:before { + content:'\e064'; +} + +.oi-eye:before { + content:'\e065'; +} + +.oi-eyedropper:before { + content:'\e066'; +} + +.oi-file:before { + content:'\e067'; +} + +.oi-fire:before { + content:'\e068'; +} + +.oi-flag:before { + content:'\e069'; +} + +.oi-flash:before { + content:'\e06a'; +} + +.oi-folder:before { + content:'\e06b'; +} + +.oi-fork:before { + content:'\e06c'; +} + +.oi-fullscreen-enter:before { + content:'\e06d'; +} + +.oi-fullscreen-exit:before { + content:'\e06e'; +} + +.oi-globe:before { + content:'\e06f'; +} + +.oi-graph:before { + content:'\e070'; +} + +.oi-grid-four-up:before { + content:'\e071'; +} + +.oi-grid-three-up:before { + content:'\e072'; +} + +.oi-grid-two-up:before { + content:'\e073'; +} + +.oi-hard-drive:before { + content:'\e074'; +} + +.oi-header:before { + content:'\e075'; +} + +.oi-headphones:before { + content:'\e076'; +} + +.oi-heart:before { + content:'\e077'; +} + +.oi-home:before { + content:'\e078'; +} + +.oi-image:before { + content:'\e079'; +} + +.oi-inbox:before { + content:'\e07a'; +} + +.oi-infinity:before { + content:'\e07b'; +} + +.oi-info:before { + content:'\e07c'; +} + +.oi-italic:before { + content:'\e07d'; +} + +.oi-justify-center:before { + content:'\e07e'; +} + +.oi-justify-left:before { + content:'\e07f'; +} + +.oi-justify-right:before { + content:'\e080'; +} + +.oi-key:before { + content:'\e081'; +} + +.oi-laptop:before { + content:'\e082'; +} + +.oi-layers:before { + content:'\e083'; +} + +.oi-lightbulb:before { + content:'\e084'; +} + +.oi-link-broken:before { + content:'\e085'; +} + +.oi-link-intact:before { + content:'\e086'; +} + +.oi-list-rich:before { + content:'\e087'; +} + +.oi-list:before { + content:'\e088'; +} + +.oi-location:before { + content:'\e089'; +} + +.oi-lock-locked:before { + content:'\e08a'; +} + +.oi-lock-unlocked:before { + content:'\e08b'; +} + +.oi-loop-circular:before { + content:'\e08c'; +} + +.oi-loop-square:before { + content:'\e08d'; +} + +.oi-loop:before { + content:'\e08e'; +} + +.oi-magnifying-glass:before { + content:'\e08f'; +} + +.oi-map-marker:before { + content:'\e090'; +} + +.oi-map:before { + content:'\e091'; +} + +.oi-media-pause:before { + content:'\e092'; +} + +.oi-media-play:before { + content:'\e093'; +} + +.oi-media-record:before { + content:'\e094'; +} + +.oi-media-skip-backward:before { + content:'\e095'; +} + +.oi-media-skip-forward:before { + content:'\e096'; +} + +.oi-media-step-backward:before { + content:'\e097'; +} + +.oi-media-step-forward:before { + content:'\e098'; +} + +.oi-media-stop:before { + content:'\e099'; +} + +.oi-medical-cross:before { + content:'\e09a'; +} + +.oi-menu:before { + content:'\e09b'; +} + +.oi-microphone:before { + content:'\e09c'; +} + +.oi-minus:before { + content:'\e09d'; +} + +.oi-monitor:before { + content:'\e09e'; +} + +.oi-moon:before { + content:'\e09f'; +} + +.oi-move:before { + content:'\e0a0'; +} + +.oi-musical-note:before { + content:'\e0a1'; +} + +.oi-paperclip:before { + content:'\e0a2'; +} + +.oi-pencil:before { + content:'\e0a3'; +} + +.oi-people:before { + content:'\e0a4'; +} + +.oi-person:before { + content:'\e0a5'; +} + +.oi-phone:before { + content:'\e0a6'; +} + +.oi-pie-chart:before { + content:'\e0a7'; +} + +.oi-pin:before { + content:'\e0a8'; +} + +.oi-play-circle:before { + content:'\e0a9'; +} + +.oi-plus:before { + content:'\e0aa'; +} + +.oi-power-standby:before { + content:'\e0ab'; +} + +.oi-print:before { + content:'\e0ac'; +} + +.oi-project:before { + content:'\e0ad'; +} + +.oi-pulse:before { + content:'\e0ae'; +} + +.oi-puzzle-piece:before { + content:'\e0af'; +} + +.oi-question-mark:before { + content:'\e0b0'; +} + +.oi-rain:before { + content:'\e0b1'; +} + +.oi-random:before { + content:'\e0b2'; +} + +.oi-reload:before { + content:'\e0b3'; +} + +.oi-resize-both:before { + content:'\e0b4'; +} + +.oi-resize-height:before { + content:'\e0b5'; +} + +.oi-resize-width:before { + content:'\e0b6'; +} + +.oi-rss-alt:before { + content:'\e0b7'; +} + +.oi-rss:before { + content:'\e0b8'; +} + +.oi-script:before { + content:'\e0b9'; +} + +.oi-share-boxed:before { + content:'\e0ba'; +} + +.oi-share:before { + content:'\e0bb'; +} + +.oi-shield:before { + content:'\e0bc'; +} + +.oi-signal:before { + content:'\e0bd'; +} + +.oi-signpost:before { + content:'\e0be'; +} + +.oi-sort-ascending:before { + content:'\e0bf'; +} + +.oi-sort-descending:before { + content:'\e0c0'; +} + +.oi-spreadsheet:before { + content:'\e0c1'; +} + +.oi-star:before { + content:'\e0c2'; +} + +.oi-sun:before { + content:'\e0c3'; +} + +.oi-tablet:before { + content:'\e0c4'; +} + +.oi-tag:before { + content:'\e0c5'; +} + +.oi-tags:before { + content:'\e0c6'; +} + +.oi-target:before { + content:'\e0c7'; +} + +.oi-task:before { + content:'\e0c8'; +} + +.oi-terminal:before { + content:'\e0c9'; +} + +.oi-text:before { + content:'\e0ca'; +} + +.oi-thumb-down:before { + content:'\e0cb'; +} + +.oi-thumb-up:before { + content:'\e0cc'; +} + +.oi-timer:before { + content:'\e0cd'; +} + +.oi-transfer:before { + content:'\e0ce'; +} + +.oi-trash:before { + content:'\e0cf'; +} + +.oi-underline:before { + content:'\e0d0'; +} + +.oi-vertical-align-bottom:before { + content:'\e0d1'; +} + +.oi-vertical-align-center:before { + content:'\e0d2'; +} + +.oi-vertical-align-top:before { + content:'\e0d3'; +} + +.oi-video:before { + content:'\e0d4'; +} + +.oi-volume-high:before { + content:'\e0d5'; +} + +.oi-volume-low:before { + content:'\e0d6'; +} + +.oi-volume-off:before { + content:'\e0d7'; +} + +.oi-warning:before { + content:'\e0d8'; +} + +.oi-wifi:before { + content:'\e0d9'; +} + +.oi-wrench:before { + content:'\e0da'; +} + +.oi-x:before { + content:'\e0db'; +} + +.oi-yen:before { + content:'\e0dc'; +} + +.oi-zoom-in:before { + content:'\e0dd'; +} + +.oi-zoom-out:before { + content:'\e0de'; +} diff --git a/static/open-iconic/css/open-iconic-bootstrap.less b/static/open-iconic/css/open-iconic-bootstrap.less new file mode 100644 index 00000000..fc3fe341 --- /dev/null +++ b/static/open-iconic/css/open-iconic-bootstrap.less @@ -0,0 +1,960 @@ +/* Bootstrap */ + +/* Override Bootstrap default variable */ +//@icon-font-path: "../fonts/"; + +@font-face { + font-family: 'Icons'; + src: ~"url('@{icon-font-path}open-iconic.eot')"; + src: ~"url('@{icon-font-path}open-iconic.eot?#iconic-sm') format('embedded-opentype')", + ~"url('@{icon-font-path}open-iconic.woff') format('woff')", + ~"url('@{icon-font-path}open-iconic.ttf') format('truetype')", + ~"url('@{icon-font-path}open-iconic.svg#iconic-sm') format('svg')"; + font-weight: normal; + font-style: normal; +} + +// Catchall baseclass +.oi { + position: relative; + top: 1px; + display: inline-block; + font-family: 'Icons'; + font-style: normal; + font-weight: normal; + line-height: 1; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + + &:empty:before { + width: 1em; + text-align: center; + box-sizing: content-box; + } + + &.oi-align-center:before { + text-align: center; + } + + &.oi-align-left:before { + text-align: left; + } + + &.oi-align-right:before { + text-align: right; + } + + + &.oi-flip-horizontal:before { + -webkit-transform: scale(-1, 1); + -ms-transform: scale(-1, 1); + transform: scale(-1, 1); + } + + &.oi-flip-vertical:before { + -webkit-transform: scale(1, -1); + -ms-transform: scale(-1, 1); + transform: scale(1, -1); + } + + &.oi-flip-horizontal-vertical:before { + -webkit-transform: scale(-1, -1); + -ms-transform: scale(-1, 1); + transform: scale(-1, -1); + } +} + + + +.oi-account-login:before { + content:"\e000"; +} + +.oi-account-logout:before { + content:"\e001"; +} + +.oi-action-redo:before { + content:"\e002"; +} + +.oi-action-undo:before { + content:"\e003"; +} + +.oi-align-center:before { + content:"\e004"; +} + +.oi-align-left:before { + content:"\e005"; +} + +.oi-align-right:before { + content:"\e006"; +} + +.oi-aperture:before { + content:"\e007"; +} + +.oi-arrow-bottom:before { + content:"\e008"; +} + +.oi-arrow-circle-bottom:before { + content:"\e009"; +} + +.oi-arrow-circle-left:before { + content:"\e00a"; +} + +.oi-arrow-circle-right:before { + content:"\e00b"; +} + +.oi-arrow-circle-top:before { + content:"\e00c"; +} + +.oi-arrow-left:before { + content:"\e00d"; +} + +.oi-arrow-right:before { + content:"\e00e"; +} + +.oi-arrow-thick-bottom:before { + content:"\e00f"; +} + +.oi-arrow-thick-left:before { + content:"\e010"; +} + +.oi-arrow-thick-right:before { + content:"\e011"; +} + +.oi-arrow-thick-top:before { + content:"\e012"; +} + +.oi-arrow-top:before { + content:"\e013"; +} + +.oi-audio-spectrum:before { + content:"\e014"; +} + +.oi-audio:before { + content:"\e015"; +} + +.oi-badge:before { + content:"\e016"; +} + +.oi-ban:before { + content:"\e017"; +} + +.oi-bar-chart:before { + content:"\e018"; +} + +.oi-basket:before { + content:"\e019"; +} + +.oi-battery-empty:before { + content:"\e01a"; +} + +.oi-battery-full:before { + content:"\e01b"; +} + +.oi-beaker:before { + content:"\e01c"; +} + +.oi-bell:before { + content:"\e01d"; +} + +.oi-bluetooth:before { + content:"\e01e"; +} + +.oi-bold:before { + content:"\e01f"; +} + +.oi-bolt:before { + content:"\e020"; +} + +.oi-book:before { + content:"\e021"; +} + +.oi-bookmark:before { + content:"\e022"; +} + +.oi-box:before { + content:"\e023"; +} + +.oi-briefcase:before { + content:"\e024"; +} + +.oi-british-pound:before { + content:"\e025"; +} + +.oi-browser:before { + content:"\e026"; +} + +.oi-brush:before { + content:"\e027"; +} + +.oi-bug:before { + content:"\e028"; +} + +.oi-bullhorn:before { + content:"\e029"; +} + +.oi-calculator:before { + content:"\e02a"; +} + +.oi-calendar:before { + content:"\e02b"; +} + +.oi-camera-slr:before { + content:"\e02c"; +} + +.oi-caret-bottom:before { + content:"\e02d"; +} + +.oi-caret-left:before { + content:"\e02e"; +} + +.oi-caret-right:before { + content:"\e02f"; +} + +.oi-caret-top:before { + content:"\e030"; +} + +.oi-cart:before { + content:"\e031"; +} + +.oi-chat:before { + content:"\e032"; +} + +.oi-check:before { + content:"\e033"; +} + +.oi-chevron-bottom:before { + content:"\e034"; +} + +.oi-chevron-left:before { + content:"\e035"; +} + +.oi-chevron-right:before { + content:"\e036"; +} + +.oi-chevron-top:before { + content:"\e037"; +} + +.oi-circle-check:before { + content:"\e038"; +} + +.oi-circle-x:before { + content:"\e039"; +} + +.oi-clipboard:before { + content:"\e03a"; +} + +.oi-clock:before { + content:"\e03b"; +} + +.oi-cloud-download:before { + content:"\e03c"; +} + +.oi-cloud-upload:before { + content:"\e03d"; +} + +.oi-cloud:before { + content:"\e03e"; +} + +.oi-cloudy:before { + content:"\e03f"; +} + +.oi-code:before { + content:"\e040"; +} + +.oi-cog:before { + content:"\e041"; +} + +.oi-collapse-down:before { + content:"\e042"; +} + +.oi-collapse-left:before { + content:"\e043"; +} + +.oi-collapse-right:before { + content:"\e044"; +} + +.oi-collapse-up:before { + content:"\e045"; +} + +.oi-command:before { + content:"\e046"; +} + +.oi-comment-square:before { + content:"\e047"; +} + +.oi-compass:before { + content:"\e048"; +} + +.oi-contrast:before { + content:"\e049"; +} + +.oi-copywriting:before { + content:"\e04a"; +} + +.oi-credit-card:before { + content:"\e04b"; +} + +.oi-crop:before { + content:"\e04c"; +} + +.oi-dashboard:before { + content:"\e04d"; +} + +.oi-data-transfer-download:before { + content:"\e04e"; +} + +.oi-data-transfer-upload:before { + content:"\e04f"; +} + +.oi-delete:before { + content:"\e050"; +} + +.oi-dial:before { + content:"\e051"; +} + +.oi-document:before { + content:"\e052"; +} + +.oi-dollar:before { + content:"\e053"; +} + +.oi-double-quote-sans-left:before { + content:"\e054"; +} + +.oi-double-quote-sans-right:before { + content:"\e055"; +} + +.oi-double-quote-serif-left:before { + content:"\e056"; +} + +.oi-double-quote-serif-right:before { + content:"\e057"; +} + +.oi-droplet:before { + content:"\e058"; +} + +.oi-eject:before { + content:"\e059"; +} + +.oi-elevator:before { + content:"\e05a"; +} + +.oi-ellipses:before { + content:"\e05b"; +} + +.oi-envelope-closed:before { + content:"\e05c"; +} + +.oi-envelope-open:before { + content:"\e05d"; +} + +.oi-euro:before { + content:"\e05e"; +} + +.oi-excerpt:before { + content:"\e05f"; +} + +.oi-expand-down:before { + content:"\e060"; +} + +.oi-expand-left:before { + content:"\e061"; +} + +.oi-expand-right:before { + content:"\e062"; +} + +.oi-expand-up:before { + content:"\e063"; +} + +.oi-external-link:before { + content:"\e064"; +} + +.oi-eye:before { + content:"\e065"; +} + +.oi-eyedropper:before { + content:"\e066"; +} + +.oi-file:before { + content:"\e067"; +} + +.oi-fire:before { + content:"\e068"; +} + +.oi-flag:before { + content:"\e069"; +} + +.oi-flash:before { + content:"\e06a"; +} + +.oi-folder:before { + content:"\e06b"; +} + +.oi-fork:before { + content:"\e06c"; +} + +.oi-fullscreen-enter:before { + content:"\e06d"; +} + +.oi-fullscreen-exit:before { + content:"\e06e"; +} + +.oi-globe:before { + content:"\e06f"; +} + +.oi-graph:before { + content:"\e070"; +} + +.oi-grid-four-up:before { + content:"\e071"; +} + +.oi-grid-three-up:before { + content:"\e072"; +} + +.oi-grid-two-up:before { + content:"\e073"; +} + +.oi-hard-drive:before { + content:"\e074"; +} + +.oi-header:before { + content:"\e075"; +} + +.oi-headphones:before { + content:"\e076"; +} + +.oi-heart:before { + content:"\e077"; +} + +.oi-home:before { + content:"\e078"; +} + +.oi-image:before { + content:"\e079"; +} + +.oi-inbox:before { + content:"\e07a"; +} + +.oi-infinity:before { + content:"\e07b"; +} + +.oi-info:before { + content:"\e07c"; +} + +.oi-italic:before { + content:"\e07d"; +} + +.oi-justify-center:before { + content:"\e07e"; +} + +.oi-justify-left:before { + content:"\e07f"; +} + +.oi-justify-right:before { + content:"\e080"; +} + +.oi-key:before { + content:"\e081"; +} + +.oi-laptop:before { + content:"\e082"; +} + +.oi-layers:before { + content:"\e083"; +} + +.oi-lightbulb:before { + content:"\e084"; +} + +.oi-link-broken:before { + content:"\e085"; +} + +.oi-link-intact:before { + content:"\e086"; +} + +.oi-list-rich:before { + content:"\e087"; +} + +.oi-list:before { + content:"\e088"; +} + +.oi-location:before { + content:"\e089"; +} + +.oi-lock-locked:before { + content:"\e08a"; +} + +.oi-lock-unlocked:before { + content:"\e08b"; +} + +.oi-loop-circular:before { + content:"\e08c"; +} + +.oi-loop-square:before { + content:"\e08d"; +} + +.oi-loop:before { + content:"\e08e"; +} + +.oi-magnifying-glass:before { + content:"\e08f"; +} + +.oi-map-marker:before { + content:"\e090"; +} + +.oi-map:before { + content:"\e091"; +} + +.oi-media-pause:before { + content:"\e092"; +} + +.oi-media-play:before { + content:"\e093"; +} + +.oi-media-record:before { + content:"\e094"; +} + +.oi-media-skip-backward:before { + content:"\e095"; +} + +.oi-media-skip-forward:before { + content:"\e096"; +} + +.oi-media-step-backward:before { + content:"\e097"; +} + +.oi-media-step-forward:before { + content:"\e098"; +} + +.oi-media-stop:before { + content:"\e099"; +} + +.oi-medical-cross:before { + content:"\e09a"; +} + +.oi-menu:before { + content:"\e09b"; +} + +.oi-microphone:before { + content:"\e09c"; +} + +.oi-minus:before { + content:"\e09d"; +} + +.oi-monitor:before { + content:"\e09e"; +} + +.oi-moon:before { + content:"\e09f"; +} + +.oi-move:before { + content:"\e0a0"; +} + +.oi-musical-note:before { + content:"\e0a1"; +} + +.oi-paperclip:before { + content:"\e0a2"; +} + +.oi-pencil:before { + content:"\e0a3"; +} + +.oi-people:before { + content:"\e0a4"; +} + +.oi-person:before { + content:"\e0a5"; +} + +.oi-phone:before { + content:"\e0a6"; +} + +.oi-pie-chart:before { + content:"\e0a7"; +} + +.oi-pin:before { + content:"\e0a8"; +} + +.oi-play-circle:before { + content:"\e0a9"; +} + +.oi-plus:before { + content:"\e0aa"; +} + +.oi-power-standby:before { + content:"\e0ab"; +} + +.oi-print:before { + content:"\e0ac"; +} + +.oi-project:before { + content:"\e0ad"; +} + +.oi-pulse:before { + content:"\e0ae"; +} + +.oi-puzzle-piece:before { + content:"\e0af"; +} + +.oi-question-mark:before { + content:"\e0b0"; +} + +.oi-rain:before { + content:"\e0b1"; +} + +.oi-random:before { + content:"\e0b2"; +} + +.oi-reload:before { + content:"\e0b3"; +} + +.oi-resize-both:before { + content:"\e0b4"; +} + +.oi-resize-height:before { + content:"\e0b5"; +} + +.oi-resize-width:before { + content:"\e0b6"; +} + +.oi-rss-alt:before { + content:"\e0b7"; +} + +.oi-rss:before { + content:"\e0b8"; +} + +.oi-script:before { + content:"\e0b9"; +} + +.oi-share-boxed:before { + content:"\e0ba"; +} + +.oi-share:before { + content:"\e0bb"; +} + +.oi-shield:before { + content:"\e0bc"; +} + +.oi-signal:before { + content:"\e0bd"; +} + +.oi-signpost:before { + content:"\e0be"; +} + +.oi-sort-ascending:before { + content:"\e0bf"; +} + +.oi-sort-descending:before { + content:"\e0c0"; +} + +.oi-spreadsheet:before { + content:"\e0c1"; +} + +.oi-star:before { + content:"\e0c2"; +} + +.oi-sun:before { + content:"\e0c3"; +} + +.oi-tablet:before { + content:"\e0c4"; +} + +.oi-tag:before { + content:"\e0c5"; +} + +.oi-tags:before { + content:"\e0c6"; +} + +.oi-target:before { + content:"\e0c7"; +} + +.oi-task:before { + content:"\e0c8"; +} + +.oi-terminal:before { + content:"\e0c9"; +} + +.oi-text:before { + content:"\e0ca"; +} + +.oi-thumb-down:before { + content:"\e0cb"; +} + +.oi-thumb-up:before { + content:"\e0cc"; +} + +.oi-timer:before { + content:"\e0cd"; +} + +.oi-transfer:before { + content:"\e0ce"; +} + +.oi-trash:before { + content:"\e0cf"; +} + +.oi-underline:before { + content:"\e0d0"; +} + +.oi-vertical-align-bottom:before { + content:"\e0d1"; +} + +.oi-vertical-align-center:before { + content:"\e0d2"; +} + +.oi-vertical-align-top:before { + content:"\e0d3"; +} + +.oi-video:before { + content:"\e0d4"; +} + +.oi-volume-high:before { + content:"\e0d5"; +} + +.oi-volume-low:before { + content:"\e0d6"; +} + +.oi-volume-off:before { + content:"\e0d7"; +} + +.oi-warning:before { + content:"\e0d8"; +} + +.oi-wifi:before { + content:"\e0d9"; +} + +.oi-wrench:before { + content:"\e0da"; +} + +.oi-x:before { + content:"\e0db"; +} + +.oi-yen:before { + content:"\e0dc"; +} + +.oi-zoom-in:before { + content:"\e0dd"; +} + +.oi-zoom-out:before { + content:"\e0de"; +} + diff --git a/static/open-iconic/css/open-iconic-bootstrap.min.css b/static/open-iconic/css/open-iconic-bootstrap.min.css new file mode 100644 index 00000000..4664f2e8 --- /dev/null +++ b/static/open-iconic/css/open-iconic-bootstrap.min.css @@ -0,0 +1 @@ +@font-face{font-family:Icons;src:url(../fonts/open-iconic.eot);src:url(../fonts/open-iconic.eot?#iconic-sm) format('embedded-opentype'),url(../fonts/open-iconic.woff) format('woff'),url(../fonts/open-iconic.ttf) format('truetype'),url(../fonts/open-iconic.otf) format('opentype'),url(../fonts/open-iconic.svg#iconic-sm) format('svg');font-weight:400;font-style:normal}.oi{position:relative;top:1px;display:inline-block;speak:none;font-family:Icons;font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.oi:empty:before{width:1em;text-align:center;box-sizing:content-box}.oi.oi-align-center:before{text-align:center}.oi.oi-align-left:before{text-align:left}.oi.oi-align-right:before{text-align:right}.oi.oi-flip-horizontal:before{-webkit-transform:scale(-1,1);-ms-transform:scale(-1,1);transform:scale(-1,1)}.oi.oi-flip-vertical:before{-webkit-transform:scale(1,-1);-ms-transform:scale(-1,1);transform:scale(1,-1)}.oi.oi-flip-horizontal-vertical:before{-webkit-transform:scale(-1,-1);-ms-transform:scale(-1,1);transform:scale(-1,-1)}.oi-account-login:before{content:'\e000'}.oi-account-logout:before{content:'\e001'}.oi-action-redo:before{content:'\e002'}.oi-action-undo:before{content:'\e003'}.oi-align-center:before{content:'\e004'}.oi-align-left:before{content:'\e005'}.oi-align-right:before{content:'\e006'}.oi-aperture:before{content:'\e007'}.oi-arrow-bottom:before{content:'\e008'}.oi-arrow-circle-bottom:before{content:'\e009'}.oi-arrow-circle-left:before{content:'\e00a'}.oi-arrow-circle-right:before{content:'\e00b'}.oi-arrow-circle-top:before{content:'\e00c'}.oi-arrow-left:before{content:'\e00d'}.oi-arrow-right:before{content:'\e00e'}.oi-arrow-thick-bottom:before{content:'\e00f'}.oi-arrow-thick-left:before{content:'\e010'}.oi-arrow-thick-right:before{content:'\e011'}.oi-arrow-thick-top:before{content:'\e012'}.oi-arrow-top:before{content:'\e013'}.oi-audio-spectrum:before{content:'\e014'}.oi-audio:before{content:'\e015'}.oi-badge:before{content:'\e016'}.oi-ban:before{content:'\e017'}.oi-bar-chart:before{content:'\e018'}.oi-basket:before{content:'\e019'}.oi-battery-empty:before{content:'\e01a'}.oi-battery-full:before{content:'\e01b'}.oi-beaker:before{content:'\e01c'}.oi-bell:before{content:'\e01d'}.oi-bluetooth:before{content:'\e01e'}.oi-bold:before{content:'\e01f'}.oi-bolt:before{content:'\e020'}.oi-book:before{content:'\e021'}.oi-bookmark:before{content:'\e022'}.oi-box:before{content:'\e023'}.oi-briefcase:before{content:'\e024'}.oi-british-pound:before{content:'\e025'}.oi-browser:before{content:'\e026'}.oi-brush:before{content:'\e027'}.oi-bug:before{content:'\e028'}.oi-bullhorn:before{content:'\e029'}.oi-calculator:before{content:'\e02a'}.oi-calendar:before{content:'\e02b'}.oi-camera-slr:before{content:'\e02c'}.oi-caret-bottom:before{content:'\e02d'}.oi-caret-left:before{content:'\e02e'}.oi-caret-right:before{content:'\e02f'}.oi-caret-top:before{content:'\e030'}.oi-cart:before{content:'\e031'}.oi-chat:before{content:'\e032'}.oi-check:before{content:'\e033'}.oi-chevron-bottom:before{content:'\e034'}.oi-chevron-left:before{content:'\e035'}.oi-chevron-right:before{content:'\e036'}.oi-chevron-top:before{content:'\e037'}.oi-circle-check:before{content:'\e038'}.oi-circle-x:before{content:'\e039'}.oi-clipboard:before{content:'\e03a'}.oi-clock:before{content:'\e03b'}.oi-cloud-download:before{content:'\e03c'}.oi-cloud-upload:before{content:'\e03d'}.oi-cloud:before{content:'\e03e'}.oi-cloudy:before{content:'\e03f'}.oi-code:before{content:'\e040'}.oi-cog:before{content:'\e041'}.oi-collapse-down:before{content:'\e042'}.oi-collapse-left:before{content:'\e043'}.oi-collapse-right:before{content:'\e044'}.oi-collapse-up:before{content:'\e045'}.oi-command:before{content:'\e046'}.oi-comment-square:before{content:'\e047'}.oi-compass:before{content:'\e048'}.oi-contrast:before{content:'\e049'}.oi-copywriting:before{content:'\e04a'}.oi-credit-card:before{content:'\e04b'}.oi-crop:before{content:'\e04c'}.oi-dashboard:before{content:'\e04d'}.oi-data-transfer-download:before{content:'\e04e'}.oi-data-transfer-upload:before{content:'\e04f'}.oi-delete:before{content:'\e050'}.oi-dial:before{content:'\e051'}.oi-document:before{content:'\e052'}.oi-dollar:before{content:'\e053'}.oi-double-quote-sans-left:before{content:'\e054'}.oi-double-quote-sans-right:before{content:'\e055'}.oi-double-quote-serif-left:before{content:'\e056'}.oi-double-quote-serif-right:before{content:'\e057'}.oi-droplet:before{content:'\e058'}.oi-eject:before{content:'\e059'}.oi-elevator:before{content:'\e05a'}.oi-ellipses:before{content:'\e05b'}.oi-envelope-closed:before{content:'\e05c'}.oi-envelope-open:before{content:'\e05d'}.oi-euro:before{content:'\e05e'}.oi-excerpt:before{content:'\e05f'}.oi-expand-down:before{content:'\e060'}.oi-expand-left:before{content:'\e061'}.oi-expand-right:before{content:'\e062'}.oi-expand-up:before{content:'\e063'}.oi-external-link:before{content:'\e064'}.oi-eye:before{content:'\e065'}.oi-eyedropper:before{content:'\e066'}.oi-file:before{content:'\e067'}.oi-fire:before{content:'\e068'}.oi-flag:before{content:'\e069'}.oi-flash:before{content:'\e06a'}.oi-folder:before{content:'\e06b'}.oi-fork:before{content:'\e06c'}.oi-fullscreen-enter:before{content:'\e06d'}.oi-fullscreen-exit:before{content:'\e06e'}.oi-globe:before{content:'\e06f'}.oi-graph:before{content:'\e070'}.oi-grid-four-up:before{content:'\e071'}.oi-grid-three-up:before{content:'\e072'}.oi-grid-two-up:before{content:'\e073'}.oi-hard-drive:before{content:'\e074'}.oi-header:before{content:'\e075'}.oi-headphones:before{content:'\e076'}.oi-heart:before{content:'\e077'}.oi-home:before{content:'\e078'}.oi-image:before{content:'\e079'}.oi-inbox:before{content:'\e07a'}.oi-infinity:before{content:'\e07b'}.oi-info:before{content:'\e07c'}.oi-italic:before{content:'\e07d'}.oi-justify-center:before{content:'\e07e'}.oi-justify-left:before{content:'\e07f'}.oi-justify-right:before{content:'\e080'}.oi-key:before{content:'\e081'}.oi-laptop:before{content:'\e082'}.oi-layers:before{content:'\e083'}.oi-lightbulb:before{content:'\e084'}.oi-link-broken:before{content:'\e085'}.oi-link-intact:before{content:'\e086'}.oi-list-rich:before{content:'\e087'}.oi-list:before{content:'\e088'}.oi-location:before{content:'\e089'}.oi-lock-locked:before{content:'\e08a'}.oi-lock-unlocked:before{content:'\e08b'}.oi-loop-circular:before{content:'\e08c'}.oi-loop-square:before{content:'\e08d'}.oi-loop:before{content:'\e08e'}.oi-magnifying-glass:before{content:'\e08f'}.oi-map-marker:before{content:'\e090'}.oi-map:before{content:'\e091'}.oi-media-pause:before{content:'\e092'}.oi-media-play:before{content:'\e093'}.oi-media-record:before{content:'\e094'}.oi-media-skip-backward:before{content:'\e095'}.oi-media-skip-forward:before{content:'\e096'}.oi-media-step-backward:before{content:'\e097'}.oi-media-step-forward:before{content:'\e098'}.oi-media-stop:before{content:'\e099'}.oi-medical-cross:before{content:'\e09a'}.oi-menu:before{content:'\e09b'}.oi-microphone:before{content:'\e09c'}.oi-minus:before{content:'\e09d'}.oi-monitor:before{content:'\e09e'}.oi-moon:before{content:'\e09f'}.oi-move:before{content:'\e0a0'}.oi-musical-note:before{content:'\e0a1'}.oi-paperclip:before{content:'\e0a2'}.oi-pencil:before{content:'\e0a3'}.oi-people:before{content:'\e0a4'}.oi-person:before{content:'\e0a5'}.oi-phone:before{content:'\e0a6'}.oi-pie-chart:before{content:'\e0a7'}.oi-pin:before{content:'\e0a8'}.oi-play-circle:before{content:'\e0a9'}.oi-plus:before{content:'\e0aa'}.oi-power-standby:before{content:'\e0ab'}.oi-print:before{content:'\e0ac'}.oi-project:before{content:'\e0ad'}.oi-pulse:before{content:'\e0ae'}.oi-puzzle-piece:before{content:'\e0af'}.oi-question-mark:before{content:'\e0b0'}.oi-rain:before{content:'\e0b1'}.oi-random:before{content:'\e0b2'}.oi-reload:before{content:'\e0b3'}.oi-resize-both:before{content:'\e0b4'}.oi-resize-height:before{content:'\e0b5'}.oi-resize-width:before{content:'\e0b6'}.oi-rss-alt:before{content:'\e0b7'}.oi-rss:before{content:'\e0b8'}.oi-script:before{content:'\e0b9'}.oi-share-boxed:before{content:'\e0ba'}.oi-share:before{content:'\e0bb'}.oi-shield:before{content:'\e0bc'}.oi-signal:before{content:'\e0bd'}.oi-signpost:before{content:'\e0be'}.oi-sort-ascending:before{content:'\e0bf'}.oi-sort-descending:before{content:'\e0c0'}.oi-spreadsheet:before{content:'\e0c1'}.oi-star:before{content:'\e0c2'}.oi-sun:before{content:'\e0c3'}.oi-tablet:before{content:'\e0c4'}.oi-tag:before{content:'\e0c5'}.oi-tags:before{content:'\e0c6'}.oi-target:before{content:'\e0c7'}.oi-task:before{content:'\e0c8'}.oi-terminal:before{content:'\e0c9'}.oi-text:before{content:'\e0ca'}.oi-thumb-down:before{content:'\e0cb'}.oi-thumb-up:before{content:'\e0cc'}.oi-timer:before{content:'\e0cd'}.oi-transfer:before{content:'\e0ce'}.oi-trash:before{content:'\e0cf'}.oi-underline:before{content:'\e0d0'}.oi-vertical-align-bottom:before{content:'\e0d1'}.oi-vertical-align-center:before{content:'\e0d2'}.oi-vertical-align-top:before{content:'\e0d3'}.oi-video:before{content:'\e0d4'}.oi-volume-high:before{content:'\e0d5'}.oi-volume-low:before{content:'\e0d6'}.oi-volume-off:before{content:'\e0d7'}.oi-warning:before{content:'\e0d8'}.oi-wifi:before{content:'\e0d9'}.oi-wrench:before{content:'\e0da'}.oi-x:before{content:'\e0db'}.oi-yen:before{content:'\e0dc'}.oi-zoom-in:before{content:'\e0dd'}.oi-zoom-out:before{content:'\e0de'} \ No newline at end of file diff --git a/static/open-iconic/css/open-iconic-bootstrap.scss b/static/open-iconic/css/open-iconic-bootstrap.scss new file mode 100644 index 00000000..18f01e26 --- /dev/null +++ b/static/open-iconic/css/open-iconic-bootstrap.scss @@ -0,0 +1,958 @@ +/* Bootstrap */ + +/* Override Bootstrap default variable */ +$icon-font-path: '../fonts/' !default; + +@font-face { + font-family: 'Icons'; + src: url('#{$icon-font-path}open-iconic.eot'); + src: url('#{$icon-font-path}open-iconic.eot?#iconic-sm') format('embedded-opentype'), url('#{$icon-font-path}open-iconic.woff') format('woff'), url('#{$icon-font-path}open-iconic.ttf') format('truetype'), url('#{$icon-font-path}open-iconic.svg#iconic-sm') format('svg'); + font-weight: normal; + font-style: normal; +} + +// Catchall baseclass +.oi { + position: relative; + top: 1px; + display: inline-block; + font-family: 'Icons'; + font-style: normal; + font-weight: normal; + line-height: 1; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + + + &:empty:before { + width: 1em; + text-align: center; + box-sizing: content-box; + } + + &.oi-align-center:before { + text-align: center; + } + + &.oi-align-left:before { + text-align: left; + } + + &.oi-align-right:before { + text-align: right; + } + + + &.oi-flip-horizontal:before { + -webkit-transform: scale(-1, 1); + -ms-transform: scale(-1, 1); + transform: scale(-1, 1); + } + + &.oi-flip-vertical:before { + -webkit-transform: scale(1, -1); + -ms-transform: scale(-1, 1); + transform: scale(1, -1); + } + + &.oi-flip-horizontal-vertical:before { + -webkit-transform: scale(-1, -1); + -ms-transform: scale(-1, 1); + transform: scale(-1, -1); + } +} + + + +.oi-account-login:before { + content:'\e000'; +} + +.oi-account-logout:before { + content:'\e001'; +} + +.oi-action-redo:before { + content:'\e002'; +} + +.oi-action-undo:before { + content:'\e003'; +} + +.oi-align-center:before { + content:'\e004'; +} + +.oi-align-left:before { + content:'\e005'; +} + +.oi-align-right:before { + content:'\e006'; +} + +.oi-aperture:before { + content:'\e007'; +} + +.oi-arrow-bottom:before { + content:'\e008'; +} + +.oi-arrow-circle-bottom:before { + content:'\e009'; +} + +.oi-arrow-circle-left:before { + content:'\e00a'; +} + +.oi-arrow-circle-right:before { + content:'\e00b'; +} + +.oi-arrow-circle-top:before { + content:'\e00c'; +} + +.oi-arrow-left:before { + content:'\e00d'; +} + +.oi-arrow-right:before { + content:'\e00e'; +} + +.oi-arrow-thick-bottom:before { + content:'\e00f'; +} + +.oi-arrow-thick-left:before { + content:'\e010'; +} + +.oi-arrow-thick-right:before { + content:'\e011'; +} + +.oi-arrow-thick-top:before { + content:'\e012'; +} + +.oi-arrow-top:before { + content:'\e013'; +} + +.oi-audio-spectrum:before { + content:'\e014'; +} + +.oi-audio:before { + content:'\e015'; +} + +.oi-badge:before { + content:'\e016'; +} + +.oi-ban:before { + content:'\e017'; +} + +.oi-bar-chart:before { + content:'\e018'; +} + +.oi-basket:before { + content:'\e019'; +} + +.oi-battery-empty:before { + content:'\e01a'; +} + +.oi-battery-full:before { + content:'\e01b'; +} + +.oi-beaker:before { + content:'\e01c'; +} + +.oi-bell:before { + content:'\e01d'; +} + +.oi-bluetooth:before { + content:'\e01e'; +} + +.oi-bold:before { + content:'\e01f'; +} + +.oi-bolt:before { + content:'\e020'; +} + +.oi-book:before { + content:'\e021'; +} + +.oi-bookmark:before { + content:'\e022'; +} + +.oi-box:before { + content:'\e023'; +} + +.oi-briefcase:before { + content:'\e024'; +} + +.oi-british-pound:before { + content:'\e025'; +} + +.oi-browser:before { + content:'\e026'; +} + +.oi-brush:before { + content:'\e027'; +} + +.oi-bug:before { + content:'\e028'; +} + +.oi-bullhorn:before { + content:'\e029'; +} + +.oi-calculator:before { + content:'\e02a'; +} + +.oi-calendar:before { + content:'\e02b'; +} + +.oi-camera-slr:before { + content:'\e02c'; +} + +.oi-caret-bottom:before { + content:'\e02d'; +} + +.oi-caret-left:before { + content:'\e02e'; +} + +.oi-caret-right:before { + content:'\e02f'; +} + +.oi-caret-top:before { + content:'\e030'; +} + +.oi-cart:before { + content:'\e031'; +} + +.oi-chat:before { + content:'\e032'; +} + +.oi-check:before { + content:'\e033'; +} + +.oi-chevron-bottom:before { + content:'\e034'; +} + +.oi-chevron-left:before { + content:'\e035'; +} + +.oi-chevron-right:before { + content:'\e036'; +} + +.oi-chevron-top:before { + content:'\e037'; +} + +.oi-circle-check:before { + content:'\e038'; +} + +.oi-circle-x:before { + content:'\e039'; +} + +.oi-clipboard:before { + content:'\e03a'; +} + +.oi-clock:before { + content:'\e03b'; +} + +.oi-cloud-download:before { + content:'\e03c'; +} + +.oi-cloud-upload:before { + content:'\e03d'; +} + +.oi-cloud:before { + content:'\e03e'; +} + +.oi-cloudy:before { + content:'\e03f'; +} + +.oi-code:before { + content:'\e040'; +} + +.oi-cog:before { + content:'\e041'; +} + +.oi-collapse-down:before { + content:'\e042'; +} + +.oi-collapse-left:before { + content:'\e043'; +} + +.oi-collapse-right:before { + content:'\e044'; +} + +.oi-collapse-up:before { + content:'\e045'; +} + +.oi-command:before { + content:'\e046'; +} + +.oi-comment-square:before { + content:'\e047'; +} + +.oi-compass:before { + content:'\e048'; +} + +.oi-contrast:before { + content:'\e049'; +} + +.oi-copywriting:before { + content:'\e04a'; +} + +.oi-credit-card:before { + content:'\e04b'; +} + +.oi-crop:before { + content:'\e04c'; +} + +.oi-dashboard:before { + content:'\e04d'; +} + +.oi-data-transfer-download:before { + content:'\e04e'; +} + +.oi-data-transfer-upload:before { + content:'\e04f'; +} + +.oi-delete:before { + content:'\e050'; +} + +.oi-dial:before { + content:'\e051'; +} + +.oi-document:before { + content:'\e052'; +} + +.oi-dollar:before { + content:'\e053'; +} + +.oi-double-quote-sans-left:before { + content:'\e054'; +} + +.oi-double-quote-sans-right:before { + content:'\e055'; +} + +.oi-double-quote-serif-left:before { + content:'\e056'; +} + +.oi-double-quote-serif-right:before { + content:'\e057'; +} + +.oi-droplet:before { + content:'\e058'; +} + +.oi-eject:before { + content:'\e059'; +} + +.oi-elevator:before { + content:'\e05a'; +} + +.oi-ellipses:before { + content:'\e05b'; +} + +.oi-envelope-closed:before { + content:'\e05c'; +} + +.oi-envelope-open:before { + content:'\e05d'; +} + +.oi-euro:before { + content:'\e05e'; +} + +.oi-excerpt:before { + content:'\e05f'; +} + +.oi-expand-down:before { + content:'\e060'; +} + +.oi-expand-left:before { + content:'\e061'; +} + +.oi-expand-right:before { + content:'\e062'; +} + +.oi-expand-up:before { + content:'\e063'; +} + +.oi-external-link:before { + content:'\e064'; +} + +.oi-eye:before { + content:'\e065'; +} + +.oi-eyedropper:before { + content:'\e066'; +} + +.oi-file:before { + content:'\e067'; +} + +.oi-fire:before { + content:'\e068'; +} + +.oi-flag:before { + content:'\e069'; +} + +.oi-flash:before { + content:'\e06a'; +} + +.oi-folder:before { + content:'\e06b'; +} + +.oi-fork:before { + content:'\e06c'; +} + +.oi-fullscreen-enter:before { + content:'\e06d'; +} + +.oi-fullscreen-exit:before { + content:'\e06e'; +} + +.oi-globe:before { + content:'\e06f'; +} + +.oi-graph:before { + content:'\e070'; +} + +.oi-grid-four-up:before { + content:'\e071'; +} + +.oi-grid-three-up:before { + content:'\e072'; +} + +.oi-grid-two-up:before { + content:'\e073'; +} + +.oi-hard-drive:before { + content:'\e074'; +} + +.oi-header:before { + content:'\e075'; +} + +.oi-headphones:before { + content:'\e076'; +} + +.oi-heart:before { + content:'\e077'; +} + +.oi-home:before { + content:'\e078'; +} + +.oi-image:before { + content:'\e079'; +} + +.oi-inbox:before { + content:'\e07a'; +} + +.oi-infinity:before { + content:'\e07b'; +} + +.oi-info:before { + content:'\e07c'; +} + +.oi-italic:before { + content:'\e07d'; +} + +.oi-justify-center:before { + content:'\e07e'; +} + +.oi-justify-left:before { + content:'\e07f'; +} + +.oi-justify-right:before { + content:'\e080'; +} + +.oi-key:before { + content:'\e081'; +} + +.oi-laptop:before { + content:'\e082'; +} + +.oi-layers:before { + content:'\e083'; +} + +.oi-lightbulb:before { + content:'\e084'; +} + +.oi-link-broken:before { + content:'\e085'; +} + +.oi-link-intact:before { + content:'\e086'; +} + +.oi-list-rich:before { + content:'\e087'; +} + +.oi-list:before { + content:'\e088'; +} + +.oi-location:before { + content:'\e089'; +} + +.oi-lock-locked:before { + content:'\e08a'; +} + +.oi-lock-unlocked:before { + content:'\e08b'; +} + +.oi-loop-circular:before { + content:'\e08c'; +} + +.oi-loop-square:before { + content:'\e08d'; +} + +.oi-loop:before { + content:'\e08e'; +} + +.oi-magnifying-glass:before { + content:'\e08f'; +} + +.oi-map-marker:before { + content:'\e090'; +} + +.oi-map:before { + content:'\e091'; +} + +.oi-media-pause:before { + content:'\e092'; +} + +.oi-media-play:before { + content:'\e093'; +} + +.oi-media-record:before { + content:'\e094'; +} + +.oi-media-skip-backward:before { + content:'\e095'; +} + +.oi-media-skip-forward:before { + content:'\e096'; +} + +.oi-media-step-backward:before { + content:'\e097'; +} + +.oi-media-step-forward:before { + content:'\e098'; +} + +.oi-media-stop:before { + content:'\e099'; +} + +.oi-medical-cross:before { + content:'\e09a'; +} + +.oi-menu:before { + content:'\e09b'; +} + +.oi-microphone:before { + content:'\e09c'; +} + +.oi-minus:before { + content:'\e09d'; +} + +.oi-monitor:before { + content:'\e09e'; +} + +.oi-moon:before { + content:'\e09f'; +} + +.oi-move:before { + content:'\e0a0'; +} + +.oi-musical-note:before { + content:'\e0a1'; +} + +.oi-paperclip:before { + content:'\e0a2'; +} + +.oi-pencil:before { + content:'\e0a3'; +} + +.oi-people:before { + content:'\e0a4'; +} + +.oi-person:before { + content:'\e0a5'; +} + +.oi-phone:before { + content:'\e0a6'; +} + +.oi-pie-chart:before { + content:'\e0a7'; +} + +.oi-pin:before { + content:'\e0a8'; +} + +.oi-play-circle:before { + content:'\e0a9'; +} + +.oi-plus:before { + content:'\e0aa'; +} + +.oi-power-standby:before { + content:'\e0ab'; +} + +.oi-print:before { + content:'\e0ac'; +} + +.oi-project:before { + content:'\e0ad'; +} + +.oi-pulse:before { + content:'\e0ae'; +} + +.oi-puzzle-piece:before { + content:'\e0af'; +} + +.oi-question-mark:before { + content:'\e0b0'; +} + +.oi-rain:before { + content:'\e0b1'; +} + +.oi-random:before { + content:'\e0b2'; +} + +.oi-reload:before { + content:'\e0b3'; +} + +.oi-resize-both:before { + content:'\e0b4'; +} + +.oi-resize-height:before { + content:'\e0b5'; +} + +.oi-resize-width:before { + content:'\e0b6'; +} + +.oi-rss-alt:before { + content:'\e0b7'; +} + +.oi-rss:before { + content:'\e0b8'; +} + +.oi-script:before { + content:'\e0b9'; +} + +.oi-share-boxed:before { + content:'\e0ba'; +} + +.oi-share:before { + content:'\e0bb'; +} + +.oi-shield:before { + content:'\e0bc'; +} + +.oi-signal:before { + content:'\e0bd'; +} + +.oi-signpost:before { + content:'\e0be'; +} + +.oi-sort-ascending:before { + content:'\e0bf'; +} + +.oi-sort-descending:before { + content:'\e0c0'; +} + +.oi-spreadsheet:before { + content:'\e0c1'; +} + +.oi-star:before { + content:'\e0c2'; +} + +.oi-sun:before { + content:'\e0c3'; +} + +.oi-tablet:before { + content:'\e0c4'; +} + +.oi-tag:before { + content:'\e0c5'; +} + +.oi-tags:before { + content:'\e0c6'; +} + +.oi-target:before { + content:'\e0c7'; +} + +.oi-task:before { + content:'\e0c8'; +} + +.oi-terminal:before { + content:'\e0c9'; +} + +.oi-text:before { + content:'\e0ca'; +} + +.oi-thumb-down:before { + content:'\e0cb'; +} + +.oi-thumb-up:before { + content:'\e0cc'; +} + +.oi-timer:before { + content:'\e0cd'; +} + +.oi-transfer:before { + content:'\e0ce'; +} + +.oi-trash:before { + content:'\e0cf'; +} + +.oi-underline:before { + content:'\e0d0'; +} + +.oi-vertical-align-bottom:before { + content:'\e0d1'; +} + +.oi-vertical-align-center:before { + content:'\e0d2'; +} + +.oi-vertical-align-top:before { + content:'\e0d3'; +} + +.oi-video:before { + content:'\e0d4'; +} + +.oi-volume-high:before { + content:'\e0d5'; +} + +.oi-volume-low:before { + content:'\e0d6'; +} + +.oi-volume-off:before { + content:'\e0d7'; +} + +.oi-warning:before { + content:'\e0d8'; +} + +.oi-wifi:before { + content:'\e0d9'; +} + +.oi-wrench:before { + content:'\e0da'; +} + +.oi-x:before { + content:'\e0db'; +} + +.oi-yen:before { + content:'\e0dc'; +} + +.oi-zoom-in:before { + content:'\e0dd'; +} + +.oi-zoom-out:before { + content:'\e0de'; +} + diff --git a/static/open-iconic/css/open-iconic-bootstrap.styl b/static/open-iconic/css/open-iconic-bootstrap.styl new file mode 100644 index 00000000..0afa2548 --- /dev/null +++ b/static/open-iconic/css/open-iconic-bootstrap.styl @@ -0,0 +1,954 @@ +/* Bootstrap */ + +@font-face + font-family 'Icons' + src url('../fonts/open-iconic.eot') + src url('../fonts/open-iconic.eot?#iconic-sm') format('embedded-opentype'), url('../fonts/open-iconic.woff') format('woff'), url('../fonts/open-iconic.ttf') format('truetype'), url('../fonts/open-iconic.svg#iconic-sm') format('svg') + font-weight normal + font-style normal + + +// Catchall baseclass +.oi + position relative + top 1px + display inline-block + font-family 'Icons' + font-style normal + font-weight normal + line-height 1 + -webkit-font-smoothing antialiased + -moz-osx-font-smoothing grayscale + + + &:empty:before + width 1em + text-align center + box-sizing content-box + + &.oi-align-center:before + text-align center + + + &.oi-align-left:before + text-align left + + + &.oi-align-right:before + text-align right + + + + &.oi-flip-horizontal:before + -webkit-transform scale(-1, 1) + -ms-transform scale(-1, 1) + transform scale(-1, 1) + + + &.oi-flip-vertical:before + -webkit-transform scale(1, -1) + -ms-transform scale(-1, 1) + transform scale(1, -1) + + + &.oi-flip-horizontal-vertical:before + -webkit-transform scale(-1, -1) + -ms-transform scale(-1, 1) + transform scale(-1, -1) + + + + + +.oi-account-login:before { + content'\e000' +} + +.oi-account-logout:before { + content'\e001' +} + +.oi-action-redo:before { + content'\e002' +} + +.oi-action-undo:before { + content'\e003' +} + +.oi-align-center:before { + content'\e004' +} + +.oi-align-left:before { + content'\e005' +} + +.oi-align-right:before { + content'\e006' +} + +.oi-aperture:before { + content'\e007' +} + +.oi-arrow-bottom:before { + content'\e008' +} + +.oi-arrow-circle-bottom:before { + content'\e009' +} + +.oi-arrow-circle-left:before { + content'\e00a' +} + +.oi-arrow-circle-right:before { + content'\e00b' +} + +.oi-arrow-circle-top:before { + content'\e00c' +} + +.oi-arrow-left:before { + content'\e00d' +} + +.oi-arrow-right:before { + content'\e00e' +} + +.oi-arrow-thick-bottom:before { + content'\e00f' +} + +.oi-arrow-thick-left:before { + content'\e010' +} + +.oi-arrow-thick-right:before { + content'\e011' +} + +.oi-arrow-thick-top:before { + content'\e012' +} + +.oi-arrow-top:before { + content'\e013' +} + +.oi-audio-spectrum:before { + content'\e014' +} + +.oi-audio:before { + content'\e015' +} + +.oi-badge:before { + content'\e016' +} + +.oi-ban:before { + content'\e017' +} + +.oi-bar-chart:before { + content'\e018' +} + +.oi-basket:before { + content'\e019' +} + +.oi-battery-empty:before { + content'\e01a' +} + +.oi-battery-full:before { + content'\e01b' +} + +.oi-beaker:before { + content'\e01c' +} + +.oi-bell:before { + content'\e01d' +} + +.oi-bluetooth:before { + content'\e01e' +} + +.oi-bold:before { + content'\e01f' +} + +.oi-bolt:before { + content'\e020' +} + +.oi-book:before { + content'\e021' +} + +.oi-bookmark:before { + content'\e022' +} + +.oi-box:before { + content'\e023' +} + +.oi-briefcase:before { + content'\e024' +} + +.oi-british-pound:before { + content'\e025' +} + +.oi-browser:before { + content'\e026' +} + +.oi-brush:before { + content'\e027' +} + +.oi-bug:before { + content'\e028' +} + +.oi-bullhorn:before { + content'\e029' +} + +.oi-calculator:before { + content'\e02a' +} + +.oi-calendar:before { + content'\e02b' +} + +.oi-camera-slr:before { + content'\e02c' +} + +.oi-caret-bottom:before { + content'\e02d' +} + +.oi-caret-left:before { + content'\e02e' +} + +.oi-caret-right:before { + content'\e02f' +} + +.oi-caret-top:before { + content'\e030' +} + +.oi-cart:before { + content'\e031' +} + +.oi-chat:before { + content'\e032' +} + +.oi-check:before { + content'\e033' +} + +.oi-chevron-bottom:before { + content'\e034' +} + +.oi-chevron-left:before { + content'\e035' +} + +.oi-chevron-right:before { + content'\e036' +} + +.oi-chevron-top:before { + content'\e037' +} + +.oi-circle-check:before { + content'\e038' +} + +.oi-circle-x:before { + content'\e039' +} + +.oi-clipboard:before { + content'\e03a' +} + +.oi-clock:before { + content'\e03b' +} + +.oi-cloud-download:before { + content'\e03c' +} + +.oi-cloud-upload:before { + content'\e03d' +} + +.oi-cloud:before { + content'\e03e' +} + +.oi-cloudy:before { + content'\e03f' +} + +.oi-code:before { + content'\e040' +} + +.oi-cog:before { + content'\e041' +} + +.oi-collapse-down:before { + content'\e042' +} + +.oi-collapse-left:before { + content'\e043' +} + +.oi-collapse-right:before { + content'\e044' +} + +.oi-collapse-up:before { + content'\e045' +} + +.oi-command:before { + content'\e046' +} + +.oi-comment-square:before { + content'\e047' +} + +.oi-compass:before { + content'\e048' +} + +.oi-contrast:before { + content'\e049' +} + +.oi-copywriting:before { + content'\e04a' +} + +.oi-credit-card:before { + content'\e04b' +} + +.oi-crop:before { + content'\e04c' +} + +.oi-dashboard:before { + content'\e04d' +} + +.oi-data-transfer-download:before { + content'\e04e' +} + +.oi-data-transfer-upload:before { + content'\e04f' +} + +.oi-delete:before { + content'\e050' +} + +.oi-dial:before { + content'\e051' +} + +.oi-document:before { + content'\e052' +} + +.oi-dollar:before { + content'\e053' +} + +.oi-double-quote-sans-left:before { + content'\e054' +} + +.oi-double-quote-sans-right:before { + content'\e055' +} + +.oi-double-quote-serif-left:before { + content'\e056' +} + +.oi-double-quote-serif-right:before { + content'\e057' +} + +.oi-droplet:before { + content'\e058' +} + +.oi-eject:before { + content'\e059' +} + +.oi-elevator:before { + content'\e05a' +} + +.oi-ellipses:before { + content'\e05b' +} + +.oi-envelope-closed:before { + content'\e05c' +} + +.oi-envelope-open:before { + content'\e05d' +} + +.oi-euro:before { + content'\e05e' +} + +.oi-excerpt:before { + content'\e05f' +} + +.oi-expand-down:before { + content'\e060' +} + +.oi-expand-left:before { + content'\e061' +} + +.oi-expand-right:before { + content'\e062' +} + +.oi-expand-up:before { + content'\e063' +} + +.oi-external-link:before { + content'\e064' +} + +.oi-eye:before { + content'\e065' +} + +.oi-eyedropper:before { + content'\e066' +} + +.oi-file:before { + content'\e067' +} + +.oi-fire:before { + content'\e068' +} + +.oi-flag:before { + content'\e069' +} + +.oi-flash:before { + content'\e06a' +} + +.oi-folder:before { + content'\e06b' +} + +.oi-fork:before { + content'\e06c' +} + +.oi-fullscreen-enter:before { + content'\e06d' +} + +.oi-fullscreen-exit:before { + content'\e06e' +} + +.oi-globe:before { + content'\e06f' +} + +.oi-graph:before { + content'\e070' +} + +.oi-grid-four-up:before { + content'\e071' +} + +.oi-grid-three-up:before { + content'\e072' +} + +.oi-grid-two-up:before { + content'\e073' +} + +.oi-hard-drive:before { + content'\e074' +} + +.oi-header:before { + content'\e075' +} + +.oi-headphones:before { + content'\e076' +} + +.oi-heart:before { + content'\e077' +} + +.oi-home:before { + content'\e078' +} + +.oi-image:before { + content'\e079' +} + +.oi-inbox:before { + content'\e07a' +} + +.oi-infinity:before { + content'\e07b' +} + +.oi-info:before { + content'\e07c' +} + +.oi-italic:before { + content'\e07d' +} + +.oi-justify-center:before { + content'\e07e' +} + +.oi-justify-left:before { + content'\e07f' +} + +.oi-justify-right:before { + content'\e080' +} + +.oi-key:before { + content'\e081' +} + +.oi-laptop:before { + content'\e082' +} + +.oi-layers:before { + content'\e083' +} + +.oi-lightbulb:before { + content'\e084' +} + +.oi-link-broken:before { + content'\e085' +} + +.oi-link-intact:before { + content'\e086' +} + +.oi-list-rich:before { + content'\e087' +} + +.oi-list:before { + content'\e088' +} + +.oi-location:before { + content'\e089' +} + +.oi-lock-locked:before { + content'\e08a' +} + +.oi-lock-unlocked:before { + content'\e08b' +} + +.oi-loop-circular:before { + content'\e08c' +} + +.oi-loop-square:before { + content'\e08d' +} + +.oi-loop:before { + content'\e08e' +} + +.oi-magnifying-glass:before { + content'\e08f' +} + +.oi-map-marker:before { + content'\e090' +} + +.oi-map:before { + content'\e091' +} + +.oi-media-pause:before { + content'\e092' +} + +.oi-media-play:before { + content'\e093' +} + +.oi-media-record:before { + content'\e094' +} + +.oi-media-skip-backward:before { + content'\e095' +} + +.oi-media-skip-forward:before { + content'\e096' +} + +.oi-media-step-backward:before { + content'\e097' +} + +.oi-media-step-forward:before { + content'\e098' +} + +.oi-media-stop:before { + content'\e099' +} + +.oi-medical-cross:before { + content'\e09a' +} + +.oi-menu:before { + content'\e09b' +} + +.oi-microphone:before { + content'\e09c' +} + +.oi-minus:before { + content'\e09d' +} + +.oi-monitor:before { + content'\e09e' +} + +.oi-moon:before { + content'\e09f' +} + +.oi-move:before { + content'\e0a0' +} + +.oi-musical-note:before { + content'\e0a1' +} + +.oi-paperclip:before { + content'\e0a2' +} + +.oi-pencil:before { + content'\e0a3' +} + +.oi-people:before { + content'\e0a4' +} + +.oi-person:before { + content'\e0a5' +} + +.oi-phone:before { + content'\e0a6' +} + +.oi-pie-chart:before { + content'\e0a7' +} + +.oi-pin:before { + content'\e0a8' +} + +.oi-play-circle:before { + content'\e0a9' +} + +.oi-plus:before { + content'\e0aa' +} + +.oi-power-standby:before { + content'\e0ab' +} + +.oi-print:before { + content'\e0ac' +} + +.oi-project:before { + content'\e0ad' +} + +.oi-pulse:before { + content'\e0ae' +} + +.oi-puzzle-piece:before { + content'\e0af' +} + +.oi-question-mark:before { + content'\e0b0' +} + +.oi-rain:before { + content'\e0b1' +} + +.oi-random:before { + content'\e0b2' +} + +.oi-reload:before { + content'\e0b3' +} + +.oi-resize-both:before { + content'\e0b4' +} + +.oi-resize-height:before { + content'\e0b5' +} + +.oi-resize-width:before { + content'\e0b6' +} + +.oi-rss-alt:before { + content'\e0b7' +} + +.oi-rss:before { + content'\e0b8' +} + +.oi-script:before { + content'\e0b9' +} + +.oi-share-boxed:before { + content'\e0ba' +} + +.oi-share:before { + content'\e0bb' +} + +.oi-shield:before { + content'\e0bc' +} + +.oi-signal:before { + content'\e0bd' +} + +.oi-signpost:before { + content'\e0be' +} + +.oi-sort-ascending:before { + content'\e0bf' +} + +.oi-sort-descending:before { + content'\e0c0' +} + +.oi-spreadsheet:before { + content'\e0c1' +} + +.oi-star:before { + content'\e0c2' +} + +.oi-sun:before { + content'\e0c3' +} + +.oi-tablet:before { + content'\e0c4' +} + +.oi-tag:before { + content'\e0c5' +} + +.oi-tags:before { + content'\e0c6' +} + +.oi-target:before { + content'\e0c7' +} + +.oi-task:before { + content'\e0c8' +} + +.oi-terminal:before { + content'\e0c9' +} + +.oi-text:before { + content'\e0ca' +} + +.oi-thumb-down:before { + content'\e0cb' +} + +.oi-thumb-up:before { + content'\e0cc' +} + +.oi-timer:before { + content'\e0cd' +} + +.oi-transfer:before { + content'\e0ce' +} + +.oi-trash:before { + content'\e0cf' +} + +.oi-underline:before { + content'\e0d0' +} + +.oi-vertical-align-bottom:before { + content'\e0d1' +} + +.oi-vertical-align-center:before { + content'\e0d2' +} + +.oi-vertical-align-top:before { + content'\e0d3' +} + +.oi-video:before { + content'\e0d4' +} + +.oi-volume-high:before { + content'\e0d5' +} + +.oi-volume-low:before { + content'\e0d6' +} + +.oi-volume-off:before { + content'\e0d7' +} + +.oi-warning:before { + content'\e0d8' +} + +.oi-wifi:before { + content'\e0d9' +} + +.oi-wrench:before { + content'\e0da' +} + +.oi-x:before { + content'\e0db' +} + +.oi-yen:before { + content'\e0dc' +} + +.oi-zoom-in:before { + content'\e0dd' +} + +.oi-zoom-out:before { + content'\e0de' +} + diff --git a/static/open-iconic/css/open-iconic-foundation.css b/static/open-iconic/css/open-iconic-foundation.css new file mode 100644 index 00000000..905a8212 --- /dev/null +++ b/static/open-iconic/css/open-iconic-foundation.css @@ -0,0 +1,1395 @@ +/* Foundation */ + +@font-face { + font-family: 'Icons'; + src: url('../fonts/open-iconic.eot'); + src: url('../fonts/open-iconic.eot?#iconic-sm') format('embedded-opentype'), url('../fonts/open-iconic.woff') format('woff'), url('../fonts/open-iconic.ttf') format('truetype'), url('../fonts/open-iconic.otf') format('opentype'), url('../fonts/open-iconic.svg#iconic-sm') format('svg'); + font-weight: normal; + font-style: normal; +} + + +.fi-account-login:before, + +.fi-account-logout:before, + +.fi-action-redo:before, + +.fi-action-undo:before, + +.fi-align-center:before, + +.fi-align-left:before, + +.fi-align-right:before, + +.fi-aperture:before, + +.fi-arrow-bottom:before, + +.fi-arrow-circle-bottom:before, + +.fi-arrow-circle-left:before, + +.fi-arrow-circle-right:before, + +.fi-arrow-circle-top:before, + +.fi-arrow-left:before, + +.fi-arrow-right:before, + +.fi-arrow-thick-bottom:before, + +.fi-arrow-thick-left:before, + +.fi-arrow-thick-right:before, + +.fi-arrow-thick-top:before, + +.fi-arrow-top:before, + +.fi-audio-spectrum:before, + +.fi-audio:before, + +.fi-badge:before, + +.fi-ban:before, + +.fi-bar-chart:before, + +.fi-basket:before, + +.fi-battery-empty:before, + +.fi-battery-full:before, + +.fi-beaker:before, + +.fi-bell:before, + +.fi-bluetooth:before, + +.fi-bold:before, + +.fi-bolt:before, + +.fi-book:before, + +.fi-bookmark:before, + +.fi-box:before, + +.fi-briefcase:before, + +.fi-british-pound:before, + +.fi-browser:before, + +.fi-brush:before, + +.fi-bug:before, + +.fi-bullhorn:before, + +.fi-calculator:before, + +.fi-calendar:before, + +.fi-camera-slr:before, + +.fi-caret-bottom:before, + +.fi-caret-left:before, + +.fi-caret-right:before, + +.fi-caret-top:before, + +.fi-cart:before, + +.fi-chat:before, + +.fi-check:before, + +.fi-chevron-bottom:before, + +.fi-chevron-left:before, + +.fi-chevron-right:before, + +.fi-chevron-top:before, + +.fi-circle-check:before, + +.fi-circle-x:before, + +.fi-clipboard:before, + +.fi-clock:before, + +.fi-cloud-download:before, + +.fi-cloud-upload:before, + +.fi-cloud:before, + +.fi-cloudy:before, + +.fi-code:before, + +.fi-cog:before, + +.fi-collapse-down:before, + +.fi-collapse-left:before, + +.fi-collapse-right:before, + +.fi-collapse-up:before, + +.fi-command:before, + +.fi-comment-square:before, + +.fi-compass:before, + +.fi-contrast:before, + +.fi-copywriting:before, + +.fi-credit-card:before, + +.fi-crop:before, + +.fi-dashboard:before, + +.fi-data-transfer-download:before, + +.fi-data-transfer-upload:before, + +.fi-delete:before, + +.fi-dial:before, + +.fi-document:before, + +.fi-dollar:before, + +.fi-double-quote-sans-left:before, + +.fi-double-quote-sans-right:before, + +.fi-double-quote-serif-left:before, + +.fi-double-quote-serif-right:before, + +.fi-droplet:before, + +.fi-eject:before, + +.fi-elevator:before, + +.fi-ellipses:before, + +.fi-envelope-closed:before, + +.fi-envelope-open:before, + +.fi-euro:before, + +.fi-excerpt:before, + +.fi-expand-down:before, + +.fi-expand-left:before, + +.fi-expand-right:before, + +.fi-expand-up:before, + +.fi-external-link:before, + +.fi-eye:before, + +.fi-eyedropper:before, + +.fi-file:before, + +.fi-fire:before, + +.fi-flag:before, + +.fi-flash:before, + +.fi-folder:before, + +.fi-fork:before, + +.fi-fullscreen-enter:before, + +.fi-fullscreen-exit:before, + +.fi-globe:before, + +.fi-graph:before, + +.fi-grid-four-up:before, + +.fi-grid-three-up:before, + +.fi-grid-two-up:before, + +.fi-hard-drive:before, + +.fi-header:before, + +.fi-headphones:before, + +.fi-heart:before, + +.fi-home:before, + +.fi-image:before, + +.fi-inbox:before, + +.fi-infinity:before, + +.fi-info:before, + +.fi-italic:before, + +.fi-justify-center:before, + +.fi-justify-left:before, + +.fi-justify-right:before, + +.fi-key:before, + +.fi-laptop:before, + +.fi-layers:before, + +.fi-lightbulb:before, + +.fi-link-broken:before, + +.fi-link-intact:before, + +.fi-list-rich:before, + +.fi-list:before, + +.fi-location:before, + +.fi-lock-locked:before, + +.fi-lock-unlocked:before, + +.fi-loop-circular:before, + +.fi-loop-square:before, + +.fi-loop:before, + +.fi-magnifying-glass:before, + +.fi-map-marker:before, + +.fi-map:before, + +.fi-media-pause:before, + +.fi-media-play:before, + +.fi-media-record:before, + +.fi-media-skip-backward:before, + +.fi-media-skip-forward:before, + +.fi-media-step-backward:before, + +.fi-media-step-forward:before, + +.fi-media-stop:before, + +.fi-medical-cross:before, + +.fi-menu:before, + +.fi-microphone:before, + +.fi-minus:before, + +.fi-monitor:before, + +.fi-moon:before, + +.fi-move:before, + +.fi-musical-note:before, + +.fi-paperclip:before, + +.fi-pencil:before, + +.fi-people:before, + +.fi-person:before, + +.fi-phone:before, + +.fi-pie-chart:before, + +.fi-pin:before, + +.fi-play-circle:before, + +.fi-plus:before, + +.fi-power-standby:before, + +.fi-print:before, + +.fi-project:before, + +.fi-pulse:before, + +.fi-puzzle-piece:before, + +.fi-question-mark:before, + +.fi-rain:before, + +.fi-random:before, + +.fi-reload:before, + +.fi-resize-both:before, + +.fi-resize-height:before, + +.fi-resize-width:before, + +.fi-rss-alt:before, + +.fi-rss:before, + +.fi-script:before, + +.fi-share-boxed:before, + +.fi-share:before, + +.fi-shield:before, + +.fi-signal:before, + +.fi-signpost:before, + +.fi-sort-ascending:before, + +.fi-sort-descending:before, + +.fi-spreadsheet:before, + +.fi-star:before, + +.fi-sun:before, + +.fi-tablet:before, + +.fi-tag:before, + +.fi-tags:before, + +.fi-target:before, + +.fi-task:before, + +.fi-terminal:before, + +.fi-text:before, + +.fi-thumb-down:before, + +.fi-thumb-up:before, + +.fi-timer:before, + +.fi-transfer:before, + +.fi-trash:before, + +.fi-underline:before, + +.fi-vertical-align-bottom:before, + +.fi-vertical-align-center:before, + +.fi-vertical-align-top:before, + +.fi-video:before, + +.fi-volume-high:before, + +.fi-volume-low:before, + +.fi-volume-off:before, + +.fi-warning:before, + +.fi-wifi:before, + +.fi-wrench:before, + +.fi-x:before, + +.fi-yen:before, + +.fi-zoom-in:before, + +.fi-zoom-out:before + { + font-family: 'Icons'; + font-style: normal; + font-weight: normal; + font-variant: normal; + text-transform: none; + line-height: 1; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + display: inline-block; + text-decoration: inherit; +} + + +[class*='fi-'].oi-align-center:before { + text-align: center; +} + +[class*='fi-'].oi-align-left:before { + text-align: left; +} + +[class*='fi-'].oi-align-right:before { + text-align: right; +} + + +[class*='fi-'].oi-flip-horizontal:before { + -webkit-transform: scale(-1, 1); + -ms-transform: scale(-1, 1); + transform: scale(-1, 1); +} + +[class*='fi-'].oi-flip-vertical:before { + -webkit-transform: scale(1, -1); + -ms-transform: scale(-1, 1); + transform: scale(1, -1); +} + +[class*='fi-'].oi-flip-horizontal-vertical:before { + -webkit-transform: scale(-1, -1); + -ms-transform: scale(-1, 1); + transform: scale(-1, -1); +} + + + +.fi-account-login:before { + content:'\e000'; +} + +.fi-account-logout:before { + content:'\e001'; +} + +.fi-action-redo:before { + content:'\e002'; +} + +.fi-action-undo:before { + content:'\e003'; +} + +.fi-align-center:before { + content:'\e004'; +} + +.fi-align-left:before { + content:'\e005'; +} + +.fi-align-right:before { + content:'\e006'; +} + +.fi-aperture:before { + content:'\e007'; +} + +.fi-arrow-bottom:before { + content:'\e008'; +} + +.fi-arrow-circle-bottom:before { + content:'\e009'; +} + +.fi-arrow-circle-left:before { + content:'\e00a'; +} + +.fi-arrow-circle-right:before { + content:'\e00b'; +} + +.fi-arrow-circle-top:before { + content:'\e00c'; +} + +.fi-arrow-left:before { + content:'\e00d'; +} + +.fi-arrow-right:before { + content:'\e00e'; +} + +.fi-arrow-thick-bottom:before { + content:'\e00f'; +} + +.fi-arrow-thick-left:before { + content:'\e010'; +} + +.fi-arrow-thick-right:before { + content:'\e011'; +} + +.fi-arrow-thick-top:before { + content:'\e012'; +} + +.fi-arrow-top:before { + content:'\e013'; +} + +.fi-audio-spectrum:before { + content:'\e014'; +} + +.fi-audio:before { + content:'\e015'; +} + +.fi-badge:before { + content:'\e016'; +} + +.fi-ban:before { + content:'\e017'; +} + +.fi-bar-chart:before { + content:'\e018'; +} + +.fi-basket:before { + content:'\e019'; +} + +.fi-battery-empty:before { + content:'\e01a'; +} + +.fi-battery-full:before { + content:'\e01b'; +} + +.fi-beaker:before { + content:'\e01c'; +} + +.fi-bell:before { + content:'\e01d'; +} + +.fi-bluetooth:before { + content:'\e01e'; +} + +.fi-bold:before { + content:'\e01f'; +} + +.fi-bolt:before { + content:'\e020'; +} + +.fi-book:before { + content:'\e021'; +} + +.fi-bookmark:before { + content:'\e022'; +} + +.fi-box:before { + content:'\e023'; +} + +.fi-briefcase:before { + content:'\e024'; +} + +.fi-british-pound:before { + content:'\e025'; +} + +.fi-browser:before { + content:'\e026'; +} + +.fi-brush:before { + content:'\e027'; +} + +.fi-bug:before { + content:'\e028'; +} + +.fi-bullhorn:before { + content:'\e029'; +} + +.fi-calculator:before { + content:'\e02a'; +} + +.fi-calendar:before { + content:'\e02b'; +} + +.fi-camera-slr:before { + content:'\e02c'; +} + +.fi-caret-bottom:before { + content:'\e02d'; +} + +.fi-caret-left:before { + content:'\e02e'; +} + +.fi-caret-right:before { + content:'\e02f'; +} + +.fi-caret-top:before { + content:'\e030'; +} + +.fi-cart:before { + content:'\e031'; +} + +.fi-chat:before { + content:'\e032'; +} + +.fi-check:before { + content:'\e033'; +} + +.fi-chevron-bottom:before { + content:'\e034'; +} + +.fi-chevron-left:before { + content:'\e035'; +} + +.fi-chevron-right:before { + content:'\e036'; +} + +.fi-chevron-top:before { + content:'\e037'; +} + +.fi-circle-check:before { + content:'\e038'; +} + +.fi-circle-x:before { + content:'\e039'; +} + +.fi-clipboard:before { + content:'\e03a'; +} + +.fi-clock:before { + content:'\e03b'; +} + +.fi-cloud-download:before { + content:'\e03c'; +} + +.fi-cloud-upload:before { + content:'\e03d'; +} + +.fi-cloud:before { + content:'\e03e'; +} + +.fi-cloudy:before { + content:'\e03f'; +} + +.fi-code:before { + content:'\e040'; +} + +.fi-cog:before { + content:'\e041'; +} + +.fi-collapse-down:before { + content:'\e042'; +} + +.fi-collapse-left:before { + content:'\e043'; +} + +.fi-collapse-right:before { + content:'\e044'; +} + +.fi-collapse-up:before { + content:'\e045'; +} + +.fi-command:before { + content:'\e046'; +} + +.fi-comment-square:before { + content:'\e047'; +} + +.fi-compass:before { + content:'\e048'; +} + +.fi-contrast:before { + content:'\e049'; +} + +.fi-copywriting:before { + content:'\e04a'; +} + +.fi-credit-card:before { + content:'\e04b'; +} + +.fi-crop:before { + content:'\e04c'; +} + +.fi-dashboard:before { + content:'\e04d'; +} + +.fi-data-transfer-download:before { + content:'\e04e'; +} + +.fi-data-transfer-upload:before { + content:'\e04f'; +} + +.fi-delete:before { + content:'\e050'; +} + +.fi-dial:before { + content:'\e051'; +} + +.fi-document:before { + content:'\e052'; +} + +.fi-dollar:before { + content:'\e053'; +} + +.fi-double-quote-sans-left:before { + content:'\e054'; +} + +.fi-double-quote-sans-right:before { + content:'\e055'; +} + +.fi-double-quote-serif-left:before { + content:'\e056'; +} + +.fi-double-quote-serif-right:before { + content:'\e057'; +} + +.fi-droplet:before { + content:'\e058'; +} + +.fi-eject:before { + content:'\e059'; +} + +.fi-elevator:before { + content:'\e05a'; +} + +.fi-ellipses:before { + content:'\e05b'; +} + +.fi-envelope-closed:before { + content:'\e05c'; +} + +.fi-envelope-open:before { + content:'\e05d'; +} + +.fi-euro:before { + content:'\e05e'; +} + +.fi-excerpt:before { + content:'\e05f'; +} + +.fi-expand-down:before { + content:'\e060'; +} + +.fi-expand-left:before { + content:'\e061'; +} + +.fi-expand-right:before { + content:'\e062'; +} + +.fi-expand-up:before { + content:'\e063'; +} + +.fi-external-link:before { + content:'\e064'; +} + +.fi-eye:before { + content:'\e065'; +} + +.fi-eyedropper:before { + content:'\e066'; +} + +.fi-file:before { + content:'\e067'; +} + +.fi-fire:before { + content:'\e068'; +} + +.fi-flag:before { + content:'\e069'; +} + +.fi-flash:before { + content:'\e06a'; +} + +.fi-folder:before { + content:'\e06b'; +} + +.fi-fork:before { + content:'\e06c'; +} + +.fi-fullscreen-enter:before { + content:'\e06d'; +} + +.fi-fullscreen-exit:before { + content:'\e06e'; +} + +.fi-globe:before { + content:'\e06f'; +} + +.fi-graph:before { + content:'\e070'; +} + +.fi-grid-four-up:before { + content:'\e071'; +} + +.fi-grid-three-up:before { + content:'\e072'; +} + +.fi-grid-two-up:before { + content:'\e073'; +} + +.fi-hard-drive:before { + content:'\e074'; +} + +.fi-header:before { + content:'\e075'; +} + +.fi-headphones:before { + content:'\e076'; +} + +.fi-heart:before { + content:'\e077'; +} + +.fi-home:before { + content:'\e078'; +} + +.fi-image:before { + content:'\e079'; +} + +.fi-inbox:before { + content:'\e07a'; +} + +.fi-infinity:before { + content:'\e07b'; +} + +.fi-info:before { + content:'\e07c'; +} + +.fi-italic:before { + content:'\e07d'; +} + +.fi-justify-center:before { + content:'\e07e'; +} + +.fi-justify-left:before { + content:'\e07f'; +} + +.fi-justify-right:before { + content:'\e080'; +} + +.fi-key:before { + content:'\e081'; +} + +.fi-laptop:before { + content:'\e082'; +} + +.fi-layers:before { + content:'\e083'; +} + +.fi-lightbulb:before { + content:'\e084'; +} + +.fi-link-broken:before { + content:'\e085'; +} + +.fi-link-intact:before { + content:'\e086'; +} + +.fi-list-rich:before { + content:'\e087'; +} + +.fi-list:before { + content:'\e088'; +} + +.fi-location:before { + content:'\e089'; +} + +.fi-lock-locked:before { + content:'\e08a'; +} + +.fi-lock-unlocked:before { + content:'\e08b'; +} + +.fi-loop-circular:before { + content:'\e08c'; +} + +.fi-loop-square:before { + content:'\e08d'; +} + +.fi-loop:before { + content:'\e08e'; +} + +.fi-magnifying-glass:before { + content:'\e08f'; +} + +.fi-map-marker:before { + content:'\e090'; +} + +.fi-map:before { + content:'\e091'; +} + +.fi-media-pause:before { + content:'\e092'; +} + +.fi-media-play:before { + content:'\e093'; +} + +.fi-media-record:before { + content:'\e094'; +} + +.fi-media-skip-backward:before { + content:'\e095'; +} + +.fi-media-skip-forward:before { + content:'\e096'; +} + +.fi-media-step-backward:before { + content:'\e097'; +} + +.fi-media-step-forward:before { + content:'\e098'; +} + +.fi-media-stop:before { + content:'\e099'; +} + +.fi-medical-cross:before { + content:'\e09a'; +} + +.fi-menu:before { + content:'\e09b'; +} + +.fi-microphone:before { + content:'\e09c'; +} + +.fi-minus:before { + content:'\e09d'; +} + +.fi-monitor:before { + content:'\e09e'; +} + +.fi-moon:before { + content:'\e09f'; +} + +.fi-move:before { + content:'\e0a0'; +} + +.fi-musical-note:before { + content:'\e0a1'; +} + +.fi-paperclip:before { + content:'\e0a2'; +} + +.fi-pencil:before { + content:'\e0a3'; +} + +.fi-people:before { + content:'\e0a4'; +} + +.fi-person:before { + content:'\e0a5'; +} + +.fi-phone:before { + content:'\e0a6'; +} + +.fi-pie-chart:before { + content:'\e0a7'; +} + +.fi-pin:before { + content:'\e0a8'; +} + +.fi-play-circle:before { + content:'\e0a9'; +} + +.fi-plus:before { + content:'\e0aa'; +} + +.fi-power-standby:before { + content:'\e0ab'; +} + +.fi-print:before { + content:'\e0ac'; +} + +.fi-project:before { + content:'\e0ad'; +} + +.fi-pulse:before { + content:'\e0ae'; +} + +.fi-puzzle-piece:before { + content:'\e0af'; +} + +.fi-question-mark:before { + content:'\e0b0'; +} + +.fi-rain:before { + content:'\e0b1'; +} + +.fi-random:before { + content:'\e0b2'; +} + +.fi-reload:before { + content:'\e0b3'; +} + +.fi-resize-both:before { + content:'\e0b4'; +} + +.fi-resize-height:before { + content:'\e0b5'; +} + +.fi-resize-width:before { + content:'\e0b6'; +} + +.fi-rss-alt:before { + content:'\e0b7'; +} + +.fi-rss:before { + content:'\e0b8'; +} + +.fi-script:before { + content:'\e0b9'; +} + +.fi-share-boxed:before { + content:'\e0ba'; +} + +.fi-share:before { + content:'\e0bb'; +} + +.fi-shield:before { + content:'\e0bc'; +} + +.fi-signal:before { + content:'\e0bd'; +} + +.fi-signpost:before { + content:'\e0be'; +} + +.fi-sort-ascending:before { + content:'\e0bf'; +} + +.fi-sort-descending:before { + content:'\e0c0'; +} + +.fi-spreadsheet:before { + content:'\e0c1'; +} + +.fi-star:before { + content:'\e0c2'; +} + +.fi-sun:before { + content:'\e0c3'; +} + +.fi-tablet:before { + content:'\e0c4'; +} + +.fi-tag:before { + content:'\e0c5'; +} + +.fi-tags:before { + content:'\e0c6'; +} + +.fi-target:before { + content:'\e0c7'; +} + +.fi-task:before { + content:'\e0c8'; +} + +.fi-terminal:before { + content:'\e0c9'; +} + +.fi-text:before { + content:'\e0ca'; +} + +.fi-thumb-down:before { + content:'\e0cb'; +} + +.fi-thumb-up:before { + content:'\e0cc'; +} + +.fi-timer:before { + content:'\e0cd'; +} + +.fi-transfer:before { + content:'\e0ce'; +} + +.fi-trash:before { + content:'\e0cf'; +} + +.fi-underline:before { + content:'\e0d0'; +} + +.fi-vertical-align-bottom:before { + content:'\e0d1'; +} + +.fi-vertical-align-center:before { + content:'\e0d2'; +} + +.fi-vertical-align-top:before { + content:'\e0d3'; +} + +.fi-video:before { + content:'\e0d4'; +} + +.fi-volume-high:before { + content:'\e0d5'; +} + +.fi-volume-low:before { + content:'\e0d6'; +} + +.fi-volume-off:before { + content:'\e0d7'; +} + +.fi-warning:before { + content:'\e0d8'; +} + +.fi-wifi:before { + content:'\e0d9'; +} + +.fi-wrench:before { + content:'\e0da'; +} + +.fi-x:before { + content:'\e0db'; +} + +.fi-yen:before { + content:'\e0dc'; +} + +.fi-zoom-in:before { + content:'\e0dd'; +} + +.fi-zoom-out:before { + content:'\e0de'; +} + diff --git a/static/open-iconic/css/open-iconic-foundation.less b/static/open-iconic/css/open-iconic-foundation.less new file mode 100644 index 00000000..deabf26f --- /dev/null +++ b/static/open-iconic/css/open-iconic-foundation.less @@ -0,0 +1,1397 @@ +/* Foundation */ + +/* Font path variable */ +@icon-font-path: '../fonts/'; + +@font-face { + font-family: 'Icons'; + src: url('@{icon-font-path}open-iconic.eot'); + src: url('@{icon-font-path}open-iconic.eot?#iconic-sm') format('embedded-opentype'), url('@{icon-font-path}open-iconic.woff') format('woff'), url('@{icon-font-path}open-iconic.ttf') format('truetype'), url('@{icon-font-path}open-iconic.otf') format('opentype'), url('@{icon-font-path}open-iconic.svg#iconic-sm') format('svg'); + font-weight: normal; + font-style: normal; +} + + +.fi-account-login:before, + +.fi-account-logout:before, + +.fi-action-redo:before, + +.fi-action-undo:before, + +.fi-align-center:before, + +.fi-align-left:before, + +.fi-align-right:before, + +.fi-aperture:before, + +.fi-arrow-bottom:before, + +.fi-arrow-circle-bottom:before, + +.fi-arrow-circle-left:before, + +.fi-arrow-circle-right:before, + +.fi-arrow-circle-top:before, + +.fi-arrow-left:before, + +.fi-arrow-right:before, + +.fi-arrow-thick-bottom:before, + +.fi-arrow-thick-left:before, + +.fi-arrow-thick-right:before, + +.fi-arrow-thick-top:before, + +.fi-arrow-top:before, + +.fi-audio-spectrum:before, + +.fi-audio:before, + +.fi-badge:before, + +.fi-ban:before, + +.fi-bar-chart:before, + +.fi-basket:before, + +.fi-battery-empty:before, + +.fi-battery-full:before, + +.fi-beaker:before, + +.fi-bell:before, + +.fi-bluetooth:before, + +.fi-bold:before, + +.fi-bolt:before, + +.fi-book:before, + +.fi-bookmark:before, + +.fi-box:before, + +.fi-briefcase:before, + +.fi-british-pound:before, + +.fi-browser:before, + +.fi-brush:before, + +.fi-bug:before, + +.fi-bullhorn:before, + +.fi-calculator:before, + +.fi-calendar:before, + +.fi-camera-slr:before, + +.fi-caret-bottom:before, + +.fi-caret-left:before, + +.fi-caret-right:before, + +.fi-caret-top:before, + +.fi-cart:before, + +.fi-chat:before, + +.fi-check:before, + +.fi-chevron-bottom:before, + +.fi-chevron-left:before, + +.fi-chevron-right:before, + +.fi-chevron-top:before, + +.fi-circle-check:before, + +.fi-circle-x:before, + +.fi-clipboard:before, + +.fi-clock:before, + +.fi-cloud-download:before, + +.fi-cloud-upload:before, + +.fi-cloud:before, + +.fi-cloudy:before, + +.fi-code:before, + +.fi-cog:before, + +.fi-collapse-down:before, + +.fi-collapse-left:before, + +.fi-collapse-right:before, + +.fi-collapse-up:before, + +.fi-command:before, + +.fi-comment-square:before, + +.fi-compass:before, + +.fi-contrast:before, + +.fi-copywriting:before, + +.fi-credit-card:before, + +.fi-crop:before, + +.fi-dashboard:before, + +.fi-data-transfer-download:before, + +.fi-data-transfer-upload:before, + +.fi-delete:before, + +.fi-dial:before, + +.fi-document:before, + +.fi-dollar:before, + +.fi-double-quote-sans-left:before, + +.fi-double-quote-sans-right:before, + +.fi-double-quote-serif-left:before, + +.fi-double-quote-serif-right:before, + +.fi-droplet:before, + +.fi-eject:before, + +.fi-elevator:before, + +.fi-ellipses:before, + +.fi-envelope-closed:before, + +.fi-envelope-open:before, + +.fi-euro:before, + +.fi-excerpt:before, + +.fi-expand-down:before, + +.fi-expand-left:before, + +.fi-expand-right:before, + +.fi-expand-up:before, + +.fi-external-link:before, + +.fi-eye:before, + +.fi-eyedropper:before, + +.fi-file:before, + +.fi-fire:before, + +.fi-flag:before, + +.fi-flash:before, + +.fi-folder:before, + +.fi-fork:before, + +.fi-fullscreen-enter:before, + +.fi-fullscreen-exit:before, + +.fi-globe:before, + +.fi-graph:before, + +.fi-grid-four-up:before, + +.fi-grid-three-up:before, + +.fi-grid-two-up:before, + +.fi-hard-drive:before, + +.fi-header:before, + +.fi-headphones:before, + +.fi-heart:before, + +.fi-home:before, + +.fi-image:before, + +.fi-inbox:before, + +.fi-infinity:before, + +.fi-info:before, + +.fi-italic:before, + +.fi-justify-center:before, + +.fi-justify-left:before, + +.fi-justify-right:before, + +.fi-key:before, + +.fi-laptop:before, + +.fi-layers:before, + +.fi-lightbulb:before, + +.fi-link-broken:before, + +.fi-link-intact:before, + +.fi-list-rich:before, + +.fi-list:before, + +.fi-location:before, + +.fi-lock-locked:before, + +.fi-lock-unlocked:before, + +.fi-loop-circular:before, + +.fi-loop-square:before, + +.fi-loop:before, + +.fi-magnifying-glass:before, + +.fi-map-marker:before, + +.fi-map:before, + +.fi-media-pause:before, + +.fi-media-play:before, + +.fi-media-record:before, + +.fi-media-skip-backward:before, + +.fi-media-skip-forward:before, + +.fi-media-step-backward:before, + +.fi-media-step-forward:before, + +.fi-media-stop:before, + +.fi-medical-cross:before, + +.fi-menu:before, + +.fi-microphone:before, + +.fi-minus:before, + +.fi-monitor:before, + +.fi-moon:before, + +.fi-move:before, + +.fi-musical-note:before, + +.fi-paperclip:before, + +.fi-pencil:before, + +.fi-people:before, + +.fi-person:before, + +.fi-phone:before, + +.fi-pie-chart:before, + +.fi-pin:before, + +.fi-play-circle:before, + +.fi-plus:before, + +.fi-power-standby:before, + +.fi-print:before, + +.fi-project:before, + +.fi-pulse:before, + +.fi-puzzle-piece:before, + +.fi-question-mark:before, + +.fi-rain:before, + +.fi-random:before, + +.fi-reload:before, + +.fi-resize-both:before, + +.fi-resize-height:before, + +.fi-resize-width:before, + +.fi-rss-alt:before, + +.fi-rss:before, + +.fi-script:before, + +.fi-share-boxed:before, + +.fi-share:before, + +.fi-shield:before, + +.fi-signal:before, + +.fi-signpost:before, + +.fi-sort-ascending:before, + +.fi-sort-descending:before, + +.fi-spreadsheet:before, + +.fi-star:before, + +.fi-sun:before, + +.fi-tablet:before, + +.fi-tag:before, + +.fi-tags:before, + +.fi-target:before, + +.fi-task:before, + +.fi-terminal:before, + +.fi-text:before, + +.fi-thumb-down:before, + +.fi-thumb-up:before, + +.fi-timer:before, + +.fi-transfer:before, + +.fi-trash:before, + +.fi-underline:before, + +.fi-vertical-align-bottom:before, + +.fi-vertical-align-center:before, + +.fi-vertical-align-top:before, + +.fi-video:before, + +.fi-volume-high:before, + +.fi-volume-low:before, + +.fi-volume-off:before, + +.fi-warning:before, + +.fi-wifi:before, + +.fi-wrench:before, + +.fi-x:before, + +.fi-yen:before, + +.fi-zoom-in:before, + +.fi-zoom-out:before + { + font-family: 'Icons'; + font-style: normal; + font-weight: normal; + font-variant: normal; + text-transform: none; + line-height: 1; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + display: inline-block; + text-decoration: inherit; +} + +[class*='fi-'].oi-align-center:before { + text-align: center; +} + +[class*='fi-'].oi-align-left:before { + text-align: left; +} + +[class*='fi-'].oi-align-right:before { + text-align: right; +} + + +[class*='fi-'].oi-flip-horizontal:before { + -webkit-transform: scale(-1, 1); + -ms-transform: scale(-1, 1); + transform: scale(-1, 1); +} + +[class*='fi-'].oi-flip-vertical:before { + -webkit-transform: scale(1, -1); + -ms-transform: scale(-1, 1); + transform: scale(1, -1); +} + +[class*='fi-'].oi-flip-horizontal-vertical:before { + -webkit-transform: scale(-1, -1); + -ms-transform: scale(-1, 1); + transform: scale(-1, -1); +} + + + +.fi-account-login:before { + content:'\e000'; +} + +.fi-account-logout:before { + content:'\e001'; +} + +.fi-action-redo:before { + content:'\e002'; +} + +.fi-action-undo:before { + content:'\e003'; +} + +.fi-align-center:before { + content:'\e004'; +} + +.fi-align-left:before { + content:'\e005'; +} + +.fi-align-right:before { + content:'\e006'; +} + +.fi-aperture:before { + content:'\e007'; +} + +.fi-arrow-bottom:before { + content:'\e008'; +} + +.fi-arrow-circle-bottom:before { + content:'\e009'; +} + +.fi-arrow-circle-left:before { + content:'\e00a'; +} + +.fi-arrow-circle-right:before { + content:'\e00b'; +} + +.fi-arrow-circle-top:before { + content:'\e00c'; +} + +.fi-arrow-left:before { + content:'\e00d'; +} + +.fi-arrow-right:before { + content:'\e00e'; +} + +.fi-arrow-thick-bottom:before { + content:'\e00f'; +} + +.fi-arrow-thick-left:before { + content:'\e010'; +} + +.fi-arrow-thick-right:before { + content:'\e011'; +} + +.fi-arrow-thick-top:before { + content:'\e012'; +} + +.fi-arrow-top:before { + content:'\e013'; +} + +.fi-audio-spectrum:before { + content:'\e014'; +} + +.fi-audio:before { + content:'\e015'; +} + +.fi-badge:before { + content:'\e016'; +} + +.fi-ban:before { + content:'\e017'; +} + +.fi-bar-chart:before { + content:'\e018'; +} + +.fi-basket:before { + content:'\e019'; +} + +.fi-battery-empty:before { + content:'\e01a'; +} + +.fi-battery-full:before { + content:'\e01b'; +} + +.fi-beaker:before { + content:'\e01c'; +} + +.fi-bell:before { + content:'\e01d'; +} + +.fi-bluetooth:before { + content:'\e01e'; +} + +.fi-bold:before { + content:'\e01f'; +} + +.fi-bolt:before { + content:'\e020'; +} + +.fi-book:before { + content:'\e021'; +} + +.fi-bookmark:before { + content:'\e022'; +} + +.fi-box:before { + content:'\e023'; +} + +.fi-briefcase:before { + content:'\e024'; +} + +.fi-british-pound:before { + content:'\e025'; +} + +.fi-browser:before { + content:'\e026'; +} + +.fi-brush:before { + content:'\e027'; +} + +.fi-bug:before { + content:'\e028'; +} + +.fi-bullhorn:before { + content:'\e029'; +} + +.fi-calculator:before { + content:'\e02a'; +} + +.fi-calendar:before { + content:'\e02b'; +} + +.fi-camera-slr:before { + content:'\e02c'; +} + +.fi-caret-bottom:before { + content:'\e02d'; +} + +.fi-caret-left:before { + content:'\e02e'; +} + +.fi-caret-right:before { + content:'\e02f'; +} + +.fi-caret-top:before { + content:'\e030'; +} + +.fi-cart:before { + content:'\e031'; +} + +.fi-chat:before { + content:'\e032'; +} + +.fi-check:before { + content:'\e033'; +} + +.fi-chevron-bottom:before { + content:'\e034'; +} + +.fi-chevron-left:before { + content:'\e035'; +} + +.fi-chevron-right:before { + content:'\e036'; +} + +.fi-chevron-top:before { + content:'\e037'; +} + +.fi-circle-check:before { + content:'\e038'; +} + +.fi-circle-x:before { + content:'\e039'; +} + +.fi-clipboard:before { + content:'\e03a'; +} + +.fi-clock:before { + content:'\e03b'; +} + +.fi-cloud-download:before { + content:'\e03c'; +} + +.fi-cloud-upload:before { + content:'\e03d'; +} + +.fi-cloud:before { + content:'\e03e'; +} + +.fi-cloudy:before { + content:'\e03f'; +} + +.fi-code:before { + content:'\e040'; +} + +.fi-cog:before { + content:'\e041'; +} + +.fi-collapse-down:before { + content:'\e042'; +} + +.fi-collapse-left:before { + content:'\e043'; +} + +.fi-collapse-right:before { + content:'\e044'; +} + +.fi-collapse-up:before { + content:'\e045'; +} + +.fi-command:before { + content:'\e046'; +} + +.fi-comment-square:before { + content:'\e047'; +} + +.fi-compass:before { + content:'\e048'; +} + +.fi-contrast:before { + content:'\e049'; +} + +.fi-copywriting:before { + content:'\e04a'; +} + +.fi-credit-card:before { + content:'\e04b'; +} + +.fi-crop:before { + content:'\e04c'; +} + +.fi-dashboard:before { + content:'\e04d'; +} + +.fi-data-transfer-download:before { + content:'\e04e'; +} + +.fi-data-transfer-upload:before { + content:'\e04f'; +} + +.fi-delete:before { + content:'\e050'; +} + +.fi-dial:before { + content:'\e051'; +} + +.fi-document:before { + content:'\e052'; +} + +.fi-dollar:before { + content:'\e053'; +} + +.fi-double-quote-sans-left:before { + content:'\e054'; +} + +.fi-double-quote-sans-right:before { + content:'\e055'; +} + +.fi-double-quote-serif-left:before { + content:'\e056'; +} + +.fi-double-quote-serif-right:before { + content:'\e057'; +} + +.fi-droplet:before { + content:'\e058'; +} + +.fi-eject:before { + content:'\e059'; +} + +.fi-elevator:before { + content:'\e05a'; +} + +.fi-ellipses:before { + content:'\e05b'; +} + +.fi-envelope-closed:before { + content:'\e05c'; +} + +.fi-envelope-open:before { + content:'\e05d'; +} + +.fi-euro:before { + content:'\e05e'; +} + +.fi-excerpt:before { + content:'\e05f'; +} + +.fi-expand-down:before { + content:'\e060'; +} + +.fi-expand-left:before { + content:'\e061'; +} + +.fi-expand-right:before { + content:'\e062'; +} + +.fi-expand-up:before { + content:'\e063'; +} + +.fi-external-link:before { + content:'\e064'; +} + +.fi-eye:before { + content:'\e065'; +} + +.fi-eyedropper:before { + content:'\e066'; +} + +.fi-file:before { + content:'\e067'; +} + +.fi-fire:before { + content:'\e068'; +} + +.fi-flag:before { + content:'\e069'; +} + +.fi-flash:before { + content:'\e06a'; +} + +.fi-folder:before { + content:'\e06b'; +} + +.fi-fork:before { + content:'\e06c'; +} + +.fi-fullscreen-enter:before { + content:'\e06d'; +} + +.fi-fullscreen-exit:before { + content:'\e06e'; +} + +.fi-globe:before { + content:'\e06f'; +} + +.fi-graph:before { + content:'\e070'; +} + +.fi-grid-four-up:before { + content:'\e071'; +} + +.fi-grid-three-up:before { + content:'\e072'; +} + +.fi-grid-two-up:before { + content:'\e073'; +} + +.fi-hard-drive:before { + content:'\e074'; +} + +.fi-header:before { + content:'\e075'; +} + +.fi-headphones:before { + content:'\e076'; +} + +.fi-heart:before { + content:'\e077'; +} + +.fi-home:before { + content:'\e078'; +} + +.fi-image:before { + content:'\e079'; +} + +.fi-inbox:before { + content:'\e07a'; +} + +.fi-infinity:before { + content:'\e07b'; +} + +.fi-info:before { + content:'\e07c'; +} + +.fi-italic:before { + content:'\e07d'; +} + +.fi-justify-center:before { + content:'\e07e'; +} + +.fi-justify-left:before { + content:'\e07f'; +} + +.fi-justify-right:before { + content:'\e080'; +} + +.fi-key:before { + content:'\e081'; +} + +.fi-laptop:before { + content:'\e082'; +} + +.fi-layers:before { + content:'\e083'; +} + +.fi-lightbulb:before { + content:'\e084'; +} + +.fi-link-broken:before { + content:'\e085'; +} + +.fi-link-intact:before { + content:'\e086'; +} + +.fi-list-rich:before { + content:'\e087'; +} + +.fi-list:before { + content:'\e088'; +} + +.fi-location:before { + content:'\e089'; +} + +.fi-lock-locked:before { + content:'\e08a'; +} + +.fi-lock-unlocked:before { + content:'\e08b'; +} + +.fi-loop-circular:before { + content:'\e08c'; +} + +.fi-loop-square:before { + content:'\e08d'; +} + +.fi-loop:before { + content:'\e08e'; +} + +.fi-magnifying-glass:before { + content:'\e08f'; +} + +.fi-map-marker:before { + content:'\e090'; +} + +.fi-map:before { + content:'\e091'; +} + +.fi-media-pause:before { + content:'\e092'; +} + +.fi-media-play:before { + content:'\e093'; +} + +.fi-media-record:before { + content:'\e094'; +} + +.fi-media-skip-backward:before { + content:'\e095'; +} + +.fi-media-skip-forward:before { + content:'\e096'; +} + +.fi-media-step-backward:before { + content:'\e097'; +} + +.fi-media-step-forward:before { + content:'\e098'; +} + +.fi-media-stop:before { + content:'\e099'; +} + +.fi-medical-cross:before { + content:'\e09a'; +} + +.fi-menu:before { + content:'\e09b'; +} + +.fi-microphone:before { + content:'\e09c'; +} + +.fi-minus:before { + content:'\e09d'; +} + +.fi-monitor:before { + content:'\e09e'; +} + +.fi-moon:before { + content:'\e09f'; +} + +.fi-move:before { + content:'\e0a0'; +} + +.fi-musical-note:before { + content:'\e0a1'; +} + +.fi-paperclip:before { + content:'\e0a2'; +} + +.fi-pencil:before { + content:'\e0a3'; +} + +.fi-people:before { + content:'\e0a4'; +} + +.fi-person:before { + content:'\e0a5'; +} + +.fi-phone:before { + content:'\e0a6'; +} + +.fi-pie-chart:before { + content:'\e0a7'; +} + +.fi-pin:before { + content:'\e0a8'; +} + +.fi-play-circle:before { + content:'\e0a9'; +} + +.fi-plus:before { + content:'\e0aa'; +} + +.fi-power-standby:before { + content:'\e0ab'; +} + +.fi-print:before { + content:'\e0ac'; +} + +.fi-project:before { + content:'\e0ad'; +} + +.fi-pulse:before { + content:'\e0ae'; +} + +.fi-puzzle-piece:before { + content:'\e0af'; +} + +.fi-question-mark:before { + content:'\e0b0'; +} + +.fi-rain:before { + content:'\e0b1'; +} + +.fi-random:before { + content:'\e0b2'; +} + +.fi-reload:before { + content:'\e0b3'; +} + +.fi-resize-both:before { + content:'\e0b4'; +} + +.fi-resize-height:before { + content:'\e0b5'; +} + +.fi-resize-width:before { + content:'\e0b6'; +} + +.fi-rss-alt:before { + content:'\e0b7'; +} + +.fi-rss:before { + content:'\e0b8'; +} + +.fi-script:before { + content:'\e0b9'; +} + +.fi-share-boxed:before { + content:'\e0ba'; +} + +.fi-share:before { + content:'\e0bb'; +} + +.fi-shield:before { + content:'\e0bc'; +} + +.fi-signal:before { + content:'\e0bd'; +} + +.fi-signpost:before { + content:'\e0be'; +} + +.fi-sort-ascending:before { + content:'\e0bf'; +} + +.fi-sort-descending:before { + content:'\e0c0'; +} + +.fi-spreadsheet:before { + content:'\e0c1'; +} + +.fi-star:before { + content:'\e0c2'; +} + +.fi-sun:before { + content:'\e0c3'; +} + +.fi-tablet:before { + content:'\e0c4'; +} + +.fi-tag:before { + content:'\e0c5'; +} + +.fi-tags:before { + content:'\e0c6'; +} + +.fi-target:before { + content:'\e0c7'; +} + +.fi-task:before { + content:'\e0c8'; +} + +.fi-terminal:before { + content:'\e0c9'; +} + +.fi-text:before { + content:'\e0ca'; +} + +.fi-thumb-down:before { + content:'\e0cb'; +} + +.fi-thumb-up:before { + content:'\e0cc'; +} + +.fi-timer:before { + content:'\e0cd'; +} + +.fi-transfer:before { + content:'\e0ce'; +} + +.fi-trash:before { + content:'\e0cf'; +} + +.fi-underline:before { + content:'\e0d0'; +} + +.fi-vertical-align-bottom:before { + content:'\e0d1'; +} + +.fi-vertical-align-center:before { + content:'\e0d2'; +} + +.fi-vertical-align-top:before { + content:'\e0d3'; +} + +.fi-video:before { + content:'\e0d4'; +} + +.fi-volume-high:before { + content:'\e0d5'; +} + +.fi-volume-low:before { + content:'\e0d6'; +} + +.fi-volume-off:before { + content:'\e0d7'; +} + +.fi-warning:before { + content:'\e0d8'; +} + +.fi-wifi:before { + content:'\e0d9'; +} + +.fi-wrench:before { + content:'\e0da'; +} + +.fi-x:before { + content:'\e0db'; +} + +.fi-yen:before { + content:'\e0dc'; +} + +.fi-zoom-in:before { + content:'\e0dd'; +} + +.fi-zoom-out:before { + content:'\e0de'; +} + diff --git a/static/open-iconic/css/open-iconic-foundation.min.css b/static/open-iconic/css/open-iconic-foundation.min.css new file mode 100644 index 00000000..bd124297 --- /dev/null +++ b/static/open-iconic/css/open-iconic-foundation.min.css @@ -0,0 +1 @@ +@font-face{font-family:Icons;src:url(../fonts/open-iconic.eot);src:url(../fonts/open-iconic.eot?#iconic-sm) format('embedded-opentype'),url(../fonts/open-iconic.woff) format('woff'),url(../fonts/open-iconic.ttf) format('truetype'),url(../fonts/open-iconic.otf) format('opentype'),url(../fonts/open-iconic.svg#iconic-sm) format('svg');font-weight:400;font-style:normal}.fi-account-login:before,.fi-account-logout:before,.fi-action-redo:before,.fi-action-undo:before,.fi-align-center:before,.fi-align-left:before,.fi-align-right:before,.fi-aperture:before,.fi-arrow-bottom:before,.fi-arrow-circle-bottom:before,.fi-arrow-circle-left:before,.fi-arrow-circle-right:before,.fi-arrow-circle-top:before,.fi-arrow-left:before,.fi-arrow-right:before,.fi-arrow-thick-bottom:before,.fi-arrow-thick-left:before,.fi-arrow-thick-right:before,.fi-arrow-thick-top:before,.fi-arrow-top:before,.fi-audio-spectrum:before,.fi-audio:before,.fi-badge:before,.fi-ban:before,.fi-bar-chart:before,.fi-basket:before,.fi-battery-empty:before,.fi-battery-full:before,.fi-beaker:before,.fi-bell:before,.fi-bluetooth:before,.fi-bold:before,.fi-bolt:before,.fi-book:before,.fi-bookmark:before,.fi-box:before,.fi-briefcase:before,.fi-british-pound:before,.fi-browser:before,.fi-brush:before,.fi-bug:before,.fi-bullhorn:before,.fi-calculator:before,.fi-calendar:before,.fi-camera-slr:before,.fi-caret-bottom:before,.fi-caret-left:before,.fi-caret-right:before,.fi-caret-top:before,.fi-cart:before,.fi-chat:before,.fi-check:before,.fi-chevron-bottom:before,.fi-chevron-left:before,.fi-chevron-right:before,.fi-chevron-top:before,.fi-circle-check:before,.fi-circle-x:before,.fi-clipboard:before,.fi-clock:before,.fi-cloud-download:before,.fi-cloud-upload:before,.fi-cloud:before,.fi-cloudy:before,.fi-code:before,.fi-cog:before,.fi-collapse-down:before,.fi-collapse-left:before,.fi-collapse-right:before,.fi-collapse-up:before,.fi-command:before,.fi-comment-square:before,.fi-compass:before,.fi-contrast:before,.fi-copywriting:before,.fi-credit-card:before,.fi-crop:before,.fi-dashboard:before,.fi-data-transfer-download:before,.fi-data-transfer-upload:before,.fi-delete:before,.fi-dial:before,.fi-document:before,.fi-dollar:before,.fi-double-quote-sans-left:before,.fi-double-quote-sans-right:before,.fi-double-quote-serif-left:before,.fi-double-quote-serif-right:before,.fi-droplet:before,.fi-eject:before,.fi-elevator:before,.fi-ellipses:before,.fi-envelope-closed:before,.fi-envelope-open:before,.fi-euro:before,.fi-excerpt:before,.fi-expand-down:before,.fi-expand-left:before,.fi-expand-right:before,.fi-expand-up:before,.fi-external-link:before,.fi-eye:before,.fi-eyedropper:before,.fi-file:before,.fi-fire:before,.fi-flag:before,.fi-flash:before,.fi-folder:before,.fi-fork:before,.fi-fullscreen-enter:before,.fi-fullscreen-exit:before,.fi-globe:before,.fi-graph:before,.fi-grid-four-up:before,.fi-grid-three-up:before,.fi-grid-two-up:before,.fi-hard-drive:before,.fi-header:before,.fi-headphones:before,.fi-heart:before,.fi-home:before,.fi-image:before,.fi-inbox:before,.fi-infinity:before,.fi-info:before,.fi-italic:before,.fi-justify-center:before,.fi-justify-left:before,.fi-justify-right:before,.fi-key:before,.fi-laptop:before,.fi-layers:before,.fi-lightbulb:before,.fi-link-broken:before,.fi-link-intact:before,.fi-list-rich:before,.fi-list:before,.fi-location:before,.fi-lock-locked:before,.fi-lock-unlocked:before,.fi-loop-circular:before,.fi-loop-square:before,.fi-loop:before,.fi-magnifying-glass:before,.fi-map-marker:before,.fi-map:before,.fi-media-pause:before,.fi-media-play:before,.fi-media-record:before,.fi-media-skip-backward:before,.fi-media-skip-forward:before,.fi-media-step-backward:before,.fi-media-step-forward:before,.fi-media-stop:before,.fi-medical-cross:before,.fi-menu:before,.fi-microphone:before,.fi-minus:before,.fi-monitor:before,.fi-moon:before,.fi-move:before,.fi-musical-note:before,.fi-paperclip:before,.fi-pencil:before,.fi-people:before,.fi-person:before,.fi-phone:before,.fi-pie-chart:before,.fi-pin:before,.fi-play-circle:before,.fi-plus:before,.fi-power-standby:before,.fi-print:before,.fi-project:before,.fi-pulse:before,.fi-puzzle-piece:before,.fi-question-mark:before,.fi-rain:before,.fi-random:before,.fi-reload:before,.fi-resize-both:before,.fi-resize-height:before,.fi-resize-width:before,.fi-rss-alt:before,.fi-rss:before,.fi-script:before,.fi-share-boxed:before,.fi-share:before,.fi-shield:before,.fi-signal:before,.fi-signpost:before,.fi-sort-ascending:before,.fi-sort-descending:before,.fi-spreadsheet:before,.fi-star:before,.fi-sun:before,.fi-tablet:before,.fi-tag:before,.fi-tags:before,.fi-target:before,.fi-task:before,.fi-terminal:before,.fi-text:before,.fi-thumb-down:before,.fi-thumb-up:before,.fi-timer:before,.fi-transfer:before,.fi-trash:before,.fi-underline:before,.fi-vertical-align-bottom:before,.fi-vertical-align-center:before,.fi-vertical-align-top:before,.fi-video:before,.fi-volume-high:before,.fi-volume-low:before,.fi-volume-off:before,.fi-warning:before,.fi-wifi:before,.fi-wrench:before,.fi-x:before,.fi-yen:before,.fi-zoom-in:before,.fi-zoom-out:before{font-family:Icons;font-style:normal;font-weight:400;font-variant:normal;text-transform:none;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;display:inline-block;text-decoration:inherit}[class*=fi-].oi-align-center:before{text-align:center}[class*=fi-].oi-align-left:before{text-align:left}[class*=fi-].oi-align-right:before{text-align:right}[class*=fi-].oi-flip-horizontal:before{-webkit-transform:scale(-1,1);-ms-transform:scale(-1,1);transform:scale(-1,1)}[class*=fi-].oi-flip-vertical:before{-webkit-transform:scale(1,-1);-ms-transform:scale(-1,1);transform:scale(1,-1)}[class*=fi-].oi-flip-horizontal-vertical:before{-webkit-transform:scale(-1,-1);-ms-transform:scale(-1,1);transform:scale(-1,-1)}.fi-account-login:before{content:'\e000'}.fi-account-logout:before{content:'\e001'}.fi-action-redo:before{content:'\e002'}.fi-action-undo:before{content:'\e003'}.fi-align-center:before{content:'\e004'}.fi-align-left:before{content:'\e005'}.fi-align-right:before{content:'\e006'}.fi-aperture:before{content:'\e007'}.fi-arrow-bottom:before{content:'\e008'}.fi-arrow-circle-bottom:before{content:'\e009'}.fi-arrow-circle-left:before{content:'\e00a'}.fi-arrow-circle-right:before{content:'\e00b'}.fi-arrow-circle-top:before{content:'\e00c'}.fi-arrow-left:before{content:'\e00d'}.fi-arrow-right:before{content:'\e00e'}.fi-arrow-thick-bottom:before{content:'\e00f'}.fi-arrow-thick-left:before{content:'\e010'}.fi-arrow-thick-right:before{content:'\e011'}.fi-arrow-thick-top:before{content:'\e012'}.fi-arrow-top:before{content:'\e013'}.fi-audio-spectrum:before{content:'\e014'}.fi-audio:before{content:'\e015'}.fi-badge:before{content:'\e016'}.fi-ban:before{content:'\e017'}.fi-bar-chart:before{content:'\e018'}.fi-basket:before{content:'\e019'}.fi-battery-empty:before{content:'\e01a'}.fi-battery-full:before{content:'\e01b'}.fi-beaker:before{content:'\e01c'}.fi-bell:before{content:'\e01d'}.fi-bluetooth:before{content:'\e01e'}.fi-bold:before{content:'\e01f'}.fi-bolt:before{content:'\e020'}.fi-book:before{content:'\e021'}.fi-bookmark:before{content:'\e022'}.fi-box:before{content:'\e023'}.fi-briefcase:before{content:'\e024'}.fi-british-pound:before{content:'\e025'}.fi-browser:before{content:'\e026'}.fi-brush:before{content:'\e027'}.fi-bug:before{content:'\e028'}.fi-bullhorn:before{content:'\e029'}.fi-calculator:before{content:'\e02a'}.fi-calendar:before{content:'\e02b'}.fi-camera-slr:before{content:'\e02c'}.fi-caret-bottom:before{content:'\e02d'}.fi-caret-left:before{content:'\e02e'}.fi-caret-right:before{content:'\e02f'}.fi-caret-top:before{content:'\e030'}.fi-cart:before{content:'\e031'}.fi-chat:before{content:'\e032'}.fi-check:before{content:'\e033'}.fi-chevron-bottom:before{content:'\e034'}.fi-chevron-left:before{content:'\e035'}.fi-chevron-right:before{content:'\e036'}.fi-chevron-top:before{content:'\e037'}.fi-circle-check:before{content:'\e038'}.fi-circle-x:before{content:'\e039'}.fi-clipboard:before{content:'\e03a'}.fi-clock:before{content:'\e03b'}.fi-cloud-download:before{content:'\e03c'}.fi-cloud-upload:before{content:'\e03d'}.fi-cloud:before{content:'\e03e'}.fi-cloudy:before{content:'\e03f'}.fi-code:before{content:'\e040'}.fi-cog:before{content:'\e041'}.fi-collapse-down:before{content:'\e042'}.fi-collapse-left:before{content:'\e043'}.fi-collapse-right:before{content:'\e044'}.fi-collapse-up:before{content:'\e045'}.fi-command:before{content:'\e046'}.fi-comment-square:before{content:'\e047'}.fi-compass:before{content:'\e048'}.fi-contrast:before{content:'\e049'}.fi-copywriting:before{content:'\e04a'}.fi-credit-card:before{content:'\e04b'}.fi-crop:before{content:'\e04c'}.fi-dashboard:before{content:'\e04d'}.fi-data-transfer-download:before{content:'\e04e'}.fi-data-transfer-upload:before{content:'\e04f'}.fi-delete:before{content:'\e050'}.fi-dial:before{content:'\e051'}.fi-document:before{content:'\e052'}.fi-dollar:before{content:'\e053'}.fi-double-quote-sans-left:before{content:'\e054'}.fi-double-quote-sans-right:before{content:'\e055'}.fi-double-quote-serif-left:before{content:'\e056'}.fi-double-quote-serif-right:before{content:'\e057'}.fi-droplet:before{content:'\e058'}.fi-eject:before{content:'\e059'}.fi-elevator:before{content:'\e05a'}.fi-ellipses:before{content:'\e05b'}.fi-envelope-closed:before{content:'\e05c'}.fi-envelope-open:before{content:'\e05d'}.fi-euro:before{content:'\e05e'}.fi-excerpt:before{content:'\e05f'}.fi-expand-down:before{content:'\e060'}.fi-expand-left:before{content:'\e061'}.fi-expand-right:before{content:'\e062'}.fi-expand-up:before{content:'\e063'}.fi-external-link:before{content:'\e064'}.fi-eye:before{content:'\e065'}.fi-eyedropper:before{content:'\e066'}.fi-file:before{content:'\e067'}.fi-fire:before{content:'\e068'}.fi-flag:before{content:'\e069'}.fi-flash:before{content:'\e06a'}.fi-folder:before{content:'\e06b'}.fi-fork:before{content:'\e06c'}.fi-fullscreen-enter:before{content:'\e06d'}.fi-fullscreen-exit:before{content:'\e06e'}.fi-globe:before{content:'\e06f'}.fi-graph:before{content:'\e070'}.fi-grid-four-up:before{content:'\e071'}.fi-grid-three-up:before{content:'\e072'}.fi-grid-two-up:before{content:'\e073'}.fi-hard-drive:before{content:'\e074'}.fi-header:before{content:'\e075'}.fi-headphones:before{content:'\e076'}.fi-heart:before{content:'\e077'}.fi-home:before{content:'\e078'}.fi-image:before{content:'\e079'}.fi-inbox:before{content:'\e07a'}.fi-infinity:before{content:'\e07b'}.fi-info:before{content:'\e07c'}.fi-italic:before{content:'\e07d'}.fi-justify-center:before{content:'\e07e'}.fi-justify-left:before{content:'\e07f'}.fi-justify-right:before{content:'\e080'}.fi-key:before{content:'\e081'}.fi-laptop:before{content:'\e082'}.fi-layers:before{content:'\e083'}.fi-lightbulb:before{content:'\e084'}.fi-link-broken:before{content:'\e085'}.fi-link-intact:before{content:'\e086'}.fi-list-rich:before{content:'\e087'}.fi-list:before{content:'\e088'}.fi-location:before{content:'\e089'}.fi-lock-locked:before{content:'\e08a'}.fi-lock-unlocked:before{content:'\e08b'}.fi-loop-circular:before{content:'\e08c'}.fi-loop-square:before{content:'\e08d'}.fi-loop:before{content:'\e08e'}.fi-magnifying-glass:before{content:'\e08f'}.fi-map-marker:before{content:'\e090'}.fi-map:before{content:'\e091'}.fi-media-pause:before{content:'\e092'}.fi-media-play:before{content:'\e093'}.fi-media-record:before{content:'\e094'}.fi-media-skip-backward:before{content:'\e095'}.fi-media-skip-forward:before{content:'\e096'}.fi-media-step-backward:before{content:'\e097'}.fi-media-step-forward:before{content:'\e098'}.fi-media-stop:before{content:'\e099'}.fi-medical-cross:before{content:'\e09a'}.fi-menu:before{content:'\e09b'}.fi-microphone:before{content:'\e09c'}.fi-minus:before{content:'\e09d'}.fi-monitor:before{content:'\e09e'}.fi-moon:before{content:'\e09f'}.fi-move:before{content:'\e0a0'}.fi-musical-note:before{content:'\e0a1'}.fi-paperclip:before{content:'\e0a2'}.fi-pencil:before{content:'\e0a3'}.fi-people:before{content:'\e0a4'}.fi-person:before{content:'\e0a5'}.fi-phone:before{content:'\e0a6'}.fi-pie-chart:before{content:'\e0a7'}.fi-pin:before{content:'\e0a8'}.fi-play-circle:before{content:'\e0a9'}.fi-plus:before{content:'\e0aa'}.fi-power-standby:before{content:'\e0ab'}.fi-print:before{content:'\e0ac'}.fi-project:before{content:'\e0ad'}.fi-pulse:before{content:'\e0ae'}.fi-puzzle-piece:before{content:'\e0af'}.fi-question-mark:before{content:'\e0b0'}.fi-rain:before{content:'\e0b1'}.fi-random:before{content:'\e0b2'}.fi-reload:before{content:'\e0b3'}.fi-resize-both:before{content:'\e0b4'}.fi-resize-height:before{content:'\e0b5'}.fi-resize-width:before{content:'\e0b6'}.fi-rss-alt:before{content:'\e0b7'}.fi-rss:before{content:'\e0b8'}.fi-script:before{content:'\e0b9'}.fi-share-boxed:before{content:'\e0ba'}.fi-share:before{content:'\e0bb'}.fi-shield:before{content:'\e0bc'}.fi-signal:before{content:'\e0bd'}.fi-signpost:before{content:'\e0be'}.fi-sort-ascending:before{content:'\e0bf'}.fi-sort-descending:before{content:'\e0c0'}.fi-spreadsheet:before{content:'\e0c1'}.fi-star:before{content:'\e0c2'}.fi-sun:before{content:'\e0c3'}.fi-tablet:before{content:'\e0c4'}.fi-tag:before{content:'\e0c5'}.fi-tags:before{content:'\e0c6'}.fi-target:before{content:'\e0c7'}.fi-task:before{content:'\e0c8'}.fi-terminal:before{content:'\e0c9'}.fi-text:before{content:'\e0ca'}.fi-thumb-down:before{content:'\e0cb'}.fi-thumb-up:before{content:'\e0cc'}.fi-timer:before{content:'\e0cd'}.fi-transfer:before{content:'\e0ce'}.fi-trash:before{content:'\e0cf'}.fi-underline:before{content:'\e0d0'}.fi-vertical-align-bottom:before{content:'\e0d1'}.fi-vertical-align-center:before{content:'\e0d2'}.fi-vertical-align-top:before{content:'\e0d3'}.fi-video:before{content:'\e0d4'}.fi-volume-high:before{content:'\e0d5'}.fi-volume-low:before{content:'\e0d6'}.fi-volume-off:before{content:'\e0d7'}.fi-warning:before{content:'\e0d8'}.fi-wifi:before{content:'\e0d9'}.fi-wrench:before{content:'\e0da'}.fi-x:before{content:'\e0db'}.fi-yen:before{content:'\e0dc'}.fi-zoom-in:before{content:'\e0dd'}.fi-zoom-out:before{content:'\e0de'} \ No newline at end of file diff --git a/static/open-iconic/css/open-iconic-foundation.scss b/static/open-iconic/css/open-iconic-foundation.scss new file mode 100644 index 00000000..fe471389 --- /dev/null +++ b/static/open-iconic/css/open-iconic-foundation.scss @@ -0,0 +1,1398 @@ +/* Foundation */ + +/* Font path variable */ +$icon-font-path: '../fonts/' !default; + +@font-face { + font-family: 'Icons'; + src: url('#{$icon-font-path}open-iconic.eot'); + src: url('#{$icon-font-path}open-iconic.eot?#iconic-sm') format('embedded-opentype'), url('#{$icon-font-path}open-iconic.woff') format('woff'), url('#{$icon-font-path}open-iconic.ttf') format('truetype'), url('#{$icon-font-path}open-iconic.otf') format('opentype'), url('#{$icon-font-path}open-iconic.svg#iconic-sm') format('svg'); + font-weight: normal; + font-style: normal; +} + + +.fi-account-login:before, + +.fi-account-logout:before, + +.fi-action-redo:before, + +.fi-action-undo:before, + +.fi-align-center:before, + +.fi-align-left:before, + +.fi-align-right:before, + +.fi-aperture:before, + +.fi-arrow-bottom:before, + +.fi-arrow-circle-bottom:before, + +.fi-arrow-circle-left:before, + +.fi-arrow-circle-right:before, + +.fi-arrow-circle-top:before, + +.fi-arrow-left:before, + +.fi-arrow-right:before, + +.fi-arrow-thick-bottom:before, + +.fi-arrow-thick-left:before, + +.fi-arrow-thick-right:before, + +.fi-arrow-thick-top:before, + +.fi-arrow-top:before, + +.fi-audio-spectrum:before, + +.fi-audio:before, + +.fi-badge:before, + +.fi-ban:before, + +.fi-bar-chart:before, + +.fi-basket:before, + +.fi-battery-empty:before, + +.fi-battery-full:before, + +.fi-beaker:before, + +.fi-bell:before, + +.fi-bluetooth:before, + +.fi-bold:before, + +.fi-bolt:before, + +.fi-book:before, + +.fi-bookmark:before, + +.fi-box:before, + +.fi-briefcase:before, + +.fi-british-pound:before, + +.fi-browser:before, + +.fi-brush:before, + +.fi-bug:before, + +.fi-bullhorn:before, + +.fi-calculator:before, + +.fi-calendar:before, + +.fi-camera-slr:before, + +.fi-caret-bottom:before, + +.fi-caret-left:before, + +.fi-caret-right:before, + +.fi-caret-top:before, + +.fi-cart:before, + +.fi-chat:before, + +.fi-check:before, + +.fi-chevron-bottom:before, + +.fi-chevron-left:before, + +.fi-chevron-right:before, + +.fi-chevron-top:before, + +.fi-circle-check:before, + +.fi-circle-x:before, + +.fi-clipboard:before, + +.fi-clock:before, + +.fi-cloud-download:before, + +.fi-cloud-upload:before, + +.fi-cloud:before, + +.fi-cloudy:before, + +.fi-code:before, + +.fi-cog:before, + +.fi-collapse-down:before, + +.fi-collapse-left:before, + +.fi-collapse-right:before, + +.fi-collapse-up:before, + +.fi-command:before, + +.fi-comment-square:before, + +.fi-compass:before, + +.fi-contrast:before, + +.fi-copywriting:before, + +.fi-credit-card:before, + +.fi-crop:before, + +.fi-dashboard:before, + +.fi-data-transfer-download:before, + +.fi-data-transfer-upload:before, + +.fi-delete:before, + +.fi-dial:before, + +.fi-document:before, + +.fi-dollar:before, + +.fi-double-quote-sans-left:before, + +.fi-double-quote-sans-right:before, + +.fi-double-quote-serif-left:before, + +.fi-double-quote-serif-right:before, + +.fi-droplet:before, + +.fi-eject:before, + +.fi-elevator:before, + +.fi-ellipses:before, + +.fi-envelope-closed:before, + +.fi-envelope-open:before, + +.fi-euro:before, + +.fi-excerpt:before, + +.fi-expand-down:before, + +.fi-expand-left:before, + +.fi-expand-right:before, + +.fi-expand-up:before, + +.fi-external-link:before, + +.fi-eye:before, + +.fi-eyedropper:before, + +.fi-file:before, + +.fi-fire:before, + +.fi-flag:before, + +.fi-flash:before, + +.fi-folder:before, + +.fi-fork:before, + +.fi-fullscreen-enter:before, + +.fi-fullscreen-exit:before, + +.fi-globe:before, + +.fi-graph:before, + +.fi-grid-four-up:before, + +.fi-grid-three-up:before, + +.fi-grid-two-up:before, + +.fi-hard-drive:before, + +.fi-header:before, + +.fi-headphones:before, + +.fi-heart:before, + +.fi-home:before, + +.fi-image:before, + +.fi-inbox:before, + +.fi-infinity:before, + +.fi-info:before, + +.fi-italic:before, + +.fi-justify-center:before, + +.fi-justify-left:before, + +.fi-justify-right:before, + +.fi-key:before, + +.fi-laptop:before, + +.fi-layers:before, + +.fi-lightbulb:before, + +.fi-link-broken:before, + +.fi-link-intact:before, + +.fi-list-rich:before, + +.fi-list:before, + +.fi-location:before, + +.fi-lock-locked:before, + +.fi-lock-unlocked:before, + +.fi-loop-circular:before, + +.fi-loop-square:before, + +.fi-loop:before, + +.fi-magnifying-glass:before, + +.fi-map-marker:before, + +.fi-map:before, + +.fi-media-pause:before, + +.fi-media-play:before, + +.fi-media-record:before, + +.fi-media-skip-backward:before, + +.fi-media-skip-forward:before, + +.fi-media-step-backward:before, + +.fi-media-step-forward:before, + +.fi-media-stop:before, + +.fi-medical-cross:before, + +.fi-menu:before, + +.fi-microphone:before, + +.fi-minus:before, + +.fi-monitor:before, + +.fi-moon:before, + +.fi-move:before, + +.fi-musical-note:before, + +.fi-paperclip:before, + +.fi-pencil:before, + +.fi-people:before, + +.fi-person:before, + +.fi-phone:before, + +.fi-pie-chart:before, + +.fi-pin:before, + +.fi-play-circle:before, + +.fi-plus:before, + +.fi-power-standby:before, + +.fi-print:before, + +.fi-project:before, + +.fi-pulse:before, + +.fi-puzzle-piece:before, + +.fi-question-mark:before, + +.fi-rain:before, + +.fi-random:before, + +.fi-reload:before, + +.fi-resize-both:before, + +.fi-resize-height:before, + +.fi-resize-width:before, + +.fi-rss-alt:before, + +.fi-rss:before, + +.fi-script:before, + +.fi-share-boxed:before, + +.fi-share:before, + +.fi-shield:before, + +.fi-signal:before, + +.fi-signpost:before, + +.fi-sort-ascending:before, + +.fi-sort-descending:before, + +.fi-spreadsheet:before, + +.fi-star:before, + +.fi-sun:before, + +.fi-tablet:before, + +.fi-tag:before, + +.fi-tags:before, + +.fi-target:before, + +.fi-task:before, + +.fi-terminal:before, + +.fi-text:before, + +.fi-thumb-down:before, + +.fi-thumb-up:before, + +.fi-timer:before, + +.fi-transfer:before, + +.fi-trash:before, + +.fi-underline:before, + +.fi-vertical-align-bottom:before, + +.fi-vertical-align-center:before, + +.fi-vertical-align-top:before, + +.fi-video:before, + +.fi-volume-high:before, + +.fi-volume-low:before, + +.fi-volume-off:before, + +.fi-warning:before, + +.fi-wifi:before, + +.fi-wrench:before, + +.fi-x:before, + +.fi-yen:before, + +.fi-zoom-in:before, + +.fi-zoom-out:before + { + font-family: 'Icons'; + font-style: normal; + font-weight: normal; + font-variant: normal; + text-transform: none; + line-height: 1; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + display: inline-block; + text-decoration: inherit; +} + + +[class*='fi-'].oi-align-center:before { + text-align: center; +} + +[class*='fi-'].oi-align-left:before { + text-align: left; +} + +[class*='fi-'].oi-align-right:before { + text-align: right; +} + + +[class*='fi-'].oi-flip-horizontal:before { + -webkit-transform: scale(-1, 1); + -ms-transform: scale(-1, 1); + transform: scale(-1, 1); +} + +[class*='fi-'].oi-flip-vertical:before { + -webkit-transform: scale(1, -1); + -ms-transform: scale(-1, 1); + transform: scale(1, -1); +} + +[class*='fi-'].oi-flip-horizontal-vertical:before { + -webkit-transform: scale(-1, -1); + -ms-transform: scale(-1, 1); + transform: scale(-1, -1); +} + + + +.fi-account-login:before { + content:'\e000'; +} + +.fi-account-logout:before { + content:'\e001'; +} + +.fi-action-redo:before { + content:'\e002'; +} + +.fi-action-undo:before { + content:'\e003'; +} + +.fi-align-center:before { + content:'\e004'; +} + +.fi-align-left:before { + content:'\e005'; +} + +.fi-align-right:before { + content:'\e006'; +} + +.fi-aperture:before { + content:'\e007'; +} + +.fi-arrow-bottom:before { + content:'\e008'; +} + +.fi-arrow-circle-bottom:before { + content:'\e009'; +} + +.fi-arrow-circle-left:before { + content:'\e00a'; +} + +.fi-arrow-circle-right:before { + content:'\e00b'; +} + +.fi-arrow-circle-top:before { + content:'\e00c'; +} + +.fi-arrow-left:before { + content:'\e00d'; +} + +.fi-arrow-right:before { + content:'\e00e'; +} + +.fi-arrow-thick-bottom:before { + content:'\e00f'; +} + +.fi-arrow-thick-left:before { + content:'\e010'; +} + +.fi-arrow-thick-right:before { + content:'\e011'; +} + +.fi-arrow-thick-top:before { + content:'\e012'; +} + +.fi-arrow-top:before { + content:'\e013'; +} + +.fi-audio-spectrum:before { + content:'\e014'; +} + +.fi-audio:before { + content:'\e015'; +} + +.fi-badge:before { + content:'\e016'; +} + +.fi-ban:before { + content:'\e017'; +} + +.fi-bar-chart:before { + content:'\e018'; +} + +.fi-basket:before { + content:'\e019'; +} + +.fi-battery-empty:before { + content:'\e01a'; +} + +.fi-battery-full:before { + content:'\e01b'; +} + +.fi-beaker:before { + content:'\e01c'; +} + +.fi-bell:before { + content:'\e01d'; +} + +.fi-bluetooth:before { + content:'\e01e'; +} + +.fi-bold:before { + content:'\e01f'; +} + +.fi-bolt:before { + content:'\e020'; +} + +.fi-book:before { + content:'\e021'; +} + +.fi-bookmark:before { + content:'\e022'; +} + +.fi-box:before { + content:'\e023'; +} + +.fi-briefcase:before { + content:'\e024'; +} + +.fi-british-pound:before { + content:'\e025'; +} + +.fi-browser:before { + content:'\e026'; +} + +.fi-brush:before { + content:'\e027'; +} + +.fi-bug:before { + content:'\e028'; +} + +.fi-bullhorn:before { + content:'\e029'; +} + +.fi-calculator:before { + content:'\e02a'; +} + +.fi-calendar:before { + content:'\e02b'; +} + +.fi-camera-slr:before { + content:'\e02c'; +} + +.fi-caret-bottom:before { + content:'\e02d'; +} + +.fi-caret-left:before { + content:'\e02e'; +} + +.fi-caret-right:before { + content:'\e02f'; +} + +.fi-caret-top:before { + content:'\e030'; +} + +.fi-cart:before { + content:'\e031'; +} + +.fi-chat:before { + content:'\e032'; +} + +.fi-check:before { + content:'\e033'; +} + +.fi-chevron-bottom:before { + content:'\e034'; +} + +.fi-chevron-left:before { + content:'\e035'; +} + +.fi-chevron-right:before { + content:'\e036'; +} + +.fi-chevron-top:before { + content:'\e037'; +} + +.fi-circle-check:before { + content:'\e038'; +} + +.fi-circle-x:before { + content:'\e039'; +} + +.fi-clipboard:before { + content:'\e03a'; +} + +.fi-clock:before { + content:'\e03b'; +} + +.fi-cloud-download:before { + content:'\e03c'; +} + +.fi-cloud-upload:before { + content:'\e03d'; +} + +.fi-cloud:before { + content:'\e03e'; +} + +.fi-cloudy:before { + content:'\e03f'; +} + +.fi-code:before { + content:'\e040'; +} + +.fi-cog:before { + content:'\e041'; +} + +.fi-collapse-down:before { + content:'\e042'; +} + +.fi-collapse-left:before { + content:'\e043'; +} + +.fi-collapse-right:before { + content:'\e044'; +} + +.fi-collapse-up:before { + content:'\e045'; +} + +.fi-command:before { + content:'\e046'; +} + +.fi-comment-square:before { + content:'\e047'; +} + +.fi-compass:before { + content:'\e048'; +} + +.fi-contrast:before { + content:'\e049'; +} + +.fi-copywriting:before { + content:'\e04a'; +} + +.fi-credit-card:before { + content:'\e04b'; +} + +.fi-crop:before { + content:'\e04c'; +} + +.fi-dashboard:before { + content:'\e04d'; +} + +.fi-data-transfer-download:before { + content:'\e04e'; +} + +.fi-data-transfer-upload:before { + content:'\e04f'; +} + +.fi-delete:before { + content:'\e050'; +} + +.fi-dial:before { + content:'\e051'; +} + +.fi-document:before { + content:'\e052'; +} + +.fi-dollar:before { + content:'\e053'; +} + +.fi-double-quote-sans-left:before { + content:'\e054'; +} + +.fi-double-quote-sans-right:before { + content:'\e055'; +} + +.fi-double-quote-serif-left:before { + content:'\e056'; +} + +.fi-double-quote-serif-right:before { + content:'\e057'; +} + +.fi-droplet:before { + content:'\e058'; +} + +.fi-eject:before { + content:'\e059'; +} + +.fi-elevator:before { + content:'\e05a'; +} + +.fi-ellipses:before { + content:'\e05b'; +} + +.fi-envelope-closed:before { + content:'\e05c'; +} + +.fi-envelope-open:before { + content:'\e05d'; +} + +.fi-euro:before { + content:'\e05e'; +} + +.fi-excerpt:before { + content:'\e05f'; +} + +.fi-expand-down:before { + content:'\e060'; +} + +.fi-expand-left:before { + content:'\e061'; +} + +.fi-expand-right:before { + content:'\e062'; +} + +.fi-expand-up:before { + content:'\e063'; +} + +.fi-external-link:before { + content:'\e064'; +} + +.fi-eye:before { + content:'\e065'; +} + +.fi-eyedropper:before { + content:'\e066'; +} + +.fi-file:before { + content:'\e067'; +} + +.fi-fire:before { + content:'\e068'; +} + +.fi-flag:before { + content:'\e069'; +} + +.fi-flash:before { + content:'\e06a'; +} + +.fi-folder:before { + content:'\e06b'; +} + +.fi-fork:before { + content:'\e06c'; +} + +.fi-fullscreen-enter:before { + content:'\e06d'; +} + +.fi-fullscreen-exit:before { + content:'\e06e'; +} + +.fi-globe:before { + content:'\e06f'; +} + +.fi-graph:before { + content:'\e070'; +} + +.fi-grid-four-up:before { + content:'\e071'; +} + +.fi-grid-three-up:before { + content:'\e072'; +} + +.fi-grid-two-up:before { + content:'\e073'; +} + +.fi-hard-drive:before { + content:'\e074'; +} + +.fi-header:before { + content:'\e075'; +} + +.fi-headphones:before { + content:'\e076'; +} + +.fi-heart:before { + content:'\e077'; +} + +.fi-home:before { + content:'\e078'; +} + +.fi-image:before { + content:'\e079'; +} + +.fi-inbox:before { + content:'\e07a'; +} + +.fi-infinity:before { + content:'\e07b'; +} + +.fi-info:before { + content:'\e07c'; +} + +.fi-italic:before { + content:'\e07d'; +} + +.fi-justify-center:before { + content:'\e07e'; +} + +.fi-justify-left:before { + content:'\e07f'; +} + +.fi-justify-right:before { + content:'\e080'; +} + +.fi-key:before { + content:'\e081'; +} + +.fi-laptop:before { + content:'\e082'; +} + +.fi-layers:before { + content:'\e083'; +} + +.fi-lightbulb:before { + content:'\e084'; +} + +.fi-link-broken:before { + content:'\e085'; +} + +.fi-link-intact:before { + content:'\e086'; +} + +.fi-list-rich:before { + content:'\e087'; +} + +.fi-list:before { + content:'\e088'; +} + +.fi-location:before { + content:'\e089'; +} + +.fi-lock-locked:before { + content:'\e08a'; +} + +.fi-lock-unlocked:before { + content:'\e08b'; +} + +.fi-loop-circular:before { + content:'\e08c'; +} + +.fi-loop-square:before { + content:'\e08d'; +} + +.fi-loop:before { + content:'\e08e'; +} + +.fi-magnifying-glass:before { + content:'\e08f'; +} + +.fi-map-marker:before { + content:'\e090'; +} + +.fi-map:before { + content:'\e091'; +} + +.fi-media-pause:before { + content:'\e092'; +} + +.fi-media-play:before { + content:'\e093'; +} + +.fi-media-record:before { + content:'\e094'; +} + +.fi-media-skip-backward:before { + content:'\e095'; +} + +.fi-media-skip-forward:before { + content:'\e096'; +} + +.fi-media-step-backward:before { + content:'\e097'; +} + +.fi-media-step-forward:before { + content:'\e098'; +} + +.fi-media-stop:before { + content:'\e099'; +} + +.fi-medical-cross:before { + content:'\e09a'; +} + +.fi-menu:before { + content:'\e09b'; +} + +.fi-microphone:before { + content:'\e09c'; +} + +.fi-minus:before { + content:'\e09d'; +} + +.fi-monitor:before { + content:'\e09e'; +} + +.fi-moon:before { + content:'\e09f'; +} + +.fi-move:before { + content:'\e0a0'; +} + +.fi-musical-note:before { + content:'\e0a1'; +} + +.fi-paperclip:before { + content:'\e0a2'; +} + +.fi-pencil:before { + content:'\e0a3'; +} + +.fi-people:before { + content:'\e0a4'; +} + +.fi-person:before { + content:'\e0a5'; +} + +.fi-phone:before { + content:'\e0a6'; +} + +.fi-pie-chart:before { + content:'\e0a7'; +} + +.fi-pin:before { + content:'\e0a8'; +} + +.fi-play-circle:before { + content:'\e0a9'; +} + +.fi-plus:before { + content:'\e0aa'; +} + +.fi-power-standby:before { + content:'\e0ab'; +} + +.fi-print:before { + content:'\e0ac'; +} + +.fi-project:before { + content:'\e0ad'; +} + +.fi-pulse:before { + content:'\e0ae'; +} + +.fi-puzzle-piece:before { + content:'\e0af'; +} + +.fi-question-mark:before { + content:'\e0b0'; +} + +.fi-rain:before { + content:'\e0b1'; +} + +.fi-random:before { + content:'\e0b2'; +} + +.fi-reload:before { + content:'\e0b3'; +} + +.fi-resize-both:before { + content:'\e0b4'; +} + +.fi-resize-height:before { + content:'\e0b5'; +} + +.fi-resize-width:before { + content:'\e0b6'; +} + +.fi-rss-alt:before { + content:'\e0b7'; +} + +.fi-rss:before { + content:'\e0b8'; +} + +.fi-script:before { + content:'\e0b9'; +} + +.fi-share-boxed:before { + content:'\e0ba'; +} + +.fi-share:before { + content:'\e0bb'; +} + +.fi-shield:before { + content:'\e0bc'; +} + +.fi-signal:before { + content:'\e0bd'; +} + +.fi-signpost:before { + content:'\e0be'; +} + +.fi-sort-ascending:before { + content:'\e0bf'; +} + +.fi-sort-descending:before { + content:'\e0c0'; +} + +.fi-spreadsheet:before { + content:'\e0c1'; +} + +.fi-star:before { + content:'\e0c2'; +} + +.fi-sun:before { + content:'\e0c3'; +} + +.fi-tablet:before { + content:'\e0c4'; +} + +.fi-tag:before { + content:'\e0c5'; +} + +.fi-tags:before { + content:'\e0c6'; +} + +.fi-target:before { + content:'\e0c7'; +} + +.fi-task:before { + content:'\e0c8'; +} + +.fi-terminal:before { + content:'\e0c9'; +} + +.fi-text:before { + content:'\e0ca'; +} + +.fi-thumb-down:before { + content:'\e0cb'; +} + +.fi-thumb-up:before { + content:'\e0cc'; +} + +.fi-timer:before { + content:'\e0cd'; +} + +.fi-transfer:before { + content:'\e0ce'; +} + +.fi-trash:before { + content:'\e0cf'; +} + +.fi-underline:before { + content:'\e0d0'; +} + +.fi-vertical-align-bottom:before { + content:'\e0d1'; +} + +.fi-vertical-align-center:before { + content:'\e0d2'; +} + +.fi-vertical-align-top:before { + content:'\e0d3'; +} + +.fi-video:before { + content:'\e0d4'; +} + +.fi-volume-high:before { + content:'\e0d5'; +} + +.fi-volume-low:before { + content:'\e0d6'; +} + +.fi-volume-off:before { + content:'\e0d7'; +} + +.fi-warning:before { + content:'\e0d8'; +} + +.fi-wifi:before { + content:'\e0d9'; +} + +.fi-wrench:before { + content:'\e0da'; +} + +.fi-x:before { + content:'\e0db'; +} + +.fi-yen:before { + content:'\e0dc'; +} + +.fi-zoom-in:before { + content:'\e0dd'; +} + +.fi-zoom-out:before { + content:'\e0de'; +} + diff --git a/static/open-iconic/css/open-iconic-foundation.styl b/static/open-iconic/css/open-iconic-foundation.styl new file mode 100644 index 00000000..a52637ab --- /dev/null +++ b/static/open-iconic/css/open-iconic-foundation.styl @@ -0,0 +1,1392 @@ +/* Foundation */ + +@font-face + font-family 'Icons' + src url('../fonts/open-iconic.eot') + src url('../fonts/open-iconic.eot?#iconic-sm') format('embedded-opentype'), url('../fonts/open-iconic.woff') format('woff'), url('../fonts/open-iconic.ttf') format('truetype'), url('../fonts/open-iconic.otf') format('opentype'), url('../fonts/open-iconic.svg#iconic-sm') format('svg') + font-weight normal + font-style normal + + + +.fi-account-loginbefore, + +.fi-account-logoutbefore, + +.fi-action-redobefore, + +.fi-action-undobefore, + +.fi-align-centerbefore, + +.fi-align-leftbefore, + +.fi-align-rightbefore, + +.fi-aperturebefore, + +.fi-arrow-bottombefore, + +.fi-arrow-circle-bottombefore, + +.fi-arrow-circle-leftbefore, + +.fi-arrow-circle-rightbefore, + +.fi-arrow-circle-topbefore, + +.fi-arrow-leftbefore, + +.fi-arrow-rightbefore, + +.fi-arrow-thick-bottombefore, + +.fi-arrow-thick-leftbefore, + +.fi-arrow-thick-rightbefore, + +.fi-arrow-thick-topbefore, + +.fi-arrow-topbefore, + +.fi-audio-spectrumbefore, + +.fi-audiobefore, + +.fi-badgebefore, + +.fi-banbefore, + +.fi-bar-chartbefore, + +.fi-basketbefore, + +.fi-battery-emptybefore, + +.fi-battery-fullbefore, + +.fi-beakerbefore, + +.fi-bellbefore, + +.fi-bluetoothbefore, + +.fi-boldbefore, + +.fi-boltbefore, + +.fi-bookbefore, + +.fi-bookmarkbefore, + +.fi-boxbefore, + +.fi-briefcasebefore, + +.fi-british-poundbefore, + +.fi-browserbefore, + +.fi-brushbefore, + +.fi-bugbefore, + +.fi-bullhornbefore, + +.fi-calculatorbefore, + +.fi-calendarbefore, + +.fi-camera-slrbefore, + +.fi-caret-bottombefore, + +.fi-caret-leftbefore, + +.fi-caret-rightbefore, + +.fi-caret-topbefore, + +.fi-cartbefore, + +.fi-chatbefore, + +.fi-checkbefore, + +.fi-chevron-bottombefore, + +.fi-chevron-leftbefore, + +.fi-chevron-rightbefore, + +.fi-chevron-topbefore, + +.fi-circle-checkbefore, + +.fi-circle-xbefore, + +.fi-clipboardbefore, + +.fi-clockbefore, + +.fi-cloud-downloadbefore, + +.fi-cloud-uploadbefore, + +.fi-cloudbefore, + +.fi-cloudybefore, + +.fi-codebefore, + +.fi-cogbefore, + +.fi-collapse-downbefore, + +.fi-collapse-leftbefore, + +.fi-collapse-rightbefore, + +.fi-collapse-upbefore, + +.fi-commandbefore, + +.fi-comment-squarebefore, + +.fi-compassbefore, + +.fi-contrastbefore, + +.fi-copywritingbefore, + +.fi-credit-cardbefore, + +.fi-cropbefore, + +.fi-dashboardbefore, + +.fi-data-transfer-downloadbefore, + +.fi-data-transfer-uploadbefore, + +.fi-deletebefore, + +.fi-dialbefore, + +.fi-documentbefore, + +.fi-dollarbefore, + +.fi-double-quote-sans-leftbefore, + +.fi-double-quote-sans-rightbefore, + +.fi-double-quote-serif-leftbefore, + +.fi-double-quote-serif-rightbefore, + +.fi-dropletbefore, + +.fi-ejectbefore, + +.fi-elevatorbefore, + +.fi-ellipsesbefore, + +.fi-envelope-closedbefore, + +.fi-envelope-openbefore, + +.fi-eurobefore, + +.fi-excerptbefore, + +.fi-expand-downbefore, + +.fi-expand-leftbefore, + +.fi-expand-rightbefore, + +.fi-expand-upbefore, + +.fi-external-linkbefore, + +.fi-eyebefore, + +.fi-eyedropperbefore, + +.fi-filebefore, + +.fi-firebefore, + +.fi-flagbefore, + +.fi-flashbefore, + +.fi-folderbefore, + +.fi-forkbefore, + +.fi-fullscreen-enterbefore, + +.fi-fullscreen-exitbefore, + +.fi-globebefore, + +.fi-graphbefore, + +.fi-grid-four-upbefore, + +.fi-grid-three-upbefore, + +.fi-grid-two-upbefore, + +.fi-hard-drivebefore, + +.fi-headerbefore, + +.fi-headphonesbefore, + +.fi-heartbefore, + +.fi-homebefore, + +.fi-imagebefore, + +.fi-inboxbefore, + +.fi-infinitybefore, + +.fi-infobefore, + +.fi-italicbefore, + +.fi-justify-centerbefore, + +.fi-justify-leftbefore, + +.fi-justify-rightbefore, + +.fi-keybefore, + +.fi-laptopbefore, + +.fi-layersbefore, + +.fi-lightbulbbefore, + +.fi-link-brokenbefore, + +.fi-link-intactbefore, + +.fi-list-richbefore, + +.fi-listbefore, + +.fi-locationbefore, + +.fi-lock-lockedbefore, + +.fi-lock-unlockedbefore, + +.fi-loop-circularbefore, + +.fi-loop-squarebefore, + +.fi-loopbefore, + +.fi-magnifying-glassbefore, + +.fi-map-markerbefore, + +.fi-mapbefore, + +.fi-media-pausebefore, + +.fi-media-playbefore, + +.fi-media-recordbefore, + +.fi-media-skip-backwardbefore, + +.fi-media-skip-forwardbefore, + +.fi-media-step-backwardbefore, + +.fi-media-step-forwardbefore, + +.fi-media-stopbefore, + +.fi-medical-crossbefore, + +.fi-menubefore, + +.fi-microphonebefore, + +.fi-minusbefore, + +.fi-monitorbefore, + +.fi-moonbefore, + +.fi-movebefore, + +.fi-musical-notebefore, + +.fi-paperclipbefore, + +.fi-pencilbefore, + +.fi-peoplebefore, + +.fi-personbefore, + +.fi-phonebefore, + +.fi-pie-chartbefore, + +.fi-pinbefore, + +.fi-play-circlebefore, + +.fi-plusbefore, + +.fi-power-standbybefore, + +.fi-printbefore, + +.fi-projectbefore, + +.fi-pulsebefore, + +.fi-puzzle-piecebefore, + +.fi-question-markbefore, + +.fi-rainbefore, + +.fi-randombefore, + +.fi-reloadbefore, + +.fi-resize-bothbefore, + +.fi-resize-heightbefore, + +.fi-resize-widthbefore, + +.fi-rss-altbefore, + +.fi-rssbefore, + +.fi-scriptbefore, + +.fi-share-boxedbefore, + +.fi-sharebefore, + +.fi-shieldbefore, + +.fi-signalbefore, + +.fi-signpostbefore, + +.fi-sort-ascendingbefore, + +.fi-sort-descendingbefore, + +.fi-spreadsheetbefore, + +.fi-starbefore, + +.fi-sunbefore, + +.fi-tabletbefore, + +.fi-tagbefore, + +.fi-tagsbefore, + +.fi-targetbefore, + +.fi-taskbefore, + +.fi-terminalbefore, + +.fi-textbefore, + +.fi-thumb-downbefore, + +.fi-thumb-upbefore, + +.fi-timerbefore, + +.fi-transferbefore, + +.fi-trashbefore, + +.fi-underlinebefore, + +.fi-vertical-align-bottombefore, + +.fi-vertical-align-centerbefore, + +.fi-vertical-align-topbefore, + +.fi-videobefore, + +.fi-volume-highbefore, + +.fi-volume-lowbefore, + +.fi-volume-offbefore, + +.fi-warningbefore, + +.fi-wifibefore, + +.fi-wrenchbefore, + +.fi-xbefore, + +.fi-yenbefore, + +.fi-zoom-inbefore, + +.fi-zoom-outbefore + + font-family 'Icons' + font-style normal + font-weight normal + font-variant normal + text-transform none + line-height 1 + -webkit-font-smoothing antialiased + -moz-osx-font-smoothing grayscale + display inline-block + text-decoration inherit + + +[class*='fi-'].oi-align-center:before + text-align center + + +[class*='fi-'].oi-align-left:before + text-align left + + +[class*='fi-'].oi-align-right:before + text-align right + + + +[class*='fi-'].oi-flip-horizontal:before + -webkit-transform scale(-1, 1) + -ms-transform scale(-1, 1) + transform scale(-1, 1) + + +[class*='fi-'].oi-flip-vertical:before + -webkit-transform scale(1, -1) + -ms-transform scale(-1, 1) + transform scale(1, -1) + + +[class*='fi-'].oi-flip-horizontal-vertical:before + -webkit-transform scale(-1, -1) + -ms-transform scale(-1, 1) + transform scale(-1, -1) + + +.fi-account-login:before + content'\e000' + + +.fi-account-logout:before + content'\e001' + + +.fi-action-redo:before + content'\e002' + + +.fi-action-undo:before + content'\e003' + + +.fi-align-center:before + content'\e004' + + +.fi-align-left:before + content'\e005' + + +.fi-align-right:before + content'\e006' + + +.fi-aperture:before + content'\e007' + + +.fi-arrow-bottom:before + content'\e008' + + +.fi-arrow-circle-bottom:before + content'\e009' + + +.fi-arrow-circle-left:before + content'\e00a' + + +.fi-arrow-circle-right:before + content'\e00b' + + +.fi-arrow-circle-top:before + content'\e00c' + + +.fi-arrow-left:before + content'\e00d' + + +.fi-arrow-right:before + content'\e00e' + + +.fi-arrow-thick-bottom:before + content'\e00f' + + +.fi-arrow-thick-left:before + content'\e010' + + +.fi-arrow-thick-right:before + content'\e011' + + +.fi-arrow-thick-top:before + content'\e012' + + +.fi-arrow-top:before + content'\e013' + + +.fi-audio-spectrum:before + content'\e014' + + +.fi-audio:before + content'\e015' + + +.fi-badge:before + content'\e016' + + +.fi-ban:before + content'\e017' + + +.fi-bar-chart:before + content'\e018' + + +.fi-basket:before + content'\e019' + + +.fi-battery-empty:before + content'\e01a' + + +.fi-battery-full:before + content'\e01b' + + +.fi-beaker:before + content'\e01c' + + +.fi-bell:before + content'\e01d' + + +.fi-bluetooth:before + content'\e01e' + + +.fi-bold:before + content'\e01f' + + +.fi-bolt:before + content'\e020' + + +.fi-book:before + content'\e021' + + +.fi-bookmark:before + content'\e022' + + +.fi-box:before + content'\e023' + + +.fi-briefcase:before + content'\e024' + + +.fi-british-pound:before + content'\e025' + + +.fi-browser:before + content'\e026' + + +.fi-brush:before + content'\e027' + + +.fi-bug:before + content'\e028' + + +.fi-bullhorn:before + content'\e029' + + +.fi-calculator:before + content'\e02a' + + +.fi-calendar:before + content'\e02b' + + +.fi-camera-slr:before + content'\e02c' + + +.fi-caret-bottom:before + content'\e02d' + + +.fi-caret-left:before + content'\e02e' + + +.fi-caret-right:before + content'\e02f' + + +.fi-caret-top:before + content'\e030' + + +.fi-cart:before + content'\e031' + + +.fi-chat:before + content'\e032' + + +.fi-check:before + content'\e033' + + +.fi-chevron-bottom:before + content'\e034' + + +.fi-chevron-left:before + content'\e035' + + +.fi-chevron-right:before + content'\e036' + + +.fi-chevron-top:before + content'\e037' + + +.fi-circle-check:before + content'\e038' + + +.fi-circle-x:before + content'\e039' + + +.fi-clipboard:before + content'\e03a' + + +.fi-clock:before + content'\e03b' + + +.fi-cloud-download:before + content'\e03c' + + +.fi-cloud-upload:before + content'\e03d' + + +.fi-cloud:before + content'\e03e' + + +.fi-cloudy:before + content'\e03f' + + +.fi-code:before + content'\e040' + + +.fi-cog:before + content'\e041' + + +.fi-collapse-down:before + content'\e042' + + +.fi-collapse-left:before + content'\e043' + + +.fi-collapse-right:before + content'\e044' + + +.fi-collapse-up:before + content'\e045' + + +.fi-command:before + content'\e046' + + +.fi-comment-square:before + content'\e047' + + +.fi-compass:before + content'\e048' + + +.fi-contrast:before + content'\e049' + + +.fi-copywriting:before + content'\e04a' + + +.fi-credit-card:before + content'\e04b' + + +.fi-crop:before + content'\e04c' + + +.fi-dashboard:before + content'\e04d' + + +.fi-data-transfer-download:before + content'\e04e' + + +.fi-data-transfer-upload:before + content'\e04f' + + +.fi-delete:before + content'\e050' + + +.fi-dial:before + content'\e051' + + +.fi-document:before + content'\e052' + + +.fi-dollar:before + content'\e053' + + +.fi-double-quote-sans-left:before + content'\e054' + + +.fi-double-quote-sans-right:before + content'\e055' + + +.fi-double-quote-serif-left:before + content'\e056' + + +.fi-double-quote-serif-right:before + content'\e057' + + +.fi-droplet:before + content'\e058' + + +.fi-eject:before + content'\e059' + + +.fi-elevator:before + content'\e05a' + + +.fi-ellipses:before + content'\e05b' + + +.fi-envelope-closed:before + content'\e05c' + + +.fi-envelope-open:before + content'\e05d' + + +.fi-euro:before + content'\e05e' + + +.fi-excerpt:before + content'\e05f' + + +.fi-expand-down:before + content'\e060' + + +.fi-expand-left:before + content'\e061' + + +.fi-expand-right:before + content'\e062' + + +.fi-expand-up:before + content'\e063' + + +.fi-external-link:before + content'\e064' + + +.fi-eye:before + content'\e065' + + +.fi-eyedropper:before + content'\e066' + + +.fi-file:before + content'\e067' + + +.fi-fire:before + content'\e068' + + +.fi-flag:before + content'\e069' + + +.fi-flash:before + content'\e06a' + + +.fi-folder:before + content'\e06b' + + +.fi-fork:before + content'\e06c' + + +.fi-fullscreen-enter:before + content'\e06d' + + +.fi-fullscreen-exit:before + content'\e06e' + + +.fi-globe:before + content'\e06f' + + +.fi-graph:before + content'\e070' + + +.fi-grid-four-up:before + content'\e071' + + +.fi-grid-three-up:before + content'\e072' + + +.fi-grid-two-up:before + content'\e073' + + +.fi-hard-drive:before + content'\e074' + + +.fi-header:before + content'\e075' + + +.fi-headphones:before + content'\e076' + + +.fi-heart:before + content'\e077' + + +.fi-home:before + content'\e078' + + +.fi-image:before + content'\e079' + + +.fi-inbox:before + content'\e07a' + + +.fi-infinity:before + content'\e07b' + + +.fi-info:before + content'\e07c' + + +.fi-italic:before + content'\e07d' + + +.fi-justify-center:before + content'\e07e' + + +.fi-justify-left:before + content'\e07f' + + +.fi-justify-right:before + content'\e080' + + +.fi-key:before + content'\e081' + + +.fi-laptop:before + content'\e082' + + +.fi-layers:before + content'\e083' + + +.fi-lightbulb:before + content'\e084' + + +.fi-link-broken:before + content'\e085' + + +.fi-link-intact:before + content'\e086' + + +.fi-list-rich:before + content'\e087' + + +.fi-list:before + content'\e088' + + +.fi-location:before + content'\e089' + + +.fi-lock-locked:before + content'\e08a' + + +.fi-lock-unlocked:before + content'\e08b' + + +.fi-loop-circular:before + content'\e08c' + + +.fi-loop-square:before + content'\e08d' + + +.fi-loop:before + content'\e08e' + + +.fi-magnifying-glass:before + content'\e08f' + + +.fi-map-marker:before + content'\e090' + + +.fi-map:before + content'\e091' + + +.fi-media-pause:before + content'\e092' + + +.fi-media-play:before + content'\e093' + + +.fi-media-record:before + content'\e094' + + +.fi-media-skip-backward:before + content'\e095' + + +.fi-media-skip-forward:before + content'\e096' + + +.fi-media-step-backward:before + content'\e097' + + +.fi-media-step-forward:before + content'\e098' + + +.fi-media-stop:before + content'\e099' + + +.fi-medical-cross:before + content'\e09a' + + +.fi-menu:before + content'\e09b' + + +.fi-microphone:before + content'\e09c' + + +.fi-minus:before + content'\e09d' + + +.fi-monitor:before + content'\e09e' + + +.fi-moon:before + content'\e09f' + + +.fi-move:before + content'\e0a0' + + +.fi-musical-note:before + content'\e0a1' + + +.fi-paperclip:before + content'\e0a2' + + +.fi-pencil:before + content'\e0a3' + + +.fi-people:before + content'\e0a4' + + +.fi-person:before + content'\e0a5' + + +.fi-phone:before + content'\e0a6' + + +.fi-pie-chart:before + content'\e0a7' + + +.fi-pin:before + content'\e0a8' + + +.fi-play-circle:before + content'\e0a9' + + +.fi-plus:before + content'\e0aa' + + +.fi-power-standby:before + content'\e0ab' + + +.fi-print:before + content'\e0ac' + + +.fi-project:before + content'\e0ad' + + +.fi-pulse:before + content'\e0ae' + + +.fi-puzzle-piece:before + content'\e0af' + + +.fi-question-mark:before + content'\e0b0' + + +.fi-rain:before + content'\e0b1' + + +.fi-random:before + content'\e0b2' + + +.fi-reload:before + content'\e0b3' + + +.fi-resize-both:before + content'\e0b4' + + +.fi-resize-height:before + content'\e0b5' + + +.fi-resize-width:before + content'\e0b6' + + +.fi-rss-alt:before + content'\e0b7' + + +.fi-rss:before + content'\e0b8' + + +.fi-script:before + content'\e0b9' + + +.fi-share-boxed:before + content'\e0ba' + + +.fi-share:before + content'\e0bb' + + +.fi-shield:before + content'\e0bc' + + +.fi-signal:before + content'\e0bd' + + +.fi-signpost:before + content'\e0be' + + +.fi-sort-ascending:before + content'\e0bf' + + +.fi-sort-descending:before + content'\e0c0' + + +.fi-spreadsheet:before + content'\e0c1' + + +.fi-star:before + content'\e0c2' + + +.fi-sun:before + content'\e0c3' + + +.fi-tablet:before + content'\e0c4' + + +.fi-tag:before + content'\e0c5' + + +.fi-tags:before + content'\e0c6' + + +.fi-target:before + content'\e0c7' + + +.fi-task:before + content'\e0c8' + + +.fi-terminal:before + content'\e0c9' + + +.fi-text:before + content'\e0ca' + + +.fi-thumb-down:before + content'\e0cb' + + +.fi-thumb-up:before + content'\e0cc' + + +.fi-timer:before + content'\e0cd' + + +.fi-transfer:before + content'\e0ce' + + +.fi-trash:before + content'\e0cf' + + +.fi-underline:before + content'\e0d0' + + +.fi-vertical-align-bottom:before + content'\e0d1' + + +.fi-vertical-align-center:before + content'\e0d2' + + +.fi-vertical-align-top:before + content'\e0d3' + + +.fi-video:before + content'\e0d4' + + +.fi-volume-high:before + content'\e0d5' + + +.fi-volume-low:before + content'\e0d6' + + +.fi-volume-off:before + content'\e0d7' + + +.fi-warning:before + content'\e0d8' + + +.fi-wifi:before + content'\e0d9' + + +.fi-wrench:before + content'\e0da' + + +.fi-x:before + content'\e0db' + + +.fi-yen:before + content'\e0dc' + + +.fi-zoom-in:before + content'\e0dd' + + +.fi-zoom-out:before + content'\e0de' + + diff --git a/static/open-iconic/css/open-iconic.css b/static/open-iconic/css/open-iconic.css new file mode 100644 index 00000000..301a138c --- /dev/null +++ b/static/open-iconic/css/open-iconic.css @@ -0,0 +1,511 @@ + +@font-face { + font-family: 'Icons'; + src: url('../fonts/open-iconic.eot'); + src: url('../fonts/open-iconic.eot?#iconic-sm') format('embedded-opentype'), url('../fonts/open-iconic.woff') format('woff'), url('../fonts/open-iconic.ttf') format('truetype'), url('../fonts/open-iconic.otf') format('opentype'), url('../fonts/open-iconic.svg#iconic-sm') format('svg'); + font-weight: normal; + font-style: normal; +} + +.oi[data-glyph].oi-text-replace { + font-size: 0; + line-height: 0; +} + +.oi[data-glyph].oi-text-replace:before { + width: 1em; + text-align: center; +} + +.oi[data-glyph]:before { + font-family: 'Icons'; + display: inline-block; + speak: none; + line-height: 1; + vertical-align: baseline; + font-weight: normal; + font-style: normal; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +.oi[data-glyph]:empty:before { + width: 1em; + text-align: center; + box-sizing: content-box; +} + +.oi[data-glyph].oi-align-left:before { + text-align: left; +} + +.oi[data-glyph].oi-align-right:before { + text-align: right; +} + +.oi[data-glyph].oi-align-center:before { + text-align: center; +} + +.oi[data-glyph].oi-flip-horizontal:before { + -webkit-transform: scale(-1, 1); + -ms-transform: scale(-1, 1); + transform: scale(-1, 1); +} +.oi[data-glyph].oi-flip-vertical:before { + -webkit-transform: scale(1, -1); + -ms-transform: scale(-1, 1); + transform: scale(1, -1); +} +.oi[data-glyph].oi-flip-horizontal-vertical:before { + -webkit-transform: scale(-1, -1); + -ms-transform: scale(-1, 1); + transform: scale(-1, -1); +} + + +.oi[data-glyph=account-login]:before { content:'\e000'; } + +.oi[data-glyph=account-logout]:before { content:'\e001'; } + +.oi[data-glyph=action-redo]:before { content:'\e002'; } + +.oi[data-glyph=action-undo]:before { content:'\e003'; } + +.oi[data-glyph=align-center]:before { content:'\e004'; } + +.oi[data-glyph=align-left]:before { content:'\e005'; } + +.oi[data-glyph=align-right]:before { content:'\e006'; } + +.oi[data-glyph=aperture]:before { content:'\e007'; } + +.oi[data-glyph=arrow-bottom]:before { content:'\e008'; } + +.oi[data-glyph=arrow-circle-bottom]:before { content:'\e009'; } + +.oi[data-glyph=arrow-circle-left]:before { content:'\e00a'; } + +.oi[data-glyph=arrow-circle-right]:before { content:'\e00b'; } + +.oi[data-glyph=arrow-circle-top]:before { content:'\e00c'; } + +.oi[data-glyph=arrow-left]:before { content:'\e00d'; } + +.oi[data-glyph=arrow-right]:before { content:'\e00e'; } + +.oi[data-glyph=arrow-thick-bottom]:before { content:'\e00f'; } + +.oi[data-glyph=arrow-thick-left]:before { content:'\e010'; } + +.oi[data-glyph=arrow-thick-right]:before { content:'\e011'; } + +.oi[data-glyph=arrow-thick-top]:before { content:'\e012'; } + +.oi[data-glyph=arrow-top]:before { content:'\e013'; } + +.oi[data-glyph=audio-spectrum]:before { content:'\e014'; } + +.oi[data-glyph=audio]:before { content:'\e015'; } + +.oi[data-glyph=badge]:before { content:'\e016'; } + +.oi[data-glyph=ban]:before { content:'\e017'; } + +.oi[data-glyph=bar-chart]:before { content:'\e018'; } + +.oi[data-glyph=basket]:before { content:'\e019'; } + +.oi[data-glyph=battery-empty]:before { content:'\e01a'; } + +.oi[data-glyph=battery-full]:before { content:'\e01b'; } + +.oi[data-glyph=beaker]:before { content:'\e01c'; } + +.oi[data-glyph=bell]:before { content:'\e01d'; } + +.oi[data-glyph=bluetooth]:before { content:'\e01e'; } + +.oi[data-glyph=bold]:before { content:'\e01f'; } + +.oi[data-glyph=bolt]:before { content:'\e020'; } + +.oi[data-glyph=book]:before { content:'\e021'; } + +.oi[data-glyph=bookmark]:before { content:'\e022'; } + +.oi[data-glyph=box]:before { content:'\e023'; } + +.oi[data-glyph=briefcase]:before { content:'\e024'; } + +.oi[data-glyph=british-pound]:before { content:'\e025'; } + +.oi[data-glyph=browser]:before { content:'\e026'; } + +.oi[data-glyph=brush]:before { content:'\e027'; } + +.oi[data-glyph=bug]:before { content:'\e028'; } + +.oi[data-glyph=bullhorn]:before { content:'\e029'; } + +.oi[data-glyph=calculator]:before { content:'\e02a'; } + +.oi[data-glyph=calendar]:before { content:'\e02b'; } + +.oi[data-glyph=camera-slr]:before { content:'\e02c'; } + +.oi[data-glyph=caret-bottom]:before { content:'\e02d'; } + +.oi[data-glyph=caret-left]:before { content:'\e02e'; } + +.oi[data-glyph=caret-right]:before { content:'\e02f'; } + +.oi[data-glyph=caret-top]:before { content:'\e030'; } + +.oi[data-glyph=cart]:before { content:'\e031'; } + +.oi[data-glyph=chat]:before { content:'\e032'; } + +.oi[data-glyph=check]:before { content:'\e033'; } + +.oi[data-glyph=chevron-bottom]:before { content:'\e034'; } + +.oi[data-glyph=chevron-left]:before { content:'\e035'; } + +.oi[data-glyph=chevron-right]:before { content:'\e036'; } + +.oi[data-glyph=chevron-top]:before { content:'\e037'; } + +.oi[data-glyph=circle-check]:before { content:'\e038'; } + +.oi[data-glyph=circle-x]:before { content:'\e039'; } + +.oi[data-glyph=clipboard]:before { content:'\e03a'; } + +.oi[data-glyph=clock]:before { content:'\e03b'; } + +.oi[data-glyph=cloud-download]:before { content:'\e03c'; } + +.oi[data-glyph=cloud-upload]:before { content:'\e03d'; } + +.oi[data-glyph=cloud]:before { content:'\e03e'; } + +.oi[data-glyph=cloudy]:before { content:'\e03f'; } + +.oi[data-glyph=code]:before { content:'\e040'; } + +.oi[data-glyph=cog]:before { content:'\e041'; } + +.oi[data-glyph=collapse-down]:before { content:'\e042'; } + +.oi[data-glyph=collapse-left]:before { content:'\e043'; } + +.oi[data-glyph=collapse-right]:before { content:'\e044'; } + +.oi[data-glyph=collapse-up]:before { content:'\e045'; } + +.oi[data-glyph=command]:before { content:'\e046'; } + +.oi[data-glyph=comment-square]:before { content:'\e047'; } + +.oi[data-glyph=compass]:before { content:'\e048'; } + +.oi[data-glyph=contrast]:before { content:'\e049'; } + +.oi[data-glyph=copywriting]:before { content:'\e04a'; } + +.oi[data-glyph=credit-card]:before { content:'\e04b'; } + +.oi[data-glyph=crop]:before { content:'\e04c'; } + +.oi[data-glyph=dashboard]:before { content:'\e04d'; } + +.oi[data-glyph=data-transfer-download]:before { content:'\e04e'; } + +.oi[data-glyph=data-transfer-upload]:before { content:'\e04f'; } + +.oi[data-glyph=delete]:before { content:'\e050'; } + +.oi[data-glyph=dial]:before { content:'\e051'; } + +.oi[data-glyph=document]:before { content:'\e052'; } + +.oi[data-glyph=dollar]:before { content:'\e053'; } + +.oi[data-glyph=double-quote-sans-left]:before { content:'\e054'; } + +.oi[data-glyph=double-quote-sans-right]:before { content:'\e055'; } + +.oi[data-glyph=double-quote-serif-left]:before { content:'\e056'; } + +.oi[data-glyph=double-quote-serif-right]:before { content:'\e057'; } + +.oi[data-glyph=droplet]:before { content:'\e058'; } + +.oi[data-glyph=eject]:before { content:'\e059'; } + +.oi[data-glyph=elevator]:before { content:'\e05a'; } + +.oi[data-glyph=ellipses]:before { content:'\e05b'; } + +.oi[data-glyph=envelope-closed]:before { content:'\e05c'; } + +.oi[data-glyph=envelope-open]:before { content:'\e05d'; } + +.oi[data-glyph=euro]:before { content:'\e05e'; } + +.oi[data-glyph=excerpt]:before { content:'\e05f'; } + +.oi[data-glyph=expand-down]:before { content:'\e060'; } + +.oi[data-glyph=expand-left]:before { content:'\e061'; } + +.oi[data-glyph=expand-right]:before { content:'\e062'; } + +.oi[data-glyph=expand-up]:before { content:'\e063'; } + +.oi[data-glyph=external-link]:before { content:'\e064'; } + +.oi[data-glyph=eye]:before { content:'\e065'; } + +.oi[data-glyph=eyedropper]:before { content:'\e066'; } + +.oi[data-glyph=file]:before { content:'\e067'; } + +.oi[data-glyph=fire]:before { content:'\e068'; } + +.oi[data-glyph=flag]:before { content:'\e069'; } + +.oi[data-glyph=flash]:before { content:'\e06a'; } + +.oi[data-glyph=folder]:before { content:'\e06b'; } + +.oi[data-glyph=fork]:before { content:'\e06c'; } + +.oi[data-glyph=fullscreen-enter]:before { content:'\e06d'; } + +.oi[data-glyph=fullscreen-exit]:before { content:'\e06e'; } + +.oi[data-glyph=globe]:before { content:'\e06f'; } + +.oi[data-glyph=graph]:before { content:'\e070'; } + +.oi[data-glyph=grid-four-up]:before { content:'\e071'; } + +.oi[data-glyph=grid-three-up]:before { content:'\e072'; } + +.oi[data-glyph=grid-two-up]:before { content:'\e073'; } + +.oi[data-glyph=hard-drive]:before { content:'\e074'; } + +.oi[data-glyph=header]:before { content:'\e075'; } + +.oi[data-glyph=headphones]:before { content:'\e076'; } + +.oi[data-glyph=heart]:before { content:'\e077'; } + +.oi[data-glyph=home]:before { content:'\e078'; } + +.oi[data-glyph=image]:before { content:'\e079'; } + +.oi[data-glyph=inbox]:before { content:'\e07a'; } + +.oi[data-glyph=infinity]:before { content:'\e07b'; } + +.oi[data-glyph=info]:before { content:'\e07c'; } + +.oi[data-glyph=italic]:before { content:'\e07d'; } + +.oi[data-glyph=justify-center]:before { content:'\e07e'; } + +.oi[data-glyph=justify-left]:before { content:'\e07f'; } + +.oi[data-glyph=justify-right]:before { content:'\e080'; } + +.oi[data-glyph=key]:before { content:'\e081'; } + +.oi[data-glyph=laptop]:before { content:'\e082'; } + +.oi[data-glyph=layers]:before { content:'\e083'; } + +.oi[data-glyph=lightbulb]:before { content:'\e084'; } + +.oi[data-glyph=link-broken]:before { content:'\e085'; } + +.oi[data-glyph=link-intact]:before { content:'\e086'; } + +.oi[data-glyph=list-rich]:before { content:'\e087'; } + +.oi[data-glyph=list]:before { content:'\e088'; } + +.oi[data-glyph=location]:before { content:'\e089'; } + +.oi[data-glyph=lock-locked]:before { content:'\e08a'; } + +.oi[data-glyph=lock-unlocked]:before { content:'\e08b'; } + +.oi[data-glyph=loop-circular]:before { content:'\e08c'; } + +.oi[data-glyph=loop-square]:before { content:'\e08d'; } + +.oi[data-glyph=loop]:before { content:'\e08e'; } + +.oi[data-glyph=magnifying-glass]:before { content:'\e08f'; } + +.oi[data-glyph=map-marker]:before { content:'\e090'; } + +.oi[data-glyph=map]:before { content:'\e091'; } + +.oi[data-glyph=media-pause]:before { content:'\e092'; } + +.oi[data-glyph=media-play]:before { content:'\e093'; } + +.oi[data-glyph=media-record]:before { content:'\e094'; } + +.oi[data-glyph=media-skip-backward]:before { content:'\e095'; } + +.oi[data-glyph=media-skip-forward]:before { content:'\e096'; } + +.oi[data-glyph=media-step-backward]:before { content:'\e097'; } + +.oi[data-glyph=media-step-forward]:before { content:'\e098'; } + +.oi[data-glyph=media-stop]:before { content:'\e099'; } + +.oi[data-glyph=medical-cross]:before { content:'\e09a'; } + +.oi[data-glyph=menu]:before { content:'\e09b'; } + +.oi[data-glyph=microphone]:before { content:'\e09c'; } + +.oi[data-glyph=minus]:before { content:'\e09d'; } + +.oi[data-glyph=monitor]:before { content:'\e09e'; } + +.oi[data-glyph=moon]:before { content:'\e09f'; } + +.oi[data-glyph=move]:before { content:'\e0a0'; } + +.oi[data-glyph=musical-note]:before { content:'\e0a1'; } + +.oi[data-glyph=paperclip]:before { content:'\e0a2'; } + +.oi[data-glyph=pencil]:before { content:'\e0a3'; } + +.oi[data-glyph=people]:before { content:'\e0a4'; } + +.oi[data-glyph=person]:before { content:'\e0a5'; } + +.oi[data-glyph=phone]:before { content:'\e0a6'; } + +.oi[data-glyph=pie-chart]:before { content:'\e0a7'; } + +.oi[data-glyph=pin]:before { content:'\e0a8'; } + +.oi[data-glyph=play-circle]:before { content:'\e0a9'; } + +.oi[data-glyph=plus]:before { content:'\e0aa'; } + +.oi[data-glyph=power-standby]:before { content:'\e0ab'; } + +.oi[data-glyph=print]:before { content:'\e0ac'; } + +.oi[data-glyph=project]:before { content:'\e0ad'; } + +.oi[data-glyph=pulse]:before { content:'\e0ae'; } + +.oi[data-glyph=puzzle-piece]:before { content:'\e0af'; } + +.oi[data-glyph=question-mark]:before { content:'\e0b0'; } + +.oi[data-glyph=rain]:before { content:'\e0b1'; } + +.oi[data-glyph=random]:before { content:'\e0b2'; } + +.oi[data-glyph=reload]:before { content:'\e0b3'; } + +.oi[data-glyph=resize-both]:before { content:'\e0b4'; } + +.oi[data-glyph=resize-height]:before { content:'\e0b5'; } + +.oi[data-glyph=resize-width]:before { content:'\e0b6'; } + +.oi[data-glyph=rss-alt]:before { content:'\e0b7'; } + +.oi[data-glyph=rss]:before { content:'\e0b8'; } + +.oi[data-glyph=script]:before { content:'\e0b9'; } + +.oi[data-glyph=share-boxed]:before { content:'\e0ba'; } + +.oi[data-glyph=share]:before { content:'\e0bb'; } + +.oi[data-glyph=shield]:before { content:'\e0bc'; } + +.oi[data-glyph=signal]:before { content:'\e0bd'; } + +.oi[data-glyph=signpost]:before { content:'\e0be'; } + +.oi[data-glyph=sort-ascending]:before { content:'\e0bf'; } + +.oi[data-glyph=sort-descending]:before { content:'\e0c0'; } + +.oi[data-glyph=spreadsheet]:before { content:'\e0c1'; } + +.oi[data-glyph=star]:before { content:'\e0c2'; } + +.oi[data-glyph=sun]:before { content:'\e0c3'; } + +.oi[data-glyph=tablet]:before { content:'\e0c4'; } + +.oi[data-glyph=tag]:before { content:'\e0c5'; } + +.oi[data-glyph=tags]:before { content:'\e0c6'; } + +.oi[data-glyph=target]:before { content:'\e0c7'; } + +.oi[data-glyph=task]:before { content:'\e0c8'; } + +.oi[data-glyph=terminal]:before { content:'\e0c9'; } + +.oi[data-glyph=text]:before { content:'\e0ca'; } + +.oi[data-glyph=thumb-down]:before { content:'\e0cb'; } + +.oi[data-glyph=thumb-up]:before { content:'\e0cc'; } + +.oi[data-glyph=timer]:before { content:'\e0cd'; } + +.oi[data-glyph=transfer]:before { content:'\e0ce'; } + +.oi[data-glyph=trash]:before { content:'\e0cf'; } + +.oi[data-glyph=underline]:before { content:'\e0d0'; } + +.oi[data-glyph=vertical-align-bottom]:before { content:'\e0d1'; } + +.oi[data-glyph=vertical-align-center]:before { content:'\e0d2'; } + +.oi[data-glyph=vertical-align-top]:before { content:'\e0d3'; } + +.oi[data-glyph=video]:before { content:'\e0d4'; } + +.oi[data-glyph=volume-high]:before { content:'\e0d5'; } + +.oi[data-glyph=volume-low]:before { content:'\e0d6'; } + +.oi[data-glyph=volume-off]:before { content:'\e0d7'; } + +.oi[data-glyph=warning]:before { content:'\e0d8'; } + +.oi[data-glyph=wifi]:before { content:'\e0d9'; } + +.oi[data-glyph=wrench]:before { content:'\e0da'; } + +.oi[data-glyph=x]:before { content:'\e0db'; } + +.oi[data-glyph=yen]:before { content:'\e0dc'; } + +.oi[data-glyph=zoom-in]:before { content:'\e0dd'; } + +.oi[data-glyph=zoom-out]:before { content:'\e0de'; } diff --git a/static/open-iconic/css/open-iconic.less b/static/open-iconic/css/open-iconic.less new file mode 100644 index 00000000..d505e9f2 --- /dev/null +++ b/static/open-iconic/css/open-iconic.less @@ -0,0 +1,962 @@ +@iconic-font-path: '../fonts/'; + +@font-face { + font-family: 'Icons'; + src: url('@{iconic-font-path}open-iconic.eot'); + src: url('@{iconic-font-path}open-iconic.eot?#iconic-sm') format('embedded-opentype'), url('@{iconic-font-path}open-iconic.woff') format('woff'), url('@{iconic-font-path}open-iconic.ttf') format('truetype'), url('@{iconic-font-path}open-iconic.otf') format('opentype'), url('@{iconic-font-path}open-iconic.svg#iconic-sm') format('svg'); + font-weight: normal; + font-style: normal; +} + +.oi[data-glyph].oi-text-replace { + font-size: 0; + line-height: 0; +} + +.oi[data-glyph].oi-text-replace:before { + width: 1em; + text-align: center; +} + +.oi[data-glyph] { + &:before { + position: relative; + top: 1px; + font-family: 'Icons'; + display: inline-block; + speak: none; + line-height: 1; + vertical-align: baseline; + font-weight: normal; + font-style: normal; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + } + + &:empty:before { + width: 1em; + text-align: center; + box-sizing: content-box; + } + + &.oi-align-left:before { + text-align: left; + } + &.oi-align-right:before { + text-align: right; + } + &.oi-align-center:before { + text-align: center; + } + + &.oi-flip-horizontal:before { + -webkit-transform: scale(-1, 1); + -ms-transform: scale(-1, 1); + transform: scale(-1, 1); + } + + &.oi-flip-vertical:before { + -webkit-transform: scale(1, -1); + -ms-transform: scale(-1, 1); + transform: scale(1, -1); + } + + &.oi-flip-horizontal-vertical:before { + -webkit-transform: scale(-1, -1); + -ms-transform: scale(-1, 1); + transform: scale(-1, -1); + } +} + + +.oi[data-glyph=account-login]:before { + content: '\e000'; +} + +.oi[data-glyph=account-logout]:before { + content: '\e001'; +} + +.oi[data-glyph=action-redo]:before { + content: '\e002'; +} + +.oi[data-glyph=action-undo]:before { + content: '\e003'; +} + +.oi[data-glyph=align-center]:before { + content: '\e004'; +} + +.oi[data-glyph=align-left]:before { + content: '\e005'; +} + +.oi[data-glyph=align-right]:before { + content: '\e006'; +} + +.oi[data-glyph=aperture]:before { + content: '\e007'; +} + +.oi[data-glyph=arrow-bottom]:before { + content: '\e008'; +} + +.oi[data-glyph=arrow-circle-bottom]:before { + content: '\e009'; +} + +.oi[data-glyph=arrow-circle-left]:before { + content: '\e00a'; +} + +.oi[data-glyph=arrow-circle-right]:before { + content: '\e00b'; +} + +.oi[data-glyph=arrow-circle-top]:before { + content: '\e00c'; +} + +.oi[data-glyph=arrow-left]:before { + content: '\e00d'; +} + +.oi[data-glyph=arrow-right]:before { + content: '\e00e'; +} + +.oi[data-glyph=arrow-thick-bottom]:before { + content: '\e00f'; +} + +.oi[data-glyph=arrow-thick-left]:before { + content: '\e010'; +} + +.oi[data-glyph=arrow-thick-right]:before { + content: '\e011'; +} + +.oi[data-glyph=arrow-thick-top]:before { + content: '\e012'; +} + +.oi[data-glyph=arrow-top]:before { + content: '\e013'; +} + +.oi[data-glyph=audio-spectrum]:before { + content: '\e014'; +} + +.oi[data-glyph=audio]:before { + content: '\e015'; +} + +.oi[data-glyph=badge]:before { + content: '\e016'; +} + +.oi[data-glyph=ban]:before { + content: '\e017'; +} + +.oi[data-glyph=bar-chart]:before { + content: '\e018'; +} + +.oi[data-glyph=basket]:before { + content: '\e019'; +} + +.oi[data-glyph=battery-empty]:before { + content: '\e01a'; +} + +.oi[data-glyph=battery-full]:before { + content: '\e01b'; +} + +.oi[data-glyph=beaker]:before { + content: '\e01c'; +} + +.oi[data-glyph=bell]:before { + content: '\e01d'; +} + +.oi[data-glyph=bluetooth]:before { + content: '\e01e'; +} + +.oi[data-glyph=bold]:before { + content: '\e01f'; +} + +.oi[data-glyph=bolt]:before { + content: '\e020'; +} + +.oi[data-glyph=book]:before { + content: '\e021'; +} + +.oi[data-glyph=bookmark]:before { + content: '\e022'; +} + +.oi[data-glyph=box]:before { + content: '\e023'; +} + +.oi[data-glyph=briefcase]:before { + content: '\e024'; +} + +.oi[data-glyph=british-pound]:before { + content: '\e025'; +} + +.oi[data-glyph=browser]:before { + content: '\e026'; +} + +.oi[data-glyph=brush]:before { + content: '\e027'; +} + +.oi[data-glyph=bug]:before { + content: '\e028'; +} + +.oi[data-glyph=bullhorn]:before { + content: '\e029'; +} + +.oi[data-glyph=calculator]:before { + content: '\e02a'; +} + +.oi[data-glyph=calendar]:before { + content: '\e02b'; +} + +.oi[data-glyph=camera-slr]:before { + content: '\e02c'; +} + +.oi[data-glyph=caret-bottom]:before { + content: '\e02d'; +} + +.oi[data-glyph=caret-left]:before { + content: '\e02e'; +} + +.oi[data-glyph=caret-right]:before { + content: '\e02f'; +} + +.oi[data-glyph=caret-top]:before { + content: '\e030'; +} + +.oi[data-glyph=cart]:before { + content: '\e031'; +} + +.oi[data-glyph=chat]:before { + content: '\e032'; +} + +.oi[data-glyph=check]:before { + content: '\e033'; +} + +.oi[data-glyph=chevron-bottom]:before { + content: '\e034'; +} + +.oi[data-glyph=chevron-left]:before { + content: '\e035'; +} + +.oi[data-glyph=chevron-right]:before { + content: '\e036'; +} + +.oi[data-glyph=chevron-top]:before { + content: '\e037'; +} + +.oi[data-glyph=circle-check]:before { + content: '\e038'; +} + +.oi[data-glyph=circle-x]:before { + content: '\e039'; +} + +.oi[data-glyph=clipboard]:before { + content: '\e03a'; +} + +.oi[data-glyph=clock]:before { + content: '\e03b'; +} + +.oi[data-glyph=cloud-download]:before { + content: '\e03c'; +} + +.oi[data-glyph=cloud-upload]:before { + content: '\e03d'; +} + +.oi[data-glyph=cloud]:before { + content: '\e03e'; +} + +.oi[data-glyph=cloudy]:before { + content: '\e03f'; +} + +.oi[data-glyph=code]:before { + content: '\e040'; +} + +.oi[data-glyph=cog]:before { + content: '\e041'; +} + +.oi[data-glyph=collapse-down]:before { + content: '\e042'; +} + +.oi[data-glyph=collapse-left]:before { + content: '\e043'; +} + +.oi[data-glyph=collapse-right]:before { + content: '\e044'; +} + +.oi[data-glyph=collapse-up]:before { + content: '\e045'; +} + +.oi[data-glyph=command]:before { + content: '\e046'; +} + +.oi[data-glyph=comment-square]:before { + content: '\e047'; +} + +.oi[data-glyph=compass]:before { + content: '\e048'; +} + +.oi[data-glyph=contrast]:before { + content: '\e049'; +} + +.oi[data-glyph=copywriting]:before { + content: '\e04a'; +} + +.oi[data-glyph=credit-card]:before { + content: '\e04b'; +} + +.oi[data-glyph=crop]:before { + content: '\e04c'; +} + +.oi[data-glyph=dashboard]:before { + content: '\e04d'; +} + +.oi[data-glyph=data-transfer-download]:before { + content: '\e04e'; +} + +.oi[data-glyph=data-transfer-upload]:before { + content: '\e04f'; +} + +.oi[data-glyph=delete]:before { + content: '\e050'; +} + +.oi[data-glyph=dial]:before { + content: '\e051'; +} + +.oi[data-glyph=document]:before { + content: '\e052'; +} + +.oi[data-glyph=dollar]:before { + content: '\e053'; +} + +.oi[data-glyph=double-quote-sans-left]:before { + content: '\e054'; +} + +.oi[data-glyph=double-quote-sans-right]:before { + content: '\e055'; +} + +.oi[data-glyph=double-quote-serif-left]:before { + content: '\e056'; +} + +.oi[data-glyph=double-quote-serif-right]:before { + content: '\e057'; +} + +.oi[data-glyph=droplet]:before { + content: '\e058'; +} + +.oi[data-glyph=eject]:before { + content: '\e059'; +} + +.oi[data-glyph=elevator]:before { + content: '\e05a'; +} + +.oi[data-glyph=ellipses]:before { + content: '\e05b'; +} + +.oi[data-glyph=envelope-closed]:before { + content: '\e05c'; +} + +.oi[data-glyph=envelope-open]:before { + content: '\e05d'; +} + +.oi[data-glyph=euro]:before { + content: '\e05e'; +} + +.oi[data-glyph=excerpt]:before { + content: '\e05f'; +} + +.oi[data-glyph=expand-down]:before { + content: '\e060'; +} + +.oi[data-glyph=expand-left]:before { + content: '\e061'; +} + +.oi[data-glyph=expand-right]:before { + content: '\e062'; +} + +.oi[data-glyph=expand-up]:before { + content: '\e063'; +} + +.oi[data-glyph=external-link]:before { + content: '\e064'; +} + +.oi[data-glyph=eye]:before { + content: '\e065'; +} + +.oi[data-glyph=eyedropper]:before { + content: '\e066'; +} + +.oi[data-glyph=file]:before { + content: '\e067'; +} + +.oi[data-glyph=fire]:before { + content: '\e068'; +} + +.oi[data-glyph=flag]:before { + content: '\e069'; +} + +.oi[data-glyph=flash]:before { + content: '\e06a'; +} + +.oi[data-glyph=folder]:before { + content: '\e06b'; +} + +.oi[data-glyph=fork]:before { + content: '\e06c'; +} + +.oi[data-glyph=fullscreen-enter]:before { + content: '\e06d'; +} + +.oi[data-glyph=fullscreen-exit]:before { + content: '\e06e'; +} + +.oi[data-glyph=globe]:before { + content: '\e06f'; +} + +.oi[data-glyph=graph]:before { + content: '\e070'; +} + +.oi[data-glyph=grid-four-up]:before { + content: '\e071'; +} + +.oi[data-glyph=grid-three-up]:before { + content: '\e072'; +} + +.oi[data-glyph=grid-two-up]:before { + content: '\e073'; +} + +.oi[data-glyph=hard-drive]:before { + content: '\e074'; +} + +.oi[data-glyph=header]:before { + content: '\e075'; +} + +.oi[data-glyph=headphones]:before { + content: '\e076'; +} + +.oi[data-glyph=heart]:before { + content: '\e077'; +} + +.oi[data-glyph=home]:before { + content: '\e078'; +} + +.oi[data-glyph=image]:before { + content: '\e079'; +} + +.oi[data-glyph=inbox]:before { + content: '\e07a'; +} + +.oi[data-glyph=infinity]:before { + content: '\e07b'; +} + +.oi[data-glyph=info]:before { + content: '\e07c'; +} + +.oi[data-glyph=italic]:before { + content: '\e07d'; +} + +.oi[data-glyph=justify-center]:before { + content: '\e07e'; +} + +.oi[data-glyph=justify-left]:before { + content: '\e07f'; +} + +.oi[data-glyph=justify-right]:before { + content: '\e080'; +} + +.oi[data-glyph=key]:before { + content: '\e081'; +} + +.oi[data-glyph=laptop]:before { + content: '\e082'; +} + +.oi[data-glyph=layers]:before { + content: '\e083'; +} + +.oi[data-glyph=lightbulb]:before { + content: '\e084'; +} + +.oi[data-glyph=link-broken]:before { + content: '\e085'; +} + +.oi[data-glyph=link-intact]:before { + content: '\e086'; +} + +.oi[data-glyph=list-rich]:before { + content: '\e087'; +} + +.oi[data-glyph=list]:before { + content: '\e088'; +} + +.oi[data-glyph=location]:before { + content: '\e089'; +} + +.oi[data-glyph=lock-locked]:before { + content: '\e08a'; +} + +.oi[data-glyph=lock-unlocked]:before { + content: '\e08b'; +} + +.oi[data-glyph=loop-circular]:before { + content: '\e08c'; +} + +.oi[data-glyph=loop-square]:before { + content: '\e08d'; +} + +.oi[data-glyph=loop]:before { + content: '\e08e'; +} + +.oi[data-glyph=magnifying-glass]:before { + content: '\e08f'; +} + +.oi[data-glyph=map-marker]:before { + content: '\e090'; +} + +.oi[data-glyph=map]:before { + content: '\e091'; +} + +.oi[data-glyph=media-pause]:before { + content: '\e092'; +} + +.oi[data-glyph=media-play]:before { + content: '\e093'; +} + +.oi[data-glyph=media-record]:before { + content: '\e094'; +} + +.oi[data-glyph=media-skip-backward]:before { + content: '\e095'; +} + +.oi[data-glyph=media-skip-forward]:before { + content: '\e096'; +} + +.oi[data-glyph=media-step-backward]:before { + content: '\e097'; +} + +.oi[data-glyph=media-step-forward]:before { + content: '\e098'; +} + +.oi[data-glyph=media-stop]:before { + content: '\e099'; +} + +.oi[data-glyph=medical-cross]:before { + content: '\e09a'; +} + +.oi[data-glyph=menu]:before { + content: '\e09b'; +} + +.oi[data-glyph=microphone]:before { + content: '\e09c'; +} + +.oi[data-glyph=minus]:before { + content: '\e09d'; +} + +.oi[data-glyph=monitor]:before { + content: '\e09e'; +} + +.oi[data-glyph=moon]:before { + content: '\e09f'; +} + +.oi[data-glyph=move]:before { + content: '\e0a0'; +} + +.oi[data-glyph=musical-note]:before { + content: '\e0a1'; +} + +.oi[data-glyph=paperclip]:before { + content: '\e0a2'; +} + +.oi[data-glyph=pencil]:before { + content: '\e0a3'; +} + +.oi[data-glyph=people]:before { + content: '\e0a4'; +} + +.oi[data-glyph=person]:before { + content: '\e0a5'; +} + +.oi[data-glyph=phone]:before { + content: '\e0a6'; +} + +.oi[data-glyph=pie-chart]:before { + content: '\e0a7'; +} + +.oi[data-glyph=pin]:before { + content: '\e0a8'; +} + +.oi[data-glyph=play-circle]:before { + content: '\e0a9'; +} + +.oi[data-glyph=plus]:before { + content: '\e0aa'; +} + +.oi[data-glyph=power-standby]:before { + content: '\e0ab'; +} + +.oi[data-glyph=print]:before { + content: '\e0ac'; +} + +.oi[data-glyph=project]:before { + content: '\e0ad'; +} + +.oi[data-glyph=pulse]:before { + content: '\e0ae'; +} + +.oi[data-glyph=puzzle-piece]:before { + content: '\e0af'; +} + +.oi[data-glyph=question-mark]:before { + content: '\e0b0'; +} + +.oi[data-glyph=rain]:before { + content: '\e0b1'; +} + +.oi[data-glyph=random]:before { + content: '\e0b2'; +} + +.oi[data-glyph=reload]:before { + content: '\e0b3'; +} + +.oi[data-glyph=resize-both]:before { + content: '\e0b4'; +} + +.oi[data-glyph=resize-height]:before { + content: '\e0b5'; +} + +.oi[data-glyph=resize-width]:before { + content: '\e0b6'; +} + +.oi[data-glyph=rss-alt]:before { + content: '\e0b7'; +} + +.oi[data-glyph=rss]:before { + content: '\e0b8'; +} + +.oi[data-glyph=script]:before { + content: '\e0b9'; +} + +.oi[data-glyph=share-boxed]:before { + content: '\e0ba'; +} + +.oi[data-glyph=share]:before { + content: '\e0bb'; +} + +.oi[data-glyph=shield]:before { + content: '\e0bc'; +} + +.oi[data-glyph=signal]:before { + content: '\e0bd'; +} + +.oi[data-glyph=signpost]:before { + content: '\e0be'; +} + +.oi[data-glyph=sort-ascending]:before { + content: '\e0bf'; +} + +.oi[data-glyph=sort-descending]:before { + content: '\e0c0'; +} + +.oi[data-glyph=spreadsheet]:before { + content: '\e0c1'; +} + +.oi[data-glyph=star]:before { + content: '\e0c2'; +} + +.oi[data-glyph=sun]:before { + content: '\e0c3'; +} + +.oi[data-glyph=tablet]:before { + content: '\e0c4'; +} + +.oi[data-glyph=tag]:before { + content: '\e0c5'; +} + +.oi[data-glyph=tags]:before { + content: '\e0c6'; +} + +.oi[data-glyph=target]:before { + content: '\e0c7'; +} + +.oi[data-glyph=task]:before { + content: '\e0c8'; +} + +.oi[data-glyph=terminal]:before { + content: '\e0c9'; +} + +.oi[data-glyph=text]:before { + content: '\e0ca'; +} + +.oi[data-glyph=thumb-down]:before { + content: '\e0cb'; +} + +.oi[data-glyph=thumb-up]:before { + content: '\e0cc'; +} + +.oi[data-glyph=timer]:before { + content: '\e0cd'; +} + +.oi[data-glyph=transfer]:before { + content: '\e0ce'; +} + +.oi[data-glyph=trash]:before { + content: '\e0cf'; +} + +.oi[data-glyph=underline]:before { + content: '\e0d0'; +} + +.oi[data-glyph=vertical-align-bottom]:before { + content: '\e0d1'; +} + +.oi[data-glyph=vertical-align-center]:before { + content: '\e0d2'; +} + +.oi[data-glyph=vertical-align-top]:before { + content: '\e0d3'; +} + +.oi[data-glyph=video]:before { + content: '\e0d4'; +} + +.oi[data-glyph=volume-high]:before { + content: '\e0d5'; +} + +.oi[data-glyph=volume-low]:before { + content: '\e0d6'; +} + +.oi[data-glyph=volume-off]:before { + content: '\e0d7'; +} + +.oi[data-glyph=warning]:before { + content: '\e0d8'; +} + +.oi[data-glyph=wifi]:before { + content: '\e0d9'; +} + +.oi[data-glyph=wrench]:before { + content: '\e0da'; +} + +.oi[data-glyph=x]:before { + content: '\e0db'; +} + +.oi[data-glyph=yen]:before { + content: '\e0dc'; +} + +.oi[data-glyph=zoom-in]:before { + content: '\e0dd'; +} + +.oi[data-glyph=zoom-out]:before { + content: '\e0de'; +} diff --git a/static/open-iconic/css/open-iconic.min.css b/static/open-iconic/css/open-iconic.min.css new file mode 100644 index 00000000..1f6afb82 --- /dev/null +++ b/static/open-iconic/css/open-iconic.min.css @@ -0,0 +1 @@ +@font-face{font-family:Icons;src:url(../fonts/open-iconic.eot);src:url(../fonts/open-iconic.eot?#iconic-sm) format('embedded-opentype'),url(../fonts/open-iconic.woff) format('woff'),url(../fonts/open-iconic.ttf) format('truetype'),url(../fonts/open-iconic.otf) format('opentype'),url(../fonts/open-iconic.svg#iconic-sm) format('svg');font-weight:400;font-style:normal}.oi[data-glyph].oi-text-replace{font-size:0;line-height:0}.oi[data-glyph].oi-text-replace:before{width:1em;text-align:center}.oi[data-glyph]:before{font-family:Icons;display:inline-block;speak:none;line-height:1;vertical-align:baseline;font-weight:400;font-style:normal;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.oi[data-glyph]:empty:before{width:1em;text-align:center;box-sizing:content-box}.oi[data-glyph].oi-align-left:before{text-align:left}.oi[data-glyph].oi-align-right:before{text-align:right}.oi[data-glyph].oi-align-center:before{text-align:center}.oi[data-glyph].oi-flip-horizontal:before{-webkit-transform:scale(-1,1);-ms-transform:scale(-1,1);transform:scale(-1,1)}.oi[data-glyph].oi-flip-vertical:before{-webkit-transform:scale(1,-1);-ms-transform:scale(-1,1);transform:scale(1,-1)}.oi[data-glyph].oi-flip-horizontal-vertical:before{-webkit-transform:scale(-1,-1);-ms-transform:scale(-1,1);transform:scale(-1,-1)}.oi[data-glyph=account-login]:before{content:'\e000'}.oi[data-glyph=account-logout]:before{content:'\e001'}.oi[data-glyph=action-redo]:before{content:'\e002'}.oi[data-glyph=action-undo]:before{content:'\e003'}.oi[data-glyph=align-center]:before{content:'\e004'}.oi[data-glyph=align-left]:before{content:'\e005'}.oi[data-glyph=align-right]:before{content:'\e006'}.oi[data-glyph=aperture]:before{content:'\e007'}.oi[data-glyph=arrow-bottom]:before{content:'\e008'}.oi[data-glyph=arrow-circle-bottom]:before{content:'\e009'}.oi[data-glyph=arrow-circle-left]:before{content:'\e00a'}.oi[data-glyph=arrow-circle-right]:before{content:'\e00b'}.oi[data-glyph=arrow-circle-top]:before{content:'\e00c'}.oi[data-glyph=arrow-left]:before{content:'\e00d'}.oi[data-glyph=arrow-right]:before{content:'\e00e'}.oi[data-glyph=arrow-thick-bottom]:before{content:'\e00f'}.oi[data-glyph=arrow-thick-left]:before{content:'\e010'}.oi[data-glyph=arrow-thick-right]:before{content:'\e011'}.oi[data-glyph=arrow-thick-top]:before{content:'\e012'}.oi[data-glyph=arrow-top]:before{content:'\e013'}.oi[data-glyph=audio-spectrum]:before{content:'\e014'}.oi[data-glyph=audio]:before{content:'\e015'}.oi[data-glyph=badge]:before{content:'\e016'}.oi[data-glyph=ban]:before{content:'\e017'}.oi[data-glyph=bar-chart]:before{content:'\e018'}.oi[data-glyph=basket]:before{content:'\e019'}.oi[data-glyph=battery-empty]:before{content:'\e01a'}.oi[data-glyph=battery-full]:before{content:'\e01b'}.oi[data-glyph=beaker]:before{content:'\e01c'}.oi[data-glyph=bell]:before{content:'\e01d'}.oi[data-glyph=bluetooth]:before{content:'\e01e'}.oi[data-glyph=bold]:before{content:'\e01f'}.oi[data-glyph=bolt]:before{content:'\e020'}.oi[data-glyph=book]:before{content:'\e021'}.oi[data-glyph=bookmark]:before{content:'\e022'}.oi[data-glyph=box]:before{content:'\e023'}.oi[data-glyph=briefcase]:before{content:'\e024'}.oi[data-glyph=british-pound]:before{content:'\e025'}.oi[data-glyph=browser]:before{content:'\e026'}.oi[data-glyph=brush]:before{content:'\e027'}.oi[data-glyph=bug]:before{content:'\e028'}.oi[data-glyph=bullhorn]:before{content:'\e029'}.oi[data-glyph=calculator]:before{content:'\e02a'}.oi[data-glyph=calendar]:before{content:'\e02b'}.oi[data-glyph=camera-slr]:before{content:'\e02c'}.oi[data-glyph=caret-bottom]:before{content:'\e02d'}.oi[data-glyph=caret-left]:before{content:'\e02e'}.oi[data-glyph=caret-right]:before{content:'\e02f'}.oi[data-glyph=caret-top]:before{content:'\e030'}.oi[data-glyph=cart]:before{content:'\e031'}.oi[data-glyph=chat]:before{content:'\e032'}.oi[data-glyph=check]:before{content:'\e033'}.oi[data-glyph=chevron-bottom]:before{content:'\e034'}.oi[data-glyph=chevron-left]:before{content:'\e035'}.oi[data-glyph=chevron-right]:before{content:'\e036'}.oi[data-glyph=chevron-top]:before{content:'\e037'}.oi[data-glyph=circle-check]:before{content:'\e038'}.oi[data-glyph=circle-x]:before{content:'\e039'}.oi[data-glyph=clipboard]:before{content:'\e03a'}.oi[data-glyph=clock]:before{content:'\e03b'}.oi[data-glyph=cloud-download]:before{content:'\e03c'}.oi[data-glyph=cloud-upload]:before{content:'\e03d'}.oi[data-glyph=cloud]:before{content:'\e03e'}.oi[data-glyph=cloudy]:before{content:'\e03f'}.oi[data-glyph=code]:before{content:'\e040'}.oi[data-glyph=cog]:before{content:'\e041'}.oi[data-glyph=collapse-down]:before{content:'\e042'}.oi[data-glyph=collapse-left]:before{content:'\e043'}.oi[data-glyph=collapse-right]:before{content:'\e044'}.oi[data-glyph=collapse-up]:before{content:'\e045'}.oi[data-glyph=command]:before{content:'\e046'}.oi[data-glyph=comment-square]:before{content:'\e047'}.oi[data-glyph=compass]:before{content:'\e048'}.oi[data-glyph=contrast]:before{content:'\e049'}.oi[data-glyph=copywriting]:before{content:'\e04a'}.oi[data-glyph=credit-card]:before{content:'\e04b'}.oi[data-glyph=crop]:before{content:'\e04c'}.oi[data-glyph=dashboard]:before{content:'\e04d'}.oi[data-glyph=data-transfer-download]:before{content:'\e04e'}.oi[data-glyph=data-transfer-upload]:before{content:'\e04f'}.oi[data-glyph=delete]:before{content:'\e050'}.oi[data-glyph=dial]:before{content:'\e051'}.oi[data-glyph=document]:before{content:'\e052'}.oi[data-glyph=dollar]:before{content:'\e053'}.oi[data-glyph=double-quote-sans-left]:before{content:'\e054'}.oi[data-glyph=double-quote-sans-right]:before{content:'\e055'}.oi[data-glyph=double-quote-serif-left]:before{content:'\e056'}.oi[data-glyph=double-quote-serif-right]:before{content:'\e057'}.oi[data-glyph=droplet]:before{content:'\e058'}.oi[data-glyph=eject]:before{content:'\e059'}.oi[data-glyph=elevator]:before{content:'\e05a'}.oi[data-glyph=ellipses]:before{content:'\e05b'}.oi[data-glyph=envelope-closed]:before{content:'\e05c'}.oi[data-glyph=envelope-open]:before{content:'\e05d'}.oi[data-glyph=euro]:before{content:'\e05e'}.oi[data-glyph=excerpt]:before{content:'\e05f'}.oi[data-glyph=expand-down]:before{content:'\e060'}.oi[data-glyph=expand-left]:before{content:'\e061'}.oi[data-glyph=expand-right]:before{content:'\e062'}.oi[data-glyph=expand-up]:before{content:'\e063'}.oi[data-glyph=external-link]:before{content:'\e064'}.oi[data-glyph=eye]:before{content:'\e065'}.oi[data-glyph=eyedropper]:before{content:'\e066'}.oi[data-glyph=file]:before{content:'\e067'}.oi[data-glyph=fire]:before{content:'\e068'}.oi[data-glyph=flag]:before{content:'\e069'}.oi[data-glyph=flash]:before{content:'\e06a'}.oi[data-glyph=folder]:before{content:'\e06b'}.oi[data-glyph=fork]:before{content:'\e06c'}.oi[data-glyph=fullscreen-enter]:before{content:'\e06d'}.oi[data-glyph=fullscreen-exit]:before{content:'\e06e'}.oi[data-glyph=globe]:before{content:'\e06f'}.oi[data-glyph=graph]:before{content:'\e070'}.oi[data-glyph=grid-four-up]:before{content:'\e071'}.oi[data-glyph=grid-three-up]:before{content:'\e072'}.oi[data-glyph=grid-two-up]:before{content:'\e073'}.oi[data-glyph=hard-drive]:before{content:'\e074'}.oi[data-glyph=header]:before{content:'\e075'}.oi[data-glyph=headphones]:before{content:'\e076'}.oi[data-glyph=heart]:before{content:'\e077'}.oi[data-glyph=home]:before{content:'\e078'}.oi[data-glyph=image]:before{content:'\e079'}.oi[data-glyph=inbox]:before{content:'\e07a'}.oi[data-glyph=infinity]:before{content:'\e07b'}.oi[data-glyph=info]:before{content:'\e07c'}.oi[data-glyph=italic]:before{content:'\e07d'}.oi[data-glyph=justify-center]:before{content:'\e07e'}.oi[data-glyph=justify-left]:before{content:'\e07f'}.oi[data-glyph=justify-right]:before{content:'\e080'}.oi[data-glyph=key]:before{content:'\e081'}.oi[data-glyph=laptop]:before{content:'\e082'}.oi[data-glyph=layers]:before{content:'\e083'}.oi[data-glyph=lightbulb]:before{content:'\e084'}.oi[data-glyph=link-broken]:before{content:'\e085'}.oi[data-glyph=link-intact]:before{content:'\e086'}.oi[data-glyph=list-rich]:before{content:'\e087'}.oi[data-glyph=list]:before{content:'\e088'}.oi[data-glyph=location]:before{content:'\e089'}.oi[data-glyph=lock-locked]:before{content:'\e08a'}.oi[data-glyph=lock-unlocked]:before{content:'\e08b'}.oi[data-glyph=loop-circular]:before{content:'\e08c'}.oi[data-glyph=loop-square]:before{content:'\e08d'}.oi[data-glyph=loop]:before{content:'\e08e'}.oi[data-glyph=magnifying-glass]:before{content:'\e08f'}.oi[data-glyph=map-marker]:before{content:'\e090'}.oi[data-glyph=map]:before{content:'\e091'}.oi[data-glyph=media-pause]:before{content:'\e092'}.oi[data-glyph=media-play]:before{content:'\e093'}.oi[data-glyph=media-record]:before{content:'\e094'}.oi[data-glyph=media-skip-backward]:before{content:'\e095'}.oi[data-glyph=media-skip-forward]:before{content:'\e096'}.oi[data-glyph=media-step-backward]:before{content:'\e097'}.oi[data-glyph=media-step-forward]:before{content:'\e098'}.oi[data-glyph=media-stop]:before{content:'\e099'}.oi[data-glyph=medical-cross]:before{content:'\e09a'}.oi[data-glyph=menu]:before{content:'\e09b'}.oi[data-glyph=microphone]:before{content:'\e09c'}.oi[data-glyph=minus]:before{content:'\e09d'}.oi[data-glyph=monitor]:before{content:'\e09e'}.oi[data-glyph=moon]:before{content:'\e09f'}.oi[data-glyph=move]:before{content:'\e0a0'}.oi[data-glyph=musical-note]:before{content:'\e0a1'}.oi[data-glyph=paperclip]:before{content:'\e0a2'}.oi[data-glyph=pencil]:before{content:'\e0a3'}.oi[data-glyph=people]:before{content:'\e0a4'}.oi[data-glyph=person]:before{content:'\e0a5'}.oi[data-glyph=phone]:before{content:'\e0a6'}.oi[data-glyph=pie-chart]:before{content:'\e0a7'}.oi[data-glyph=pin]:before{content:'\e0a8'}.oi[data-glyph=play-circle]:before{content:'\e0a9'}.oi[data-glyph=plus]:before{content:'\e0aa'}.oi[data-glyph=power-standby]:before{content:'\e0ab'}.oi[data-glyph=print]:before{content:'\e0ac'}.oi[data-glyph=project]:before{content:'\e0ad'}.oi[data-glyph=pulse]:before{content:'\e0ae'}.oi[data-glyph=puzzle-piece]:before{content:'\e0af'}.oi[data-glyph=question-mark]:before{content:'\e0b0'}.oi[data-glyph=rain]:before{content:'\e0b1'}.oi[data-glyph=random]:before{content:'\e0b2'}.oi[data-glyph=reload]:before{content:'\e0b3'}.oi[data-glyph=resize-both]:before{content:'\e0b4'}.oi[data-glyph=resize-height]:before{content:'\e0b5'}.oi[data-glyph=resize-width]:before{content:'\e0b6'}.oi[data-glyph=rss-alt]:before{content:'\e0b7'}.oi[data-glyph=rss]:before{content:'\e0b8'}.oi[data-glyph=script]:before{content:'\e0b9'}.oi[data-glyph=share-boxed]:before{content:'\e0ba'}.oi[data-glyph=share]:before{content:'\e0bb'}.oi[data-glyph=shield]:before{content:'\e0bc'}.oi[data-glyph=signal]:before{content:'\e0bd'}.oi[data-glyph=signpost]:before{content:'\e0be'}.oi[data-glyph=sort-ascending]:before{content:'\e0bf'}.oi[data-glyph=sort-descending]:before{content:'\e0c0'}.oi[data-glyph=spreadsheet]:before{content:'\e0c1'}.oi[data-glyph=star]:before{content:'\e0c2'}.oi[data-glyph=sun]:before{content:'\e0c3'}.oi[data-glyph=tablet]:before{content:'\e0c4'}.oi[data-glyph=tag]:before{content:'\e0c5'}.oi[data-glyph=tags]:before{content:'\e0c6'}.oi[data-glyph=target]:before{content:'\e0c7'}.oi[data-glyph=task]:before{content:'\e0c8'}.oi[data-glyph=terminal]:before{content:'\e0c9'}.oi[data-glyph=text]:before{content:'\e0ca'}.oi[data-glyph=thumb-down]:before{content:'\e0cb'}.oi[data-glyph=thumb-up]:before{content:'\e0cc'}.oi[data-glyph=timer]:before{content:'\e0cd'}.oi[data-glyph=transfer]:before{content:'\e0ce'}.oi[data-glyph=trash]:before{content:'\e0cf'}.oi[data-glyph=underline]:before{content:'\e0d0'}.oi[data-glyph=vertical-align-bottom]:before{content:'\e0d1'}.oi[data-glyph=vertical-align-center]:before{content:'\e0d2'}.oi[data-glyph=vertical-align-top]:before{content:'\e0d3'}.oi[data-glyph=video]:before{content:'\e0d4'}.oi[data-glyph=volume-high]:before{content:'\e0d5'}.oi[data-glyph=volume-low]:before{content:'\e0d6'}.oi[data-glyph=volume-off]:before{content:'\e0d7'}.oi[data-glyph=warning]:before{content:'\e0d8'}.oi[data-glyph=wifi]:before{content:'\e0d9'}.oi[data-glyph=wrench]:before{content:'\e0da'}.oi[data-glyph=x]:before{content:'\e0db'}.oi[data-glyph=yen]:before{content:'\e0dc'}.oi[data-glyph=zoom-in]:before{content:'\e0dd'}.oi[data-glyph=zoom-out]:before{content:'\e0de'} \ No newline at end of file diff --git a/static/open-iconic/css/open-iconic.scss b/static/open-iconic/css/open-iconic.scss new file mode 100644 index 00000000..e03d979f --- /dev/null +++ b/static/open-iconic/css/open-iconic.scss @@ -0,0 +1,963 @@ +$iconic-font-path: '../fonts/' !default; + +@font-face { + font-family: 'Icons'; + src: url('#{$iconic-font-path}open-iconic.eot'); + src: url('#{$iconic-font-path}open-iconic.eot?#iconic-sm') format('embedded-opentype'), url('#{$iconic-font-path}open-iconic.woff') format('woff'), url('#{$iconic-font-path}open-iconic.ttf') format('truetype'), url('#{$iconic-font-path}open-iconic.otf') format('opentype'), url('#{$iconic-font-path}open-iconic.svg#iconic-sm') format('svg'); + font-weight: normal; + font-style: normal; +} + +.oi[data-glyph].oi-text-replace { + font-size: 0; + line-height: 0; +} + +.oi[data-glyph].oi-text-replace:before { + width: 1em; + text-align: center; +} + +.oi[data-glyph] { + &:before { + position: relative; + top: 1px; + font-family: 'Icons'; + display: inline-block; + speak: none; + line-height: 1; + vertical-align: baseline; + font-weight: normal; + font-style: normal; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + } + + &:empty:before { + width: 1em; + text-align: center; + box-sizing: content-box; + } + + &.oi-align-left:before { + text-align: left; + } + &.oi-align-right:before { + text-align: right; + } + &.oi-align-center:before { + text-align: center; + } + + &.oi-flip-horizontal:before { + -webkit-transform: scale(-1, 1); + -ms-transform: scale(-1, 1); + transform: scale(-1, 1); + } + + &.oi-flip-vertical:before { + -webkit-transform: scale(1, -1); + -ms-transform: scale(-1, 1); + transform: scale(1, -1); + } + + &.oi-flip-horizontal-vertical:before { + -webkit-transform: scale(-1, -1); + -ms-transform: scale(-1, 1); + transform: scale(-1, -1); + } +} + + +.oi[data-glyph=account-login]:before { + content: '\e000'; +} + +.oi[data-glyph=account-logout]:before { + content: '\e001'; +} + +.oi[data-glyph=action-redo]:before { + content: '\e002'; +} + +.oi[data-glyph=action-undo]:before { + content: '\e003'; +} + +.oi[data-glyph=align-center]:before { + content: '\e004'; +} + +.oi[data-glyph=align-left]:before { + content: '\e005'; +} + +.oi[data-glyph=align-right]:before { + content: '\e006'; +} + +.oi[data-glyph=aperture]:before { + content: '\e007'; +} + +.oi[data-glyph=arrow-bottom]:before { + content: '\e008'; +} + +.oi[data-glyph=arrow-circle-bottom]:before { + content: '\e009'; +} + +.oi[data-glyph=arrow-circle-left]:before { + content: '\e00a'; +} + +.oi[data-glyph=arrow-circle-right]:before { + content: '\e00b'; +} + +.oi[data-glyph=arrow-circle-top]:before { + content: '\e00c'; +} + +.oi[data-glyph=arrow-left]:before { + content: '\e00d'; +} + +.oi[data-glyph=arrow-right]:before { + content: '\e00e'; +} + +.oi[data-glyph=arrow-thick-bottom]:before { + content: '\e00f'; +} + +.oi[data-glyph=arrow-thick-left]:before { + content: '\e010'; +} + +.oi[data-glyph=arrow-thick-right]:before { + content: '\e011'; +} + +.oi[data-glyph=arrow-thick-top]:before { + content: '\e012'; +} + +.oi[data-glyph=arrow-top]:before { + content: '\e013'; +} + +.oi[data-glyph=audio-spectrum]:before { + content: '\e014'; +} + +.oi[data-glyph=audio]:before { + content: '\e015'; +} + +.oi[data-glyph=badge]:before { + content: '\e016'; +} + +.oi[data-glyph=ban]:before { + content: '\e017'; +} + +.oi[data-glyph=bar-chart]:before { + content: '\e018'; +} + +.oi[data-glyph=basket]:before { + content: '\e019'; +} + +.oi[data-glyph=battery-empty]:before { + content: '\e01a'; +} + +.oi[data-glyph=battery-full]:before { + content: '\e01b'; +} + +.oi[data-glyph=beaker]:before { + content: '\e01c'; +} + +.oi[data-glyph=bell]:before { + content: '\e01d'; +} + +.oi[data-glyph=bluetooth]:before { + content: '\e01e'; +} + +.oi[data-glyph=bold]:before { + content: '\e01f'; +} + +.oi[data-glyph=bolt]:before { + content: '\e020'; +} + +.oi[data-glyph=book]:before { + content: '\e021'; +} + +.oi[data-glyph=bookmark]:before { + content: '\e022'; +} + +.oi[data-glyph=box]:before { + content: '\e023'; +} + +.oi[data-glyph=briefcase]:before { + content: '\e024'; +} + +.oi[data-glyph=british-pound]:before { + content: '\e025'; +} + +.oi[data-glyph=browser]:before { + content: '\e026'; +} + +.oi[data-glyph=brush]:before { + content: '\e027'; +} + +.oi[data-glyph=bug]:before { + content: '\e028'; +} + +.oi[data-glyph=bullhorn]:before { + content: '\e029'; +} + +.oi[data-glyph=calculator]:before { + content: '\e02a'; +} + +.oi[data-glyph=calendar]:before { + content: '\e02b'; +} + +.oi[data-glyph=camera-slr]:before { + content: '\e02c'; +} + +.oi[data-glyph=caret-bottom]:before { + content: '\e02d'; +} + +.oi[data-glyph=caret-left]:before { + content: '\e02e'; +} + +.oi[data-glyph=caret-right]:before { + content: '\e02f'; +} + +.oi[data-glyph=caret-top]:before { + content: '\e030'; +} + +.oi[data-glyph=cart]:before { + content: '\e031'; +} + +.oi[data-glyph=chat]:before { + content: '\e032'; +} + +.oi[data-glyph=check]:before { + content: '\e033'; +} + +.oi[data-glyph=chevron-bottom]:before { + content: '\e034'; +} + +.oi[data-glyph=chevron-left]:before { + content: '\e035'; +} + +.oi[data-glyph=chevron-right]:before { + content: '\e036'; +} + +.oi[data-glyph=chevron-top]:before { + content: '\e037'; +} + +.oi[data-glyph=circle-check]:before { + content: '\e038'; +} + +.oi[data-glyph=circle-x]:before { + content: '\e039'; +} + +.oi[data-glyph=clipboard]:before { + content: '\e03a'; +} + +.oi[data-glyph=clock]:before { + content: '\e03b'; +} + +.oi[data-glyph=cloud-download]:before { + content: '\e03c'; +} + +.oi[data-glyph=cloud-upload]:before { + content: '\e03d'; +} + +.oi[data-glyph=cloud]:before { + content: '\e03e'; +} + +.oi[data-glyph=cloudy]:before { + content: '\e03f'; +} + +.oi[data-glyph=code]:before { + content: '\e040'; +} + +.oi[data-glyph=cog]:before { + content: '\e041'; +} + +.oi[data-glyph=collapse-down]:before { + content: '\e042'; +} + +.oi[data-glyph=collapse-left]:before { + content: '\e043'; +} + +.oi[data-glyph=collapse-right]:before { + content: '\e044'; +} + +.oi[data-glyph=collapse-up]:before { + content: '\e045'; +} + +.oi[data-glyph=command]:before { + content: '\e046'; +} + +.oi[data-glyph=comment-square]:before { + content: '\e047'; +} + +.oi[data-glyph=compass]:before { + content: '\e048'; +} + +.oi[data-glyph=contrast]:before { + content: '\e049'; +} + +.oi[data-glyph=copywriting]:before { + content: '\e04a'; +} + +.oi[data-glyph=credit-card]:before { + content: '\e04b'; +} + +.oi[data-glyph=crop]:before { + content: '\e04c'; +} + +.oi[data-glyph=dashboard]:before { + content: '\e04d'; +} + +.oi[data-glyph=data-transfer-download]:before { + content: '\e04e'; +} + +.oi[data-glyph=data-transfer-upload]:before { + content: '\e04f'; +} + +.oi[data-glyph=delete]:before { + content: '\e050'; +} + +.oi[data-glyph=dial]:before { + content: '\e051'; +} + +.oi[data-glyph=document]:before { + content: '\e052'; +} + +.oi[data-glyph=dollar]:before { + content: '\e053'; +} + +.oi[data-glyph=double-quote-sans-left]:before { + content: '\e054'; +} + +.oi[data-glyph=double-quote-sans-right]:before { + content: '\e055'; +} + +.oi[data-glyph=double-quote-serif-left]:before { + content: '\e056'; +} + +.oi[data-glyph=double-quote-serif-right]:before { + content: '\e057'; +} + +.oi[data-glyph=droplet]:before { + content: '\e058'; +} + +.oi[data-glyph=eject]:before { + content: '\e059'; +} + +.oi[data-glyph=elevator]:before { + content: '\e05a'; +} + +.oi[data-glyph=ellipses]:before { + content: '\e05b'; +} + +.oi[data-glyph=envelope-closed]:before { + content: '\e05c'; +} + +.oi[data-glyph=envelope-open]:before { + content: '\e05d'; +} + +.oi[data-glyph=euro]:before { + content: '\e05e'; +} + +.oi[data-glyph=excerpt]:before { + content: '\e05f'; +} + +.oi[data-glyph=expand-down]:before { + content: '\e060'; +} + +.oi[data-glyph=expand-left]:before { + content: '\e061'; +} + +.oi[data-glyph=expand-right]:before { + content: '\e062'; +} + +.oi[data-glyph=expand-up]:before { + content: '\e063'; +} + +.oi[data-glyph=external-link]:before { + content: '\e064'; +} + +.oi[data-glyph=eye]:before { + content: '\e065'; +} + +.oi[data-glyph=eyedropper]:before { + content: '\e066'; +} + +.oi[data-glyph=file]:before { + content: '\e067'; +} + +.oi[data-glyph=fire]:before { + content: '\e068'; +} + +.oi[data-glyph=flag]:before { + content: '\e069'; +} + +.oi[data-glyph=flash]:before { + content: '\e06a'; +} + +.oi[data-glyph=folder]:before { + content: '\e06b'; +} + +.oi[data-glyph=fork]:before { + content: '\e06c'; +} + +.oi[data-glyph=fullscreen-enter]:before { + content: '\e06d'; +} + +.oi[data-glyph=fullscreen-exit]:before { + content: '\e06e'; +} + +.oi[data-glyph=globe]:before { + content: '\e06f'; +} + +.oi[data-glyph=graph]:before { + content: '\e070'; +} + +.oi[data-glyph=grid-four-up]:before { + content: '\e071'; +} + +.oi[data-glyph=grid-three-up]:before { + content: '\e072'; +} + +.oi[data-glyph=grid-two-up]:before { + content: '\e073'; +} + +.oi[data-glyph=hard-drive]:before { + content: '\e074'; +} + +.oi[data-glyph=header]:before { + content: '\e075'; +} + +.oi[data-glyph=headphones]:before { + content: '\e076'; +} + +.oi[data-glyph=heart]:before { + content: '\e077'; +} + +.oi[data-glyph=home]:before { + content: '\e078'; +} + +.oi[data-glyph=image]:before { + content: '\e079'; +} + +.oi[data-glyph=inbox]:before { + content: '\e07a'; +} + +.oi[data-glyph=infinity]:before { + content: '\e07b'; +} + +.oi[data-glyph=info]:before { + content: '\e07c'; +} + +.oi[data-glyph=italic]:before { + content: '\e07d'; +} + +.oi[data-glyph=justify-center]:before { + content: '\e07e'; +} + +.oi[data-glyph=justify-left]:before { + content: '\e07f'; +} + +.oi[data-glyph=justify-right]:before { + content: '\e080'; +} + +.oi[data-glyph=key]:before { + content: '\e081'; +} + +.oi[data-glyph=laptop]:before { + content: '\e082'; +} + +.oi[data-glyph=layers]:before { + content: '\e083'; +} + +.oi[data-glyph=lightbulb]:before { + content: '\e084'; +} + +.oi[data-glyph=link-broken]:before { + content: '\e085'; +} + +.oi[data-glyph=link-intact]:before { + content: '\e086'; +} + +.oi[data-glyph=list-rich]:before { + content: '\e087'; +} + +.oi[data-glyph=list]:before { + content: '\e088'; +} + +.oi[data-glyph=location]:before { + content: '\e089'; +} + +.oi[data-glyph=lock-locked]:before { + content: '\e08a'; +} + +.oi[data-glyph=lock-unlocked]:before { + content: '\e08b'; +} + +.oi[data-glyph=loop-circular]:before { + content: '\e08c'; +} + +.oi[data-glyph=loop-square]:before { + content: '\e08d'; +} + +.oi[data-glyph=loop]:before { + content: '\e08e'; +} + +.oi[data-glyph=magnifying-glass]:before { + content: '\e08f'; +} + +.oi[data-glyph=map-marker]:before { + content: '\e090'; +} + +.oi[data-glyph=map]:before { + content: '\e091'; +} + +.oi[data-glyph=media-pause]:before { + content: '\e092'; +} + +.oi[data-glyph=media-play]:before { + content: '\e093'; +} + +.oi[data-glyph=media-record]:before { + content: '\e094'; +} + +.oi[data-glyph=media-skip-backward]:before { + content: '\e095'; +} + +.oi[data-glyph=media-skip-forward]:before { + content: '\e096'; +} + +.oi[data-glyph=media-step-backward]:before { + content: '\e097'; +} + +.oi[data-glyph=media-step-forward]:before { + content: '\e098'; +} + +.oi[data-glyph=media-stop]:before { + content: '\e099'; +} + +.oi[data-glyph=medical-cross]:before { + content: '\e09a'; +} + +.oi[data-glyph=menu]:before { + content: '\e09b'; +} + +.oi[data-glyph=microphone]:before { + content: '\e09c'; +} + +.oi[data-glyph=minus]:before { + content: '\e09d'; +} + +.oi[data-glyph=monitor]:before { + content: '\e09e'; +} + +.oi[data-glyph=moon]:before { + content: '\e09f'; +} + +.oi[data-glyph=move]:before { + content: '\e0a0'; +} + +.oi[data-glyph=musical-note]:before { + content: '\e0a1'; +} + +.oi[data-glyph=paperclip]:before { + content: '\e0a2'; +} + +.oi[data-glyph=pencil]:before { + content: '\e0a3'; +} + +.oi[data-glyph=people]:before { + content: '\e0a4'; +} + +.oi[data-glyph=person]:before { + content: '\e0a5'; +} + +.oi[data-glyph=phone]:before { + content: '\e0a6'; +} + +.oi[data-glyph=pie-chart]:before { + content: '\e0a7'; +} + +.oi[data-glyph=pin]:before { + content: '\e0a8'; +} + +.oi[data-glyph=play-circle]:before { + content: '\e0a9'; +} + +.oi[data-glyph=plus]:before { + content: '\e0aa'; +} + +.oi[data-glyph=power-standby]:before { + content: '\e0ab'; +} + +.oi[data-glyph=print]:before { + content: '\e0ac'; +} + +.oi[data-glyph=project]:before { + content: '\e0ad'; +} + +.oi[data-glyph=pulse]:before { + content: '\e0ae'; +} + +.oi[data-glyph=puzzle-piece]:before { + content: '\e0af'; +} + +.oi[data-glyph=question-mark]:before { + content: '\e0b0'; +} + +.oi[data-glyph=rain]:before { + content: '\e0b1'; +} + +.oi[data-glyph=random]:before { + content: '\e0b2'; +} + +.oi[data-glyph=reload]:before { + content: '\e0b3'; +} + +.oi[data-glyph=resize-both]:before { + content: '\e0b4'; +} + +.oi[data-glyph=resize-height]:before { + content: '\e0b5'; +} + +.oi[data-glyph=resize-width]:before { + content: '\e0b6'; +} + +.oi[data-glyph=rss-alt]:before { + content: '\e0b7'; +} + +.oi[data-glyph=rss]:before { + content: '\e0b8'; +} + +.oi[data-glyph=script]:before { + content: '\e0b9'; +} + +.oi[data-glyph=share-boxed]:before { + content: '\e0ba'; +} + +.oi[data-glyph=share]:before { + content: '\e0bb'; +} + +.oi[data-glyph=shield]:before { + content: '\e0bc'; +} + +.oi[data-glyph=signal]:before { + content: '\e0bd'; +} + +.oi[data-glyph=signpost]:before { + content: '\e0be'; +} + +.oi[data-glyph=sort-ascending]:before { + content: '\e0bf'; +} + +.oi[data-glyph=sort-descending]:before { + content: '\e0c0'; +} + +.oi[data-glyph=spreadsheet]:before { + content: '\e0c1'; +} + +.oi[data-glyph=star]:before { + content: '\e0c2'; +} + +.oi[data-glyph=sun]:before { + content: '\e0c3'; +} + +.oi[data-glyph=tablet]:before { + content: '\e0c4'; +} + +.oi[data-glyph=tag]:before { + content: '\e0c5'; +} + +.oi[data-glyph=tags]:before { + content: '\e0c6'; +} + +.oi[data-glyph=target]:before { + content: '\e0c7'; +} + +.oi[data-glyph=task]:before { + content: '\e0c8'; +} + +.oi[data-glyph=terminal]:before { + content: '\e0c9'; +} + +.oi[data-glyph=text]:before { + content: '\e0ca'; +} + +.oi[data-glyph=thumb-down]:before { + content: '\e0cb'; +} + +.oi[data-glyph=thumb-up]:before { + content: '\e0cc'; +} + +.oi[data-glyph=timer]:before { + content: '\e0cd'; +} + +.oi[data-glyph=transfer]:before { + content: '\e0ce'; +} + +.oi[data-glyph=trash]:before { + content: '\e0cf'; +} + +.oi[data-glyph=underline]:before { + content: '\e0d0'; +} + +.oi[data-glyph=vertical-align-bottom]:before { + content: '\e0d1'; +} + +.oi[data-glyph=vertical-align-center]:before { + content: '\e0d2'; +} + +.oi[data-glyph=vertical-align-top]:before { + content: '\e0d3'; +} + +.oi[data-glyph=video]:before { + content: '\e0d4'; +} + +.oi[data-glyph=volume-high]:before { + content: '\e0d5'; +} + +.oi[data-glyph=volume-low]:before { + content: '\e0d6'; +} + +.oi[data-glyph=volume-off]:before { + content: '\e0d7'; +} + +.oi[data-glyph=warning]:before { + content: '\e0d8'; +} + +.oi[data-glyph=wifi]:before { + content: '\e0d9'; +} + +.oi[data-glyph=wrench]:before { + content: '\e0da'; +} + +.oi[data-glyph=x]:before { + content: '\e0db'; +} + +.oi[data-glyph=yen]:before { + content: '\e0dc'; +} + +.oi[data-glyph=zoom-in]:before { + content: '\e0dd'; +} + +.oi[data-glyph=zoom-out]:before { + content: '\e0de'; +} + diff --git a/static/open-iconic/css/open-iconic.styl b/static/open-iconic/css/open-iconic.styl new file mode 100644 index 00000000..f541bc2d --- /dev/null +++ b/static/open-iconic/css/open-iconic.styl @@ -0,0 +1,733 @@ +@font-face + font-family 'Icons' + src url('../fonts/open-iconic.eot') + src url('../fonts/open-iconic.eot?#iconic-sm') format('embedded-opentype'), url('../fonts/open-iconic.woff') format('woff'), url('../fonts/open-iconic.ttf') format('truetype'), url('../fonts/open-iconic.otf') format('opentype'), url('../fonts/open-iconic.svg#iconic-sm') format('svg') + font-weight normal + font-style normal + + +.oi[data-glyph].oi-text-replace + font-size 0 + line-height 0 + +.oi[data-glyph].oi-text-replace:before + width 1em + text-align center + +.oi[data-glyph] + &:before + position relative + top 1px + font-family 'Icons' + display inline-block + speak none + line-height 1 + vertical-align baseline + font-weight normal + font-style normal + -webkit-font-smoothing antialiased + -moz-osx-font-smoothing grayscale + + &:empty:before + width 1em + text-align center + box-sizing content-box + + &.oi-align-left:before + text-align left + + &.oi-align-right:before + text-align right + + &.oi-align-center:before + text-align center + + + &.oi-flip-horizontal:before + -webkit-transform scale(-1, 1) + -ms-transform scale(-1, 1) + transform scale(-1, 1) + + + &.oi-flip-vertical:before + -webkit-transform scale(1, -1) + -ms-transform scale(-1, 1) + transform scale(1, -1) + + + &.oi-flip-horizontal-vertical:before + -webkit-transform scale(-1, -1) + -ms-transform scale(-1, 1) + transform scale(-1, -1) + + + + +.oi[data-glyph=account-login]:before + content '\e000' + +.oi[data-glyph=account-logout]:before + content '\e001' + +.oi[data-glyph=action-redo]:before + content '\e002' + +.oi[data-glyph=action-undo]:before + content '\e003' + +.oi[data-glyph=align-center]:before + content '\e004' + +.oi[data-glyph=align-left]:before + content '\e005' + +.oi[data-glyph=align-right]:before + content '\e006' + +.oi[data-glyph=aperture]:before + content '\e007' + +.oi[data-glyph=arrow-bottom]:before + content '\e008' + +.oi[data-glyph=arrow-circle-bottom]:before + content '\e009' + +.oi[data-glyph=arrow-circle-left]:before + content '\e00a' + +.oi[data-glyph=arrow-circle-right]:before + content '\e00b' + +.oi[data-glyph=arrow-circle-top]:before + content '\e00c' + +.oi[data-glyph=arrow-left]:before + content '\e00d' + +.oi[data-glyph=arrow-right]:before + content '\e00e' + +.oi[data-glyph=arrow-thick-bottom]:before + content '\e00f' + +.oi[data-glyph=arrow-thick-left]:before + content '\e010' + +.oi[data-glyph=arrow-thick-right]:before + content '\e011' + +.oi[data-glyph=arrow-thick-top]:before + content '\e012' + +.oi[data-glyph=arrow-top]:before + content '\e013' + +.oi[data-glyph=audio-spectrum]:before + content '\e014' + +.oi[data-glyph=audio]:before + content '\e015' + +.oi[data-glyph=badge]:before + content '\e016' + +.oi[data-glyph=ban]:before + content '\e017' + +.oi[data-glyph=bar-chart]:before + content '\e018' + +.oi[data-glyph=basket]:before + content '\e019' + +.oi[data-glyph=battery-empty]:before + content '\e01a' + +.oi[data-glyph=battery-full]:before + content '\e01b' + +.oi[data-glyph=beaker]:before + content '\e01c' + +.oi[data-glyph=bell]:before + content '\e01d' + +.oi[data-glyph=bluetooth]:before + content '\e01e' + +.oi[data-glyph=bold]:before + content '\e01f' + +.oi[data-glyph=bolt]:before + content '\e020' + +.oi[data-glyph=book]:before + content '\e021' + +.oi[data-glyph=bookmark]:before + content '\e022' + +.oi[data-glyph=box]:before + content '\e023' + +.oi[data-glyph=briefcase]:before + content '\e024' + +.oi[data-glyph=british-pound]:before + content '\e025' + +.oi[data-glyph=browser]:before + content '\e026' + +.oi[data-glyph=brush]:before + content '\e027' + +.oi[data-glyph=bug]:before + content '\e028' + +.oi[data-glyph=bullhorn]:before + content '\e029' + +.oi[data-glyph=calculator]:before + content '\e02a' + +.oi[data-glyph=calendar]:before + content '\e02b' + +.oi[data-glyph=camera-slr]:before + content '\e02c' + +.oi[data-glyph=caret-bottom]:before + content '\e02d' + +.oi[data-glyph=caret-left]:before + content '\e02e' + +.oi[data-glyph=caret-right]:before + content '\e02f' + +.oi[data-glyph=caret-top]:before + content '\e030' + +.oi[data-glyph=cart]:before + content '\e031' + +.oi[data-glyph=chat]:before + content '\e032' + +.oi[data-glyph=check]:before + content '\e033' + +.oi[data-glyph=chevron-bottom]:before + content '\e034' + +.oi[data-glyph=chevron-left]:before + content '\e035' + +.oi[data-glyph=chevron-right]:before + content '\e036' + +.oi[data-glyph=chevron-top]:before + content '\e037' + +.oi[data-glyph=circle-check]:before + content '\e038' + +.oi[data-glyph=circle-x]:before + content '\e039' + +.oi[data-glyph=clipboard]:before + content '\e03a' + +.oi[data-glyph=clock]:before + content '\e03b' + +.oi[data-glyph=cloud-download]:before + content '\e03c' + +.oi[data-glyph=cloud-upload]:before + content '\e03d' + +.oi[data-glyph=cloud]:before + content '\e03e' + +.oi[data-glyph=cloudy]:before + content '\e03f' + +.oi[data-glyph=code]:before + content '\e040' + +.oi[data-glyph=cog]:before + content '\e041' + +.oi[data-glyph=collapse-down]:before + content '\e042' + +.oi[data-glyph=collapse-left]:before + content '\e043' + +.oi[data-glyph=collapse-right]:before + content '\e044' + +.oi[data-glyph=collapse-up]:before + content '\e045' + +.oi[data-glyph=command]:before + content '\e046' + +.oi[data-glyph=comment-square]:before + content '\e047' + +.oi[data-glyph=compass]:before + content '\e048' + +.oi[data-glyph=contrast]:before + content '\e049' + +.oi[data-glyph=copywriting]:before + content '\e04a' + +.oi[data-glyph=credit-card]:before + content '\e04b' + +.oi[data-glyph=crop]:before + content '\e04c' + +.oi[data-glyph=dashboard]:before + content '\e04d' + +.oi[data-glyph=data-transfer-download]:before + content '\e04e' + +.oi[data-glyph=data-transfer-upload]:before + content '\e04f' + +.oi[data-glyph=delete]:before + content '\e050' + +.oi[data-glyph=dial]:before + content '\e051' + +.oi[data-glyph=document]:before + content '\e052' + +.oi[data-glyph=dollar]:before + content '\e053' + +.oi[data-glyph=double-quote-sans-left]:before + content '\e054' + +.oi[data-glyph=double-quote-sans-right]:before + content '\e055' + +.oi[data-glyph=double-quote-serif-left]:before + content '\e056' + +.oi[data-glyph=double-quote-serif-right]:before + content '\e057' + +.oi[data-glyph=droplet]:before + content '\e058' + +.oi[data-glyph=eject]:before + content '\e059' + +.oi[data-glyph=elevator]:before + content '\e05a' + +.oi[data-glyph=ellipses]:before + content '\e05b' + +.oi[data-glyph=envelope-closed]:before + content '\e05c' + +.oi[data-glyph=envelope-open]:before + content '\e05d' + +.oi[data-glyph=euro]:before + content '\e05e' + +.oi[data-glyph=excerpt]:before + content '\e05f' + +.oi[data-glyph=expand-down]:before + content '\e060' + +.oi[data-glyph=expand-left]:before + content '\e061' + +.oi[data-glyph=expand-right]:before + content '\e062' + +.oi[data-glyph=expand-up]:before + content '\e063' + +.oi[data-glyph=external-link]:before + content '\e064' + +.oi[data-glyph=eye]:before + content '\e065' + +.oi[data-glyph=eyedropper]:before + content '\e066' + +.oi[data-glyph=file]:before + content '\e067' + +.oi[data-glyph=fire]:before + content '\e068' + +.oi[data-glyph=flag]:before + content '\e069' + +.oi[data-glyph=flash]:before + content '\e06a' + +.oi[data-glyph=folder]:before + content '\e06b' + +.oi[data-glyph=fork]:before + content '\e06c' + +.oi[data-glyph=fullscreen-enter]:before + content '\e06d' + +.oi[data-glyph=fullscreen-exit]:before + content '\e06e' + +.oi[data-glyph=globe]:before + content '\e06f' + +.oi[data-glyph=graph]:before + content '\e070' + +.oi[data-glyph=grid-four-up]:before + content '\e071' + +.oi[data-glyph=grid-three-up]:before + content '\e072' + +.oi[data-glyph=grid-two-up]:before + content '\e073' + +.oi[data-glyph=hard-drive]:before + content '\e074' + +.oi[data-glyph=header]:before + content '\e075' + +.oi[data-glyph=headphones]:before + content '\e076' + +.oi[data-glyph=heart]:before + content '\e077' + +.oi[data-glyph=home]:before + content '\e078' + +.oi[data-glyph=image]:before + content '\e079' + +.oi[data-glyph=inbox]:before + content '\e07a' + +.oi[data-glyph=infinity]:before + content '\e07b' + +.oi[data-glyph=info]:before + content '\e07c' + +.oi[data-glyph=italic]:before + content '\e07d' + +.oi[data-glyph=justify-center]:before + content '\e07e' + +.oi[data-glyph=justify-left]:before + content '\e07f' + +.oi[data-glyph=justify-right]:before + content '\e080' + +.oi[data-glyph=key]:before + content '\e081' + +.oi[data-glyph=laptop]:before + content '\e082' + +.oi[data-glyph=layers]:before + content '\e083' + +.oi[data-glyph=lightbulb]:before + content '\e084' + +.oi[data-glyph=link-broken]:before + content '\e085' + +.oi[data-glyph=link-intact]:before + content '\e086' + +.oi[data-glyph=list-rich]:before + content '\e087' + +.oi[data-glyph=list]:before + content '\e088' + +.oi[data-glyph=location]:before + content '\e089' + +.oi[data-glyph=lock-locked]:before + content '\e08a' + +.oi[data-glyph=lock-unlocked]:before + content '\e08b' + +.oi[data-glyph=loop-circular]:before + content '\e08c' + +.oi[data-glyph=loop-square]:before + content '\e08d' + +.oi[data-glyph=loop]:before + content '\e08e' + +.oi[data-glyph=magnifying-glass]:before + content '\e08f' + +.oi[data-glyph=map-marker]:before + content '\e090' + +.oi[data-glyph=map]:before + content '\e091' + +.oi[data-glyph=media-pause]:before + content '\e092' + +.oi[data-glyph=media-play]:before + content '\e093' + +.oi[data-glyph=media-record]:before + content '\e094' + +.oi[data-glyph=media-skip-backward]:before + content '\e095' + +.oi[data-glyph=media-skip-forward]:before + content '\e096' + +.oi[data-glyph=media-step-backward]:before + content '\e097' + +.oi[data-glyph=media-step-forward]:before + content '\e098' + +.oi[data-glyph=media-stop]:before + content '\e099' + +.oi[data-glyph=medical-cross]:before + content '\e09a' + +.oi[data-glyph=menu]:before + content '\e09b' + +.oi[data-glyph=microphone]:before + content '\e09c' + +.oi[data-glyph=minus]:before + content '\e09d' + +.oi[data-glyph=monitor]:before + content '\e09e' + +.oi[data-glyph=moon]:before + content '\e09f' + +.oi[data-glyph=move]:before + content '\e0a0' + +.oi[data-glyph=musical-note]:before + content '\e0a1' + +.oi[data-glyph=paperclip]:before + content '\e0a2' + +.oi[data-glyph=pencil]:before + content '\e0a3' + +.oi[data-glyph=people]:before + content '\e0a4' + +.oi[data-glyph=person]:before + content '\e0a5' + +.oi[data-glyph=phone]:before + content '\e0a6' + +.oi[data-glyph=pie-chart]:before + content '\e0a7' + +.oi[data-glyph=pin]:before + content '\e0a8' + +.oi[data-glyph=play-circle]:before + content '\e0a9' + +.oi[data-glyph=plus]:before + content '\e0aa' + +.oi[data-glyph=power-standby]:before + content '\e0ab' + +.oi[data-glyph=print]:before + content '\e0ac' + +.oi[data-glyph=project]:before + content '\e0ad' + +.oi[data-glyph=pulse]:before + content '\e0ae' + +.oi[data-glyph=puzzle-piece]:before + content '\e0af' + +.oi[data-glyph=question-mark]:before + content '\e0b0' + +.oi[data-glyph=rain]:before + content '\e0b1' + +.oi[data-glyph=random]:before + content '\e0b2' + +.oi[data-glyph=reload]:before + content '\e0b3' + +.oi[data-glyph=resize-both]:before + content '\e0b4' + +.oi[data-glyph=resize-height]:before + content '\e0b5' + +.oi[data-glyph=resize-width]:before + content '\e0b6' + +.oi[data-glyph=rss-alt]:before + content '\e0b7' + +.oi[data-glyph=rss]:before + content '\e0b8' + +.oi[data-glyph=script]:before + content '\e0b9' + +.oi[data-glyph=share-boxed]:before + content '\e0ba' + +.oi[data-glyph=share]:before + content '\e0bb' + +.oi[data-glyph=shield]:before + content '\e0bc' + +.oi[data-glyph=signal]:before + content '\e0bd' + +.oi[data-glyph=signpost]:before + content '\e0be' + +.oi[data-glyph=sort-ascending]:before + content '\e0bf' + +.oi[data-glyph=sort-descending]:before + content '\e0c0' + +.oi[data-glyph=spreadsheet]:before + content '\e0c1' + +.oi[data-glyph=star]:before + content '\e0c2' + +.oi[data-glyph=sun]:before + content '\e0c3' + +.oi[data-glyph=tablet]:before + content '\e0c4' + +.oi[data-glyph=tag]:before + content '\e0c5' + +.oi[data-glyph=tags]:before + content '\e0c6' + +.oi[data-glyph=target]:before + content '\e0c7' + +.oi[data-glyph=task]:before + content '\e0c8' + +.oi[data-glyph=terminal]:before + content '\e0c9' + +.oi[data-glyph=text]:before + content '\e0ca' + +.oi[data-glyph=thumb-down]:before + content '\e0cb' + +.oi[data-glyph=thumb-up]:before + content '\e0cc' + +.oi[data-glyph=timer]:before + content '\e0cd' + +.oi[data-glyph=transfer]:before + content '\e0ce' + +.oi[data-glyph=trash]:before + content '\e0cf' + +.oi[data-glyph=underline]:before + content '\e0d0' + +.oi[data-glyph=vertical-align-bottom]:before + content '\e0d1' + +.oi[data-glyph=vertical-align-center]:before + content '\e0d2' + +.oi[data-glyph=vertical-align-top]:before + content '\e0d3' + +.oi[data-glyph=video]:before + content '\e0d4' + +.oi[data-glyph=volume-high]:before + content '\e0d5' + +.oi[data-glyph=volume-low]:before + content '\e0d6' + +.oi[data-glyph=volume-off]:before + content '\e0d7' + +.oi[data-glyph=warning]:before + content '\e0d8' + +.oi[data-glyph=wifi]:before + content '\e0d9' + +.oi[data-glyph=wrench]:before + content '\e0da' + +.oi[data-glyph=x]:before + content '\e0db' + +.oi[data-glyph=yen]:before + content '\e0dc' + +.oi[data-glyph=zoom-in]:before + content '\e0dd' + +.oi[data-glyph=zoom-out]:before + content '\e0de' diff --git a/static/open-iconic/fonts/open-iconic.eot b/static/open-iconic/fonts/open-iconic.eot new file mode 100644 index 00000000..f98177db Binary files /dev/null and b/static/open-iconic/fonts/open-iconic.eot differ diff --git a/static/open-iconic/fonts/open-iconic.otf b/static/open-iconic/fonts/open-iconic.otf new file mode 100644 index 00000000..f6bd6846 Binary files /dev/null and b/static/open-iconic/fonts/open-iconic.otf differ diff --git a/static/open-iconic/fonts/open-iconic.svg b/static/open-iconic/fonts/open-iconic.svg new file mode 100644 index 00000000..32b2c4e9 --- /dev/null +++ b/static/open-iconic/fonts/open-iconic.svg @@ -0,0 +1,543 @@ + + + + + +Created by FontForge 20120731 at Tue Jul 1 20:39:22 2014 + By P.J. Onori +Created by P.J. Onori with FontForge 2.0 (http://fontforge.sf.net) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/static/open-iconic/fonts/open-iconic.ttf b/static/open-iconic/fonts/open-iconic.ttf new file mode 100644 index 00000000..fab60486 Binary files /dev/null and b/static/open-iconic/fonts/open-iconic.ttf differ diff --git a/static/open-iconic/fonts/open-iconic.woff b/static/open-iconic/fonts/open-iconic.woff new file mode 100644 index 00000000..f9309988 Binary files /dev/null and b/static/open-iconic/fonts/open-iconic.woff differ diff --git a/static/swagger-ui/LICENSE b/static/swagger-ui/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/static/swagger-ui/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/static/swagger-ui/SwaggerDark.css b/static/swagger-ui/SwaggerDark.css new file mode 100644 index 00000000..677e68cd --- /dev/null +++ b/static/swagger-ui/SwaggerDark.css @@ -0,0 +1,853 @@ +/*! + * MIT License + * + * Copyright (c) 2020 Romans Pokrovskis + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +a { color: #8c8cfa; } + +::-webkit-scrollbar-track-piece { background-color: rgba(255, 255, 255, .2) !important; } + +::-webkit-scrollbar-track { background-color: rgba(255, 255, 255, .3) !important; } + +::-webkit-scrollbar-thumb { background-color: rgba(255, 255, 255, .5) !important; } + +embed[type="application/pdf"] { filter: invert(90%); } + +html { + background: #1f1f1f !important; + box-sizing: border-box; + filter: contrast(100%) brightness(100%) saturate(100%); + overflow-y: scroll; +} + +body { + background: #1f1f1f; + background-color: #1f1f1f; + background-image: none !important; +} + +button, input, select, textarea { + background-color: #1f1f1f; + color: #bfbfbf; +} + +font, html { color: #bfbfbf; } + +.swagger-ui, .swagger-ui section h3 { color: #b5bac9; } + +.swagger-ui a { background-color: transparent; } + +.swagger-ui mark { + background-color: #664b00; + color: #bfbfbf; +} + +.swagger-ui legend { color: inherit; } + +.swagger-ui .debug * { outline: #e6da99 solid 1px; } + +.swagger-ui .debug-white * { outline: #fff solid 1px; } + +.swagger-ui .debug-black * { outline: #bfbfbf solid 1px; } + +.swagger-ui .debug-grid { background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAgAAAAICAYAAADED76LAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyhpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNi1jMTExIDc5LjE1ODMyNSwgMjAxNS8wOS8xMC0wMToxMDoyMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6MTRDOTY4N0U2N0VFMTFFNjg2MzZDQjkwNkQ4MjgwMEIiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6MTRDOTY4N0Q2N0VFMTFFNjg2MzZDQjkwNkQ4MjgwMEIiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTUgKE1hY2ludG9zaCkiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3NjcyQkQ3NjY3QzUxMUU2QjJCQ0UyNDA4MTAwMjE3MSIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3NjcyQkQ3NzY3QzUxMUU2QjJCQ0UyNDA4MTAwMjE3MSIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PsBS+GMAAAAjSURBVHjaYvz//z8DLsD4gcGXiYEAGBIKGBne//fFpwAgwAB98AaF2pjlUQAAAABJRU5ErkJggg==) 0 0; } + +.swagger-ui .debug-grid-16 { background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyhpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNi1jMTExIDc5LjE1ODMyNSwgMjAxNS8wOS8xMC0wMToxMDoyMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6ODYyRjhERDU2N0YyMTFFNjg2MzZDQjkwNkQ4MjgwMEIiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6ODYyRjhERDQ2N0YyMTFFNjg2MzZDQjkwNkQ4MjgwMEIiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTUgKE1hY2ludG9zaCkiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3NjcyQkQ3QTY3QzUxMUU2QjJCQ0UyNDA4MTAwMjE3MSIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3NjcyQkQ3QjY3QzUxMUU2QjJCQ0UyNDA4MTAwMjE3MSIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PvCS01IAAABMSURBVHjaYmR4/5+BFPBfAMFm/MBgx8RAGWCn1AAmSg34Q6kBDKMGMDCwICeMIemF/5QawEipAWwUhwEjMDvbAWlWkvVBwu8vQIABAEwBCph8U6c0AAAAAElFTkSuQmCC) 0 0; } + +.swagger-ui .debug-grid-8-solid { background: url(data:image/jpeg;base64,/9j/4QAYRXhpZgAASUkqAAgAAAAAAAAAAAAAAP/sABFEdWNreQABAAQAAAAAAAD/4QMxaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLwA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/PiA8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJBZG9iZSBYTVAgQ29yZSA1LjYtYzExMSA3OS4xNTgzMjUsIDIwMTUvMDkvMTAtMDE6MTA6MjAgICAgICAgICI+IDxyZGY6UkRGIHhtbG5zOnJkZj0iaHR0cDovL3d3dy53My5vcmcvMTk5OS8wMi8yMi1yZGYtc3ludGF4LW5zIyI+IDxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiIHhtbG5zOnhtcD0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLyIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bXA6Q3JlYXRvclRvb2w9IkFkb2JlIFBob3Rvc2hvcCBDQyAyMDE1IChNYWNpbnRvc2gpIiB4bXBNTTpJbnN0YW5jZUlEPSJ4bXAuaWlkOkIxMjI0OTczNjdCMzExRTZCMkJDRTI0MDgxMDAyMTcxIiB4bXBNTTpEb2N1bWVudElEPSJ4bXAuZGlkOkIxMjI0OTc0NjdCMzExRTZCMkJDRTI0MDgxMDAyMTcxIj4gPHhtcE1NOkRlcml2ZWRGcm9tIHN0UmVmOmluc3RhbmNlSUQ9InhtcC5paWQ6QjEyMjQ5NzE2N0IzMTFFNkIyQkNFMjQwODEwMDIxNzEiIHN0UmVmOmRvY3VtZW50SUQ9InhtcC5kaWQ6QjEyMjQ5NzI2N0IzMTFFNkIyQkNFMjQwODEwMDIxNzEiLz4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz7/7gAOQWRvYmUAZMAAAAAB/9sAhAAbGhopHSlBJiZBQi8vL0JHPz4+P0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHAR0pKTQmND8oKD9HPzU/R0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0f/wAARCAAIAAgDASIAAhEBAxEB/8QAWQABAQAAAAAAAAAAAAAAAAAAAAYBAQEAAAAAAAAAAAAAAAAAAAIEEAEBAAMBAAAAAAAAAAAAAAABADECA0ERAAEDBQAAAAAAAAAAAAAAAAARITFBUWESIv/aAAwDAQACEQMRAD8AoOnTV1QTD7JJshP3vSM3P//Z) 0 0 #1c1c21; } + +.swagger-ui .debug-grid-16-solid { background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyhpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNi1jMTExIDc5LjE1ODMyNSwgMjAxNS8wOS8xMC0wMToxMDoyMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTUgKE1hY2ludG9zaCkiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6NzY3MkJEN0U2N0M1MTFFNkIyQkNFMjQwODEwMDIxNzEiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6NzY3MkJEN0Y2N0M1MTFFNkIyQkNFMjQwODEwMDIxNzEiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3NjcyQkQ3QzY3QzUxMUU2QjJCQ0UyNDA4MTAwMjE3MSIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3NjcyQkQ3RDY3QzUxMUU2QjJCQ0UyNDA4MTAwMjE3MSIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/Pve6J3kAAAAzSURBVHjaYvz//z8D0UDsMwMjSRoYP5Gq4SPNbRjVMEQ1fCRDg+in/6+J1AJUxsgAEGAA31BAJMS0GYEAAAAASUVORK5CYII=) 0 0 #1c1c21; } + +.swagger-ui .b--black { border-color: #000; } + +.swagger-ui .b--near-black { border-color: #121212; } + +.swagger-ui .b--dark-gray { border-color: #333; } + +.swagger-ui .b--mid-gray { border-color: #545454; } + +.swagger-ui .b--gray { border-color: #787878; } + +.swagger-ui .b--silver { border-color: #999; } + +.swagger-ui .b--light-silver { border-color: #6e6e6e; } + +.swagger-ui .b--moon-gray { border-color: #4d4d4d; } + +.swagger-ui .b--light-gray { border-color: #2b2b2b; } + +.swagger-ui .b--near-white { border-color: #242424; } + +.swagger-ui .b--white { border-color: #1c1c21; } + +.swagger-ui .b--white-90 { border-color: rgba(28, 28, 33, .9); } + +.swagger-ui .b--white-80 { border-color: rgba(28, 28, 33, .8); } + +.swagger-ui .b--white-70 { border-color: rgba(28, 28, 33, .7); } + +.swagger-ui .b--white-60 { border-color: rgba(28, 28, 33, .6); } + +.swagger-ui .b--white-50 { border-color: rgba(28, 28, 33, .5); } + +.swagger-ui .b--white-40 { border-color: rgba(28, 28, 33, .4); } + +.swagger-ui .b--white-30 { border-color: rgba(28, 28, 33, .3); } + +.swagger-ui .b--white-20 { border-color: rgba(28, 28, 33, .2); } + +.swagger-ui .b--white-10 { border-color: rgba(28, 28, 33, .1); } + +.swagger-ui .b--white-05 { border-color: rgba(28, 28, 33, .05); } + +.swagger-ui .b--white-025 { border-color: rgba(28, 28, 33, .024); } + +.swagger-ui .b--white-0125 { border-color: rgba(28, 28, 33, .01); } + +.swagger-ui .b--black-90 { border-color: rgba(0, 0, 0, .9); } + +.swagger-ui .b--black-80 { border-color: rgba(0, 0, 0, .8); } + +.swagger-ui .b--black-70 { border-color: rgba(0, 0, 0, .7); } + +.swagger-ui .b--black-60 { border-color: rgba(0, 0, 0, .6); } + +.swagger-ui .b--black-50 { border-color: rgba(0, 0, 0, .5); } + +.swagger-ui .b--black-40 { border-color: rgba(0, 0, 0, .4); } + +.swagger-ui .b--black-30 { border-color: rgba(0, 0, 0, .3); } + +.swagger-ui .b--black-20 { border-color: rgba(0, 0, 0, .2); } + +.swagger-ui .b--black-10 { border-color: rgba(0, 0, 0, .1); } + +.swagger-ui .b--black-05 { border-color: rgba(0, 0, 0, .05); } + +.swagger-ui .b--black-025 { border-color: rgba(0, 0, 0, .024); } + +.swagger-ui .b--black-0125 { border-color: rgba(0, 0, 0, .01); } + +.swagger-ui .b--dark-red { border-color: #bc2f36; } + +.swagger-ui .b--red { border-color: #c83932; } + +.swagger-ui .b--light-red { border-color: #ab3c2b; } + +.swagger-ui .b--orange { border-color: #cc6e33; } + +.swagger-ui .b--purple { border-color: #5e2ca5; } + +.swagger-ui .b--light-purple { border-color: #672caf; } + +.swagger-ui .b--dark-pink { border-color: #ab2b81; } + +.swagger-ui .b--hot-pink { border-color: #c03086; } + +.swagger-ui .b--pink { border-color: #8f2464; } + +.swagger-ui .b--light-pink { border-color: #721d4d; } + +.swagger-ui .b--dark-green { border-color: #1c6e50; } + +.swagger-ui .b--green { border-color: #279b70; } + +.swagger-ui .b--light-green { border-color: #228762; } + +.swagger-ui .b--navy { border-color: #0d1d35; } + +.swagger-ui .b--dark-blue { border-color: #20497e; } + +.swagger-ui .b--blue { border-color: #4380d0; } + +.swagger-ui .b--light-blue { border-color: #20517e; } + +.swagger-ui .b--lightest-blue { border-color: #143a52; } + +.swagger-ui .b--washed-blue { border-color: #0c312d; } + +.swagger-ui .b--washed-green { border-color: #0f3d2c; } + +.swagger-ui .b--washed-red { border-color: #411010; } + +.swagger-ui .b--transparent { border-color: transparent; } + +.swagger-ui .b--gold, .swagger-ui .b--light-yellow, .swagger-ui .b--washed-yellow, .swagger-ui .b--yellow { border-color: #664b00; } + +.swagger-ui .shadow-1 { box-shadow: rgba(0, 0, 0, .2) 0 0 4px 2px; } + +.swagger-ui .shadow-2 { box-shadow: rgba(0, 0, 0, .2) 0 0 8px 2px; } + +.swagger-ui .shadow-3 { box-shadow: rgba(0, 0, 0, .2) 2px 2px 4px 2px; } + +.swagger-ui .shadow-4 { box-shadow: rgba(0, 0, 0, .2) 2px 2px 8px 0; } + +.swagger-ui .shadow-5 { box-shadow: rgba(0, 0, 0, .2) 4px 4px 8px 0; } + +@media screen and (min-width: 30em) { + .swagger-ui .shadow-1-ns { box-shadow: rgba(0, 0, 0, .2) 0 0 4px 2px; } + + .swagger-ui .shadow-2-ns { box-shadow: rgba(0, 0, 0, .2) 0 0 8px 2px; } + + .swagger-ui .shadow-3-ns { box-shadow: rgba(0, 0, 0, .2) 2px 2px 4px 2px; } + + .swagger-ui .shadow-4-ns { box-shadow: rgba(0, 0, 0, .2) 2px 2px 8px 0; } + + .swagger-ui .shadow-5-ns { box-shadow: rgba(0, 0, 0, .2) 4px 4px 8px 0; } +} + +@media screen and (max-width: 60em) and (min-width: 30em) { + .swagger-ui .shadow-1-m { box-shadow: rgba(0, 0, 0, .2) 0 0 4px 2px; } + + .swagger-ui .shadow-2-m { box-shadow: rgba(0, 0, 0, .2) 0 0 8px 2px; } + + .swagger-ui .shadow-3-m { box-shadow: rgba(0, 0, 0, .2) 2px 2px 4px 2px; } + + .swagger-ui .shadow-4-m { box-shadow: rgba(0, 0, 0, .2) 2px 2px 8px 0; } + + .swagger-ui .shadow-5-m { box-shadow: rgba(0, 0, 0, .2) 4px 4px 8px 0; } +} + +@media screen and (min-width: 60em) { + .swagger-ui .shadow-1-l { box-shadow: rgba(0, 0, 0, .2) 0 0 4px 2px; } + + .swagger-ui .shadow-2-l { box-shadow: rgba(0, 0, 0, .2) 0 0 8px 2px; } + + .swagger-ui .shadow-3-l { box-shadow: rgba(0, 0, 0, .2) 2px 2px 4px 2px; } + + .swagger-ui .shadow-4-l { box-shadow: rgba(0, 0, 0, .2) 2px 2px 8px 0; } + + .swagger-ui .shadow-5-l { box-shadow: rgba(0, 0, 0, .2) 4px 4px 8px 0; } +} + +.swagger-ui .black-05 { color: rgba(191, 191, 191, .05); } + +.swagger-ui .bg-black-05 { background-color: rgba(0, 0, 0, .05); } + +.swagger-ui .black-90, .swagger-ui .hover-black-90:focus, .swagger-ui .hover-black-90:hover { color: rgba(191, 191, 191, .9); } + +.swagger-ui .black-80, .swagger-ui .hover-black-80:focus, .swagger-ui .hover-black-80:hover { color: rgba(191, 191, 191, .8); } + +.swagger-ui .black-70, .swagger-ui .hover-black-70:focus, .swagger-ui .hover-black-70:hover { color: rgba(191, 191, 191, .7); } + +.swagger-ui .black-60, .swagger-ui .hover-black-60:focus, .swagger-ui .hover-black-60:hover { color: rgba(191, 191, 191, .6); } + +.swagger-ui .black-50, .swagger-ui .hover-black-50:focus, .swagger-ui .hover-black-50:hover { color: rgba(191, 191, 191, .5); } + +.swagger-ui .black-40, .swagger-ui .hover-black-40:focus, .swagger-ui .hover-black-40:hover { color: rgba(191, 191, 191, .4); } + +.swagger-ui .black-30, .swagger-ui .hover-black-30:focus, .swagger-ui .hover-black-30:hover { color: rgba(191, 191, 191, .3); } + +.swagger-ui .black-20, .swagger-ui .hover-black-20:focus, .swagger-ui .hover-black-20:hover { color: rgba(191, 191, 191, .2); } + +.swagger-ui .black-10, .swagger-ui .hover-black-10:focus, .swagger-ui .hover-black-10:hover { color: rgba(191, 191, 191, .1); } + +.swagger-ui .hover-white-90:focus, .swagger-ui .hover-white-90:hover, .swagger-ui .white-90 { color: rgba(255, 255, 255, .9); } + +.swagger-ui .hover-white-80:focus, .swagger-ui .hover-white-80:hover, .swagger-ui .white-80 { color: rgba(255, 255, 255, .8); } + +.swagger-ui .hover-white-70:focus, .swagger-ui .hover-white-70:hover, .swagger-ui .white-70 { color: rgba(255, 255, 255, .7); } + +.swagger-ui .hover-white-60:focus, .swagger-ui .hover-white-60:hover, .swagger-ui .white-60 { color: rgba(255, 255, 255, .6); } + +.swagger-ui .hover-white-50:focus, .swagger-ui .hover-white-50:hover, .swagger-ui .white-50 { color: rgba(255, 255, 255, .5); } + +.swagger-ui .hover-white-40:focus, .swagger-ui .hover-white-40:hover, .swagger-ui .white-40 { color: rgba(255, 255, 255, .4); } + +.swagger-ui .hover-white-30:focus, .swagger-ui .hover-white-30:hover, .swagger-ui .white-30 { color: rgba(255, 255, 255, .3); } + +.swagger-ui .hover-white-20:focus, .swagger-ui .hover-white-20:hover, .swagger-ui .white-20 { color: rgba(255, 255, 255, .2); } + +.swagger-ui .hover-white-10:focus, .swagger-ui .hover-white-10:hover, .swagger-ui .white-10 { color: rgba(255, 255, 255, .1); } + +.swagger-ui .hover-moon-gray:focus, .swagger-ui .hover-moon-gray:hover, .swagger-ui .moon-gray { color: #ccc; } + +.swagger-ui .hover-light-gray:focus, .swagger-ui .hover-light-gray:hover, .swagger-ui .light-gray { color: #ededed; } + +.swagger-ui .hover-near-white:focus, .swagger-ui .hover-near-white:hover, .swagger-ui .near-white { color: #f5f5f5; } + +.swagger-ui .dark-red, .swagger-ui .hover-dark-red:focus, .swagger-ui .hover-dark-red:hover { color: #e6999d; } + +.swagger-ui .hover-red:focus, .swagger-ui .hover-red:hover, .swagger-ui .red { color: #e69d99; } + +.swagger-ui .hover-light-red:focus, .swagger-ui .hover-light-red:hover, .swagger-ui .light-red { color: #e6a399; } + +.swagger-ui .hover-orange:focus, .swagger-ui .hover-orange:hover, .swagger-ui .orange { color: #e6b699; } + +.swagger-ui .gold, .swagger-ui .hover-gold:focus, .swagger-ui .hover-gold:hover { color: #e6d099; } + +.swagger-ui .hover-yellow:focus, .swagger-ui .hover-yellow:hover, .swagger-ui .yellow { color: #e6da99; } + +.swagger-ui .hover-light-yellow:focus, .swagger-ui .hover-light-yellow:hover, .swagger-ui .light-yellow { color: #ede6b6; } + +.swagger-ui .hover-purple:focus, .swagger-ui .hover-purple:hover, .swagger-ui .purple { color: #b99ae4; } + +.swagger-ui .hover-light-purple:focus, .swagger-ui .hover-light-purple:hover, .swagger-ui .light-purple { color: #bb99e6; } + +.swagger-ui .dark-pink, .swagger-ui .hover-dark-pink:focus, .swagger-ui .hover-dark-pink:hover { color: #e699cc; } + +.swagger-ui .hot-pink, .swagger-ui .hover-hot-pink:focus, .swagger-ui .hover-hot-pink:hover, .swagger-ui .hover-pink:focus, .swagger-ui .hover-pink:hover, .swagger-ui .pink { color: #e699c7; } + +.swagger-ui .hover-light-pink:focus, .swagger-ui .hover-light-pink:hover, .swagger-ui .light-pink { color: #edb6d5; } + +.swagger-ui .dark-green, .swagger-ui .green, .swagger-ui .hover-dark-green:focus, .swagger-ui .hover-dark-green:hover, .swagger-ui .hover-green:focus, .swagger-ui .hover-green:hover { color: #99e6c9; } + +.swagger-ui .hover-light-green:focus, .swagger-ui .hover-light-green:hover, .swagger-ui .light-green { color: #a1e8ce; } + +.swagger-ui .hover-navy:focus, .swagger-ui .hover-navy:hover, .swagger-ui .navy { color: #99b8e6; } + +.swagger-ui .blue, .swagger-ui .dark-blue, .swagger-ui .hover-blue:focus, .swagger-ui .hover-blue:hover, .swagger-ui .hover-dark-blue:focus, .swagger-ui .hover-dark-blue:hover { color: #99bae6; } + +.swagger-ui .hover-light-blue:focus, .swagger-ui .hover-light-blue:hover, .swagger-ui .light-blue { color: #a9cbea; } + +.swagger-ui .hover-lightest-blue:focus, .swagger-ui .hover-lightest-blue:hover, .swagger-ui .lightest-blue { color: #d6e9f5; } + +.swagger-ui .hover-washed-blue:focus, .swagger-ui .hover-washed-blue:hover, .swagger-ui .washed-blue { color: #f7fdfc; } + +.swagger-ui .hover-washed-green:focus, .swagger-ui .hover-washed-green:hover, .swagger-ui .washed-green { color: #ebfaf4; } + +.swagger-ui .hover-washed-yellow:focus, .swagger-ui .hover-washed-yellow:hover, .swagger-ui .washed-yellow { color: #fbf9ef; } + +.swagger-ui .hover-washed-red:focus, .swagger-ui .hover-washed-red:hover, .swagger-ui .washed-red { color: #f9e7e7; } + +.swagger-ui .color-inherit, .swagger-ui .hover-inherit:focus, .swagger-ui .hover-inherit:hover { color: inherit; } + +.swagger-ui .bg-black-90, .swagger-ui .hover-bg-black-90:focus, .swagger-ui .hover-bg-black-90:hover { background-color: rgba(0, 0, 0, .9); } + +.swagger-ui .bg-black-80, .swagger-ui .hover-bg-black-80:focus, .swagger-ui .hover-bg-black-80:hover { background-color: rgba(0, 0, 0, .8); } + +.swagger-ui .bg-black-70, .swagger-ui .hover-bg-black-70:focus, .swagger-ui .hover-bg-black-70:hover { background-color: rgba(0, 0, 0, .7); } + +.swagger-ui .bg-black-60, .swagger-ui .hover-bg-black-60:focus, .swagger-ui .hover-bg-black-60:hover { background-color: rgba(0, 0, 0, .6); } + +.swagger-ui .bg-black-50, .swagger-ui .hover-bg-black-50:focus, .swagger-ui .hover-bg-black-50:hover { background-color: rgba(0, 0, 0, .5); } + +.swagger-ui .bg-black-40, .swagger-ui .hover-bg-black-40:focus, .swagger-ui .hover-bg-black-40:hover { background-color: rgba(0, 0, 0, .4); } + +.swagger-ui .bg-black-30, .swagger-ui .hover-bg-black-30:focus, .swagger-ui .hover-bg-black-30:hover { background-color: rgba(0, 0, 0, .3); } + +.swagger-ui .bg-black-20, .swagger-ui .hover-bg-black-20:focus, .swagger-ui .hover-bg-black-20:hover { background-color: rgba(0, 0, 0, .2); } + +.swagger-ui .bg-white-90, .swagger-ui .hover-bg-white-90:focus, .swagger-ui .hover-bg-white-90:hover { background-color: rgba(28, 28, 33, .9); } + +.swagger-ui .bg-white-80, .swagger-ui .hover-bg-white-80:focus, .swagger-ui .hover-bg-white-80:hover { background-color: rgba(28, 28, 33, .8); } + +.swagger-ui .bg-white-70, .swagger-ui .hover-bg-white-70:focus, .swagger-ui .hover-bg-white-70:hover { background-color: rgba(28, 28, 33, .7); } + +.swagger-ui .bg-white-60, .swagger-ui .hover-bg-white-60:focus, .swagger-ui .hover-bg-white-60:hover { background-color: rgba(28, 28, 33, .6); } + +.swagger-ui .bg-white-50, .swagger-ui .hover-bg-white-50:focus, .swagger-ui .hover-bg-white-50:hover { background-color: rgba(28, 28, 33, .5); } + +.swagger-ui .bg-white-40, .swagger-ui .hover-bg-white-40:focus, .swagger-ui .hover-bg-white-40:hover { background-color: rgba(28, 28, 33, .4); } + +.swagger-ui .bg-white-30, .swagger-ui .hover-bg-white-30:focus, .swagger-ui .hover-bg-white-30:hover { background-color: rgba(28, 28, 33, .3); } + +.swagger-ui .bg-white-20, .swagger-ui .hover-bg-white-20:focus, .swagger-ui .hover-bg-white-20:hover { background-color: rgba(28, 28, 33, .2); } + +.swagger-ui .bg-black, .swagger-ui .hover-bg-black:focus, .swagger-ui .hover-bg-black:hover { background-color: #000; } + +.swagger-ui .bg-near-black, .swagger-ui .hover-bg-near-black:focus, .swagger-ui .hover-bg-near-black:hover { background-color: #121212; } + +.swagger-ui .bg-dark-gray, .swagger-ui .hover-bg-dark-gray:focus, .swagger-ui .hover-bg-dark-gray:hover { background-color: #333; } + +.swagger-ui .bg-mid-gray, .swagger-ui .hover-bg-mid-gray:focus, .swagger-ui .hover-bg-mid-gray:hover { background-color: #545454; } + +.swagger-ui .bg-gray, .swagger-ui .hover-bg-gray:focus, .swagger-ui .hover-bg-gray:hover { background-color: #787878; } + +.swagger-ui .bg-silver, .swagger-ui .hover-bg-silver:focus, .swagger-ui .hover-bg-silver:hover { background-color: #999; } + +.swagger-ui .bg-white, .swagger-ui .hover-bg-white:focus, .swagger-ui .hover-bg-white:hover { background-color: #1c1c21; } + +.swagger-ui .bg-transparent, .swagger-ui .hover-bg-transparent:focus, .swagger-ui .hover-bg-transparent:hover { background-color: transparent; } + +.swagger-ui .bg-dark-red, .swagger-ui .hover-bg-dark-red:focus, .swagger-ui .hover-bg-dark-red:hover { background-color: #bc2f36; } + +.swagger-ui .bg-red, .swagger-ui .hover-bg-red:focus, .swagger-ui .hover-bg-red:hover { background-color: #c83932; } + +.swagger-ui .bg-light-red, .swagger-ui .hover-bg-light-red:focus, .swagger-ui .hover-bg-light-red:hover { background-color: #ab3c2b; } + +.swagger-ui .bg-orange, .swagger-ui .hover-bg-orange:focus, .swagger-ui .hover-bg-orange:hover { background-color: #cc6e33; } + +.swagger-ui .bg-gold, .swagger-ui .bg-light-yellow, .swagger-ui .bg-washed-yellow, .swagger-ui .bg-yellow, .swagger-ui .hover-bg-gold:focus, .swagger-ui .hover-bg-gold:hover, .swagger-ui .hover-bg-light-yellow:focus, .swagger-ui .hover-bg-light-yellow:hover, .swagger-ui .hover-bg-washed-yellow:focus, .swagger-ui .hover-bg-washed-yellow:hover, .swagger-ui .hover-bg-yellow:focus, .swagger-ui .hover-bg-yellow:hover { background-color: #664b00; } + +.swagger-ui .bg-purple, .swagger-ui .hover-bg-purple:focus, .swagger-ui .hover-bg-purple:hover { background-color: #5e2ca5; } + +.swagger-ui .bg-light-purple, .swagger-ui .hover-bg-light-purple:focus, .swagger-ui .hover-bg-light-purple:hover { background-color: #672caf; } + +.swagger-ui .bg-dark-pink, .swagger-ui .hover-bg-dark-pink:focus, .swagger-ui .hover-bg-dark-pink:hover { background-color: #ab2b81; } + +.swagger-ui .bg-hot-pink, .swagger-ui .hover-bg-hot-pink:focus, .swagger-ui .hover-bg-hot-pink:hover { background-color: #c03086; } + +.swagger-ui .bg-pink, .swagger-ui .hover-bg-pink:focus, .swagger-ui .hover-bg-pink:hover { background-color: #8f2464; } + +.swagger-ui .bg-light-pink, .swagger-ui .hover-bg-light-pink:focus, .swagger-ui .hover-bg-light-pink:hover { background-color: #721d4d; } + +.swagger-ui .bg-dark-green, .swagger-ui .hover-bg-dark-green:focus, .swagger-ui .hover-bg-dark-green:hover { background-color: #1c6e50; } + +.swagger-ui .bg-green, .swagger-ui .hover-bg-green:focus, .swagger-ui .hover-bg-green:hover { background-color: #279b70; } + +.swagger-ui .bg-light-green, .swagger-ui .hover-bg-light-green:focus, .swagger-ui .hover-bg-light-green:hover { background-color: #228762; } + +.swagger-ui .bg-navy, .swagger-ui .hover-bg-navy:focus, .swagger-ui .hover-bg-navy:hover { background-color: #0d1d35; } + +.swagger-ui .bg-dark-blue, .swagger-ui .hover-bg-dark-blue:focus, .swagger-ui .hover-bg-dark-blue:hover { background-color: #20497e; } + +.swagger-ui .bg-blue, .swagger-ui .hover-bg-blue:focus, .swagger-ui .hover-bg-blue:hover { background-color: #4380d0; } + +.swagger-ui .bg-light-blue, .swagger-ui .hover-bg-light-blue:focus, .swagger-ui .hover-bg-light-blue:hover { background-color: #20517e; } + +.swagger-ui .bg-lightest-blue, .swagger-ui .hover-bg-lightest-blue:focus, .swagger-ui .hover-bg-lightest-blue:hover { background-color: #143a52; } + +.swagger-ui .bg-washed-blue, .swagger-ui .hover-bg-washed-blue:focus, .swagger-ui .hover-bg-washed-blue:hover { background-color: #0c312d; } + +.swagger-ui .bg-washed-green, .swagger-ui .hover-bg-washed-green:focus, .swagger-ui .hover-bg-washed-green:hover { background-color: #0f3d2c; } + +.swagger-ui .bg-washed-red, .swagger-ui .hover-bg-washed-red:focus, .swagger-ui .hover-bg-washed-red:hover { background-color: #411010; } + +.swagger-ui .bg-inherit, .swagger-ui .hover-bg-inherit:focus, .swagger-ui .hover-bg-inherit:hover { background-color: inherit; } + +.swagger-ui .shadow-hover { transition: all .5s cubic-bezier(.165, .84, .44, 1) 0s; } + +.swagger-ui .shadow-hover::after { + border-radius: inherit; + box-shadow: rgba(0, 0, 0, .2) 0 0 16px 2px; + content: ""; + height: 100%; + left: 0; + opacity: 0; + position: absolute; + top: 0; + transition: opacity .5s cubic-bezier(.165, .84, .44, 1) 0s; + width: 100%; + z-index: -1; +} + +.swagger-ui .bg-animate, .swagger-ui .bg-animate:focus, .swagger-ui .bg-animate:hover { transition: background-color .15s ease-in-out 0s; } + +.swagger-ui .nested-links a { + color: #99bae6; + transition: color .15s ease-in 0s; +} + +.swagger-ui .nested-links a:focus, .swagger-ui .nested-links a:hover { + color: #a9cbea; + transition: color .15s ease-in 0s; +} + +.swagger-ui .opblock-tag { + border-bottom: 1px solid rgba(58, 64, 80, .3); + color: #b5bac9; + transition: all .2s ease 0s; +} + +.swagger-ui .opblock-tag svg, .swagger-ui section.models h4 svg { transition: all .4s ease 0s; } + +.swagger-ui .opblock { + border: 1px solid #000; + border-radius: 4px; + box-shadow: rgba(0, 0, 0, .19) 0 0 3px; + margin: 0 0 15px; +} + +.swagger-ui .opblock .tab-header .tab-item.active h4 span::after { background: gray; } + +.swagger-ui .opblock.is-open .opblock-summary { border-bottom: 1px solid #000; } + +.swagger-ui .opblock .opblock-section-header { + background: rgba(28, 28, 33, .8); + box-shadow: rgba(0, 0, 0, .1) 0 1px 2px; +} + +.swagger-ui .opblock .opblock-section-header > label > span { padding: 0 10px 0 0; } + +.swagger-ui .opblock .opblock-summary-method { + background: #000; + color: #fff; + text-shadow: rgba(0, 0, 0, .1) 0 1px 0; +} + +.swagger-ui .opblock.opblock-post { + background: rgba(72, 203, 144, .1); + border-color: #48cb90; +} + +.swagger-ui .opblock.opblock-post .opblock-summary-method, .swagger-ui .opblock.opblock-post .tab-header .tab-item.active h4 span::after { background: #48cb90; } + +.swagger-ui .opblock.opblock-post .opblock-summary { border-color: #48cb90; } + +.swagger-ui .opblock.opblock-put { + background: rgba(213, 157, 88, .1); + border-color: #d59d58; +} + +.swagger-ui .opblock.opblock-put .opblock-summary-method, .swagger-ui .opblock.opblock-put .tab-header .tab-item.active h4 span::after { background: #d59d58; } + +.swagger-ui .opblock.opblock-put .opblock-summary { border-color: #d59d58; } + +.swagger-ui .opblock.opblock-delete { + background: rgba(200, 50, 50, .1); + border-color: #c83232; +} + +.swagger-ui .opblock.opblock-delete .opblock-summary-method, .swagger-ui .opblock.opblock-delete .tab-header .tab-item.active h4 span::after { background: #c83232; } + +.swagger-ui .opblock.opblock-delete .opblock-summary { border-color: #c83232; } + +.swagger-ui .opblock.opblock-get { + background: rgba(42, 105, 167, .1); + border-color: #2a69a7; +} + +.swagger-ui .opblock.opblock-get .opblock-summary-method, .swagger-ui .opblock.opblock-get .tab-header .tab-item.active h4 span::after { background: #2a69a7; } + +.swagger-ui .opblock.opblock-get .opblock-summary { border-color: #2a69a7; } + +.swagger-ui .opblock.opblock-patch { + background: rgba(92, 214, 188, .1); + border-color: #5cd6bc; +} + +.swagger-ui .opblock.opblock-patch .opblock-summary-method, .swagger-ui .opblock.opblock-patch .tab-header .tab-item.active h4 span::after { background: #5cd6bc; } + +.swagger-ui .opblock.opblock-patch .opblock-summary { border-color: #5cd6bc; } + +.swagger-ui .opblock.opblock-head { + background: rgba(140, 63, 207, .1); + border-color: #8c3fcf; +} + +.swagger-ui .opblock.opblock-head .opblock-summary-method, .swagger-ui .opblock.opblock-head .tab-header .tab-item.active h4 span::after { background: #8c3fcf; } + +.swagger-ui .opblock.opblock-head .opblock-summary { border-color: #8c3fcf; } + +.swagger-ui .opblock.opblock-options { + background: rgba(36, 89, 143, .1); + border-color: #24598f; +} + +.swagger-ui .opblock.opblock-options .opblock-summary-method, .swagger-ui .opblock.opblock-options .tab-header .tab-item.active h4 span::after { background: #24598f; } + +.swagger-ui .opblock.opblock-options .opblock-summary { border-color: #24598f; } + +.swagger-ui .opblock.opblock-deprecated { + background: rgba(46, 46, 46, .1); + border-color: #2e2e2e; + opacity: .6; +} + +.swagger-ui .opblock.opblock-deprecated .opblock-summary-method, .swagger-ui .opblock.opblock-deprecated .tab-header .tab-item.active h4 span::after { background: #2e2e2e; } + +.swagger-ui .opblock.opblock-deprecated .opblock-summary { border-color: #2e2e2e; } + +.swagger-ui .filter .operation-filter-input { border: 2px solid #2b3446; } + +.swagger-ui .tab li:first-of-type::after { background: rgba(0, 0, 0, .2); } + +.swagger-ui .download-contents { + background: #7c8192; + color: #fff; +} + +.swagger-ui .scheme-container { + background: #1c1c21; + box-shadow: rgba(0, 0, 0, .15) 0 1px 2px 0; +} + +.swagger-ui .loading-container .loading::before { + animation: 1s linear 0s infinite normal none running rotation, .5s ease 0s 1 normal none running opacity; + border-color: rgba(0, 0, 0, .6) rgba(84, 84, 84, .1) rgba(84, 84, 84, .1); +} + +.swagger-ui .response-control-media-type--accept-controller select { border-color: #196619; } + +.swagger-ui .response-control-media-type__accept-message { color: #99e699; } + +.swagger-ui .version-pragma__message code { background-color: #3b3b3b; } + +.swagger-ui .btn { + background: 0 0; + border: 2px solid gray; + box-shadow: rgba(0, 0, 0, .1) 0 1px 2px; + color: #b5bac9; +} + +.swagger-ui .btn:hover { box-shadow: rgba(0, 0, 0, .3) 0 0 5px; } + +.swagger-ui .btn.authorize, .swagger-ui .btn.cancel { + background-color: transparent; + border-color: #a72a2a; + color: #e69999; +} + +.swagger-ui .btn.authorize { + border-color: #48cb90; + color: #9ce3c3; +} + +.swagger-ui .btn.authorize svg { fill: #9ce3c3; } + +.swagger-ui .btn.execute { + background-color: #5892d5; + border-color: #5892d5; + color: #fff; +} + +.swagger-ui .copy-to-clipboard { background: #7c8192; } + +.swagger-ui .copy-to-clipboard button { background: url("data:image/svg+xml;charset=utf-8,") 50% center no-repeat; } + +.swagger-ui select { + background: url("data:image/svg+xml;charset=utf-8,") right 10px center/20px no-repeat #212121; + background: url(data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcKICAgeG1sbnM6ZGM9Imh0dHA6Ly9wdXJsLm9yZy9kYy9lbGVtZW50cy8xLjEvIgogICB4bWxuczpjYz0iaHR0cDovL2NyZWF0aXZlY29tbW9ucy5vcmcvbnMjIgogICB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiCiAgIHhtbG5zOnN2Zz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIKICAgeG1sbnM6c29kaXBvZGk9Imh0dHA6Ly9zb2RpcG9kaS5zb3VyY2Vmb3JnZS5uZXQvRFREL3NvZGlwb2RpLTAuZHRkIgogICB4bWxuczppbmtzY2FwZT0iaHR0cDovL3d3dy5pbmtzY2FwZS5vcmcvbmFtZXNwYWNlcy9pbmtzY2FwZSIKICAgaW5rc2NhcGU6dmVyc2lvbj0iMS4wICg0MDM1YTRmYjQ5LCAyMDIwLTA1LTAxKSIKICAgc29kaXBvZGk6ZG9jbmFtZT0iZG93bmxvYWQuc3ZnIgogICBpZD0ic3ZnNCIKICAgdmVyc2lvbj0iMS4xIgogICB2aWV3Qm94PSIwIDAgMjAgMjAiPgogIDxtZXRhZGF0YQogICAgIGlkPSJtZXRhZGF0YTEwIj4KICAgIDxyZGY6UkRGPgogICAgICA8Y2M6V29yawogICAgICAgICByZGY6YWJvdXQ9IiI+CiAgICAgICAgPGRjOmZvcm1hdD5pbWFnZS9zdmcreG1sPC9kYzpmb3JtYXQ+CiAgICAgICAgPGRjOnR5cGUKICAgICAgICAgICByZGY6cmVzb3VyY2U9Imh0dHA6Ly9wdXJsLm9yZy9kYy9kY21pdHlwZS9TdGlsbEltYWdlIiAvPgogICAgICA8L2NjOldvcms+CiAgICA8L3JkZjpSREY+CiAgPC9tZXRhZGF0YT4KICA8ZGVmcwogICAgIGlkPSJkZWZzOCIgLz4KICA8c29kaXBvZGk6bmFtZWR2aWV3CiAgICAgaW5rc2NhcGU6Y3VycmVudC1sYXllcj0ic3ZnNCIKICAgICBpbmtzY2FwZTp3aW5kb3ctbWF4aW1pemVkPSIxIgogICAgIGlua3NjYXBlOndpbmRvdy15PSItOSIKICAgICBpbmtzY2FwZTp3aW5kb3cteD0iLTkiCiAgICAgaW5rc2NhcGU6Y3k9IjEwIgogICAgIGlua3NjYXBlOmN4PSIxMCIKICAgICBpbmtzY2FwZTp6b29tPSI0MS41IgogICAgIHNob3dncmlkPSJmYWxzZSIKICAgICBpZD0ibmFtZWR2aWV3NiIKICAgICBpbmtzY2FwZTp3aW5kb3ctaGVpZ2h0PSIxMDAxIgogICAgIGlua3NjYXBlOndpbmRvdy13aWR0aD0iMTkyMCIKICAgICBpbmtzY2FwZTpwYWdlc2hhZG93PSIyIgogICAgIGlua3NjYXBlOnBhZ2VvcGFjaXR5PSIwIgogICAgIGd1aWRldG9sZXJhbmNlPSIxMCIKICAgICBncmlkdG9sZXJhbmNlPSIxMCIKICAgICBvYmplY3R0b2xlcmFuY2U9IjEwIgogICAgIGJvcmRlcm9wYWNpdHk9IjEiCiAgICAgYm9yZGVyY29sb3I9IiM2NjY2NjYiCiAgICAgcGFnZWNvbG9yPSIjZmZmZmZmIiAvPgogIDxwYXRoCiAgICAgc3R5bGU9ImZpbGw6I2ZmZmZmZiIKICAgICBpZD0icGF0aDIiCiAgICAgZD0iTTEzLjQxOCA3Ljg1OWEuNjk1LjY5NSAwIDAxLjk3OCAwIC42OC42OCAwIDAxMCAuOTY5bC0zLjkwOCAzLjgzYS42OTcuNjk3IDAgMDEtLjk3OSAwbC0zLjkwOC0zLjgzYS42OC42OCAwIDAxMC0uOTY5LjY5NS42OTUgMCAwMS45NzggMEwxMCAxMWwzLjQxOC0zLjE0MXoiIC8+Cjwvc3ZnPgo=) right 10px center/20px no-repeat #1c1c21; + border: 2px solid #41444e; +} + +.swagger-ui select[multiple] { background: #212121; } + +.swagger-ui button.invalid, .swagger-ui input[type=email].invalid, .swagger-ui input[type=file].invalid, .swagger-ui input[type=password].invalid, .swagger-ui input[type=search].invalid, .swagger-ui input[type=text].invalid, .swagger-ui select.invalid, .swagger-ui textarea.invalid { + background: #390e0e; + border-color: #c83232; +} + +.swagger-ui input[type=email], .swagger-ui input[type=file], .swagger-ui input[type=password], .swagger-ui input[type=search], .swagger-ui input[type=text], .swagger-ui textarea { + background: #1c1c21; + border: 1px solid #404040; +} + +.swagger-ui textarea { + background: rgba(28, 28, 33, .8); + color: #b5bac9; +} + +.swagger-ui input[disabled], .swagger-ui select[disabled] { + background-color: #1f1f1f; + color: #bfbfbf; +} + +.swagger-ui textarea[disabled] { + background-color: #41444e; + color: #fff; +} + +.swagger-ui select[disabled] { border-color: #878787; } + +.swagger-ui textarea:focus { border: 2px solid #2a69a7; } + +.swagger-ui .checkbox input[type=checkbox] + label > .item { + background: #303030; + box-shadow: #303030 0 0 0 2px; +} + +.swagger-ui .checkbox input[type=checkbox]:checked + label > .item { background: url("data:image/svg+xml;charset=utf-8,") 50% center no-repeat #303030; } + +.swagger-ui .dialog-ux .backdrop-ux { background: rgba(0, 0, 0, .8); } + +.swagger-ui .dialog-ux .modal-ux { + background: #1c1c21; + border: 1px solid #2e2e2e; + box-shadow: rgba(0, 0, 0, .2) 0 10px 30px 0; +} + +.swagger-ui .dialog-ux .modal-ux-header .close-modal { background: 0 0; } + +.swagger-ui .model .deprecated span, .swagger-ui .model .deprecated td { color: #bfbfbf !important; } + +.swagger-ui .model-toggle::after { background: url("data:image/svg+xml;charset=utf-8,") 50% center/100% no-repeat; } + +.swagger-ui .model-hint { + background: rgba(0, 0, 0, .7); + color: #ebebeb; +} + +.swagger-ui section.models { border: 1px solid rgba(58, 64, 80, .3); } + +.swagger-ui section.models.is-open h4 { border-bottom: 1px solid rgba(58, 64, 80, .3); } + +.swagger-ui section.models .model-container { background: rgba(0, 0, 0, .05); } + +.swagger-ui section.models .model-container:hover { background: rgba(0, 0, 0, .07); } + +.swagger-ui .model-box { background: rgba(0, 0, 0, .1); } + +.swagger-ui .prop-type { color: #aaaad4; } + +.swagger-ui table thead tr td, .swagger-ui table thead tr th { + border-bottom: 1px solid rgba(58, 64, 80, .2); + color: #b5bac9; +} + +.swagger-ui .parameter__name.required::after { color: rgba(230, 153, 153, .6); } + +.swagger-ui .topbar .download-url-wrapper .select-label { color: #f0f0f0; } + +.swagger-ui .topbar .download-url-wrapper .download-url-button { + background: #63a040; + color: #fff; +} + +.swagger-ui .info .title small { background: #7c8492; } + +.swagger-ui .info .title small.version-stamp { background-color: #7a9b27; } + +.swagger-ui .auth-container .errors { + background-color: #350d0d; + color: #b5bac9; +} + +.swagger-ui .errors-wrapper { + background: rgba(200, 50, 50, .1); + border: 2px solid #c83232; +} + +.swagger-ui .markdown code, .swagger-ui .renderedmarkdown code { + background: rgba(0, 0, 0, .05); + color: #c299e6; +} + +.swagger-ui .model-toggle:after { background: url(data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcKICAgeG1sbnM6ZGM9Imh0dHA6Ly9wdXJsLm9yZy9kYy9lbGVtZW50cy8xLjEvIgogICB4bWxuczpjYz0iaHR0cDovL2NyZWF0aXZlY29tbW9ucy5vcmcvbnMjIgogICB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiCiAgIHhtbG5zOnN2Zz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIKICAgeG1sbnM6c29kaXBvZGk9Imh0dHA6Ly9zb2RpcG9kaS5zb3VyY2Vmb3JnZS5uZXQvRFREL3NvZGlwb2RpLTAuZHRkIgogICB4bWxuczppbmtzY2FwZT0iaHR0cDovL3d3dy5pbmtzY2FwZS5vcmcvbmFtZXNwYWNlcy9pbmtzY2FwZSIKICAgaW5rc2NhcGU6dmVyc2lvbj0iMS4wICg0MDM1YTRmYjQ5LCAyMDIwLTA1LTAxKSIKICAgc29kaXBvZGk6ZG9jbmFtZT0iZG93bmxvYWQyLnN2ZyIKICAgaWQ9InN2ZzQiCiAgIHZlcnNpb249IjEuMSIKICAgaGVpZ2h0PSIyNCIKICAgd2lkdGg9IjI0Ij4KICA8bWV0YWRhdGEKICAgICBpZD0ibWV0YWRhdGExMCI+CiAgICA8cmRmOlJERj4KICAgICAgPGNjOldvcmsKICAgICAgICAgcmRmOmFib3V0PSIiPgogICAgICAgIDxkYzpmb3JtYXQ+aW1hZ2Uvc3ZnK3htbDwvZGM6Zm9ybWF0PgogICAgICAgIDxkYzp0eXBlCiAgICAgICAgICAgcmRmOnJlc291cmNlPSJodHRwOi8vcHVybC5vcmcvZGMvZGNtaXR5cGUvU3RpbGxJbWFnZSIgLz4KICAgICAgPC9jYzpXb3JrPgogICAgPC9yZGY6UkRGPgogIDwvbWV0YWRhdGE+CiAgPGRlZnMKICAgICBpZD0iZGVmczgiIC8+CiAgPHNvZGlwb2RpOm5hbWVkdmlldwogICAgIGlua3NjYXBlOmN1cnJlbnQtbGF5ZXI9InN2ZzQiCiAgICAgaW5rc2NhcGU6d2luZG93LW1heGltaXplZD0iMSIKICAgICBpbmtzY2FwZTp3aW5kb3cteT0iLTkiCiAgICAgaW5rc2NhcGU6d2luZG93LXg9Ii05IgogICAgIGlua3NjYXBlOmN5PSIxMiIKICAgICBpbmtzY2FwZTpjeD0iMTIiCiAgICAgaW5rc2NhcGU6em9vbT0iMzQuNTgzMzMzIgogICAgIHNob3dncmlkPSJmYWxzZSIKICAgICBpZD0ibmFtZWR2aWV3NiIKICAgICBpbmtzY2FwZTp3aW5kb3ctaGVpZ2h0PSIxMDAxIgogICAgIGlua3NjYXBlOndpbmRvdy13aWR0aD0iMTkyMCIKICAgICBpbmtzY2FwZTpwYWdlc2hhZG93PSIyIgogICAgIGlua3NjYXBlOnBhZ2VvcGFjaXR5PSIwIgogICAgIGd1aWRldG9sZXJhbmNlPSIxMCIKICAgICBncmlkdG9sZXJhbmNlPSIxMCIKICAgICBvYmplY3R0b2xlcmFuY2U9IjEwIgogICAgIGJvcmRlcm9wYWNpdHk9IjEiCiAgICAgYm9yZGVyY29sb3I9IiM2NjY2NjYiCiAgICAgcGFnZWNvbG9yPSIjZmZmZmZmIiAvPgogIDxwYXRoCiAgICAgc3R5bGU9ImZpbGw6I2ZmZmZmZiIKICAgICBpZD0icGF0aDIiCiAgICAgZD0iTTEwIDZMOC41OSA3LjQxIDEzLjE3IDEybC00LjU4IDQuNTlMMTAgMThsNi02eiIgLz4KPC9zdmc+Cg==) 50% no-repeat; } + +.swagger-ui .expand-operation svg, .swagger-ui section.models h4 svg { fill: #fff; } + +::-webkit-scrollbar-track { background-color: #646464 !important; } + +::-webkit-scrollbar-thumb { + background-color: #242424 !important; + border: 2px solid #3e4346 !important; +} + +::-webkit-scrollbar-button:vertical:start:decrement { + background: linear-gradient(130deg, #696969 40%, rgba(255, 0, 0, 0) 41%), linear-gradient(230deg, #696969 40%, transparent 41%), linear-gradient(0deg, #696969 40%, transparent 31%); + background-color: #b6b6b6; +} + +::-webkit-scrollbar-button:vertical:end:increment { + background: linear-gradient(310deg, #696969 40%, transparent 41%), linear-gradient(50deg, #696969 40%, transparent 41%), linear-gradient(180deg, #696969 40%, transparent 31%); + background-color: #b6b6b6; +} + +::-webkit-scrollbar-button:horizontal:end:increment { + background: linear-gradient(210deg, #696969 40%, transparent 41%), linear-gradient(330deg, #696969 40%, transparent 41%), linear-gradient(90deg, #696969 30%, transparent 31%); + background-color: #b6b6b6; +} + +::-webkit-scrollbar-button:horizontal:start:decrement { + background: linear-gradient(30deg, #696969 40%, transparent 41%), linear-gradient(150deg, #696969 40%, transparent 41%), linear-gradient(270deg, #696969 30%, transparent 31%); + background-color: #b6b6b6; +} + +::-webkit-scrollbar-button, ::-webkit-scrollbar-track-piece { background-color: #3e4346 !important; } + +.swagger-ui .black, .swagger-ui .checkbox, .swagger-ui .dark-gray, .swagger-ui .download-url-wrapper .loading, .swagger-ui .errors-wrapper .errors small, .swagger-ui .fallback, .swagger-ui .filter .loading, .swagger-ui .gray, .swagger-ui .hover-black:focus, .swagger-ui .hover-black:hover, .swagger-ui .hover-dark-gray:focus, .swagger-ui .hover-dark-gray:hover, .swagger-ui .hover-gray:focus, .swagger-ui .hover-gray:hover, .swagger-ui .hover-light-silver:focus, .swagger-ui .hover-light-silver:hover, .swagger-ui .hover-mid-gray:focus, .swagger-ui .hover-mid-gray:hover, .swagger-ui .hover-near-black:focus, .swagger-ui .hover-near-black:hover, .swagger-ui .hover-silver:focus, .swagger-ui .hover-silver:hover, .swagger-ui .light-silver, .swagger-ui .markdown pre, .swagger-ui .mid-gray, .swagger-ui .model .property, .swagger-ui .model .property.primitive, .swagger-ui .model-title, .swagger-ui .near-black, .swagger-ui .parameter__extension, .swagger-ui .parameter__in, .swagger-ui .prop-format, .swagger-ui .renderedmarkdown pre, .swagger-ui .response-col_links .response-undocumented, .swagger-ui .response-col_status .response-undocumented, .swagger-ui .silver, .swagger-ui section.models h4, .swagger-ui section.models h5, .swagger-ui span.token-not-formatted, .swagger-ui span.token-string, .swagger-ui table.headers .header-example, .swagger-ui table.model tr.description, .swagger-ui table.model tr.extension { color: #bfbfbf; } + +.swagger-ui .hover-white:focus, .swagger-ui .hover-white:hover, .swagger-ui .info .title small pre, .swagger-ui .topbar a, .swagger-ui .white { color: #fff; } + +.swagger-ui .bg-black-10, .swagger-ui .hover-bg-black-10:focus, .swagger-ui .hover-bg-black-10:hover, .swagger-ui .stripe-dark:nth-child(2n + 1) { background-color: rgba(0, 0, 0, .1); } + +.swagger-ui .bg-white-10, .swagger-ui .hover-bg-white-10:focus, .swagger-ui .hover-bg-white-10:hover, .swagger-ui .stripe-light:nth-child(2n + 1) { background-color: rgba(28, 28, 33, .1); } + +.swagger-ui .bg-light-silver, .swagger-ui .hover-bg-light-silver:focus, .swagger-ui .hover-bg-light-silver:hover, .swagger-ui .striped--light-silver:nth-child(2n + 1) { background-color: #6e6e6e; } + +.swagger-ui .bg-moon-gray, .swagger-ui .hover-bg-moon-gray:focus, .swagger-ui .hover-bg-moon-gray:hover, .swagger-ui .striped--moon-gray:nth-child(2n + 1) { background-color: #4d4d4d; } + +.swagger-ui .bg-light-gray, .swagger-ui .hover-bg-light-gray:focus, .swagger-ui .hover-bg-light-gray:hover, .swagger-ui .striped--light-gray:nth-child(2n + 1) { background-color: #2b2b2b; } + +.swagger-ui .bg-near-white, .swagger-ui .hover-bg-near-white:focus, .swagger-ui .hover-bg-near-white:hover, .swagger-ui .striped--near-white:nth-child(2n + 1) { background-color: #242424; } + +.swagger-ui .opblock-tag:hover, .swagger-ui section.models h4:hover { background: rgba(0, 0, 0, .02); } + +.swagger-ui .checkbox p, .swagger-ui .dialog-ux .modal-ux-content h4, .swagger-ui .dialog-ux .modal-ux-content p, .swagger-ui .dialog-ux .modal-ux-header h3, .swagger-ui .errors-wrapper .errors h4, .swagger-ui .errors-wrapper hgroup h4, .swagger-ui .info .base-url, .swagger-ui .info .title, .swagger-ui .info h1, .swagger-ui .info h2, .swagger-ui .info h3, .swagger-ui .info h4, .swagger-ui .info h5, .swagger-ui .info li, .swagger-ui .info p, .swagger-ui .info table, .swagger-ui .loading-container .loading::after, .swagger-ui .model, .swagger-ui .opblock .opblock-section-header h4, .swagger-ui .opblock .opblock-section-header > label, .swagger-ui .opblock .opblock-summary-description, .swagger-ui .opblock .opblock-summary-operation-id, .swagger-ui .opblock .opblock-summary-path, .swagger-ui .opblock .opblock-summary-path__deprecated, .swagger-ui .opblock-description-wrapper, .swagger-ui .opblock-description-wrapper h4, .swagger-ui .opblock-description-wrapper p, .swagger-ui .opblock-external-docs-wrapper, .swagger-ui .opblock-external-docs-wrapper h4, .swagger-ui .opblock-external-docs-wrapper p, .swagger-ui .opblock-tag small, .swagger-ui .opblock-title_normal, .swagger-ui .opblock-title_normal h4, .swagger-ui .opblock-title_normal p, .swagger-ui .parameter__name, .swagger-ui .parameter__type, .swagger-ui .response-col_links, .swagger-ui .response-col_status, .swagger-ui .responses-inner h4, .swagger-ui .responses-inner h5, .swagger-ui .scheme-container .schemes > label, .swagger-ui .scopes h2, .swagger-ui .servers > label, .swagger-ui .tab li, .swagger-ui label, .swagger-ui select, .swagger-ui table.headers td { color: #b5bac9; } + +.swagger-ui .download-url-wrapper .failed, .swagger-ui .filter .failed, .swagger-ui .model-deprecated-warning, .swagger-ui .parameter__deprecated, .swagger-ui .parameter__name.required span, .swagger-ui table.model tr.property-row .star { color: #e69999; } + +.swagger-ui .opblock-body pre.microlight, .swagger-ui textarea.curl { + background: #41444e; + border-radius: 4px; + color: #fff; +} + +.swagger-ui .expand-methods svg, .swagger-ui .expand-methods:hover svg { fill: #bfbfbf; } + +.swagger-ui .auth-container, .swagger-ui .dialog-ux .modal-ux-header { border-bottom: 1px solid #2e2e2e; } + +.swagger-ui .topbar .download-url-wrapper .select-label select, .swagger-ui .topbar .download-url-wrapper input[type=text] { border: 2px solid #63a040; } + +.swagger-ui .info a, .swagger-ui .info a:hover, .swagger-ui .scopes h2 a { color: #99bde6; } + +/* Dark Scrollbar */ +::-webkit-scrollbar { + width: 14px; + height: 14px; +} + +::-webkit-scrollbar-button { + background-color: #3e4346 !important; +} + +::-webkit-scrollbar-track { + background-color: #646464 !important; +} + +::-webkit-scrollbar-track-piece { + background-color: #3e4346 !important; +} + +::-webkit-scrollbar-thumb { + height: 50px; + background-color: #242424 !important; + border: 2px solid #3e4346 !important; +} + +::-webkit-scrollbar-corner {} + +::-webkit-resizer {} + +::-webkit-scrollbar-button:vertical:start:decrement { + background: + linear-gradient(130deg, #696969 40%, rgba(255, 0, 0, 0) 41%), + linear-gradient(230deg, #696969 40%, rgba(0, 0, 0, 0) 41%), + linear-gradient(0deg, #696969 40%, rgba(0, 0, 0, 0) 31%); + background-color: #b6b6b6; +} + +::-webkit-scrollbar-button:vertical:end:increment { + background: + linear-gradient(310deg, #696969 40%, rgba(0, 0, 0, 0) 41%), + linear-gradient(50deg, #696969 40%, rgba(0, 0, 0, 0) 41%), + linear-gradient(180deg, #696969 40%, rgba(0, 0, 0, 0) 31%); + background-color: #b6b6b6; +} + +::-webkit-scrollbar-button:horizontal:end:increment { + background: + linear-gradient(210deg, #696969 40%, rgba(0, 0, 0, 0) 41%), + linear-gradient(330deg, #696969 40%, rgba(0, 0, 0, 0) 41%), + linear-gradient(90deg, #696969 30%, rgba(0, 0, 0, 0) 31%); + background-color: #b6b6b6; +} + +::-webkit-scrollbar-button:horizontal:start:decrement { + background: + linear-gradient(30deg, #696969 40%, rgba(0, 0, 0, 0) 41%), + linear-gradient(150deg, #696969 40%, rgba(0, 0, 0, 0) 41%), + linear-gradient(270deg, #696969 30%, rgba(0, 0, 0, 0) 31%); + background-color: #b6b6b6; +} diff --git a/static/swagger-ui/index.css b/static/swagger-ui/index.css new file mode 100644 index 00000000..0aa7e9be --- /dev/null +++ b/static/swagger-ui/index.css @@ -0,0 +1,17 @@ +/*! Swagger UI 4.13.2 | https://swagger.io/tools/swagger-ui/ | Apache License 2.0 (license file can be found at ./LICENSE) */ +html { + box-sizing: border-box; + overflow: -moz-scrollbars-vertical; + overflow-y: scroll; +} + +*, +*:before, +*:after { + box-sizing: inherit; +} + +body { + margin: 0; + background: #fafafa; +} diff --git a/static/swagger-ui/oauth2-redirect.html b/static/swagger-ui/oauth2-redirect.html new file mode 100644 index 00000000..9a25c9e8 --- /dev/null +++ b/static/swagger-ui/oauth2-redirect.html @@ -0,0 +1,79 @@ + + + + Swagger UI: OAuth2 Redirect + + + + + diff --git a/static/swagger-ui/swagger-ui-bundle.js b/static/swagger-ui/swagger-ui-bundle.js new file mode 100644 index 00000000..0f5ced83 --- /dev/null +++ b/static/swagger-ui/swagger-ui-bundle.js @@ -0,0 +1,2 @@ +/*! Swagger UI 4.13.2 | https://swagger.io/tools/swagger-ui/ | Apache License 2.0 (license file can be found at ./LICENSE) */ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.SwaggerUIBundle=t():e.SwaggerUIBundle=t()}(this,(function(){return(()=>{var e={17967:(e,t)=>{"use strict";t.N=void 0;var r=/^([^\w]*)(javascript|data|vbscript)/im,n=/&#(\w+)(^\w|;)?/g,o=/[\u0000-\u001F\u007F-\u009F\u2000-\u200D\uFEFF]/gim,a=/^([^:]+):/gm,i=[".","/"];t.N=function(e){var t,s=(t=e||"",t.replace(n,(function(e,t){return String.fromCharCode(t)}))).replace(o,"").trim();if(!s)return"about:blank";if(function(e){return i.indexOf(e[0])>-1}(s))return s;var l=s.match(a);if(!l)return s;var u=l[0];return r.test(u)?"about:blank":s}},53795:(e,t,r)=>{"use strict";r.d(t,{Z:()=>P});var n=r(23101),o=r.n(n),a=r(61125),i=r.n(a),s=r(11882),l=r.n(s),u=r(97606),c=r.n(u),p=r(67294),f=r(43393);function h(e){return h="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},h(e)}function d(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function m(e,t){for(var r=0;r1&&void 0!==arguments[1]?arguments[1]:{},r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},n=k(t,r),o=e||Object.keys(y({},r,{},t));return o.every(n)}function k(e,t){return function(r){if("string"==typeof r)return(0,f.is)(t[r],e[r]);if(Array.isArray(r))return(0,f.is)(S(t,r),S(e,r));throw new TypeError("Invalid key: expected Array or string: "+r)}}var C=function(e){function t(){return d(this,t),E(this,b(t).apply(this,arguments))}var r,n,o;return function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&w(e,t)}(t,e),r=t,n=[{key:"shouldComponentUpdate",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return!A(this.updateOnProps,this.props,e,"updateOnProps")||!A(this.updateOnStates,this.state,t,"updateOnStates")}}],n&&m(r.prototype,n),o&&m(r,o),t}(p.Component);const O=C;var j=r(23930),I=r.n(j),N=r(45697),T=r.n(N);class P extends O{constructor(){super(...arguments),i()(this,"getModelName",(e=>-1!==l()(e).call(e,"#/definitions/")?e.replace(/^.*#\/definitions\//,""):-1!==l()(e).call(e,"#/components/schemas/")?e.replace(/^.*#\/components\/schemas\//,""):void 0)),i()(this,"getRefSchema",(e=>{let{specSelectors:t}=this.props;return t.findDefinition(e)}))}render(){let{getComponent:e,getConfigs:t,specSelectors:n,schema:a,required:i,name:s,isRef:l,specPath:u,displayName:c,includeReadOnly:f,includeWriteOnly:h}=this.props;const d=e("ObjectModel"),m=e("ArrayModel"),g=e("PrimitiveModel");let v="object",y=a&&a.get("$$ref");if(!s&&y&&(s=this.getModelName(y)),!a&&y&&(a=this.getRefSchema(s)),!a)return p.createElement("span",{className:"model model-title"},p.createElement("span",{className:"model-title__text"},c||s),p.createElement("img",{src:r(2517),height:"20px",width:"20px"}));const b=n.isOAS3()&&a.get("deprecated");switch(l=void 0!==l?l:!!y,v=a&&a.get("type")||v,v){case"object":return p.createElement(d,o()({className:"object"},this.props,{specPath:u,getConfigs:t,schema:a,name:s,deprecated:b,isRef:l,includeReadOnly:f,includeWriteOnly:h}));case"array":return p.createElement(m,o()({className:"array"},this.props,{getConfigs:t,schema:a,name:s,deprecated:b,required:i,includeReadOnly:f,includeWriteOnly:h}));default:return p.createElement(g,o()({},this.props,{getComponent:e,getConfigs:t,schema:a,name:s,deprecated:b,required:i}))}}}i()(P,"propTypes",{schema:c()(I()).isRequired,getComponent:T().func.isRequired,getConfigs:T().func.isRequired,specSelectors:T().object.isRequired,name:T().string,displayName:T().string,isRef:T().bool,required:T().bool,expandDepth:T().number,depth:T().number,specPath:I().list.isRequired,includeReadOnly:T().bool,includeWriteOnly:T().bool})},5623:(e,t,r)=>{"use strict";r.d(t,{Z:()=>f});var n=r(61125),o=r.n(n),a=r(28222),i=r.n(a),s=r(67294),l=r(84564),u=r.n(l),c=r(90242),p=r(27504);class f extends s.Component{constructor(e,t){super(e,t),o()(this,"getDefinitionUrl",(()=>{let{specSelectors:e}=this.props;return new(u())(e.url(),p.Z.location).toString()}));let{getConfigs:r}=e,{validatorUrl:n}=r();this.state={url:this.getDefinitionUrl(),validatorUrl:void 0===n?"https://validator.swagger.io/validator":n}}UNSAFE_componentWillReceiveProps(e){let{getConfigs:t}=e,{validatorUrl:r}=t();this.setState({url:this.getDefinitionUrl(),validatorUrl:void 0===r?"https://validator.swagger.io/validator":r})}render(){let{getConfigs:e}=this.props,{spec:t}=e(),r=(0,c.Nm)(this.state.validatorUrl);return"object"==typeof t&&i()(t).length?null:this.state.url&&(0,c.hW)(this.state.validatorUrl)&&(0,c.hW)(this.state.url)?s.createElement("span",{className:"float-right"},s.createElement("a",{target:"_blank",rel:"noopener noreferrer",href:`${r}/debug?url=${encodeURIComponent(this.state.url)}`},s.createElement(h,{src:`${r}?url=${encodeURIComponent(this.state.url)}`,alt:"Online validator badge"}))):null}}class h extends s.Component{constructor(e){super(e),this.state={loaded:!1,error:!1}}componentDidMount(){const e=new Image;e.onload=()=>{this.setState({loaded:!0})},e.onerror=()=>{this.setState({error:!0})},e.src=this.props.src}UNSAFE_componentWillReceiveProps(e){if(e.src!==this.props.src){const t=new Image;t.onload=()=>{this.setState({loaded:!0})},t.onerror=()=>{this.setState({error:!0})},t.src=e.src}}render(){return this.state.error?s.createElement("img",{alt:"Error"}):this.state.loaded?s.createElement("img",{src:this.props.src,alt:this.props.alt}):null}}},86019:(e,t,r)=>{"use strict";r.d(t,{Z:()=>me,s:()=>ge});var n=r(67294),o=r(89927);function a(e,t){if(Array.prototype.indexOf)return e.indexOf(t);for(var r=0,n=e.length;r=0;r--)!0===t(e[r])&&e.splice(r,1)}function s(e){throw new Error("Unhandled case for value: '"+e+"'")}var l=function(){function e(e){void 0===e&&(e={}),this.tagName="",this.attrs={},this.innerHTML="",this.whitespaceRegex=/\s+/,this.tagName=e.tagName||"",this.attrs=e.attrs||{},this.innerHTML=e.innerHtml||e.innerHTML||""}return e.prototype.setTagName=function(e){return this.tagName=e,this},e.prototype.getTagName=function(){return this.tagName||""},e.prototype.setAttr=function(e,t){return this.getAttrs()[e]=t,this},e.prototype.getAttr=function(e){return this.getAttrs()[e]},e.prototype.setAttrs=function(e){return Object.assign(this.getAttrs(),e),this},e.prototype.getAttrs=function(){return this.attrs||(this.attrs={})},e.prototype.setClass=function(e){return this.setAttr("class",e)},e.prototype.addClass=function(e){for(var t,r=this.getClass(),n=this.whitespaceRegex,o=r?r.split(n):[],i=e.split(n);t=i.shift();)-1===a(o,t)&&o.push(t);return this.getAttrs().class=o.join(" "),this},e.prototype.removeClass=function(e){for(var t,r=this.getClass(),n=this.whitespaceRegex,o=r?r.split(n):[],i=e.split(n);o.length&&(t=i.shift());){var s=a(o,t);-1!==s&&o.splice(s,1)}return this.getAttrs().class=o.join(" "),this},e.prototype.getClass=function(){return this.getAttrs().class||""},e.prototype.hasClass=function(e){return-1!==(" "+this.getClass()+" ").indexOf(" "+e+" ")},e.prototype.setInnerHTML=function(e){return this.innerHTML=e,this},e.prototype.setInnerHtml=function(e){return this.setInnerHTML(e)},e.prototype.getInnerHTML=function(){return this.innerHTML||""},e.prototype.getInnerHtml=function(){return this.getInnerHTML()},e.prototype.toAnchorString=function(){var e=this.getTagName(),t=this.buildAttrsStr();return["<",e,t=t?" "+t:"",">",this.getInnerHtml(),""].join("")},e.prototype.buildAttrsStr=function(){if(!this.attrs)return"";var e=this.getAttrs(),t=[];for(var r in e)e.hasOwnProperty(r)&&t.push(r+'="'+e[r]+'"');return t.join(" ")},e}();var u=function(){function e(e){void 0===e&&(e={}),this.newWindow=!1,this.truncate={},this.className="",this.newWindow=e.newWindow||!1,this.truncate=e.truncate||{},this.className=e.className||""}return e.prototype.build=function(e){return new l({tagName:"a",attrs:this.createAttrs(e),innerHtml:this.processAnchorText(e.getAnchorText())})},e.prototype.createAttrs=function(e){var t={href:e.getAnchorHref()},r=this.createCssClass(e);return r&&(t.class=r),this.newWindow&&(t.target="_blank",t.rel="noopener noreferrer"),this.truncate&&this.truncate.length&&this.truncate.length=s)return l.host.length==t?(l.host.substr(0,t-o)+r).substr(0,s+n):i(c,s).substr(0,s+n);var p="";if(l.path&&(p+="/"+l.path),l.query&&(p+="?"+l.query),p){if((c+p).length>=s)return(c+p).length==t?(c+p).substr(0,t):(c+i(p,s-c.length)).substr(0,s+n);c+=p}if(l.fragment){var f="#"+l.fragment;if((c+f).length>=s)return(c+f).length==t?(c+f).substr(0,t):(c+i(f,s-c.length)).substr(0,s+n);c+=f}if(l.scheme&&l.host){var h=l.scheme+"://";if((c+h).length0&&(d=c.substr(-1*Math.floor(s/2))),(c.substr(0,Math.ceil(s/2))+r+d).substr(0,s+n)}(e,r):"middle"===n?function(e,t,r){if(e.length<=t)return e;var n,o;null==r?(r="…",n=8,o=3):(n=r.length,o=r.length);var a=t-o,i="";return a>0&&(i=e.substr(-1*Math.floor(a/2))),(e.substr(0,Math.ceil(a/2))+r+i).substr(0,a+n)}(e,r):function(e,t,r){return function(e,t,r){var n;return e.length>t&&(null==r?(r="…",n=3):n=r.length,e=e.substring(0,t-n)+r),e}(e,t,r)}(e,r)},e}(),c=function(){function e(e){this.__jsduckDummyDocProp=null,this.matchedText="",this.offset=0,this.tagBuilder=e.tagBuilder,this.matchedText=e.matchedText,this.offset=e.offset}return e.prototype.getMatchedText=function(){return this.matchedText},e.prototype.setOffset=function(e){this.offset=e},e.prototype.getOffset=function(){return this.offset},e.prototype.getCssClassSuffixes=function(){return[this.getType()]},e.prototype.buildTag=function(){return this.tagBuilder.build(this)},e}(),p=function(e,t){return p=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,t){e.__proto__=t}||function(e,t){for(var r in t)t.hasOwnProperty(r)&&(e[r]=t[r])},p(e,t)};function f(e,t){function r(){this.constructor=e}p(e,t),e.prototype=null===t?Object.create(t):(r.prototype=t.prototype,new r)}var h=function(){return h=Object.assign||function(e){for(var t,r=1,n=arguments.length;r-1},e.isValidUriScheme=function(e){var t=e.match(this.uriSchemeRegex),r=t&&t[0].toLowerCase();return"javascript:"!==r&&"vbscript:"!==r},e.urlMatchDoesNotHaveProtocolOrDot=function(e,t){return!(!e||t&&this.hasFullProtocolRegex.test(t)||-1!==e.indexOf("."))},e.urlMatchDoesNotHaveAtLeastOneWordChar=function(e,t){return!(!e||!t)&&(!this.hasFullProtocolRegex.test(t)&&!this.hasWordCharAfterProtocolRegex.test(e))},e.hasFullProtocolRegex=/^[A-Za-z][-.+A-Za-z0-9]*:\/\//,e.uriSchemeRegex=/^[A-Za-z][-.+A-Za-z0-9]*:/,e.hasWordCharAfterProtocolRegex=new RegExp(":[^\\s]*?["+C+"]"),e.ipRegex=/[0-9][0-9]?[0-9]?\.[0-9][0-9]?[0-9]?\.[0-9][0-9]?[0-9]?\.[0-9][0-9]?[0-9]?(:[0-9]*)?\/?$/,e}(),V=(d=new RegExp("[/?#](?:["+N+"\\-+&@#/%=~_()|'$*\\[\\]{}?!:,.;^✓]*["+N+"\\-+&@#/%=~_()|'$*\\[\\]{}✓])?"),new RegExp(["(?:","(",/(?:[A-Za-z][-.+A-Za-z0-9]{0,63}:(?![A-Za-z][-.+A-Za-z0-9]{0,63}:\/\/)(?!\d+\/?)(?:\/\/)?)/.source,M(2),")","|","(","(//)?",/(?:www\.)/.source,M(6),")","|","(","(//)?",M(10)+"\\.",L.source,"(?![-"+I+"])",")",")","(?::[0-9]+)?","(?:"+d.source+")?"].join(""),"gi")),$=new RegExp("["+N+"]"),W=function(e){function t(t){var r=e.call(this,t)||this;return r.stripPrefix={scheme:!0,www:!0},r.stripTrailingSlash=!0,r.decodePercentEncoding=!0,r.matcherRegex=V,r.wordCharRegExp=$,r.stripPrefix=t.stripPrefix,r.stripTrailingSlash=t.stripTrailingSlash,r.decodePercentEncoding=t.decodePercentEncoding,r}return f(t,e),t.prototype.parseMatches=function(e){for(var t,r=this.matcherRegex,n=this.stripPrefix,o=this.stripTrailingSlash,a=this.decodePercentEncoding,i=this.tagBuilder,s=[],l=function(){var r=t[0],l=t[1],c=t[4],p=t[5],f=t[9],h=t.index,d=p||f,m=e.charAt(h-1);if(!q.isValid(r,l))return"continue";if(h>0&&"@"===m)return"continue";if(h>0&&d&&u.wordCharRegExp.test(m))return"continue";if(/\?$/.test(r)&&(r=r.substr(0,r.length-1)),u.matchHasUnbalancedClosingParen(r))r=r.substr(0,r.length-1);else{var g=u.matchHasInvalidCharAfterTld(r,l);g>-1&&(r=r.substr(0,g))}var v=["http://","https://"].find((function(e){return!!l&&-1!==l.indexOf(e)}));if(v){var y=r.indexOf(v);r=r.substr(y),l=l.substr(y),h+=y}var w=l?"scheme":c?"www":"tld",E=!!l;s.push(new b({tagBuilder:i,matchedText:r,offset:h,urlMatchType:w,url:r,protocolUrlMatch:E,protocolRelativeMatch:!!d,stripPrefix:n,stripTrailingSlash:o,decodePercentEncoding:a}))},u=this;null!==(t=r.exec(e));)l();return s},t.prototype.matchHasUnbalancedClosingParen=function(e){var t,r=e.charAt(e.length-1);if(")"===r)t="(";else if("]"===r)t="[";else{if("}"!==r)return!1;t="{"}for(var n=0,o=0,a=e.length-1;o"===e?(m=new ne(h(h({},m),{name:H()})),W()):E.test(e)||x.test(e)||":"===e||V()}function w(e){">"===e?V():E.test(e)?f=3:V()}function _(e){S.test(e)||("/"===e?f=12:">"===e?W():"<"===e?$():"="===e||A.test(e)||k.test(e)?V():f=5)}function C(e){S.test(e)?f=6:"/"===e?f=12:"="===e?f=7:">"===e?W():"<"===e?$():A.test(e)&&V()}function O(e){S.test(e)||("/"===e?f=12:"="===e?f=7:">"===e?W():"<"===e?$():A.test(e)?V():f=5)}function j(e){S.test(e)||('"'===e?f=8:"'"===e?f=9:/[>=`]/.test(e)?V():"<"===e?$():f=10)}function I(e){'"'===e&&(f=11)}function N(e){"'"===e&&(f=11)}function T(e){S.test(e)?f=4:">"===e?W():"<"===e&&$()}function P(e){S.test(e)?f=4:"/"===e?f=12:">"===e?W():"<"===e?$():(f=4,c--)}function R(e){">"===e?(m=new ne(h(h({},m),{isClosing:!0})),W()):f=4}function M(t){"--"===e.substr(c,2)?(c+=2,m=new ne(h(h({},m),{type:"comment"})),f=14):"DOCTYPE"===e.substr(c,7).toUpperCase()?(c+=7,m=new ne(h(h({},m),{type:"doctype"})),f=20):V()}function D(e){"-"===e?f=15:">"===e?V():f=16}function L(e){"-"===e?f=18:">"===e?V():f=16}function B(e){"-"===e&&(f=17)}function F(e){f="-"===e?18:16}function z(e){">"===e?W():"!"===e?f=19:"-"===e||(f=16)}function U(e){"-"===e?f=17:">"===e?W():f=16}function q(e){">"===e?W():"<"===e&&$()}function V(){f=0,m=u}function $(){f=1,m=new ne({idx:c})}function W(){var t=e.slice(d,m.idx);t&&a(t,d),"comment"===m.type?i(m.idx):"doctype"===m.type?l(m.idx):(m.isOpening&&n(m.name,m.idx),m.isClosing&&o(m.name,m.idx)),V(),d=c+1}function H(){var t=m.idx+(m.isClosing?2:1);return e.slice(t,c).toLowerCase()}d=0&&n++},onText:function(e,r){if(0===n){var a=function(e,t){if(!t.global)throw new Error("`splitRegex` must have the 'g' flag set");for(var r,n=[],o=0;r=t.exec(e);)n.push(e.substring(o,r.index)),n.push(r[0]),o=r.index+r[0].length;return n.push(e.substring(o)),n}(e,/( | |<|<|>|>|"|"|')/gi),i=r;a.forEach((function(e,r){if(r%2==0){var n=t.parseText(e,i);o.push.apply(o,n)}i+=e.length}))}},onCloseTag:function(e){r.indexOf(e)>=0&&(n=Math.max(n-1,0))},onComment:function(e){},onDoctype:function(e){}}),o=this.compactMatches(o),o=this.removeUnwantedMatches(o)},e.prototype.compactMatches=function(e){e.sort((function(e,t){return e.getOffset()-t.getOffset()}));for(var t=0;to?t:t+1;e.splice(i,1);continue}e[t+1].getOffset()/g,">"));for(var t=this.parse(e),r=[],n=0,o=0,a=t.length;o/i.test(e)}function se(){var e=[],t=new oe({stripPrefix:!1,url:!0,email:!0,replaceFn:function(t){switch(t.getType()){case"url":e.push({text:t.matchedText,url:t.getUrl()});break;case"email":e.push({text:t.matchedText,url:"mailto:"+t.getEmail().replace(/^mailto:/i,"")})}return!1}});return{links:e,autolinker:t}}function le(e){var t,r,n,o,a,i,s,l,u,c,p,f,h,d,m=e.tokens,g=null;for(r=0,n=m.length;r=0;t--)if("link_close"!==(a=o[t]).type){if("htmltag"===a.type&&(d=a.content,/^\s]/i.test(d)&&p>0&&p--,ie(a.content)&&p++),!(p>0)&&"text"===a.type&&ae.test(a.content)){if(g||(f=(g=se()).links,h=g.autolinker),i=a.content,f.length=0,h.link(i),!f.length)continue;for(s=[],c=a.level,l=0;l({useUnsafeMarkdown:!1})};const me=de;function ge(e){let{useUnsafeMarkdown:t=!1}=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};const r=t,n=t?[]:["style","class"];return t&&!ge.hasWarnedAboutDeprecation&&(console.warn("useUnsafeMarkdown display configuration parameter is deprecated since >3.26.0 and will be removed in v4.0.0."),ge.hasWarnedAboutDeprecation=!0),pe().sanitize(e,{ADD_ATTR:["target"],FORBID_TAGS:["style","form"],ALLOW_DATA_ATTR:r,FORBID_ATTR:n})}ge.hasWarnedAboutDeprecation=!1},45308:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>f});var n,o=r(86),a=r.n(o),i=r(8712),s=r.n(i),l=r(90242),u=r(27621);const c=r(95102),p={},f=p;a()(n=s()(c).call(c)).call(n,(function(e){if("./index.js"===e)return;let t=c(e);p[(0,l.Zl)(e)]=t.default?t.default:t})),p.SafeRender=u.default},55812:(e,t,r)=>{"use strict";r.r(t),r.d(t,{SHOW_AUTH_POPUP:()=>p,AUTHORIZE:()=>f,LOGOUT:()=>h,PRE_AUTHORIZE_OAUTH2:()=>d,AUTHORIZE_OAUTH2:()=>m,VALIDATE:()=>g,CONFIGURE_AUTH:()=>v,RESTORE_AUTHORIZATION:()=>y,showDefinitions:()=>b,authorize:()=>w,authorizeWithPersistOption:()=>E,logout:()=>x,logoutWithPersistOption:()=>_,preAuthorizeImplicit:()=>S,authorizeOauth2:()=>A,authorizeOauth2WithPersistOption:()=>k,authorizePassword:()=>C,authorizeApplication:()=>O,authorizeAccessCodeWithFormParams:()=>j,authorizeAccessCodeWithBasicAuthentication:()=>I,authorizeRequest:()=>N,configureAuth:()=>T,restoreAuthorization:()=>P,persistAuthorizationIfNeeded:()=>R,authPopup:()=>M});var n=r(35627),o=r.n(n),a=r(76986),i=r.n(a),s=r(84564),l=r.n(s),u=r(27504),c=r(90242);const p="show_popup",f="authorize",h="logout",d="pre_authorize_oauth2",m="authorize_oauth2",g="validate",v="configure_auth",y="restore_authorization";function b(e){return{type:p,payload:e}}function w(e){return{type:f,payload:e}}const E=e=>t=>{let{authActions:r}=t;r.authorize(e),r.persistAuthorizationIfNeeded()};function x(e){return{type:h,payload:e}}const _=e=>t=>{let{authActions:r}=t;r.logout(e),r.persistAuthorizationIfNeeded()},S=e=>t=>{let{authActions:r,errActions:n}=t,{auth:a,token:i,isValid:s}=e,{schema:l,name:c}=a,p=l.get("flow");delete u.Z.swaggerUIRedirectOauth2,"accessCode"===p||s||n.newAuthErr({authId:c,source:"auth",level:"warning",message:"Authorization may be unsafe, passed state was changed in server Passed state wasn't returned from auth server"}),i.error?n.newAuthErr({authId:c,source:"auth",level:"error",message:o()(i)}):r.authorizeOauth2WithPersistOption({auth:a,token:i})};function A(e){return{type:m,payload:e}}const k=e=>t=>{let{authActions:r}=t;r.authorizeOauth2(e),r.persistAuthorizationIfNeeded()},C=e=>t=>{let{authActions:r}=t,{schema:n,name:o,username:a,password:s,passwordType:l,clientId:u,clientSecret:p}=e,f={grant_type:"password",scope:e.scopes.join(" "),username:a,password:s},h={};switch(l){case"request-body":!function(e,t,r){t&&i()(e,{client_id:t});r&&i()(e,{client_secret:r})}(f,u,p);break;case"basic":h.Authorization="Basic "+(0,c.r3)(u+":"+p);break;default:console.warn(`Warning: invalid passwordType ${l} was passed, not including client id and secret`)}return r.authorizeRequest({body:(0,c.GZ)(f),url:n.get("tokenUrl"),name:o,headers:h,query:{},auth:e})};const O=e=>t=>{let{authActions:r}=t,{schema:n,scopes:o,name:a,clientId:i,clientSecret:s}=e,l={Authorization:"Basic "+(0,c.r3)(i+":"+s)},u={grant_type:"client_credentials",scope:o.join(" ")};return r.authorizeRequest({body:(0,c.GZ)(u),name:a,url:n.get("tokenUrl"),auth:e,headers:l})},j=e=>{let{auth:t,redirectUrl:r}=e;return e=>{let{authActions:n}=e,{schema:o,name:a,clientId:i,clientSecret:s,codeVerifier:l}=t,u={grant_type:"authorization_code",code:t.code,client_id:i,client_secret:s,redirect_uri:r,code_verifier:l};return n.authorizeRequest({body:(0,c.GZ)(u),name:a,url:o.get("tokenUrl"),auth:t})}},I=e=>{let{auth:t,redirectUrl:r}=e;return e=>{let{authActions:n}=e,{schema:o,name:a,clientId:i,clientSecret:s,codeVerifier:l}=t,u={Authorization:"Basic "+(0,c.r3)(i+":"+s)},p={grant_type:"authorization_code",code:t.code,client_id:i,redirect_uri:r,code_verifier:l};return n.authorizeRequest({body:(0,c.GZ)(p),name:a,url:o.get("tokenUrl"),auth:t,headers:u})}},N=e=>t=>{let r,{fn:n,getConfigs:a,authActions:s,errActions:u,oas3Selectors:c,specSelectors:p,authSelectors:f}=t,{body:h,query:d={},headers:m={},name:g,url:v,auth:y}=e,{additionalQueryStringParams:b}=f.getConfigs()||{};if(p.isOAS3()){let e=c.serverEffectiveValue(c.selectedServer());r=l()(v,e,!0)}else r=l()(v,p.url(),!0);"object"==typeof b&&(r.query=i()({},r.query,b));const w=r.toString();let E=i()({Accept:"application/json, text/plain, */*","Content-Type":"application/x-www-form-urlencoded","X-Requested-With":"XMLHttpRequest"},m);n.fetch({url:w,method:"post",headers:E,query:d,body:h,requestInterceptor:a().requestInterceptor,responseInterceptor:a().responseInterceptor}).then((function(e){let t=JSON.parse(e.data),r=t&&(t.error||""),n=t&&(t.parseError||"");e.ok?r||n?u.newAuthErr({authId:g,level:"error",source:"auth",message:o()(t)}):s.authorizeOauth2WithPersistOption({auth:y,token:t}):u.newAuthErr({authId:g,level:"error",source:"auth",message:e.statusText})})).catch((e=>{let t=new Error(e).message;if(e.response&&e.response.data){const r=e.response.data;try{const e="string"==typeof r?JSON.parse(r):r;e.error&&(t+=`, error: ${e.error}`),e.error_description&&(t+=`, description: ${e.error_description}`)}catch(e){}}u.newAuthErr({authId:g,level:"error",source:"auth",message:t})}))};function T(e){return{type:v,payload:e}}function P(e){return{type:y,payload:e}}const R=()=>e=>{let{authSelectors:t,getConfigs:r}=e;if(r().persistAuthorization){const e=t.authorized();localStorage.setItem("authorized",o()(e.toJS()))}},M=(e,t)=>()=>{u.Z.swaggerUIRedirectOauth2=t,u.Z.open(e)}},93705:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>u,preauthorizeBasic:()=>c,preauthorizeApiKey:()=>p});var n=r(11189),o=r.n(n),a=r(43962),i=r(55812),s=r(60035),l=r(48302);function u(){return{afterLoad(e){this.rootInjects=this.rootInjects||{},this.rootInjects.initOAuth=e.authActions.configureAuth,this.rootInjects.preauthorizeApiKey=o()(p).call(p,null,e),this.rootInjects.preauthorizeBasic=o()(c).call(c,null,e)},statePlugins:{auth:{reducers:a.default,actions:i,selectors:s},spec:{wrapActions:l}}}}function c(e,t,r,n){const{authActions:{authorize:o},specSelectors:{specJson:a,isOAS3:i}}=e,s=i()?["components","securitySchemes"]:["securityDefinitions"],l=a().getIn([...s,t]);return l?o({[t]:{value:{username:r,password:n},schema:l.toJS()}}):null}function p(e,t,r){const{authActions:{authorize:n},specSelectors:{specJson:o,isOAS3:a}}=e,i=a()?["components","securitySchemes"]:["securityDefinitions"],s=o().getIn([...i,t]);return s?n({[t]:{value:r,schema:s.toJS()}}):null}},43962:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>c});var n=r(86),o=r.n(n),a=r(76986),i=r.n(a),s=r(43393),l=r(90242),u=r(55812);const c={[u.SHOW_AUTH_POPUP]:(e,t)=>{let{payload:r}=t;return e.set("showDefinitions",r)},[u.AUTHORIZE]:(e,t)=>{var r;let{payload:n}=t,a=(0,s.fromJS)(n),i=e.get("authorized")||(0,s.Map)();return o()(r=a.entrySeq()).call(r,(t=>{let[r,n]=t;if(!(0,l.Wl)(n.getIn))return e.set("authorized",i);let o=n.getIn(["schema","type"]);if("apiKey"===o||"http"===o)i=i.set(r,n);else if("basic"===o){let e=n.getIn(["value","username"]),t=n.getIn(["value","password"]);i=i.setIn([r,"value"],{username:e,header:"Basic "+(0,l.r3)(e+":"+t)}),i=i.setIn([r,"schema"],n.get("schema"))}})),e.set("authorized",i)},[u.AUTHORIZE_OAUTH2]:(e,t)=>{let r,{payload:n}=t,{auth:o,token:a}=n;o.token=i()({},a),r=(0,s.fromJS)(o);let l=e.get("authorized")||(0,s.Map)();return l=l.set(r.get("name"),r),e.set("authorized",l)},[u.LOGOUT]:(e,t)=>{let{payload:r}=t,n=e.get("authorized").withMutations((e=>{o()(r).call(r,(t=>{e.delete(t)}))}));return e.set("authorized",n)},[u.CONFIGURE_AUTH]:(e,t)=>{let{payload:r}=t;return e.set("configs",r)},[u.RESTORE_AUTHORIZATION]:(e,t)=>{let{payload:r}=t;return e.set("authorized",(0,s.fromJS)(r.authorized))}}},60035:(e,t,r)=>{"use strict";r.r(t),r.d(t,{shownDefinitions:()=>y,definitionsToAuthorize:()=>b,getDefinitionsByNames:()=>w,definitionsForRequirements:()=>E,authorized:()=>x,isAuthorized:()=>_,getConfigs:()=>S});var n=r(86),o=r.n(n),a=r(14418),i=r.n(a),s=r(92039),l=r.n(s),u=r(11882),c=r.n(u),p=r(97606),f=r.n(p),h=r(28222),d=r.n(h),m=r(20573),g=r(43393);const v=e=>e,y=(0,m.P1)(v,(e=>e.get("showDefinitions"))),b=(0,m.P1)(v,(()=>e=>{var t;let{specSelectors:r}=e,n=r.securityDefinitions()||(0,g.Map)({}),a=(0,g.List)();return o()(t=n.entrySeq()).call(t,(e=>{let[t,r]=e,n=(0,g.Map)();n=n.set(t,r),a=a.push(n)})),a})),w=(e,t)=>e=>{var r;let{specSelectors:n}=e;console.warn("WARNING: getDefinitionsByNames is deprecated and will be removed in the next major version.");let a=n.securityDefinitions(),i=(0,g.List)();return o()(r=t.valueSeq()).call(r,(e=>{var t;let r=(0,g.Map)();o()(t=e.entrySeq()).call(t,(e=>{let t,[n,i]=e,s=a.get(n);var l;"oauth2"===s.get("type")&&i.size&&(t=s.get("scopes"),o()(l=t.keySeq()).call(l,(e=>{i.contains(e)||(t=t.delete(e))})),s=s.set("allowedScopes",t));r=r.set(n,s)})),i=i.push(r)})),i},E=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:(0,g.List)();return e=>{let{authSelectors:r}=e;const n=r.definitionsToAuthorize()||(0,g.List)();return i()(n).call(n,(e=>l()(t).call(t,(t=>t.get(e.keySeq().first())))))}},x=(0,m.P1)(v,(e=>e.get("authorized")||(0,g.Map)())),_=(e,t)=>e=>{var r;let{authSelectors:n}=e,o=n.authorized();return g.List.isList(t)?!!i()(r=t.toJS()).call(r,(e=>{var t,r;return-1===c()(t=f()(r=d()(e)).call(r,(e=>!!o.get(e)))).call(t,!1)})).length:null},S=(0,m.P1)(v,(e=>e.get("configs")))},48302:(e,t,r)=>{"use strict";r.r(t),r.d(t,{execute:()=>n});const n=(e,t)=>{let{authSelectors:r,specSelectors:n}=t;return t=>{let{path:o,method:a,operation:i,extras:s}=t,l={authorized:r.authorized()&&r.authorized().toJS(),definitions:n.securityDefinitions()&&n.securityDefinitions().toJS(),specSecurity:n.security()&&n.security().toJS()};return e({path:o,method:a,operation:i,securities:l,...s})}}},70714:(e,t,r)=>{"use strict";r.r(t),r.d(t,{UPDATE_CONFIGS:()=>n,TOGGLE_CONFIGS:()=>o,update:()=>a,toggle:()=>i,loaded:()=>s});const n="configs_update",o="configs_toggle";function a(e,t){return{type:n,payload:{[e]:t}}}function i(e){return{type:o,payload:e}}const s=()=>e=>{let{getConfigs:t,authActions:r}=e;if(t().persistAuthorization){const e=localStorage.getItem("authorized");e&&r.restoreAuthorization({authorized:JSON.parse(e)})}}},92256:(e,t,r)=>{"use strict";r.r(t),r.d(t,{parseYamlConfig:()=>o});var n=r(1272);const o=(e,t)=>{try{return n.ZP.load(e)}catch(e){return t&&t.errActions.newThrownErr(new Error(e)),{}}}},1661:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>c});var n=r(15163),o=r(92256),a=r(70714),i=r(22698),s=r(69018),l=r(37743);const u={getLocalConfig:()=>(0,o.parseYamlConfig)(n)};function c(){return{statePlugins:{spec:{actions:i,selectors:u},configs:{reducers:l.default,actions:a,selectors:s}}}}},37743:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>a});var n=r(43393),o=r(70714);const a={[o.UPDATE_CONFIGS]:(e,t)=>e.merge((0,n.fromJS)(t.payload)),[o.TOGGLE_CONFIGS]:(e,t)=>{const r=t.payload,n=e.get(r);return e.set(r,!n)}}},69018:(e,t,r)=>{"use strict";r.r(t),r.d(t,{get:()=>a});var n=r(58309),o=r.n(n);const a=(e,t)=>e.getIn(o()(t)?t:[t])},22698:(e,t,r)=>{"use strict";r.r(t),r.d(t,{downloadConfig:()=>o,getConfigByUrl:()=>a});var n=r(92256);const o=e=>t=>{const{fn:{fetch:r}}=t;return r(e)},a=(e,t)=>r=>{let{specActions:o}=r;if(e)return o.downloadConfig(e).then(a,a);function a(r){r instanceof Error||r.status>=400?(o.updateLoadingStatus("failedConfig"),o.updateLoadingStatus("failedConfig"),o.updateUrl(""),console.error(r.statusText+" "+e.url),t(null)):t((0,n.parseYamlConfig)(r.text))}}},31970:(e,t,r)=>{"use strict";r.r(t),r.d(t,{setHash:()=>n});const n=e=>e?history.pushState(null,null,`#${e}`):window.location.hash=""},34980:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>i});var n=r(41599),o=r(60877),a=r(34584);function i(){return[n.default,{statePlugins:{configs:{wrapActions:{loaded:(e,t)=>function(){e(...arguments);const r=decodeURIComponent(window.location.hash);t.layoutActions.parseDeepLinkHash(r)}}}},wrapComponents:{operation:o.default,OperationTag:a.default}}]}},41599:(e,t,r)=>{"use strict";r.r(t),r.d(t,{show:()=>b,scrollTo:()=>w,parseDeepLinkHash:()=>E,readyToScroll:()=>x,scrollToElement:()=>_,clearScrollTo:()=>S,default:()=>A});var n=r(58309),o=r.n(n),a=r(24278),i=r.n(a),s=r(97606),l=r.n(s),u=r(11882),c=r.n(u),p=r(31970),f=r(45172),h=r.n(f),d=r(90242),m=r(43393),g=r.n(m);const v="layout_scroll_to",y="layout_clear_scroll",b=(e,t)=>{let{getConfigs:r,layoutSelectors:n}=t;return function(){for(var t=arguments.length,a=new Array(t),i=0;i({type:v,payload:o()(e)?e:[e]}),E=e=>t=>{let{layoutActions:r,layoutSelectors:n,getConfigs:o}=t;if(o().deepLinking&&e){var a;let t=i()(e).call(e,1);"!"===t[0]&&(t=i()(t).call(t,1)),"/"===t[0]&&(t=i()(t).call(t,1));const o=l()(a=t.split("/")).call(a,(e=>e||"")),s=n.isShownKeyFromUrlHashArray(o),[u,p="",f=""]=s;if("operations"===u){const e=n.isShownKeyFromUrlHashArray([p]);c()(p).call(p,"_")>-1&&(console.warn("Warning: escaping deep link whitespace with `_` will be unsupported in v4.0, use `%20` instead."),r.show(l()(e).call(e,(e=>e.replace(/_/g," "))),!0)),r.show(e,!0)}(c()(p).call(p,"_")>-1||c()(f).call(f,"_")>-1)&&(console.warn("Warning: escaping deep link whitespace with `_` will be unsupported in v4.0, use `%20` instead."),r.show(l()(s).call(s,(e=>e.replace(/_/g," "))),!0)),r.show(s,!0),r.scrollTo(s)}},x=(e,t)=>r=>{const n=r.layoutSelectors.getScrollToKey();g().is(n,(0,m.fromJS)(e))&&(r.layoutActions.scrollToElement(t),r.layoutActions.clearScrollTo())},_=(e,t)=>r=>{try{t=t||r.fn.getScrollParent(e),h().createScroller(t).to(e)}catch(e){console.error(e)}},S=()=>({type:y});const A={fn:{getScrollParent:function(e,t){const r=document.documentElement;let n=getComputedStyle(e);const o="absolute"===n.position,a=t?/(auto|scroll|hidden)/:/(auto|scroll)/;if("fixed"===n.position)return r;for(let t=e;t=t.parentElement;)if(n=getComputedStyle(t),(!o||"static"!==n.position)&&a.test(n.overflow+n.overflowY+n.overflowX))return t;return r}},statePlugins:{layout:{actions:{scrollToElement:_,scrollTo:w,clearScrollTo:S,readyToScroll:x,parseDeepLinkHash:E},selectors:{getScrollToKey:e=>e.get("scrollToKey"),isShownKeyFromUrlHashArray(e,t){const[r,n]=t;return n?["operations",r,n]:r?["operations-tag",r]:[]},urlHashArrayFromIsShownKey(e,t){let[r,n,o]=t;return"operations"==r?[n,o]:"operations-tag"==r?[n]:[]}},reducers:{[v]:(e,t)=>e.set("scrollToKey",g().fromJS(t.payload)),[y]:e=>e.delete("scrollToKey")},wrapActions:{show:b}}}}},34584:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>i});var n=r(61125),o=r.n(n),a=r(67294);const i=(e,t)=>class extends a.Component{constructor(){super(...arguments),o()(this,"onLoad",(e=>{const{tag:r}=this.props,n=["operations-tag",r];t.layoutActions.readyToScroll(n,e)}))}render(){return a.createElement("span",{ref:this.onLoad},a.createElement(e,this.props))}}},60877:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>i});var n=r(61125),o=r.n(n),a=r(67294);r(23930);const i=(e,t)=>class extends a.Component{constructor(){super(...arguments),o()(this,"onLoad",(e=>{const{operation:r}=this.props,{tag:n,operationId:o}=r.toObject();let{isShownKey:a}=r.toObject();a=a||["operations",n,o],t.layoutActions.readyToScroll(a,e)}))}render(){return a.createElement("span",{ref:this.onLoad},a.createElement(e,this.props))}}},48011:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>d});var n=r(76986),o=r.n(n),a=r(63460),i=r.n(a),s=r(11882),l=r.n(s),u=r(35627),c=r.n(u),p=r(20573),f=r(43393),h=r(27504);function d(e){let{fn:t}=e;return{statePlugins:{spec:{actions:{download:e=>r=>{let{errActions:n,specSelectors:a,specActions:s,getConfigs:l}=r,{fetch:u}=t;const c=l();function p(t){if(t instanceof Error||t.status>=400)return s.updateLoadingStatus("failed"),n.newThrownErr(o()(new Error((t.message||t.statusText)+" "+e),{source:"fetch"})),void(!t.status&&t instanceof Error&&function(){try{let t;if("URL"in h.Z?t=new(i())(e):(t=document.createElement("a"),t.href=e),"https:"!==t.protocol&&"https:"===h.Z.location.protocol){const e=o()(new Error(`Possible mixed-content issue? The page was loaded over https:// but a ${t.protocol}// URL was specified. Check that you are not attempting to load mixed content.`),{source:"fetch"});return void n.newThrownErr(e)}if(t.origin!==h.Z.location.origin){const e=o()(new Error(`Possible cross-origin (CORS) issue? The URL origin (${t.origin}) does not match the page (${h.Z.location.origin}). Check the server returns the correct 'Access-Control-Allow-*' headers.`),{source:"fetch"});n.newThrownErr(e)}}catch(e){return}}());s.updateLoadingStatus("success"),s.updateSpec(t.text),a.url()!==e&&s.updateUrl(e)}e=e||a.url(),s.updateLoadingStatus("loading"),n.clear({source:"fetch"}),u({url:e,loadSpec:!0,requestInterceptor:c.requestInterceptor||(e=>e),responseInterceptor:c.responseInterceptor||(e=>e),credentials:"same-origin",headers:{Accept:"application/json,*/*"}}).then(p,p)},updateLoadingStatus:e=>{let t=[null,"loading","failed","success","failedConfig"];return-1===l()(t).call(t,e)&&console.error(`Error: ${e} is not one of ${c()(t)}`),{type:"spec_update_loading_status",payload:e}}},reducers:{spec_update_loading_status:(e,t)=>"string"==typeof t.payload?e.set("loadingStatus",t.payload):e},selectors:{loadingStatus:(0,p.P1)((e=>e||(0,f.Map)()),(e=>e.get("loadingStatus")||null))}}}}}},34966:(e,t,r)=>{"use strict";r.r(t),r.d(t,{NEW_THROWN_ERR:()=>o,NEW_THROWN_ERR_BATCH:()=>a,NEW_SPEC_ERR:()=>i,NEW_SPEC_ERR_BATCH:()=>s,NEW_AUTH_ERR:()=>l,CLEAR:()=>u,CLEAR_BY:()=>c,newThrownErr:()=>p,newThrownErrBatch:()=>f,newSpecErr:()=>h,newSpecErrBatch:()=>d,newAuthErr:()=>m,clear:()=>g,clearBy:()=>v});var n=r(7710);const o="err_new_thrown_err",a="err_new_thrown_err_batch",i="err_new_spec_err",s="err_new_spec_err_batch",l="err_new_auth_err",u="err_clear",c="err_clear_by";function p(e){return{type:o,payload:(0,n.serializeError)(e)}}function f(e){return{type:a,payload:e}}function h(e){return{type:i,payload:e}}function d(e){return{type:s,payload:e}}function m(e){return{type:l,payload:e}}function g(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return{type:u,payload:e}}function v(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:()=>!0;return{type:c,payload:e}}},56982:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>c});var n=r(14418),o=r.n(n),a=r(97606),i=r.n(a),s=r(54061),l=r.n(s);const u=[r(2392),r(21835)];function c(e){var t;let r={jsSpec:{}},n=l()(u,((e,t)=>{try{let n=t.transform(e,r);return o()(n).call(n,(e=>!!e))}catch(t){return console.error("Transformer error:",t),e}}),e);return i()(t=o()(n).call(n,(e=>!!e))).call(t,(e=>(!e.get("line")&&e.get("path"),e)))}},2392:(e,t,r)=>{"use strict";r.r(t),r.d(t,{transform:()=>p});var n=r(97606),o=r.n(n),a=r(11882),i=r.n(a),s=r(24278),l=r.n(s),u=r(24282),c=r.n(u);function p(e){return o()(e).call(e,(e=>{var t;let r="is not of a type(s)",n=i()(t=e.get("message")).call(t,r);if(n>-1){var o,a;let t=l()(o=e.get("message")).call(o,n+r.length).split(",");return e.set("message",l()(a=e.get("message")).call(a,0,n)+function(e){return c()(e).call(e,((e,t,r,n)=>r===n.length-1&&n.length>1?e+"or "+t:n[r+1]&&n.length>2?e+t+", ":n[r+1]?e+t+" ":e+t),"should be a")}(t))}return e}))}},21835:(e,t,r)=>{"use strict";r.r(t),r.d(t,{transform:()=>n});r(97606),r(11882),r(27361),r(43393);function n(e,t){let{jsSpec:r}=t;return e}},77793:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>i});var n=r(93527),o=r(34966),a=r(87667);function i(e){return{statePlugins:{err:{reducers:(0,n.default)(e),actions:o,selectors:a}}}}},93527:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>v});var n=r(76986),o=r.n(n),a=r(97606),i=r.n(a),s=r(39022),l=r.n(s),u=r(14418),c=r.n(u),p=r(2250),f=r.n(p),h=r(34966),d=r(43393),m=r(56982);let g={line:0,level:"error",message:"Unknown error"};function v(){return{[h.NEW_THROWN_ERR]:(e,t)=>{let{payload:r}=t,n=o()(g,r,{type:"thrown"});return e.update("errors",(e=>(e||(0,d.List)()).push((0,d.fromJS)(n)))).update("errors",(e=>(0,m.default)(e)))},[h.NEW_THROWN_ERR_BATCH]:(e,t)=>{let{payload:r}=t;return r=i()(r).call(r,(e=>(0,d.fromJS)(o()(g,e,{type:"thrown"})))),e.update("errors",(e=>{var t;return l()(t=e||(0,d.List)()).call(t,(0,d.fromJS)(r))})).update("errors",(e=>(0,m.default)(e)))},[h.NEW_SPEC_ERR]:(e,t)=>{let{payload:r}=t,n=(0,d.fromJS)(r);return n=n.set("type","spec"),e.update("errors",(e=>(e||(0,d.List)()).push((0,d.fromJS)(n)).sortBy((e=>e.get("line"))))).update("errors",(e=>(0,m.default)(e)))},[h.NEW_SPEC_ERR_BATCH]:(e,t)=>{let{payload:r}=t;return r=i()(r).call(r,(e=>(0,d.fromJS)(o()(g,e,{type:"spec"})))),e.update("errors",(e=>{var t;return l()(t=e||(0,d.List)()).call(t,(0,d.fromJS)(r))})).update("errors",(e=>(0,m.default)(e)))},[h.NEW_AUTH_ERR]:(e,t)=>{let{payload:r}=t,n=(0,d.fromJS)(o()({},r));return n=n.set("type","auth"),e.update("errors",(e=>(e||(0,d.List)()).push((0,d.fromJS)(n)))).update("errors",(e=>(0,m.default)(e)))},[h.CLEAR]:(e,t)=>{var r;let{payload:n}=t;if(!n||!e.get("errors"))return e;let o=c()(r=e.get("errors")).call(r,(e=>{var t;return f()(t=e.keySeq()).call(t,(t=>{const r=e.get(t),o=n[t];return!o||r!==o}))}));return e.merge({errors:o})},[h.CLEAR_BY]:(e,t)=>{var r;let{payload:n}=t;if(!n||"function"!=typeof n)return e;let o=c()(r=e.get("errors")).call(r,(e=>n(e)));return e.merge({errors:o})}}}},87667:(e,t,r)=>{"use strict";r.r(t),r.d(t,{allErrors:()=>a,lastError:()=>i});var n=r(43393),o=r(20573);const a=(0,o.P1)((e=>e),(e=>e.get("errors",(0,n.List)()))),i=(0,o.P1)(a,(e=>e.last()))},49978:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>o});var n=r(4309);function o(){return{fn:{opsFilter:n.default}}}},4309:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>s});var n=r(14418),o=r.n(n),a=r(11882),i=r.n(a);function s(e,t){return o()(e).call(e,((e,r)=>-1!==i()(r).call(r,t)))}},25474:(e,t,r)=>{"use strict";r.r(t),r.d(t,{UPDATE_LAYOUT:()=>o,UPDATE_FILTER:()=>a,UPDATE_MODE:()=>i,SHOW:()=>s,updateLayout:()=>l,updateFilter:()=>u,show:()=>c,changeMode:()=>p});var n=r(90242);const o="layout_update_layout",a="layout_update_filter",i="layout_update_mode",s="layout_show";function l(e){return{type:o,payload:e}}function u(e){return{type:a,payload:e}}function c(e){let t=!(arguments.length>1&&void 0!==arguments[1])||arguments[1];return e=(0,n.AF)(e),{type:s,payload:{thing:e,shown:t}}}function p(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"";return e=(0,n.AF)(e),{type:i,payload:{thing:e,mode:t}}}},26821:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>s});var n=r(5672),o=r(25474),a=r(4400),i=r(28989);function s(){return{statePlugins:{layout:{reducers:n.default,actions:o,selectors:a},spec:{wrapSelectors:i}}}}},5672:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>s});var n=r(39022),o=r.n(n),a=r(43393),i=r(25474);const s={[i.UPDATE_LAYOUT]:(e,t)=>e.set("layout",t.payload),[i.UPDATE_FILTER]:(e,t)=>e.set("filter",t.payload),[i.SHOW]:(e,t)=>{const r=t.payload.shown,n=(0,a.fromJS)(t.payload.thing);return e.update("shown",(0,a.fromJS)({}),(e=>e.set(n,r)))},[i.UPDATE_MODE]:(e,t)=>{var r;let n=t.payload.thing,a=t.payload.mode;return e.setIn(o()(r=["modes"]).call(r,n),(a||"")+"")}}},4400:(e,t,r)=>{"use strict";r.r(t),r.d(t,{current:()=>i,currentFilter:()=>s,isShown:()=>l,whatMode:()=>u,showSummary:()=>c});var n=r(20573),o=r(90242),a=r(43393);const i=e=>e.get("layout"),s=e=>e.get("filter"),l=(e,t,r)=>(t=(0,o.AF)(t),e.get("shown",(0,a.fromJS)({})).get((0,a.fromJS)(t),r)),u=function(e,t){let r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:"";return t=(0,o.AF)(t),e.getIn(["modes",...t],r)},c=(0,n.P1)((e=>e),(e=>!l(e,"editor")))},28989:(e,t,r)=>{"use strict";r.r(t),r.d(t,{taggedOperations:()=>a});var n=r(24278),o=r.n(n);const a=(e,t)=>function(r){for(var n=arguments.length,a=new Array(n>1?n-1:0),i=1;i=0&&(s=o()(s).call(s,0,f)),s}},9150:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>a});var n=r(11189),o=r.n(n);function a(e){let{configs:t}=e;const r={debug:0,info:1,log:2,warn:3,error:4},n=e=>r[e]||-1;let{logLevel:a}=t,i=n(a);function s(e){for(var t=arguments.length,r=new Array(t>1?t-1:0),o=1;o=i&&console[e](...r)}return s.warn=o()(s).call(s,null,"warn"),s.error=o()(s).call(s,null,"error"),s.info=o()(s).call(s,null,"info"),s.debug=o()(s).call(s,null,"debug"),{rootInjects:{log:s}}}},67002:(e,t,r)=>{"use strict";r.r(t),r.d(t,{UPDATE_SELECTED_SERVER:()=>n,UPDATE_REQUEST_BODY_VALUE:()=>o,UPDATE_REQUEST_BODY_VALUE_RETAIN_FLAG:()=>a,UPDATE_REQUEST_BODY_INCLUSION:()=>i,UPDATE_ACTIVE_EXAMPLES_MEMBER:()=>s,UPDATE_REQUEST_CONTENT_TYPE:()=>l,UPDATE_RESPONSE_CONTENT_TYPE:()=>u,UPDATE_SERVER_VARIABLE_VALUE:()=>c,SET_REQUEST_BODY_VALIDATE_ERROR:()=>p,CLEAR_REQUEST_BODY_VALIDATE_ERROR:()=>f,CLEAR_REQUEST_BODY_VALUE:()=>h,setSelectedServer:()=>d,setRequestBodyValue:()=>m,setRetainRequestBodyValueFlag:()=>g,setRequestBodyInclusion:()=>v,setActiveExamplesMember:()=>y,setRequestContentType:()=>b,setResponseContentType:()=>w,setServerVariableValue:()=>E,setRequestBodyValidateError:()=>x,clearRequestBodyValidateError:()=>_,initRequestBodyValidateError:()=>S,clearRequestBodyValue:()=>A});const n="oas3_set_servers",o="oas3_set_request_body_value",a="oas3_set_request_body_retain_flag",i="oas3_set_request_body_inclusion",s="oas3_set_active_examples_member",l="oas3_set_request_content_type",u="oas3_set_response_content_type",c="oas3_set_server_variable_value",p="oas3_set_request_body_validate_error",f="oas3_clear_request_body_validate_error",h="oas3_clear_request_body_value";function d(e,t){return{type:n,payload:{selectedServerUrl:e,namespace:t}}}function m(e){let{value:t,pathMethod:r}=e;return{type:o,payload:{value:t,pathMethod:r}}}const g=e=>{let{value:t,pathMethod:r}=e;return{type:a,payload:{value:t,pathMethod:r}}};function v(e){let{value:t,pathMethod:r,name:n}=e;return{type:i,payload:{value:t,pathMethod:r,name:n}}}function y(e){let{name:t,pathMethod:r,contextType:n,contextName:o}=e;return{type:s,payload:{name:t,pathMethod:r,contextType:n,contextName:o}}}function b(e){let{value:t,pathMethod:r}=e;return{type:l,payload:{value:t,pathMethod:r}}}function w(e){let{value:t,path:r,method:n}=e;return{type:u,payload:{value:t,path:r,method:n}}}function E(e){let{server:t,namespace:r,key:n,val:o}=e;return{type:c,payload:{server:t,namespace:r,key:n,val:o}}}const x=e=>{let{path:t,method:r,validationErrors:n}=e;return{type:p,payload:{path:t,method:r,validationErrors:n}}},_=e=>{let{path:t,method:r}=e;return{type:f,payload:{path:t,method:r}}},S=e=>{let{pathMethod:t}=e;return{type:f,payload:{path:t[0],method:t[1]}}},A=e=>{let{pathMethod:t}=e;return{type:h,payload:{pathMethod:t}}}},73723:(e,t,r)=>{"use strict";r.r(t),r.d(t,{definitionsToAuthorize:()=>f});var n=r(86),o=r.n(n),a=r(14418),i=r.n(a),s=r(24282),l=r.n(s),u=r(20573),c=r(43393),p=r(7779);const f=(h=(0,u.P1)((e=>e),(e=>{let{specSelectors:t}=e;return t.securityDefinitions()}),((e,t)=>{var r;let n=(0,c.List)();return t?(o()(r=t.entrySeq()).call(r,(e=>{let[t,r]=e;const a=r.get("type");var s;if("oauth2"===a&&o()(s=r.get("flows").entrySeq()).call(s,(e=>{let[o,a]=e,s=(0,c.fromJS)({flow:o,authorizationUrl:a.get("authorizationUrl"),tokenUrl:a.get("tokenUrl"),scopes:a.get("scopes"),type:r.get("type"),description:r.get("description")});n=n.push(new c.Map({[t]:i()(s).call(s,(e=>void 0!==e))}))})),"http"!==a&&"apiKey"!==a||(n=n.push(new c.Map({[t]:r}))),"openIdConnect"===a&&r.get("openIdConnectData")){let e=r.get("openIdConnectData"),a=e.get("grant_types_supported")||["authorization_code","implicit"];o()(a).call(a,(o=>{var a;let s=e.get("scopes_supported")&&l()(a=e.get("scopes_supported")).call(a,((e,t)=>e.set(t,"")),new c.Map),u=(0,c.fromJS)({flow:o,authorizationUrl:e.get("authorization_endpoint"),tokenUrl:e.get("token_endpoint"),scopes:s,type:"oauth2",openIdConnectUrl:r.get("openIdConnectUrl")});n=n.push(new c.Map({[t]:i()(u).call(u,(e=>void 0!==e))}))}))}})),n):n})),(e,t)=>function(){const r=t.getSystem().specSelectors.specJson();for(var n=arguments.length,o=new Array(n),a=0;a{"use strict";r.r(t),r.d(t,{default:()=>u});var n=r(23101),o=r.n(n),a=r(97606),i=r.n(a),s=r(67294),l=(r(23930),r(43393));const u=e=>{var t;let{callbacks:r,getComponent:n,specPath:a}=e;const u=n("OperationContainer",!0);if(!r)return s.createElement("span",null,"No callbacks");let c=i()(t=r.entrySeq()).call(t,(t=>{var r;let[n,c]=t;return s.createElement("div",{key:n},s.createElement("h2",null,n),i()(r=c.entrySeq()).call(r,(t=>{var r;let[c,p]=t;return"$$ref"===c?null:s.createElement("div",{key:c},i()(r=p.entrySeq()).call(r,(t=>{let[r,i]=t;if("$$ref"===r)return null;let p=(0,l.fromJS)({operation:i});return s.createElement(u,o()({},e,{op:p,key:r,tag:"",method:r,path:c,specPath:a.push(n,c,r),allowTryItOut:!1}))})))})))}));return s.createElement("div",null,c)}},86775:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>f});var n=r(61125),o=r.n(n),a=r(76986),i=r.n(a),s=r(14418),l=r.n(s),u=r(97606),c=r.n(u),p=r(67294);class f extends p.Component{constructor(e,t){super(e,t),o()(this,"onChange",(e=>{let{onChange:t}=this.props,{value:r,name:n}=e.target,o=i()({},this.state.value);n?o[n]=r:o=r,this.setState({value:o},(()=>t(this.state)))}));let{name:r,schema:n}=this.props,a=this.getValue();this.state={name:r,schema:n,value:a}}getValue(){let{name:e,authorized:t}=this.props;return t&&t.getIn([e,"value"])}render(){var e;let{schema:t,getComponent:r,errSelectors:n,name:o}=this.props;const a=r("Input"),i=r("Row"),s=r("Col"),u=r("authError"),f=r("Markdown",!0),h=r("JumpToPath",!0),d=(t.get("scheme")||"").toLowerCase();let m=this.getValue(),g=l()(e=n.allErrors()).call(e,(e=>e.get("authId")===o));if("basic"===d){var v;let e=m?m.get("username"):null;return p.createElement("div",null,p.createElement("h4",null,p.createElement("code",null,o||t.get("name")),"  (http, Basic)",p.createElement(h,{path:["securityDefinitions",o]})),e&&p.createElement("h6",null,"Authorized"),p.createElement(i,null,p.createElement(f,{source:t.get("description")})),p.createElement(i,null,p.createElement("label",null,"Username:"),e?p.createElement("code",null," ",e," "):p.createElement(s,null,p.createElement(a,{type:"text",required:"required",name:"username","aria-label":"auth-basic-username",onChange:this.onChange,autoFocus:!0}))),p.createElement(i,null,p.createElement("label",null,"Password:"),e?p.createElement("code",null," ****** "):p.createElement(s,null,p.createElement(a,{autoComplete:"new-password",name:"password",type:"password","aria-label":"auth-basic-password",onChange:this.onChange}))),c()(v=g.valueSeq()).call(v,((e,t)=>p.createElement(u,{error:e,key:t}))))}var y;return"bearer"===d?p.createElement("div",null,p.createElement("h4",null,p.createElement("code",null,o||t.get("name")),"  (http, Bearer)",p.createElement(h,{path:["securityDefinitions",o]})),m&&p.createElement("h6",null,"Authorized"),p.createElement(i,null,p.createElement(f,{source:t.get("description")})),p.createElement(i,null,p.createElement("label",null,"Value:"),m?p.createElement("code",null," ****** "):p.createElement(s,null,p.createElement(a,{type:"text","aria-label":"auth-bearer-value",onChange:this.onChange,autoFocus:!0}))),c()(y=g.valueSeq()).call(y,((e,t)=>p.createElement(u,{error:e,key:t})))):p.createElement("div",null,p.createElement("em",null,p.createElement("b",null,o)," HTTP authentication: unsupported scheme ",`'${d}'`))}}},76467:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>p});var n=r(33427),o=r(42458),a=r(15757),i=r(56617),s=r(9928),l=r(45327),u=r(86775),c=r(96796);const p={Callbacks:n.default,HttpAuth:u.default,RequestBody:o.default,Servers:i.default,ServersContainer:s.default,RequestBodyEditor:l.default,OperationServers:c.default,operationLink:a.default}},15757:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>u});var n=r(35627),o=r.n(n),a=r(97606),i=r.n(a),s=r(67294);r(23930);class l extends s.Component{render(){const{link:e,name:t,getComponent:r}=this.props,n=r("Markdown",!0);let a=e.get("operationId")||e.get("operationRef"),l=e.get("parameters")&&e.get("parameters").toJS(),u=e.get("description");return s.createElement("div",{className:"operation-link"},s.createElement("div",{className:"description"},s.createElement("b",null,s.createElement("code",null,t)),u?s.createElement(n,{source:u}):null),s.createElement("pre",null,"Operation `",a,"`",s.createElement("br",null),s.createElement("br",null),"Parameters ",function(e,t){var r;if("string"!=typeof t)return"";return i()(r=t.split("\n")).call(r,((t,r)=>r>0?Array(e+1).join(" ")+t:t)).join("\n")}(0,o()(l,null,2))||"{}",s.createElement("br",null)))}}const u=l},96796:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>i});var n=r(61125),o=r.n(n),a=r(67294);r(23930);class i extends a.Component{constructor(){super(...arguments),o()(this,"setSelectedServer",(e=>{const{path:t,method:r}=this.props;return this.forceUpdate(),this.props.setSelectedServer(e,`${t}:${r}`)})),o()(this,"setServerVariableValue",(e=>{const{path:t,method:r}=this.props;return this.forceUpdate(),this.props.setServerVariableValue({...e,namespace:`${t}:${r}`})})),o()(this,"getSelectedServer",(()=>{const{path:e,method:t}=this.props;return this.props.getSelectedServer(`${e}:${t}`)})),o()(this,"getServerVariable",((e,t)=>{const{path:r,method:n}=this.props;return this.props.getServerVariable({namespace:`${r}:${n}`,server:e},t)})),o()(this,"getEffectiveServerValue",(e=>{const{path:t,method:r}=this.props;return this.props.getEffectiveServerValue({server:e,namespace:`${t}:${r}`})}))}render(){const{operationServers:e,pathServers:t,getComponent:r}=this.props;if(!e&&!t)return null;const n=r("Servers"),o=e||t,i=e?"operation":"path";return a.createElement("div",{className:"opblock-section operation-servers"},a.createElement("div",{className:"opblock-section-header"},a.createElement("div",{className:"tab-header"},a.createElement("h4",{className:"opblock-title"},"Servers"))),a.createElement("div",{className:"opblock-description-wrapper"},a.createElement("h4",{className:"message"},"These ",i,"-level options override the global server options."),a.createElement(n,{servers:o,currentServer:this.getSelectedServer(),setSelectedServer:this.setSelectedServer,setServerVariableValue:this.setServerVariableValue,getServerVariable:this.getServerVariable,getEffectiveServerValue:this.getEffectiveServerValue})))}}},45327:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>c});var n=r(61125),o=r.n(n),a=r(67294),i=r(94184),s=r.n(i),l=r(90242);const u=Function.prototype;class c extends a.PureComponent{constructor(e,t){super(e,t),o()(this,"applyDefaultValue",(e=>{const{onChange:t,defaultValue:r}=e||this.props;return this.setState({value:r}),t(r)})),o()(this,"onChange",(e=>{this.props.onChange((0,l.Pz)(e))})),o()(this,"onDomChange",(e=>{const t=e.target.value;this.setState({value:t},(()=>this.onChange(t)))})),this.state={value:(0,l.Pz)(e.value)||e.defaultValue},e.onChange(e.value)}UNSAFE_componentWillReceiveProps(e){this.props.value!==e.value&&e.value!==this.state.value&&this.setState({value:(0,l.Pz)(e.value)}),!e.value&&e.defaultValue&&this.state.value&&this.applyDefaultValue(e)}render(){let{getComponent:e,errors:t}=this.props,{value:r}=this.state,n=t.size>0;const o=e("TextArea");return a.createElement("div",{className:"body-param"},a.createElement(o,{className:s()("body-param__text",{invalid:n}),title:t.size?t.join(", "):"",value:r,onChange:this.onDomChange}))}}o()(c,"defaultProps",{onChange:u,userHasEditedBody:!1})},42458:(e,t,r)=>{"use strict";r.r(t),r.d(t,{getDefaultRequestBodyValue:()=>m,default:()=>g});var n=r(97606),o=r.n(n),a=r(11882),i=r.n(a),s=r(58118),l=r.n(s),u=r(58309),c=r.n(u),p=r(67294),f=(r(23930),r(43393)),h=r(90242),d=r(2518);const m=(e,t,r)=>{const n=e.getIn(["content",t]),o=n.get("schema").toJS(),a=void 0!==n.get("examples"),i=n.get("example"),s=a?n.getIn(["examples",r,"value"]):i,l=(0,h.xi)(o,t,{includeWriteOnly:!0},s);return(0,h.Pz)(l)},g=e=>{let{userHasEditedBody:t,requestBody:r,requestBodyValue:n,requestBodyInclusionSetting:a,requestBodyErrors:s,getComponent:u,getConfigs:g,specSelectors:v,fn:y,contentType:b,isExecute:w,specPath:E,onChange:x,onChangeIncludeEmpty:_,activeExamplesKey:S,updateActiveExamplesKey:A,setRetainRequestBodyValueFlag:k}=e;const C=e=>{x(e.target.files[0])},O=e=>{let t={key:e,shouldDispatchInit:!1,defaultValue:!0};return"no value"===a.get(e,"no value")&&(t.shouldDispatchInit=!0),t},j=u("Markdown",!0),I=u("modelExample"),N=u("RequestBodyEditor"),T=u("highlightCode"),P=u("ExamplesSelectValueRetainer"),R=u("Example"),M=u("ParameterIncludeEmpty"),{showCommonExtensions:D}=g(),L=r&&r.get("description")||null,B=r&&r.get("content")||new f.OrderedMap;b=b||B.keySeq().first()||"";const F=B.get(b,(0,f.OrderedMap)()),z=F.get("schema",(0,f.OrderedMap)()),U=F.get("examples",null),q=null==U?void 0:o()(U).call(U,((e,t)=>{var n;const o=null===(n=e)||void 0===n?void 0:n.get("value",null);return o&&(e=e.set("value",m(r,b,t),o)),e}));if(s=f.List.isList(s)?s:(0,f.List)(),!F.size)return null;const V="object"===F.getIn(["schema","type"]),$="binary"===F.getIn(["schema","format"]),W="base64"===F.getIn(["schema","format"]);if("application/octet-stream"===b||0===i()(b).call(b,"image/")||0===i()(b).call(b,"audio/")||0===i()(b).call(b,"video/")||$||W){const e=u("Input");return w?p.createElement(e,{type:"file",onChange:C}):p.createElement("i",null,"Example values are not available for ",p.createElement("code",null,b)," media types.")}if(V&&("application/x-www-form-urlencoded"===b||0===i()(b).call(b,"multipart/"))&&z.get("properties",(0,f.OrderedMap)()).size>0){var H;const e=u("JsonSchemaForm"),t=u("ParameterExt"),r=z.get("properties",(0,f.OrderedMap)());return n=f.Map.isMap(n)?n:(0,f.OrderedMap)(),p.createElement("div",{className:"table-container"},L&&p.createElement(j,{source:L}),p.createElement("table",null,p.createElement("tbody",null,f.Map.isMap(r)&&o()(H=r.entrySeq()).call(H,(r=>{var i,d;let[m,g]=r;if(g.get("readOnly"))return;let v=D?(0,h.po)(g):null;const b=l()(i=z.get("required",(0,f.List)())).call(i,m),E=g.get("type"),S=g.get("format"),A=g.get("description"),k=n.getIn([m,"value"]),C=n.getIn([m,"errors"])||s,I=a.get(m)||!1,N=g.has("default")||g.has("example")||g.hasIn(["items","example"])||g.hasIn(["items","default"]),T=g.has("enum")&&(1===g.get("enum").size||b),P=N||T;let R="";"array"!==E||P||(R=[]),("object"===E||P)&&(R=(0,h.xi)(g,!1,{includeWriteOnly:!0})),"string"!=typeof R&&"object"===E&&(R=(0,h.Pz)(R)),"string"==typeof R&&"array"===E&&(R=JSON.parse(R));const L="string"===E&&("binary"===S||"base64"===S);return p.createElement("tr",{key:m,className:"parameters","data-property-name":m},p.createElement("td",{className:"parameters-col_name"},p.createElement("div",{className:b?"parameter__name required":"parameter__name"},m,b?p.createElement("span",null," *"):null),p.createElement("div",{className:"parameter__type"},E,S&&p.createElement("span",{className:"prop-format"},"($",S,")"),D&&v.size?o()(d=v.entrySeq()).call(d,(e=>{let[r,n]=e;return p.createElement(t,{key:`${r}-${n}`,xKey:r,xVal:n})})):null),p.createElement("div",{className:"parameter__deprecated"},g.get("deprecated")?"deprecated":null)),p.createElement("td",{className:"parameters-col_description"},p.createElement(j,{source:A}),w?p.createElement("div",null,p.createElement(e,{fn:y,dispatchInitialValue:!L,schema:g,description:m,getComponent:u,value:void 0===k?R:k,required:b,errors:C,onChange:e=>{x(e,[m])}}),b?null:p.createElement(M,{onChange:e=>_(m,e),isIncluded:I,isIncludedOptions:O(m),isDisabled:c()(k)?0!==k.length:!(0,h.O2)(k)})):null))})))))}const J=m(r,b,S);let K=null;return(0,d.O)(J)&&(K="json"),p.createElement("div",null,L&&p.createElement(j,{source:L}),q?p.createElement(P,{userHasEditedBody:t,examples:q,currentKey:S,currentUserInputValue:n,onSelect:e=>{A(e)},updateValue:x,defaultToFirstExample:!0,getComponent:u,setRetainRequestBodyValueFlag:k}):null,w?p.createElement("div",null,p.createElement(N,{value:n,errors:s,defaultValue:J,onChange:x,getComponent:u})):p.createElement(I,{getComponent:u,getConfigs:g,specSelectors:v,expandDepth:1,isExecute:w,schema:F.get("schema"),specPath:E.push("content",b),example:p.createElement(T,{className:"body-param__example",getConfigs:g,language:K,value:(0,h.Pz)(n)||J}),includeWriteOnly:!0}),q?p.createElement(R,{example:q.get(S),getComponent:u,getConfigs:g}):null)}},9928:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>o});var n=r(67294);class o extends n.Component{render(){const{specSelectors:e,oas3Selectors:t,oas3Actions:r,getComponent:o}=this.props,a=e.servers(),i=o("Servers");return a&&a.size?n.createElement("div",null,n.createElement("span",{className:"servers-title"},"Servers"),n.createElement(i,{servers:a,currentServer:t.selectedServer(),setSelectedServer:r.setSelectedServer,setServerVariableValue:r.setServerVariableValue,getServerVariable:t.serverVariableValue,getEffectiveServerValue:t.serverEffectiveValue})):null}}},56617:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>p});var n=r(61125),o=r.n(n),a=r(51679),i=r.n(a),s=r(97606),l=r.n(s),u=r(67294),c=r(43393);r(23930);class p extends u.Component{constructor(){super(...arguments),o()(this,"onServerChange",(e=>{this.setServer(e.target.value)})),o()(this,"onServerVariableValueChange",(e=>{let{setServerVariableValue:t,currentServer:r}=this.props,n=e.target.getAttribute("data-variable"),o=e.target.value;"function"==typeof t&&t({server:r,key:n,val:o})})),o()(this,"setServer",(e=>{let{setSelectedServer:t}=this.props;t(e)}))}componentDidMount(){var e;let{servers:t,currentServer:r}=this.props;r||this.setServer(null===(e=t.first())||void 0===e?void 0:e.get("url"))}UNSAFE_componentWillReceiveProps(e){let{servers:t,setServerVariableValue:r,getServerVariable:n}=e;if(this.props.currentServer!==e.currentServer||this.props.servers!==e.servers){var o;let a=i()(t).call(t,(t=>t.get("url")===e.currentServer)),s=i()(o=this.props.servers).call(o,(e=>e.get("url")===this.props.currentServer))||(0,c.OrderedMap)();if(!a)return this.setServer(t.first().get("url"));let u=s.get("variables")||(0,c.OrderedMap)(),p=(i()(u).call(u,(e=>e.get("default")))||(0,c.OrderedMap)()).get("default"),f=a.get("variables")||(0,c.OrderedMap)(),h=(i()(f).call(f,(e=>e.get("default")))||(0,c.OrderedMap)()).get("default");l()(f).call(f,((t,o)=>{n(e.currentServer,o)&&p===h||r({server:e.currentServer,key:o,val:t.get("default")||""})}))}}render(){var e,t;let{servers:r,currentServer:n,getServerVariable:o,getEffectiveServerValue:a}=this.props,s=(i()(r).call(r,(e=>e.get("url")===n))||(0,c.OrderedMap)()).get("variables")||(0,c.OrderedMap)(),p=0!==s.size;return u.createElement("div",{className:"servers"},u.createElement("label",{htmlFor:"servers"},u.createElement("select",{onChange:this.onServerChange,value:n},l()(e=r.valueSeq()).call(e,(e=>u.createElement("option",{value:e.get("url"),key:e.get("url")},e.get("url"),e.get("description")&&` - ${e.get("description")}`))).toArray())),p?u.createElement("div",null,u.createElement("div",{className:"computed-url"},"Computed URL:",u.createElement("code",null,a(n))),u.createElement("h4",null,"Server variables"),u.createElement("table",null,u.createElement("tbody",null,l()(t=s.entrySeq()).call(t,(e=>{var t;let[r,a]=e;return u.createElement("tr",{key:r},u.createElement("td",null,r),u.createElement("td",null,a.get("enum")?u.createElement("select",{"data-variable":r,onChange:this.onServerVariableValueChange},l()(t=a.get("enum")).call(t,(e=>u.createElement("option",{selected:e===o(n,r),key:e,value:e},e)))):u.createElement("input",{type:"text",value:o(n,r)||"",onChange:this.onServerVariableValueChange,"data-variable":r})))}))))):null)}}},7779:(e,t,r)=>{"use strict";r.r(t),r.d(t,{isOAS3:()=>l,isSwagger2:()=>u,OAS3ComponentWrapFactory:()=>c});var n=r(23101),o=r.n(n),a=r(27043),i=r.n(a),s=r(67294);function l(e){const t=e.get("openapi");return"string"==typeof t&&(i()(t).call(t,"3.0.")&&t.length>4)}function u(e){const t=e.get("swagger");return"string"==typeof t&&i()(t).call(t,"2.0")}function c(e){return(t,r)=>n=>{if(r&&r.specSelectors&&r.specSelectors.specJson){return l(r.specSelectors.specJson())?s.createElement(e,o()({},n,r,{Ori:t})):s.createElement(t,n)}return console.warn("OAS3 wrapper: couldn't get spec"),null}}},97451:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>p});var n=r(92044),o=r(73723),a=r(91741),i=r(76467),s=r(37761),l=r(67002),u=r(5065),c=r(62109);function p(){return{components:i.default,wrapComponents:s.default,statePlugins:{spec:{wrapSelectors:n,selectors:a},auth:{wrapSelectors:o},oas3:{actions:l,reducers:c.default,selectors:u}}}}},62109:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>p});var n=r(8712),o=r.n(n),a=r(86),i=r.n(a),s=r(24282),l=r.n(s),u=r(43393),c=r(67002);const p={[c.UPDATE_SELECTED_SERVER]:(e,t)=>{let{payload:{selectedServerUrl:r,namespace:n}}=t;const o=n?[n,"selectedServer"]:["selectedServer"];return e.setIn(o,r)},[c.UPDATE_REQUEST_BODY_VALUE]:(e,t)=>{let{payload:{value:r,pathMethod:n}}=t,[a,s]=n;if(!u.Map.isMap(r))return e.setIn(["requestData",a,s,"bodyValue"],r);let l,c=e.getIn(["requestData",a,s,"bodyValue"])||(0,u.Map)();u.Map.isMap(c)||(c=(0,u.Map)());const[...p]=o()(r).call(r);return i()(p).call(p,(e=>{let t=r.getIn([e]);c.has(e)&&u.Map.isMap(t)||(l=c.setIn([e,"value"],t))})),e.setIn(["requestData",a,s,"bodyValue"],l)},[c.UPDATE_REQUEST_BODY_VALUE_RETAIN_FLAG]:(e,t)=>{let{payload:{value:r,pathMethod:n}}=t,[o,a]=n;return e.setIn(["requestData",o,a,"retainBodyValue"],r)},[c.UPDATE_REQUEST_BODY_INCLUSION]:(e,t)=>{let{payload:{value:r,pathMethod:n,name:o}}=t,[a,i]=n;return e.setIn(["requestData",a,i,"bodyInclusion",o],r)},[c.UPDATE_ACTIVE_EXAMPLES_MEMBER]:(e,t)=>{let{payload:{name:r,pathMethod:n,contextType:o,contextName:a}}=t,[i,s]=n;return e.setIn(["examples",i,s,o,a,"activeExample"],r)},[c.UPDATE_REQUEST_CONTENT_TYPE]:(e,t)=>{let{payload:{value:r,pathMethod:n}}=t,[o,a]=n;return e.setIn(["requestData",o,a,"requestContentType"],r)},[c.UPDATE_RESPONSE_CONTENT_TYPE]:(e,t)=>{let{payload:{value:r,path:n,method:o}}=t;return e.setIn(["requestData",n,o,"responseContentType"],r)},[c.UPDATE_SERVER_VARIABLE_VALUE]:(e,t)=>{let{payload:{server:r,namespace:n,key:o,val:a}}=t;const i=n?[n,"serverVariableValues",r,o]:["serverVariableValues",r,o];return e.setIn(i,a)},[c.SET_REQUEST_BODY_VALIDATE_ERROR]:(e,t)=>{let{payload:{path:r,method:n,validationErrors:o}}=t,a=[];if(a.push("Required field is not provided"),o.missingBodyValue)return e.setIn(["requestData",r,n,"errors"],(0,u.fromJS)(a));if(o.missingRequiredKeys&&o.missingRequiredKeys.length>0){const{missingRequiredKeys:t}=o;return e.updateIn(["requestData",r,n,"bodyValue"],(0,u.fromJS)({}),(e=>l()(t).call(t,((e,t)=>e.setIn([t,"errors"],(0,u.fromJS)(a))),e)))}return console.warn("unexpected result: SET_REQUEST_BODY_VALIDATE_ERROR"),e},[c.CLEAR_REQUEST_BODY_VALIDATE_ERROR]:(e,t)=>{let{payload:{path:r,method:n}}=t;const a=e.getIn(["requestData",r,n,"bodyValue"]);if(!u.Map.isMap(a))return e.setIn(["requestData",r,n,"errors"],(0,u.fromJS)([]));const[...i]=o()(a).call(a);return i?e.updateIn(["requestData",r,n,"bodyValue"],(0,u.fromJS)({}),(e=>l()(i).call(i,((e,t)=>e.setIn([t,"errors"],(0,u.fromJS)([]))),e))):e},[c.CLEAR_REQUEST_BODY_VALUE]:(e,t)=>{let{payload:{pathMethod:r}}=t,[n,o]=r;const a=e.getIn(["requestData",n,o,"bodyValue"]);return a?u.Map.isMap(a)?e.setIn(["requestData",n,o,"bodyValue"],(0,u.Map)()):e.setIn(["requestData",n,o,"bodyValue"],""):e}}},5065:(e,t,r)=>{"use strict";r.r(t),r.d(t,{selectedServer:()=>g,requestBodyValue:()=>v,shouldRetainRequestBodyValue:()=>y,hasUserEditedBody:()=>b,requestBodyInclusionSetting:()=>w,requestBodyErrors:()=>E,activeExamplesMember:()=>x,requestContentType:()=>_,responseContentType:()=>S,serverVariableValue:()=>A,serverVariables:()=>k,serverEffectiveValue:()=>C,validateBeforeExecute:()=>O,validateShallowRequired:()=>I});var n=r(97606),o=r.n(n),a=r(86),i=r.n(a),s=r(28222),l=r.n(s),u=r(11882),c=r.n(u),p=r(43393),f=r(7779),h=r(42458),d=r(90242);function m(e){return function(){for(var t=arguments.length,r=new Array(t),n=0;n{const n=t.getSystem().specSelectors.specJson();return(0,f.isOAS3)(n)?e(...r):null}}}const g=m(((e,t)=>{const r=t?[t,"selectedServer"]:["selectedServer"];return e.getIn(r)||""})),v=m(((e,t,r)=>e.getIn(["requestData",t,r,"bodyValue"])||null)),y=m(((e,t,r)=>e.getIn(["requestData",t,r,"retainBodyValue"])||!1)),b=(e,t,r)=>e=>{const{oas3Selectors:n,specSelectors:o}=e.getSystem(),a=o.specJson();if((0,f.isOAS3)(a)){let e=!1;const a=n.requestContentType(t,r);let i=n.requestBodyValue(t,r);if(p.Map.isMap(i)&&(i=(0,d.Pz)(i.mapEntries((e=>p.Map.isMap(e[1])?[e[0],e[1].get("value")]:e)).toJS())),p.List.isList(i)&&(i=(0,d.Pz)(i)),a){const s=(0,h.getDefaultRequestBodyValue)(o.specResolvedSubtree(["paths",t,r,"requestBody"]),a,n.activeExamplesMember(t,r,"requestBody","requestBody"));e=!!i&&i!==s}return e}return null},w=m(((e,t,r)=>e.getIn(["requestData",t,r,"bodyInclusion"])||(0,p.Map)())),E=m(((e,t,r)=>e.getIn(["requestData",t,r,"errors"])||null)),x=m(((e,t,r,n,o)=>e.getIn(["examples",t,r,n,o,"activeExample"])||null)),_=m(((e,t,r)=>e.getIn(["requestData",t,r,"requestContentType"])||null)),S=m(((e,t,r)=>e.getIn(["requestData",t,r,"responseContentType"])||null)),A=m(((e,t,r)=>{let n;if("string"!=typeof t){const{server:e,namespace:o}=t;n=o?[o,"serverVariableValues",e,r]:["serverVariableValues",e,r]}else{n=["serverVariableValues",t,r]}return e.getIn(n)||null})),k=m(((e,t)=>{let r;if("string"!=typeof t){const{server:e,namespace:n}=t;r=n?[n,"serverVariableValues",e]:["serverVariableValues",e]}else{r=["serverVariableValues",t]}return e.getIn(r)||(0,p.OrderedMap)()})),C=m(((e,t)=>{var r,n;if("string"!=typeof t){const{server:o,namespace:a}=t;n=o,r=a?e.getIn([a,"serverVariableValues",n]):e.getIn(["serverVariableValues",n])}else n=t,r=e.getIn(["serverVariableValues",n]);r=r||(0,p.OrderedMap)();let a=n;return o()(r).call(r,((e,t)=>{a=a.replace(new RegExp(`{${t}}`,"g"),e)})),a})),O=(j=(e,t)=>((e,t)=>(t=t||[],!!e.getIn(["requestData",...t,"bodyValue"])))(e,t),function(){for(var e=arguments.length,t=new Array(e),r=0;r{const r=e.getSystem().specSelectors.specJson();let n=[...t][1]||[];return!r.getIn(["paths",...n,"requestBody","required"])||j(...t)}});var j;const I=(e,t)=>{var r;let{oas3RequiredRequestBodyContentType:n,oas3RequestContentType:o,oas3RequestBodyValue:a}=t,s=[];if(!p.Map.isMap(a))return s;let u=[];return i()(r=l()(n.requestContentType)).call(r,(e=>{if(e===o){let t=n.requestContentType[e];i()(t).call(t,(e=>{c()(u).call(u,e)<0&&u.push(e)}))}})),i()(u).call(u,(e=>{a.getIn([e,"value"])||s.push(e)})),s}},91741:(e,t,r)=>{"use strict";r.r(t),r.d(t,{servers:()=>u,isSwagger2:()=>p});var n=r(20573),o=r(43393),a=r(7779);const i=e=>e||(0,o.Map)(),s=(0,n.P1)(i,(e=>e.get("json",(0,o.Map)()))),l=(0,n.P1)(i,(e=>e.get("resolved",(0,o.Map)()))),u=(c=(0,n.P1)((e=>{let t=l(e);return t.count()<1&&(t=s(e)),t}),(e=>e.getIn(["servers"])||(0,o.Map)())),()=>function(e){const t=e.getSystem().specSelectors.specJson();if((0,a.isOAS3)(t)){for(var r=arguments.length,n=new Array(r>1?r-1:0),o=1;o()=>{const e=t.getSystem().specSelectors.specJson();return(0,a.isSwagger2)(e)}},92044:(e,t,r)=>{"use strict";r.r(t),r.d(t,{definitions:()=>h,hasHost:()=>d,securityDefinitions:()=>m,host:()=>g,basePath:()=>v,consumes:()=>y,produces:()=>b,schemes:()=>w,servers:()=>E,isOAS3:()=>x,isSwagger2:()=>_});var n=r(20573),o=r(33881),a=r(43393),i=r(7779);function s(e){return(t,r)=>function(){const n=r.getSystem().specSelectors.specJson();return(0,i.isOAS3)(n)?e(...arguments):t(...arguments)}}const l=e=>e||(0,a.Map)(),u=s((0,n.P1)((()=>null))),c=(0,n.P1)(l,(e=>e.get("json",(0,a.Map)()))),p=(0,n.P1)(l,(e=>e.get("resolved",(0,a.Map)()))),f=e=>{let t=p(e);return t.count()<1&&(t=c(e)),t},h=s((0,n.P1)(f,(e=>{const t=e.getIn(["components","schemas"]);return a.Map.isMap(t)?t:(0,a.Map)()}))),d=s((e=>f(e).hasIn(["servers",0]))),m=s((0,n.P1)(o.specJsonWithResolvedSubtrees,(e=>e.getIn(["components","securitySchemes"])||null))),g=u,v=u,y=u,b=u,w=u,E=s((0,n.P1)(f,(e=>e.getIn(["servers"])||(0,a.Map)()))),x=(e,t)=>()=>{const e=t.getSystem().specSelectors.specJson();return(0,i.isOAS3)(a.Map.isMap(e)?e:(0,a.Map)())},_=(e,t)=>()=>{const e=t.getSystem().specSelectors.specJson();return(0,i.isSwagger2)(a.Map.isMap(e)?e:(0,a.Map)())}},70356:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>o});var n=r(67294);const o=(0,r(7779).OAS3ComponentWrapFactory)((e=>{let{Ori:t,...r}=e;const{schema:o,getComponent:a,errSelectors:i,authorized:s,onAuthChange:l,name:u}=r,c=a("HttpAuth");return"http"===o.get("type")?n.createElement(c,{key:u,schema:o,name:u,errSelectors:i,authorized:s,getComponent:a,onChange:l}):n.createElement(t,r)}))},37761:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>u});var n=r(22460),o=r(70356),a=r(69487),i=r(50058),s=r(53499),l=r(90287);const u={Markdown:n.default,AuthItem:o.default,JsonSchema_string:l.default,VersionStamp:a.default,model:s.default,onlineValidatorBadge:i.default}},90287:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>o});var n=r(67294);const o=(0,r(7779).OAS3ComponentWrapFactory)((e=>{let{Ori:t,...r}=e;const{schema:o,getComponent:a,errors:i,onChange:s}=r,l=o&&o.get?o.get("format"):null,u=o&&o.get?o.get("type"):null,c=a("Input");return u&&"string"===u&&l&&("binary"===l||"base64"===l)?n.createElement(c,{type:"file",className:i.length?"invalid":"",title:i.length?i:"",onChange:e=>{s(e.target.files[0])},disabled:t.isDisabled}):n.createElement(t,r)}))},22460:(e,t,r)=>{"use strict";r.r(t),r.d(t,{Markdown:()=>f,default:()=>h});var n=r(81607),o=r.n(n),a=r(67294),i=r(94184),s=r.n(i),l=r(89927),u=r(7779),c=r(86019);const p=new l._("commonmark");p.block.ruler.enable(["table"]),p.set({linkTarget:"_blank"});const f=e=>{let{source:t,className:r="",getConfigs:n}=e;if("string"!=typeof t)return null;if(t){const{useUnsafeMarkdown:e}=n(),i=p.render(t),l=(0,c.s)(i,{useUnsafeMarkdown:e});let u;return"string"==typeof l&&(u=o()(l).call(l)),a.createElement("div",{dangerouslySetInnerHTML:{__html:u},className:s()(r,"renderedMarkdown")})}return null};f.defaultProps={getConfigs:()=>({useUnsafeMarkdown:!1})};const h=(0,u.OAS3ComponentWrapFactory)(f)},53499:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>u});var n=r(23101),o=r.n(n),a=r(67294),i=r(7779),s=r(53795);class l extends a.Component{render(){let{getConfigs:e,schema:t}=this.props,r=["model-box"],n=null;return!0===t.get("deprecated")&&(r.push("deprecated"),n=a.createElement("span",{className:"model-deprecated-warning"},"Deprecated:")),a.createElement("div",{className:r.join(" ")},n,a.createElement(s.Z,o()({},this.props,{getConfigs:e,depth:1,expandDepth:this.props.expandDepth||0})))}}const u=(0,i.OAS3ComponentWrapFactory)(l)},50058:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>a});var n=r(7779),o=r(5623);const a=(0,n.OAS3ComponentWrapFactory)(o.Z)},69487:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>o});var n=r(67294);const o=(0,r(7779).OAS3ComponentWrapFactory)((e=>{const{Ori:t}=e;return n.createElement("span",null,n.createElement(t,e),n.createElement("small",{className:"version-stamp"},n.createElement("pre",{className:"version"},"OAS3")))}))},28560:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>i});var n=r(87198),o=r.n(n);let a=!1;function i(){return{statePlugins:{spec:{wrapActions:{updateSpec:e=>function(){return a=!0,e(...arguments)},updateJsonSpec:(e,t)=>function(){const r=t.getConfigs().onComplete;return a&&"function"==typeof r&&(o()(r,0),a=!1),e(...arguments)}}}}}}},92135:(e,t,r)=>{"use strict";r.r(t),r.d(t,{requestSnippetGenerator_curl_powershell:()=>A,requestSnippetGenerator_curl_bash:()=>k,requestSnippetGenerator_curl_cmd:()=>C});var n=r(11882),o=r.n(n),a=r(81607),i=r.n(a),s=r(35627),l=r.n(s),u=r(97606),c=r.n(u),p=r(12196),f=r.n(p),h=r(74386),d=r.n(h),m=r(58118),g=r.n(m),v=r(27504),y=r(43393);const b=e=>{var t;const r="_**[]";return o()(e).call(e,r)<0?e:i()(t=e.split(r)[0]).call(t)},w=e=>"-d "===e||/^[_\/-]/g.test(e)?e:"'"+e.replace(/'/g,"'\\''")+"'",E=e=>"-d "===(e=e.replace(/\^/g,"^^").replace(/\\"/g,'\\\\"').replace(/"/g,'""').replace(/\n/g,"^\n"))?e.replace(/-d /g,"-d ^\n"):/^[_\/-]/g.test(e)?e:'"'+e+'"',x=e=>"-d "===e?e:/\n/.test(e)?'@"\n'+e.replace(/"/g,'\\"').replace(/`/g,"``").replace(/\$/,"`$")+'\n"@':/^[_\/-]/g.test(e)?e:"'"+e.replace(/"/g,'""').replace(/'/g,"''")+"'";function _(e){let t=[];for(let[r,n]of e.get("body").entrySeq()){let e=b(r);n instanceof v.Z.File?t.push(` "${e}": {\n "name": "${n.name}"${n.type?`,\n "type": "${n.type}"`:""}\n }`):t.push(` "${e}": ${l()(n,null,2).replace(/(\r\n|\r|\n)/g,"\n ")}`)}return`{\n${t.join(",\n")}\n}`}const S=function(e,t,r){let n=arguments.length>3&&void 0!==arguments[3]?arguments[3]:"",o=!1,a="";const i=function(){for(var e=arguments.length,r=new Array(e),n=0;na+=` ${r}`,p=function(){var e;let t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:1;return a+=f()(e=" ").call(e,t)};let h=e.get("headers");if(a+="curl"+n,e.has("curlOptions")&&i(...e.get("curlOptions")),i("-X",e.get("method")),u(),p(),s(`${e.get("url")}`),h&&h.size)for(let t of d()(m=e.get("headers")).call(m)){var m;u(),p();let[e,r]=t;s("-H",`${e}: ${r}`),o=o||/^content-type$/i.test(e)&&/^multipart\/form-data$/i.test(r)}const w=e.get("body");var E;if(w)if(o&&g()(E=["POST","PUT","PATCH"]).call(E,e.get("method")))for(let[e,t]of w.entrySeq()){let r=b(e);u(),p(),s("-F"),t instanceof v.Z.File?i(`${r}=@${t.name}${t.type?`;type=${t.type}`:""}`):i(`${r}=${t}`)}else if(w instanceof v.Z.File)u(),p(),s(`--data-binary '@${w.name}'`);else{u(),p(),s("-d ");let t=w;y.Map.isMap(t)?s(_(e)):("string"!=typeof t&&(t=l()(t)),s(t))}else w||"POST"!==e.get("method")||(u(),p(),s("-d ''"));return a},A=e=>S(e,x,"`\n",".exe"),k=e=>S(e,w,"\\\n"),C=e=>S(e,E,"^\n")},86575:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>i});var n=r(92135),o=r(4669),a=r(84206);const i=()=>({components:{RequestSnippets:a.default},fn:n,statePlugins:{requestSnippets:{selectors:o}}})},84206:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>w});var n=r(14418),o=r.n(n),a=r(25110),i=r.n(a),s=r(86),l=r.n(s),u=r(97606),c=r.n(u),p=r(67294),f=r(27361),h=r.n(f),d=r(23560),m=r.n(d),g=r(74855),v=r(36581);const y={cursor:"pointer",lineHeight:1,display:"inline-flex",backgroundColor:"rgb(250, 250, 250)",paddingBottom:"0",paddingTop:"0",border:"1px solid rgb(51, 51, 51)",borderRadius:"4px 4px 0 0",boxShadow:"none",borderBottom:"none"},b={cursor:"pointer",lineHeight:1,display:"inline-flex",backgroundColor:"rgb(51, 51, 51)",boxShadow:"none",border:"1px solid rgb(51, 51, 51)",paddingBottom:"0",paddingTop:"0",borderRadius:"4px 4px 0 0",marginTop:"-5px",marginRight:"-5px",marginLeft:"-5px",zIndex:"9999",borderBottom:"none"},w=e=>{var t,r;let{request:n,requestSnippetsSelectors:a,getConfigs:s}=e;const u=m()(s)?s():null,f=!1!==h()(u,"syntaxHighlight")&&h()(u,"syntaxHighlight.activated",!0),d=(0,p.useRef)(null),[w,E]=(0,p.useState)(null===(t=a.getSnippetGenerators())||void 0===t?void 0:t.keySeq().first()),[x,_]=(0,p.useState)(null==a?void 0:a.getDefaultExpanded());(0,p.useEffect)((()=>{}),[]),(0,p.useEffect)((()=>{var e;const t=o()(e=i()(d.current.childNodes)).call(e,(e=>{var t;return!!e.nodeType&&(null===(t=e.classList)||void 0===t?void 0:t.contains("curl-command"))}));return l()(t).call(t,(e=>e.addEventListener("mousewheel",j,{passive:!1}))),()=>{l()(t).call(t,(e=>e.removeEventListener("mousewheel",j)))}}),[n]);const S=a.getSnippetGenerators(),A=S.get(w),k=A.get("fn")(n),C=()=>{_(!x)},O=e=>e===w?b:y,j=e=>{const{target:t,deltaY:r}=e,{scrollHeight:n,offsetHeight:o,scrollTop:a}=t;n>o&&(0===a&&r<0||o+a>=n&&r>0)&&e.preventDefault()},I=f?p.createElement(v.d3,{language:A.get("syntax"),className:"curl microlight",style:(0,v.C2)(h()(u,"syntaxHighlight.theme"))},k):p.createElement("textarea",{readOnly:!0,className:"curl",value:k});return p.createElement("div",{className:"request-snippets",ref:d},p.createElement("div",{style:{width:"100%",display:"flex",justifyContent:"flex-start",alignItems:"center",marginBottom:"15px"}},p.createElement("h4",{onClick:()=>C(),style:{cursor:"pointer"}},"Snippets"),p.createElement("button",{onClick:()=>C(),style:{border:"none",background:"none"},title:x?"Collapse operation":"Expand operation"},p.createElement("svg",{className:"arrow",width:"10",height:"10"},p.createElement("use",{href:x?"#large-arrow-down":"#large-arrow",xlinkHref:x?"#large-arrow-down":"#large-arrow"})))),x&&p.createElement("div",{className:"curl-command"},p.createElement("div",{style:{paddingLeft:"15px",paddingRight:"10px",width:"100%",display:"flex"}},c()(r=S.entrySeq()).call(r,(e=>{let[t,r]=e;return p.createElement("div",{style:O(t),className:"btn",key:t,onClick:()=>(e=>{w!==e&&E(e)})(t)},p.createElement("h4",{style:t===w?{color:"white"}:{}},r.get("title")))}))),p.createElement("div",{className:"copy-to-clipboard"},p.createElement(g.CopyToClipboard,{text:k},p.createElement("button",null))),p.createElement("div",null,I)))}},4669:(e,t,r)=>{"use strict";r.r(t),r.d(t,{getGenerators:()=>f,getSnippetGenerators:()=>h,getActiveLanguage:()=>d,getDefaultExpanded:()=>m});var n=r(14418),o=r.n(n),a=r(58118),i=r.n(a),s=r(97606),l=r.n(s),u=r(20573),c=r(43393);const p=e=>e||(0,c.Map)(),f=(0,u.P1)(p,(e=>{const t=e.get("languages"),r=e.get("generators",(0,c.Map)());return!t||t.isEmpty()?r:o()(r).call(r,((e,r)=>i()(t).call(t,r)))})),h=e=>t=>{var r,n;let{fn:a}=t;return o()(r=l()(n=f(e)).call(n,((e,t)=>{const r=(e=>a[`requestSnippetGenerator_${e}`])(t);return"function"!=typeof r?null:e.set("fn",r)}))).call(r,(e=>e))},d=(0,u.P1)(p,(e=>e.get("activeLanguage"))),m=(0,u.P1)(p,(e=>e.get("defaultExpanded")))},36195:(e,t,r)=>{"use strict";r.r(t),r.d(t,{ErrorBoundary:()=>i,default:()=>s});var n=r(67294),o=r(56189),a=r(29403);class i extends n.Component{static getDerivedStateFromError(e){return{hasError:!0,error:e}}constructor(){super(...arguments),this.state={hasError:!1,error:null}}componentDidCatch(e,t){this.props.fn.componentDidCatch(e,t)}render(){const{getComponent:e,targetName:t,children:r}=this.props;if(this.state.hasError){const r=e("Fallback");return n.createElement(r,{name:t})}return r}}i.defaultProps={targetName:"this component",getComponent:()=>a.default,fn:{componentDidCatch:o.componentDidCatch},children:null};const s=i},29403:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>o});var n=r(67294);const o=e=>{let{name:t}=e;return n.createElement("div",{className:"fallback"},"😱 ",n.createElement("i",null,"Could not render ","t"===t?"this component":t,", see the console."))}},56189:(e,t,r)=>{"use strict";r.r(t),r.d(t,{componentDidCatch:()=>i,withErrorBoundary:()=>s});var n=r(23101),o=r.n(n),a=r(67294);const i=console.error,s=e=>t=>{const{getComponent:r,fn:n}=e(),i=r("ErrorBoundary"),s=n.getDisplayName(t);class l extends a.Component{render(){return a.createElement(i,{targetName:s,getComponent:r,fn:n},a.createElement(t,o()({},this.props,this.context)))}}var u;return l.displayName=`WithErrorBoundary(${s})`,(u=t).prototype&&u.prototype.isReactComponent&&(l.prototype.mapStateToProps=t.prototype.mapStateToProps),l}},27621:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>c});var n=r(47475),o=r.n(n),a=r(7287),i=r.n(a),s=r(36195),l=r(29403),u=r(56189);const c=function(){let{componentList:e=[],fullOverride:t=!1}=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return r=>{var n;let{getSystem:a}=r;const c=t?e:["App","BaseLayout","VersionPragmaFilter","InfoContainer","ServersContainer","SchemesContainer","AuthorizeBtnContainer","FilterContainer","Operations","OperationContainer","parameters","responses","OperationServers","Models","ModelWrapper",...e],p=i()(c,o()(n=Array(c.length)).call(n,((e,t)=>{let{fn:r}=t;return r.withErrorBoundary(e)})));return{fn:{componentDidCatch:u.componentDidCatch,withErrorBoundary:(0,u.withErrorBoundary)(a)},components:{ErrorBoundary:s.default,Fallback:l.default},wrapComponents:p}}}},57050:(e,t,r)=>{"use strict";r.r(t),r.d(t,{sampleFromSchemaGeneric:()=>F,inferSchema:()=>z,createXMLExample:()=>U,sampleFromSchema:()=>q,memoizedCreateXMLExample:()=>$,memoizedSampleFromSchema:()=>W});var n=r(11882),o=r.n(n),a=r(86),i=r.n(a),s=r(58309),l=r.n(s),u=r(58118),c=r.n(u),p=r(92039),f=r.n(p),h=r(24278),d=r.n(h),m=r(51679),g=r.n(m),v=r(39022),y=r.n(v),b=r(97606),w=r.n(b),E=r(35627),x=r.n(E),_=r(53479),S=r.n(_),A=r(14419),k=r.n(A),C=r(41609),O=r.n(C),j=r(90242),I=r(60314);const N={string:e=>e.pattern?(e=>{try{return new(k())(e).gen()}catch(e){return"string"}})(e.pattern):"string",string_email:()=>"user@example.com","string_date-time":()=>(new Date).toISOString(),string_date:()=>(new Date).toISOString().substring(0,10),string_uuid:()=>"3fa85f64-5717-4562-b3fc-2c963f66afa6",string_hostname:()=>"example.com",string_ipv4:()=>"198.51.100.42",string_ipv6:()=>"2001:0db8:5b96:0000:0000:426f:8e17:642a",number:()=>0,number_float:()=>0,integer:()=>0,boolean:e=>"boolean"!=typeof e.default||e.default},T=e=>{e=(0,j.mz)(e);let{type:t,format:r}=e,n=N[`${t}_${r}`]||N[t];return(0,j.Wl)(n)?n(e):"Unknown Type: "+e.type},P=e=>(0,j.XV)(e,"$$ref",(e=>"string"==typeof e&&o()(e).call(e,"#")>-1)),R=["maxProperties","minProperties"],M=["minItems","maxItems"],D=["minimum","maximum","exclusiveMinimum","exclusiveMaximum"],L=["minLength","maxLength"],B=function(e,t){var r;let n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};const a=r=>{void 0===t[r]&&void 0!==e[r]&&(t[r]=e[r])};var s;(i()(r=["example","default","enum","xml","type",...R,...M,...D,...L]).call(r,(e=>a(e))),void 0!==e.required&&l()(e.required))&&(void 0!==t.required&&t.required.length||(t.required=[]),i()(s=e.required).call(s,(e=>{var r;c()(r=t.required).call(r,e)||t.required.push(e)})));if(e.properties){t.properties||(t.properties={});let r=(0,j.mz)(e.properties);for(let a in r){var u;if(Object.prototype.hasOwnProperty.call(r,a))if(!r[a]||!r[a].deprecated)if(!r[a]||!r[a].readOnly||n.includeReadOnly)if(!r[a]||!r[a].writeOnly||n.includeWriteOnly)if(!t.properties[a])t.properties[a]=r[a],!e.required&&l()(e.required)&&-1!==o()(u=e.required).call(u,a)&&(t.required?t.required.push(a):t.required=[a])}}return e.items&&(t.items||(t.items={}),t.items=B(e.items,t.items,n)),t},F=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:void 0,n=arguments.length>3&&void 0!==arguments[3]&&arguments[3];e&&(0,j.Wl)(e.toJS)&&(e=e.toJS());let a=void 0!==r||e&&void 0!==e.example||e&&void 0!==e.default;const s=!a&&e&&e.oneOf&&e.oneOf.length>0,u=!a&&e&&e.anyOf&&e.anyOf.length>0;if(!a&&(s||u)){const r=(0,j.mz)(s?e.oneOf[0]:e.anyOf[0]);if(B(r,e,t),!e.xml&&r.xml&&(e.xml=r.xml),void 0!==e.example&&void 0!==r.example)a=!0;else if(r.properties){e.properties||(e.properties={});let n=(0,j.mz)(r.properties);for(let a in n){var p;if(Object.prototype.hasOwnProperty.call(n,a))if(!n[a]||!n[a].deprecated)if(!n[a]||!n[a].readOnly||t.includeReadOnly)if(!n[a]||!n[a].writeOnly||t.includeWriteOnly)if(!e.properties[a])e.properties[a]=n[a],!r.required&&l()(r.required)&&-1!==o()(p=r.required).call(p,a)&&(e.required?e.required.push(a):e.required=[a])}}}const h={};let{xml:m,type:v,example:b,properties:E,additionalProperties:x,items:_}=e||{},{includeReadOnly:S,includeWriteOnly:A}=t;m=m||{};let k,{name:C,prefix:I,namespace:N}=m,L={};if(n&&(C=C||"notagname",k=(I?I+":":"")+C,N)){h[I?"xmlns:"+I:"xmlns"]=N}n&&(L[k]=[]);const z=t=>f()(t).call(t,(t=>Object.prototype.hasOwnProperty.call(e,t)));e&&!v&&(E||x||z(R)?v="object":_||z(M)?v="array":z(D)?(v="number",e.type="number"):a||e.enum||(v="string",e.type="string"));const U=t=>{var r,n,o,a,i;null!==(null===(r=e)||void 0===r?void 0:r.maxItems)&&void 0!==(null===(n=e)||void 0===n?void 0:n.maxItems)&&(t=d()(t).call(t,0,null===(i=e)||void 0===i?void 0:i.maxItems));if(null!==(null===(o=e)||void 0===o?void 0:o.minItems)&&void 0!==(null===(a=e)||void 0===a?void 0:a.minItems)){let r=0;for(;t.length<(null===(s=e)||void 0===s?void 0:s.minItems);){var s;t.push(t[r++%t.length])}}return t},q=(0,j.mz)(E);let V,$=0;const W=()=>e&&null!==e.maxProperties&&void 0!==e.maxProperties&&$>=e.maxProperties,H=()=>{if(!e||!e.required)return 0;let t=0;var r,o;n?i()(r=e.required).call(r,(e=>t+=void 0===L[e]?0:1)):i()(o=e.required).call(o,(e=>{var r;return t+=void 0===(null===(r=L[k])||void 0===r?void 0:g()(r).call(r,(t=>void 0!==t[e])))?0:1}));return e.required.length-t},J=t=>{var r;return!(e&&e.required&&e.required.length)||!c()(r=e.required).call(r,t)},K=t=>!e||null===e.maxProperties||void 0===e.maxProperties||!W()&&(!J(t)||e.maxProperties-$-H()>0);if(V=n?function(r){let o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:void 0;if(e&&q[r]){if(q[r].xml=q[r].xml||{},q[r].xml.attribute){const e=l()(q[r].enum)?q[r].enum[0]:void 0,t=q[r].example,n=q[r].default;return void(h[q[r].xml.name||r]=void 0!==t?t:void 0!==n?n:void 0!==e?e:T(q[r]))}q[r].xml.name=q[r].xml.name||r}else q[r]||!1===x||(q[r]={xml:{name:r}});let a=F(e&&q[r]||void 0,t,o,n);var i;K(r)&&($++,l()(a)?L[k]=y()(i=L[k]).call(i,a):L[k].push(a))}:(e,r)=>{K(e)&&(L[e]=F(q[e],t,r,n),$++)},a){let o;if(o=P(void 0!==r?r:void 0!==b?b:e.default),!n){if("number"==typeof o&&"string"===v)return`${o}`;if("string"!=typeof o||"string"===v)return o;try{return JSON.parse(o)}catch(e){return o}}if(e||(v=l()(o)?"array":typeof o),"array"===v){if(!l()(o)){if("string"==typeof o)return o;o=[o]}const r=e?e.items:void 0;r&&(r.xml=r.xml||m||{},r.xml.name=r.xml.name||m.name);let a=w()(o).call(o,(e=>F(r,t,e,n)));return a=U(a),m.wrapped?(L[k]=a,O()(h)||L[k].push({_attr:h})):L=a,L}if("object"===v){if("string"==typeof o)return o;for(let t in o)Object.prototype.hasOwnProperty.call(o,t)&&(e&&q[t]&&q[t].readOnly&&!S||e&&q[t]&&q[t].writeOnly&&!A||(e&&q[t]&&q[t].xml&&q[t].xml.attribute?h[q[t].xml.name||t]=o[t]:V(t,o[t])));return O()(h)||L[k].push({_attr:h}),L}return L[k]=O()(h)?o:[{_attr:h},o],L}if("object"===v){for(let e in q)Object.prototype.hasOwnProperty.call(q,e)&&(q[e]&&q[e].deprecated||q[e]&&q[e].readOnly&&!S||q[e]&&q[e].writeOnly&&!A||V(e));if(n&&h&&L[k].push({_attr:h}),W())return L;if(!0===x)n?L[k].push({additionalProp:"Anything can be here"}):L.additionalProp1={},$++;else if(x){const r=(0,j.mz)(x),o=F(r,t,void 0,n);if(n&&r.xml&&r.xml.name&&"notagname"!==r.xml.name)L[k].push(o);else{const t=null!==e.minProperties&&void 0!==e.minProperties&&$F(B(_,e,t),t,void 0,n)));else if(l()(_.oneOf)){var Y;r=w()(Y=_.oneOf).call(Y,(e=>F(B(_,e,t),t,void 0,n)))}else{if(!(!n||n&&m.wrapped))return F(_,t,void 0,n);r=[F(_,t,void 0,n)]}return r=U(r),n&&m.wrapped?(L[k]=r,O()(h)||L[k].push({_attr:h}),L):r}let Q;if(e&&l()(e.enum))Q=(0,j.AF)(e.enum)[0];else{if(!e)return;if(Q=T(e),"number"==typeof Q){let t=e.minimum;null!=t&&(e.exclusiveMinimum&&t++,Q=t);let r=e.maximum;null!=r&&(e.exclusiveMaximum&&r--,Q=r)}if("string"==typeof Q&&(null!==e.maxLength&&void 0!==e.maxLength&&(Q=d()(Q).call(Q,0,e.maxLength)),null!==e.minLength&&void 0!==e.minLength)){let t=0;for(;Q.length(e.schema&&(e=e.schema),e.properties&&(e.type="object"),e),U=(e,t,r)=>{const n=F(e,t,r,!0);if(n)return"string"==typeof n?n:S()(n,{declaration:!0,indent:"\t"})},q=(e,t,r)=>F(e,t,r,!1),V=(e,t,r)=>[e,x()(t),x()(r)],$=(0,I.Z)(U,V),W=(0,I.Z)(q,V)},8883:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>o});var n=r(57050);function o(){return{fn:n}}},51228:(e,t,r)=>{"use strict";r.r(t),r.d(t,{UPDATE_SPEC:()=>U,UPDATE_URL:()=>q,UPDATE_JSON:()=>V,UPDATE_PARAM:()=>$,UPDATE_EMPTY_PARAM_INCLUSION:()=>W,VALIDATE_PARAMS:()=>H,SET_RESPONSE:()=>J,SET_REQUEST:()=>K,SET_MUTATED_REQUEST:()=>G,LOG_REQUEST:()=>Z,CLEAR_RESPONSE:()=>Y,CLEAR_REQUEST:()=>Q,CLEAR_VALIDATE_PARAMS:()=>X,UPDATE_OPERATION_META_VALUE:()=>ee,UPDATE_RESOLVED:()=>te,UPDATE_RESOLVED_SUBTREE:()=>re,SET_SCHEME:()=>ne,updateSpec:()=>oe,updateResolved:()=>ae,updateUrl:()=>ie,updateJsonSpec:()=>se,parseToJson:()=>le,resolveSpec:()=>ce,requestResolvedSubtree:()=>he,changeParam:()=>de,changeParamByIdentity:()=>me,updateResolvedSubtree:()=>ge,invalidateResolvedSubtreeCache:()=>ve,validateParams:()=>ye,updateEmptyParamInclusion:()=>be,clearValidateParams:()=>we,changeConsumesValue:()=>Ee,changeProducesValue:()=>xe,setResponse:()=>_e,setRequest:()=>Se,setMutatedRequest:()=>Ae,logRequest:()=>ke,executeRequest:()=>Ce,execute:()=>Oe,clearResponse:()=>je,clearRequest:()=>Ie,setScheme:()=>Ne});var n=r(58309),o=r.n(n),a=r(97606),i=r.n(a),s=r(96718),l=r.n(s),u=r(24282),c=r.n(u),p=r(2250),f=r.n(p),h=r(6226),d=r.n(h),m=r(14418),g=r.n(m),v=r(3665),y=r.n(v),b=r(11882),w=r.n(b),E=r(86),x=r.n(E),_=r(28222),S=r.n(_),A=r(76986),k=r.n(A),C=r(70586),O=r.n(C),j=r(1272),I=r(43393),N=r(84564),T=r.n(N),P=r(7710),R=r(47037),M=r.n(R),D=r(23279),L=r.n(D),B=r(36968),F=r.n(B),z=r(90242);const U="spec_update_spec",q="spec_update_url",V="spec_update_json",$="spec_update_param",W="spec_update_empty_param_inclusion",H="spec_validate_param",J="spec_set_response",K="spec_set_request",G="spec_set_mutated_request",Z="spec_log_request",Y="spec_clear_response",Q="spec_clear_request",X="spec_clear_validate_param",ee="spec_update_operation_meta_value",te="spec_update_resolved",re="spec_update_resolved_subtree",ne="set_scheme";function oe(e){const t=(r=e,M()(r)?r:"").replace(/\t/g," ");var r;if("string"==typeof e)return{type:U,payload:t}}function ae(e){return{type:te,payload:e}}function ie(e){return{type:q,payload:e}}function se(e){return{type:V,payload:e}}const le=e=>t=>{let{specActions:r,specSelectors:n,errActions:o}=t,{specStr:a}=n,i=null;try{e=e||a(),o.clear({source:"parser"}),i=j.ZP.load(e,{schema:j.A8})}catch(e){return console.error(e),o.newSpecErr({source:"parser",level:"error",message:e.reason,line:e.mark&&e.mark.line?e.mark.line+1:void 0})}return i&&"object"==typeof i?r.updateJsonSpec(i):{}};let ue=!1;const ce=(e,t)=>r=>{let{specActions:n,specSelectors:a,errActions:s,fn:{fetch:u,resolve:c,AST:p={}},getConfigs:f}=r;ue||(console.warn("specActions.resolveSpec is deprecated since v3.10.0 and will be removed in v4.0.0; use requestResolvedSubtree instead!"),ue=!0);const{modelPropertyMacro:h,parameterMacro:d,requestInterceptor:m,responseInterceptor:g}=f();void 0===e&&(e=a.specJson()),void 0===t&&(t=a.url());let v=p.getLineNumberForPath?p.getLineNumberForPath:()=>{},y=a.specStr();return c({fetch:u,spec:e,baseDoc:t,modelPropertyMacro:h,parameterMacro:d,requestInterceptor:m,responseInterceptor:g}).then((e=>{let{spec:t,errors:r}=e;if(s.clear({type:"thrown"}),o()(r)&&r.length>0){let e=i()(r).call(r,(e=>(console.error(e),e.line=e.fullPath?v(y,e.fullPath):null,e.path=e.fullPath?e.fullPath.join("."):null,e.level="error",e.type="thrown",e.source="resolver",l()(e,"message",{enumerable:!0,value:e.message}),e)));s.newThrownErrBatch(e)}return n.updateResolved(t)}))};let pe=[];const fe=L()((async()=>{const e=pe.system;if(!e)return void console.error("debResolveSubtrees: don't have a system to operate on, aborting.");const{errActions:t,errSelectors:r,fn:{resolveSubtree:n,fetch:a,AST:s={}},specSelectors:u,specActions:p}=e;if(!n)return void console.error("Error: Swagger-Client did not provide a `resolveSubtree` method, doing nothing.");let h=s.getLineNumberForPath?s.getLineNumberForPath:()=>{};const m=u.specStr(),{modelPropertyMacro:v,parameterMacro:b,requestInterceptor:w,responseInterceptor:E}=e.getConfigs();try{var x=await c()(pe).call(pe,(async(e,s)=>{const{resultMap:c,specWithCurrentSubtrees:p}=await e,{errors:x,spec:_}=await n(p,s,{baseDoc:u.url(),modelPropertyMacro:v,parameterMacro:b,requestInterceptor:w,responseInterceptor:E});if(r.allErrors().size&&t.clearBy((e=>{var t;return"thrown"!==e.get("type")||"resolver"!==e.get("source")||!f()(t=e.get("fullPath")).call(t,((e,t)=>e===s[t]||void 0===s[t]))})),o()(x)&&x.length>0){let e=i()(x).call(x,(e=>(e.line=e.fullPath?h(m,e.fullPath):null,e.path=e.fullPath?e.fullPath.join("."):null,e.level="error",e.type="thrown",e.source="resolver",l()(e,"message",{enumerable:!0,value:e.message}),e)));t.newThrownErrBatch(e)}var S,A;_&&u.isOAS3()&&"components"===s[0]&&"securitySchemes"===s[1]&&await d().all(i()(S=g()(A=y()(_)).call(A,(e=>"openIdConnect"===e.type))).call(S,(async e=>{const t={url:e.openIdConnectUrl,requestInterceptor:w,responseInterceptor:E};try{const r=await a(t);r instanceof Error||r.status>=400?console.error(r.statusText+" "+t.url):e.openIdConnectData=JSON.parse(r.text)}catch(e){console.error(e)}})));return F()(c,s,_),F()(p,s,_),{resultMap:c,specWithCurrentSubtrees:p}}),d().resolve({resultMap:(u.specResolvedSubtree([])||(0,I.Map)()).toJS(),specWithCurrentSubtrees:u.specJson().toJS()}));delete pe.system,pe=[]}catch(e){console.error(e)}p.updateResolvedSubtree([],x.resultMap)}),35),he=e=>t=>{var r;w()(r=i()(pe).call(pe,(e=>e.join("@@")))).call(r,e.join("@@"))>-1||(pe.push(e),pe.system=t,fe())};function de(e,t,r,n,o){return{type:$,payload:{path:e,value:n,paramName:t,paramIn:r,isXml:o}}}function me(e,t,r,n){return{type:$,payload:{path:e,param:t,value:r,isXml:n}}}const ge=(e,t)=>({type:re,payload:{path:e,value:t}}),ve=()=>({type:re,payload:{path:[],value:(0,I.Map)()}}),ye=(e,t)=>({type:H,payload:{pathMethod:e,isOAS3:t}}),be=(e,t,r,n)=>({type:W,payload:{pathMethod:e,paramName:t,paramIn:r,includeEmptyValue:n}});function we(e){return{type:X,payload:{pathMethod:e}}}function Ee(e,t){return{type:ee,payload:{path:e,value:t,key:"consumes_value"}}}function xe(e,t){return{type:ee,payload:{path:e,value:t,key:"produces_value"}}}const _e=(e,t,r)=>({payload:{path:e,method:t,res:r},type:J}),Se=(e,t,r)=>({payload:{path:e,method:t,req:r},type:K}),Ae=(e,t,r)=>({payload:{path:e,method:t,req:r},type:G}),ke=e=>({payload:e,type:Z}),Ce=e=>t=>{let{fn:r,specActions:n,specSelectors:a,getConfigs:s,oas3Selectors:l}=t,{pathName:u,method:c,operation:p}=e,{requestInterceptor:f,responseInterceptor:h}=s(),d=p.toJS();var m,v;p&&p.get("parameters")&&x()(m=g()(v=p.get("parameters")).call(v,(e=>e&&!0===e.get("allowEmptyValue")))).call(m,(t=>{if(a.parameterInclusionSettingFor([u,c],t.get("name"),t.get("in"))){e.parameters=e.parameters||{};const r=(0,z.cz)(t,e.parameters);(!r||r&&0===r.size)&&(e.parameters[t.get("name")]="")}}));if(e.contextUrl=T()(a.url()).toString(),d&&d.operationId?e.operationId=d.operationId:d&&u&&c&&(e.operationId=r.opId(d,u,c)),a.isOAS3()){const t=`${u}:${c}`;e.server=l.selectedServer(t)||l.selectedServer();const r=l.serverVariables({server:e.server,namespace:t}).toJS(),n=l.serverVariables({server:e.server}).toJS();e.serverVariables=S()(r).length?r:n,e.requestContentType=l.requestContentType(u,c),e.responseContentType=l.responseContentType(u,c)||"*/*";const a=l.requestBodyValue(u,c),s=l.requestBodyInclusionSetting(u,c);var y;if(a&&a.toJS)e.requestBody=g()(y=i()(a).call(a,(e=>I.Map.isMap(e)?e.get("value"):e))).call(y,((e,t)=>(o()(e)?0!==e.length:!(0,z.O2)(e))||s.get(t))).toJS();else e.requestBody=a}let b=k()({},e);b=r.buildRequest(b),n.setRequest(e.pathName,e.method,b);e.requestInterceptor=async t=>{let r=await f.apply(void 0,[t]),o=k()({},r);return n.setMutatedRequest(e.pathName,e.method,o),r},e.responseInterceptor=h;const w=O()();return r.execute(e).then((t=>{t.duration=O()()-w,n.setResponse(e.pathName,e.method,t)})).catch((t=>{"Failed to fetch"===t.message&&(t.name="",t.message='**Failed to fetch.** \n**Possible Reasons:** \n - CORS \n - Network Failure \n - URL scheme must be "http" or "https" for CORS request.'),n.setResponse(e.pathName,e.method,{error:!0,err:(0,P.serializeError)(t)})}))},Oe=function(){let{path:e,method:t,...r}=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return n=>{let{fn:{fetch:o},specSelectors:a,specActions:i}=n,s=a.specJsonWithResolvedSubtrees().toJS(),l=a.operationScheme(e,t),{requestContentType:u,responseContentType:c}=a.contentTypeValues([e,t]).toJS(),p=/xml/i.test(u),f=a.parameterValues([e,t],p).toJS();return i.executeRequest({...r,fetch:o,spec:s,pathName:e,method:t,parameters:f,requestContentType:u,scheme:l,responseContentType:c})}};function je(e,t){return{type:Y,payload:{path:e,method:t}}}function Ie(e,t){return{type:Q,payload:{path:e,method:t}}}function Ne(e,t,r){return{type:ne,payload:{scheme:e,path:t,method:r}}}},37038:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>s});var n=r(20032),o=r(51228),a=r(33881),i=r(77508);function s(){return{statePlugins:{spec:{wrapActions:i,reducers:n.default,actions:o,selectors:a}}}}},20032:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>d});var n=r(24282),o=r.n(n),a=r(97606),i=r.n(a),s=r(76986),l=r.n(s),u=r(43393),c=r(90242),p=r(27504),f=r(33881),h=r(51228);const d={[h.UPDATE_SPEC]:(e,t)=>"string"==typeof t.payload?e.set("spec",t.payload):e,[h.UPDATE_URL]:(e,t)=>e.set("url",t.payload+""),[h.UPDATE_JSON]:(e,t)=>e.set("json",(0,c.oG)(t.payload)),[h.UPDATE_RESOLVED]:(e,t)=>e.setIn(["resolved"],(0,c.oG)(t.payload)),[h.UPDATE_RESOLVED_SUBTREE]:(e,t)=>{const{value:r,path:n}=t.payload;return e.setIn(["resolvedSubtrees",...n],(0,c.oG)(r))},[h.UPDATE_PARAM]:(e,t)=>{let{payload:r}=t,{path:n,paramName:o,paramIn:a,param:i,value:s,isXml:l}=r,u=i?(0,c.V9)(i):`${a}.${o}`;const p=l?"value_xml":"value";return e.setIn(["meta","paths",...n,"parameters",u,p],s)},[h.UPDATE_EMPTY_PARAM_INCLUSION]:(e,t)=>{let{payload:r}=t,{pathMethod:n,paramName:o,paramIn:a,includeEmptyValue:i}=r;if(!o||!a)return console.warn("Warning: UPDATE_EMPTY_PARAM_INCLUSION could not generate a paramKey."),e;const s=`${a}.${o}`;return e.setIn(["meta","paths",...n,"parameter_inclusions",s],i)},[h.VALIDATE_PARAMS]:(e,t)=>{let{payload:{pathMethod:r,isOAS3:n}}=t;const a=(0,f.specJsonWithResolvedSubtrees)(e).getIn(["paths",...r]),i=(0,f.parameterValues)(e,r).toJS();return e.updateIn(["meta","paths",...r,"parameters"],(0,u.fromJS)({}),(t=>{var s;return o()(s=a.get("parameters",(0,u.List)())).call(s,((t,o)=>{const a=(0,c.cz)(o,i),s=(0,f.parameterInclusionSettingFor)(e,r,o.get("name"),o.get("in")),l=(0,c.Ik)(o,a,{bypassRequiredCheck:s,isOAS3:n});return t.setIn([(0,c.V9)(o),"errors"],(0,u.fromJS)(l))}),t)}))},[h.CLEAR_VALIDATE_PARAMS]:(e,t)=>{let{payload:{pathMethod:r}}=t;return e.updateIn(["meta","paths",...r,"parameters"],(0,u.fromJS)([]),(e=>i()(e).call(e,(e=>e.set("errors",(0,u.fromJS)([]))))))},[h.SET_RESPONSE]:(e,t)=>{let r,{payload:{res:n,path:o,method:a}}=t;r=n.error?l()({error:!0,name:n.err.name,message:n.err.message,statusCode:n.err.statusCode},n.err.response):n,r.headers=r.headers||{};let i=e.setIn(["responses",o,a],(0,c.oG)(r));return p.Z.Blob&&n.data instanceof p.Z.Blob&&(i=i.setIn(["responses",o,a,"text"],n.data)),i},[h.SET_REQUEST]:(e,t)=>{let{payload:{req:r,path:n,method:o}}=t;return e.setIn(["requests",n,o],(0,c.oG)(r))},[h.SET_MUTATED_REQUEST]:(e,t)=>{let{payload:{req:r,path:n,method:o}}=t;return e.setIn(["mutatedRequests",n,o],(0,c.oG)(r))},[h.UPDATE_OPERATION_META_VALUE]:(e,t)=>{let{payload:{path:r,value:n,key:o}}=t,a=["paths",...r],i=["meta","paths",...r];return e.getIn(["json",...a])||e.getIn(["resolved",...a])||e.getIn(["resolvedSubtrees",...a])?e.setIn([...i,o],(0,u.fromJS)(n)):e},[h.CLEAR_RESPONSE]:(e,t)=>{let{payload:{path:r,method:n}}=t;return e.deleteIn(["responses",r,n])},[h.CLEAR_REQUEST]:(e,t)=>{let{payload:{path:r,method:n}}=t;return e.deleteIn(["requests",r,n])},[h.SET_SCHEME]:(e,t)=>{let{payload:{scheme:r,path:n,method:o}}=t;return n&&o?e.setIn(["scheme",n,o],r):n||o?void 0:e.setIn(["scheme","_defaultScheme"],r)}}},33881:(e,t,r)=>{"use strict";r.r(t),r.d(t,{lastError:()=>O,url:()=>j,specStr:()=>I,specSource:()=>N,specJson:()=>T,specResolved:()=>P,specResolvedSubtree:()=>R,specJsonWithResolvedSubtrees:()=>D,spec:()=>L,isOAS3:()=>B,info:()=>F,externalDocs:()=>z,version:()=>U,semver:()=>q,paths:()=>V,operations:()=>$,consumes:()=>W,produces:()=>H,security:()=>J,securityDefinitions:()=>K,findDefinition:()=>G,definitions:()=>Z,basePath:()=>Y,host:()=>Q,schemes:()=>X,operationsWithRootInherited:()=>ee,tags:()=>te,tagDetails:()=>re,operationsWithTags:()=>ne,taggedOperations:()=>oe,responses:()=>ae,requests:()=>ie,mutatedRequests:()=>se,responseFor:()=>le,requestFor:()=>ue,mutatedRequestFor:()=>ce,allowTryItOutFor:()=>pe,parameterWithMetaByIdentity:()=>fe,parameterInclusionSettingFor:()=>he,parameterWithMeta:()=>de,operationWithMeta:()=>me,getParameter:()=>ge,hasHost:()=>ve,parameterValues:()=>ye,parametersIncludeIn:()=>be,parametersIncludeType:()=>we,contentTypeValues:()=>Ee,currentProducesFor:()=>xe,producesOptionsFor:()=>_e,consumesOptionsFor:()=>Se,operationScheme:()=>Ae,canExecuteScheme:()=>ke,validateBeforeExecute:()=>Ce,getOAS3RequiredRequestBodyContentType:()=>Oe,isMediaTypeSchemaPropertiesEqual:()=>je});var n=r(24278),o=r.n(n),a=r(86),i=r.n(a),s=r(11882),l=r.n(s),u=r(97606),c=r.n(u),p=r(14418),f=r.n(p),h=r(51679),d=r.n(h),m=r(24282),g=r.n(m),v=r(2578),y=r.n(v),b=r(92039),w=r.n(b),E=r(58309),x=r.n(E),_=r(20573),S=r(90242),A=r(43393);const k=["get","put","post","delete","options","head","patch","trace"],C=e=>e||(0,A.Map)(),O=(0,_.P1)(C,(e=>e.get("lastError"))),j=(0,_.P1)(C,(e=>e.get("url"))),I=(0,_.P1)(C,(e=>e.get("spec")||"")),N=(0,_.P1)(C,(e=>e.get("specSource")||"not-editor")),T=(0,_.P1)(C,(e=>e.get("json",(0,A.Map)()))),P=(0,_.P1)(C,(e=>e.get("resolved",(0,A.Map)()))),R=(e,t)=>e.getIn(["resolvedSubtrees",...t],void 0),M=(e,t)=>A.Map.isMap(e)&&A.Map.isMap(t)?t.get("$$ref")?t:(0,A.OrderedMap)().mergeWith(M,e,t):t,D=(0,_.P1)(C,(e=>(0,A.OrderedMap)().mergeWith(M,e.get("json"),e.get("resolvedSubtrees")))),L=e=>T(e),B=(0,_.P1)(L,(()=>!1)),F=(0,_.P1)(L,(e=>Ie(e&&e.get("info")))),z=(0,_.P1)(L,(e=>Ie(e&&e.get("externalDocs")))),U=(0,_.P1)(F,(e=>e&&e.get("version"))),q=(0,_.P1)(U,(e=>{var t;return o()(t=/v?([0-9]*)\.([0-9]*)\.([0-9]*)/i.exec(e)).call(t,1)})),V=(0,_.P1)(D,(e=>e.get("paths"))),$=(0,_.P1)(V,(e=>{if(!e||e.size<1)return(0,A.List)();let t=(0,A.List)();return e&&i()(e)?(i()(e).call(e,((e,r)=>{if(!e||!i()(e))return{};i()(e).call(e,((e,n)=>{l()(k).call(k,n)<0||(t=t.push((0,A.fromJS)({path:r,method:n,operation:e,id:`${n}-${r}`})))}))})),t):(0,A.List)()})),W=(0,_.P1)(L,(e=>(0,A.Set)(e.get("consumes")))),H=(0,_.P1)(L,(e=>(0,A.Set)(e.get("produces")))),J=(0,_.P1)(L,(e=>e.get("security",(0,A.List)()))),K=(0,_.P1)(L,(e=>e.get("securityDefinitions"))),G=(e,t)=>{const r=e.getIn(["resolvedSubtrees","definitions",t],null),n=e.getIn(["json","definitions",t],null);return r||n||null},Z=(0,_.P1)(L,(e=>{const t=e.get("definitions");return A.Map.isMap(t)?t:(0,A.Map)()})),Y=(0,_.P1)(L,(e=>e.get("basePath"))),Q=(0,_.P1)(L,(e=>e.get("host"))),X=(0,_.P1)(L,(e=>e.get("schemes",(0,A.Map)()))),ee=(0,_.P1)($,W,H,((e,t,r)=>c()(e).call(e,(e=>e.update("operation",(e=>{if(e){if(!A.Map.isMap(e))return;return e.withMutations((e=>(e.get("consumes")||e.update("consumes",(e=>(0,A.Set)(e).merge(t))),e.get("produces")||e.update("produces",(e=>(0,A.Set)(e).merge(r))),e)))}return(0,A.Map)()})))))),te=(0,_.P1)(L,(e=>{const t=e.get("tags",(0,A.List)());return A.List.isList(t)?f()(t).call(t,(e=>A.Map.isMap(e))):(0,A.List)()})),re=(e,t)=>{var r;let n=te(e)||(0,A.List)();return d()(r=f()(n).call(n,A.Map.isMap)).call(r,(e=>e.get("name")===t),(0,A.Map)())},ne=(0,_.P1)(ee,te,((e,t)=>g()(e).call(e,((e,t)=>{let r=(0,A.Set)(t.getIn(["operation","tags"]));return r.count()<1?e.update("default",(0,A.List)(),(e=>e.push(t))):g()(r).call(r,((e,r)=>e.update(r,(0,A.List)(),(e=>e.push(t)))),e)}),g()(t).call(t,((e,t)=>e.set(t.get("name"),(0,A.List)())),(0,A.OrderedMap)())))),oe=e=>t=>{var r;let{getConfigs:n}=t,{tagsSorter:o,operationsSorter:a}=n();return c()(r=ne(e).sortBy(((e,t)=>t),((e,t)=>{let r="function"==typeof o?o:S.wh.tagsSorter[o];return r?r(e,t):null}))).call(r,((t,r)=>{let n="function"==typeof a?a:S.wh.operationsSorter[a],o=n?y()(t).call(t,n):t;return(0,A.Map)({tagDetails:re(e,r),operations:o})}))},ae=(0,_.P1)(C,(e=>e.get("responses",(0,A.Map)()))),ie=(0,_.P1)(C,(e=>e.get("requests",(0,A.Map)()))),se=(0,_.P1)(C,(e=>e.get("mutatedRequests",(0,A.Map)()))),le=(e,t,r)=>ae(e).getIn([t,r],null),ue=(e,t,r)=>ie(e).getIn([t,r],null),ce=(e,t,r)=>se(e).getIn([t,r],null),pe=()=>!0,fe=(e,t,r)=>{const n=D(e).getIn(["paths",...t,"parameters"],(0,A.OrderedMap)()),o=e.getIn(["meta","paths",...t,"parameters"],(0,A.OrderedMap)()),a=c()(n).call(n,(e=>{const t=o.get(`${r.get("in")}.${r.get("name")}`),n=o.get(`${r.get("in")}.${r.get("name")}.hash-${r.hashCode()}`);return(0,A.OrderedMap)().merge(e,t,n)}));return d()(a).call(a,(e=>e.get("in")===r.get("in")&&e.get("name")===r.get("name")),(0,A.OrderedMap)())},he=(e,t,r,n)=>{const o=`${n}.${r}`;return e.getIn(["meta","paths",...t,"parameter_inclusions",o],!1)},de=(e,t,r,n)=>{const o=D(e).getIn(["paths",...t,"parameters"],(0,A.OrderedMap)()),a=d()(o).call(o,(e=>e.get("in")===n&&e.get("name")===r),(0,A.OrderedMap)());return fe(e,t,a)},me=(e,t,r)=>{var n;const o=D(e).getIn(["paths",t,r],(0,A.OrderedMap)()),a=e.getIn(["meta","paths",t,r],(0,A.OrderedMap)()),i=c()(n=o.get("parameters",(0,A.List)())).call(n,(n=>fe(e,[t,r],n)));return(0,A.OrderedMap)().merge(o,a).set("parameters",i)};function ge(e,t,r,n){t=t||[];let o=e.getIn(["meta","paths",...t,"parameters"],(0,A.fromJS)([]));return d()(o).call(o,(e=>A.Map.isMap(e)&&e.get("name")===r&&e.get("in")===n))||(0,A.Map)()}const ve=(0,_.P1)(L,(e=>{const t=e.get("host");return"string"==typeof t&&t.length>0&&"/"!==t[0]}));function ye(e,t,r){t=t||[];let n=me(e,...t).get("parameters",(0,A.List)());return g()(n).call(n,((e,t)=>{let n=r&&"body"===t.get("in")?t.get("value_xml"):t.get("value");return e.set((0,S.V9)(t,{allowHashes:!1}),n)}),(0,A.fromJS)({}))}function be(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"";if(A.List.isList(e))return w()(e).call(e,(e=>A.Map.isMap(e)&&e.get("in")===t))}function we(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"";if(A.List.isList(e))return w()(e).call(e,(e=>A.Map.isMap(e)&&e.get("type")===t))}function Ee(e,t){t=t||[];let r=D(e).getIn(["paths",...t],(0,A.fromJS)({})),n=e.getIn(["meta","paths",...t],(0,A.fromJS)({})),o=xe(e,t);const a=r.get("parameters")||new A.List,i=n.get("consumes_value")?n.get("consumes_value"):we(a,"file")?"multipart/form-data":we(a,"formData")?"application/x-www-form-urlencoded":void 0;return(0,A.fromJS)({requestContentType:i,responseContentType:o})}function xe(e,t){t=t||[];const r=D(e).getIn(["paths",...t],null);if(null===r)return;const n=e.getIn(["meta","paths",...t,"produces_value"],null),o=r.getIn(["produces",0],null);return n||o||"application/json"}function _e(e,t){t=t||[];const r=D(e),n=r.getIn(["paths",...t],null);if(null===n)return;const[o]=t,a=n.get("produces",null),i=r.getIn(["paths",o,"produces"],null),s=r.getIn(["produces"],null);return a||i||s}function Se(e,t){t=t||[];const r=D(e),n=r.getIn(["paths",...t],null);if(null===n)return;const[o]=t,a=n.get("consumes",null),i=r.getIn(["paths",o,"consumes"],null),s=r.getIn(["consumes"],null);return a||i||s}const Ae=(e,t,r)=>{let n=e.get("url").match(/^([a-z][a-z0-9+\-.]*):/),o=x()(n)?n[1]:null;return e.getIn(["scheme",t,r])||e.getIn(["scheme","_defaultScheme"])||o||""},ke=(e,t,r)=>{var n;return l()(n=["http","https"]).call(n,Ae(e,t,r))>-1},Ce=(e,t)=>{t=t||[];let r=e.getIn(["meta","paths",...t,"parameters"],(0,A.fromJS)([])),n=!0;return i()(r).call(r,(e=>{let t=e.get("errors");t&&t.count()&&(n=!1)})),n},Oe=(e,t)=>{var r;let n={requestBody:!1,requestContentType:{}},o=e.getIn(["resolvedSubtrees","paths",...t,"requestBody"],(0,A.fromJS)([]));return o.size<1||(o.getIn(["required"])&&(n.requestBody=o.getIn(["required"])),i()(r=o.getIn(["content"]).entrySeq()).call(r,(e=>{const t=e[0];if(e[1].getIn(["schema","required"])){const r=e[1].getIn(["schema","required"]).toJS();n.requestContentType[t]=r}}))),n},je=(e,t,r,n)=>{if((r||n)&&r===n)return!0;let o=e.getIn(["resolvedSubtrees","paths",...t,"requestBody","content"],(0,A.fromJS)([]));if(o.size<2||!r||!n)return!1;let a=o.getIn([r,"schema","properties"],(0,A.fromJS)([])),i=o.getIn([n,"schema","properties"],(0,A.fromJS)([]));return!!a.equals(i)};function Ie(e){return A.Map.isMap(e)?e:new A.Map}},77508:(e,t,r)=>{"use strict";r.r(t),r.d(t,{updateSpec:()=>u,updateJsonSpec:()=>c,executeRequest:()=>p,validateParams:()=>f});var n=r(28222),o=r.n(n),a=r(86),i=r.n(a),s=r(27361),l=r.n(s);const u=(e,t)=>{let{specActions:r}=t;return function(){e(...arguments),r.parseToJson(...arguments)}},c=(e,t)=>{let{specActions:r}=t;return function(){for(var t=arguments.length,n=new Array(t),a=0;a{l()(u,[e]).$ref&&r.requestResolvedSubtree(["paths",e])})),r.requestResolvedSubtree(["components","securitySchemes"])}},p=(e,t)=>{let{specActions:r}=t;return t=>(r.logRequest(t),e(t))},f=(e,t)=>{let{specSelectors:r}=t;return t=>e(t,r.isOAS3())}},34852:(e,t,r)=>{"use strict";r.r(t),r.d(t,{loaded:()=>n});const n=(e,t)=>function(){e(...arguments);const r=t.getConfigs().withCredentials;void 0!==r&&(t.fn.fetch.withCredentials="string"==typeof r?"true"===r:!!r)}},48792:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>qr});var n={};r.r(n),r.d(n,{JsonPatchError:()=>Fe,_areEquals:()=>Ge,applyOperation:()=>$e,applyPatch:()=>We,applyReducer:()=>He,deepClone:()=>ze,getValueByPointer:()=>Ve,validate:()=>Ke,validator:()=>Je});var o={};r.r(o),r.d(o,{compare:()=>nt,generate:()=>tt,observe:()=>et,unobserve:()=>Xe});var a={};r.r(a),r.d(a,{cookie:()=>kr,header:()=>Ar,path:()=>xr,query:()=>_r});var i=r(80093),s=r.n(i),l=r(30222),u=r.n(l),c=r(36594),p=r.n(c),f=r(20474),h=r.n(f),d=r(67375),m=r.n(d),g=r(58118),v=r.n(g),y=r(74386),b=r.n(y),w=r(25110),E=r.n(w),x=r(35627),_=r.n(x),S=r(97606),A=r.n(S),k=r(28222),C=r.n(k),O=r(39022),j=r.n(O),I=r(2018),N=r.n(I),T=r(14418),P=r.n(T),R=(r(31905),r(92495)),M=r.n(R),D=r(1272);const L="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof self?self:window,{FormData:B,Blob:F,File:z}=L;var U=r(15687),q=r.n(U),V=r(24278),$=r.n(V),W=function(e){return":/?#[]@!$&'()*+,;=".indexOf(e)>-1},H=function(e){return/^[a-z0-9\-._~]+$/i.test(e)};function J(e){var t,r=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=r.escape,o=arguments.length>2?arguments[2]:void 0;return"number"==typeof e&&(e=e.toString()),"string"==typeof e&&e.length&&n?o?JSON.parse(e):A()(t=q()(e)).call(t,(function(e){var t,r;if(H(e))return e;if(W(e)&&"unsafe"===n)return e;var o=new TextEncoder;return A()(t=A()(r=E()(o.encode(e))).call(r,(function(e){var t;return $()(t="0".concat(e.toString(16).toUpperCase())).call(t,-2)}))).call(t,(function(e){return"%".concat(e)})).join("")})).join(""):e}function K(e){var t=e.value;return Array.isArray(t)?function(e){var t=e.key,r=e.value,n=e.style,o=e.explode,a=e.escape,i=function(e){return J(e,{escape:a})};if("simple"===n)return A()(r).call(r,(function(e){return i(e)})).join(",");if("label"===n)return".".concat(A()(r).call(r,(function(e){return i(e)})).join("."));if("matrix"===n)return A()(r).call(r,(function(e){return i(e)})).reduce((function(e,r){var n,a,i;return!e||o?j()(a=j()(i="".concat(e||"",";")).call(i,t,"=")).call(a,r):j()(n="".concat(e,",")).call(n,r)}),"");if("form"===n){var s=o?"&".concat(t,"="):",";return A()(r).call(r,(function(e){return i(e)})).join(s)}if("spaceDelimited"===n){var l=o?"".concat(t,"="):"";return A()(r).call(r,(function(e){return i(e)})).join(" ".concat(l))}if("pipeDelimited"===n){var u=o?"".concat(t,"="):"";return A()(r).call(r,(function(e){return i(e)})).join("|".concat(u))}return}(e):"object"===h()(t)?function(e){var t=e.key,r=e.value,n=e.style,o=e.explode,a=e.escape,i=function(e){return J(e,{escape:a})},s=C()(r);if("simple"===n)return s.reduce((function(e,t){var n,a,s,l=i(r[t]),u=o?"=":",",c=e?"".concat(e,","):"";return j()(n=j()(a=j()(s="".concat(c)).call(s,t)).call(a,u)).call(n,l)}),"");if("label"===n)return s.reduce((function(e,t){var n,a,s,l=i(r[t]),u=o?"=":".",c=e?"".concat(e,"."):".";return j()(n=j()(a=j()(s="".concat(c)).call(s,t)).call(a,u)).call(n,l)}),"");if("matrix"===n&&o)return s.reduce((function(e,t){var n,o,a=i(r[t]),s=e?"".concat(e,";"):";";return j()(n=j()(o="".concat(s)).call(o,t,"=")).call(n,a)}),"");if("matrix"===n)return s.reduce((function(e,n){var o,a,s=i(r[n]),l=e?"".concat(e,","):";".concat(t,"=");return j()(o=j()(a="".concat(l)).call(a,n,",")).call(o,s)}),"");if("form"===n)return s.reduce((function(e,t){var n,a,s,l,u=i(r[t]),c=e?j()(n="".concat(e)).call(n,o?"&":","):"",p=o?"=":",";return j()(a=j()(s=j()(l="".concat(c)).call(l,t)).call(s,p)).call(a,u)}),"");return}(e):function(e){var t,r=e.key,n=e.value,o=e.style,a=e.escape,i=function(e){return J(e,{escape:a})};if("simple"===o)return i(n);if("label"===o)return".".concat(i(n));if("matrix"===o)return j()(t=";".concat(r,"=")).call(t,i(n));if("form"===o)return i(n);if("deepObject"===o)return i(n,{},!0);return}(e)}const G=function(e,t){t.body=e};var Z={serializeRes:te,mergeInQueryOrForm:fe};function Y(e){return Q.apply(this,arguments)}function Q(){return Q=s()(u().mark((function e(t){var r,n,o,a,i,s=arguments;return u().wrap((function(e){for(;;)switch(e.prev=e.next){case 0:if(r=s.length>1&&void 0!==s[1]?s[1]:{},"object"===h()(t)&&(t=(r=t).url),r.headers=r.headers||{},Z.mergeInQueryOrForm(r),r.headers&&C()(r.headers).forEach((function(e){var t=r.headers[e];"string"==typeof t&&(r.headers[e]=t.replace(/\n+/g," "))})),!r.requestInterceptor){e.next=12;break}return e.next=8,r.requestInterceptor(r);case 8:if(e.t0=e.sent,e.t0){e.next=11;break}e.t0=r;case 11:r=e.t0;case 12:return n=r.headers["content-type"]||r.headers["Content-Type"],/multipart\/form-data/i.test(n)&&r.body instanceof B&&(delete r.headers["content-type"],delete r.headers["Content-Type"]),e.prev=14,e.next=17,(r.userFetch||fetch)(r.url,r);case 17:return o=e.sent,e.next=20,Z.serializeRes(o,t,r);case 20:if(o=e.sent,!r.responseInterceptor){e.next=28;break}return e.next=24,r.responseInterceptor(o);case 24:if(e.t1=e.sent,e.t1){e.next=27;break}e.t1=o;case 27:o=e.t1;case 28:e.next=39;break;case 30:if(e.prev=30,e.t2=e.catch(14),o){e.next=34;break}throw e.t2;case 34:throw(a=new Error(o.statusText||"response status is ".concat(o.status))).status=o.status,a.statusCode=o.status,a.responseError=e.t2,a;case 39:if(o.ok){e.next=45;break}throw(i=new Error(o.statusText||"response status is ".concat(o.status))).status=o.status,i.statusCode=o.status,i.response=o,i;case 45:return e.abrupt("return",o);case 46:case"end":return e.stop()}}),e,null,[[14,30]])}))),Q.apply(this,arguments)}var X=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"";return/(json|xml|yaml|text)\b/.test(e)};function ee(e,t){return t&&(0===t.indexOf("application/json")||t.indexOf("+json")>0)?JSON.parse(e):D.ZP.load(e)}function te(e,t){var r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},n=r.loadSpec,o=void 0!==n&&n,a={ok:e.ok,url:e.url||t,status:e.status,statusText:e.statusText,headers:ne(e.headers)},i=a.headers["content-type"],s=o||X(i),l=s?e.text:e.blob||e.buffer;return l.call(e).then((function(e){if(a.text=e,a.data=e,s)try{var t=ee(e,i);a.body=t,a.obj=t}catch(e){a.parseError=e}return a}))}function re(e){return v()(e).call(e,", ")?e.split(", "):e}function ne(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return"function"!=typeof b()(e)?{}:E()(b()(e).call(e)).reduce((function(e,t){var r=m()(t,2),n=r[0],o=r[1];return e[n]=re(o),e}),{})}function oe(e,t){return t||"undefined"==typeof navigator||(t=navigator),t&&"ReactNative"===t.product?!(!e||"object"!==h()(e)||"string"!=typeof e.uri):void 0!==z&&e instanceof z||(void 0!==F&&e instanceof F||(!!ArrayBuffer.isView(e)||null!==e&&"object"===h()(e)&&"function"==typeof e.pipe))}function ae(e,t){return Array.isArray(e)&&e.some((function(e){return oe(e,t)}))}var ie={form:",",spaceDelimited:"%20",pipeDelimited:"|"},se={csv:",",ssv:"%20",tsv:"%09",pipes:"|"};function le(e,t){var r=arguments.length>2&&void 0!==arguments[2]&&arguments[2],n=t.collectionFormat,o=t.allowEmptyValue,a=t.serializationOption,i=t.encoding,s="object"!==h()(t)||Array.isArray(t)?t:t.value,l=r?function(e){return e.toString()}:function(e){return encodeURIComponent(e)},u=l(e);if(void 0===s&&o)return[[u,""]];if(oe(s)||ae(s))return[[u,s]];if(a)return ue(e,s,r,a);if(i){if([h()(i.style),h()(i.explode),h()(i.allowReserved)].some((function(e){return"undefined"!==e}))){var c=i.style,p=i.explode,f=i.allowReserved;return ue(e,s,r,{style:c,explode:p,allowReserved:f})}if(i.contentType){if("application/json"===i.contentType){var d="string"==typeof s?s:_()(s);return[[u,l(d)]]}return[[u,l(s.toString())]]}return"object"!==h()(s)?[[u,l(s)]]:Array.isArray(s)&&s.every((function(e){return"object"!==h()(e)}))?[[u,A()(s).call(s,l).join(",")]]:[[u,l(_()(s))]]}return"object"!==h()(s)?[[u,l(s)]]:Array.isArray(s)?"multi"===n?[[u,A()(s).call(s,l)]]:[[u,A()(s).call(s,l).join(se[n||"csv"])]]:[[u,""]]}function ue(e,t,r,n){var o,a,i,s=n.style||"form",l=void 0===n.explode?"form"===s:n.explode,u=!r&&(n&&n.allowReserved?"unsafe":"reserved"),c=function(e){return J(e,{escape:u})},p=r?function(e){return e}:function(e){return J(e,{escape:u})};return"object"!==h()(t)?[[p(e),c(t)]]:Array.isArray(t)?l?[[p(e),A()(t).call(t,c)]]:[[p(e),A()(t).call(t,c).join(ie[s])]]:"deepObject"===s?A()(a=C()(t)).call(a,(function(r){var n;return[p(j()(n="".concat(e,"[")).call(n,r,"]")),c(t[r])]})):l?A()(i=C()(t)).call(i,(function(e){return[p(e),c(t[e])]})):[[p(e),A()(o=C()(t)).call(o,(function(e){var r;return[j()(r="".concat(p(e),",")).call(r,c(t[e]))]})).join(",")]]}function ce(e){return N()(e).reduce((function(e,t){var r,n=m()(t,2),o=n[0],a=n[1],i=p()(le(o,a,!0));try{for(i.s();!(r=i.n()).done;){var s=m()(r.value,2),l=s[0],u=s[1];if(Array.isArray(u)){var c,f=p()(u);try{for(f.s();!(c=f.n()).done;){var h=c.value;if(ArrayBuffer.isView(h)){var d=new F([h]);e.append(l,d)}else e.append(l,h)}}catch(e){f.e(e)}finally{f.f()}}else if(ArrayBuffer.isView(u)){var g=new F([u]);e.append(l,g)}else e.append(l,u)}}catch(e){i.e(e)}finally{i.f()}return e}),new B)}function pe(e){var t=C()(e).reduce((function(t,r){var n,o=p()(le(r,e[r]));try{for(o.s();!(n=o.n()).done;){var a=m()(n.value,2),i=a[0],s=a[1];t[i]=s}}catch(e){o.e(e)}finally{o.f()}return t}),{});return M().stringify(t,{encode:!1,indices:!1})||""}function fe(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=e.url,r=void 0===t?"":t,n=e.query,o=e.form,a=function(){for(var e=arguments.length,t=new Array(e),r=0;r=48&&t<=57))return!1;r++}return!0}function Re(e){return-1===e.indexOf("/")&&-1===e.indexOf("~")?e:e.replace(/~/g,"~0").replace(/\//g,"~1")}function Me(e){return e.replace(/~1/g,"/").replace(/~0/g,"~")}function De(e){if(void 0===e)return!0;if(e)if(Array.isArray(e)){for(var t=0,r=e.length;t0&&"constructor"==s[u-1]))throw new TypeError("JSON-Patch: modifying `__proto__` or `constructor/prototype` prop is banned for security reasons, if this was on purpose, please set `banPrototypeModifications` flag false and pass it to this function. More info in fast-json-patch README");if(r&&void 0===p&&(void 0===l[f]?p=s.slice(0,u).join("/"):u==c-1&&(p=t.path),void 0!==p&&h(t,0,e,p)),u++,Array.isArray(l)){if("-"===f)f=l.length;else{if(r&&!Pe(f))throw new Fe("Expected an unsigned base-10 integer value, making the new referenced value the array element with the zero-based index","OPERATION_PATH_ILLEGAL_ARRAY_INDEX",a,t,e);Pe(f)&&(f=~~f)}if(u>=c){if(r&&"add"===t.op&&f>l.length)throw new Fe("The specified index MUST NOT be greater than the number of elements in the array","OPERATION_VALUE_OUT_OF_BOUNDS",a,t,e);if(!1===(i=qe[t.op].call(t,l,f,e)).test)throw new Fe("Test operation failed","TEST_OPERATION_FAILED",a,t,e);return i}}else if(u>=c){if(!1===(i=Ue[t.op].call(t,l,f,e)).test)throw new Fe("Test operation failed","TEST_OPERATION_FAILED",a,t,e);return i}if(l=l[f],r&&u0)throw new Fe('Operation `path` property must start with "/"',"OPERATION_PATH_INVALID",t,e,r);if(("move"===e.op||"copy"===e.op)&&"string"!=typeof e.from)throw new Fe("Operation `from` property is not present (applicable in `move` and `copy` operations)","OPERATION_FROM_REQUIRED",t,e,r);if(("add"===e.op||"replace"===e.op||"test"===e.op)&&void 0===e.value)throw new Fe("Operation `value` property is not present (applicable in `add`, `replace` and `test` operations)","OPERATION_VALUE_REQUIRED",t,e,r);if(("add"===e.op||"replace"===e.op||"test"===e.op)&&De(e.value))throw new Fe("Operation `value` property is not present (applicable in `add`, `replace` and `test` operations)","OPERATION_VALUE_CANNOT_CONTAIN_UNDEFINED",t,e,r);if(r)if("add"==e.op){var o=e.path.split("/").length,a=n.split("/").length;if(o!==a+1&&o!==a)throw new Fe("Cannot perform an `add` operation at the desired path","OPERATION_PATH_CANNOT_ADD",t,e,r)}else if("replace"===e.op||"remove"===e.op||"_get"===e.op){if(e.path!==n)throw new Fe("Cannot perform the operation at a path that does not exist","OPERATION_PATH_UNRESOLVABLE",t,e,r)}else if("move"===e.op||"copy"===e.op){var i=Ke([{op:"_get",path:e.from,value:void 0}],r);if(i&&"OPERATION_PATH_UNRESOLVABLE"===i.name)throw new Fe("Cannot perform the operation from a path that does not exist","OPERATION_FROM_UNRESOLVABLE",t,e,r)}}function Ke(e,t,r){try{if(!Array.isArray(e))throw new Fe("Patch sequence must be an array","SEQUENCE_NOT_AN_ARRAY");if(t)We(Te(t),Te(e),r||!0);else{r=r||Je;for(var n=0;n0&&(e.patches=[],e.callback&&e.callback(n)),n}function rt(e,t,r,n,o){if(t!==e){"function"==typeof t.toJSON&&(t=t.toJSON());for(var a=Ne(t),i=Ne(e),s=!1,l=i.length-1;l>=0;l--){var u=e[p=i[l]];if(!Ie(t,p)||void 0===t[p]&&void 0!==u&&!1===Array.isArray(t))Array.isArray(e)===Array.isArray(t)?(o&&r.push({op:"test",path:n+"/"+Re(p),value:Te(u)}),r.push({op:"remove",path:n+"/"+Re(p)}),s=!0):(o&&r.push({op:"test",path:n,value:e}),r.push({op:"replace",path:n,value:t}),!0);else{var c=t[p];"object"==typeof u&&null!=u&&"object"==typeof c&&null!=c&&Array.isArray(u)===Array.isArray(c)?rt(u,c,r,n+"/"+Re(p),o):u!==c&&(!0,o&&r.push({op:"test",path:n+"/"+Re(p),value:Te(u)}),r.push({op:"replace",path:n+"/"+Re(p),value:Te(c)}))}}if(s||a.length!=i.length)for(l=0;l0){var o=t(e,r[r.length-1],r);o&&(n=j()(n).call(n,o))}if(Array.isArray(e)){var a=A()(e).call(e,(function(e,n){return pt(e,t,j()(r).call(r,n))}));a&&(n=j()(n).call(n,a))}else if(mt(e)){var i,s=A()(i=C()(e)).call(i,(function(n){return pt(e[n],t,j()(r).call(r,n))}));s&&(n=j()(n).call(n,s))}return n=ht(n)}function ft(e){return Array.isArray(e)?e:[e]}function ht(e){var t;return j()(t=[]).apply(t,q()(A()(e).call(e,(function(e){return Array.isArray(e)?ht(e):e}))))}function dt(e){return P()(e).call(e,(function(e){return void 0!==e}))}function mt(e){return e&&"object"===h()(e)}function gt(e){return e&&"function"==typeof e}function vt(e){if(wt(e)){var t=e.op;return"add"===t||"remove"===t||"replace"===t}return!1}function yt(e){return vt(e)||wt(e)&&"mutation"===e.type}function bt(e){return yt(e)&&("add"===e.op||"replace"===e.op||"merge"===e.op||"mergeDeep"===e.op)}function wt(e){return e&&"object"===h()(e)}function Et(e,t){try{return Ve(e,t)}catch(e){return console.error(e),{}}}var xt=r(28886),_t=r.n(xt),St=r(37659),At=r.n(St),kt=r(8575);function Ct(e,t){function r(){Error.captureStackTrace?Error.captureStackTrace(this,this.constructor):this.stack=(new Error).stack;for(var e=arguments.length,r=new Array(e),n=0;n-1&&-1===Nt.indexOf(r)||Tt.indexOf(n)>-1||Pt.some((function(e){return n.indexOf(e)>-1}))}function Mt(e,t){var r,n=e.split("#"),o=m()(n,2),a=o[0],i=o[1],s=kt.resolve(a||"",t||"");return i?j()(r="".concat(s,"#")).call(r,i):s}var Dt="application/json, application/yaml",Lt=/^([a-z]+:\/\/|\/\/)/i,Bt=Ct("JSONRefError",(function(e,t,r){this.originalError=r,Ee()(this,t||{})})),Ft={},zt=new(_t()),Ut=[function(e){return"paths"===e[0]&&"responses"===e[3]&&"examples"===e[5]},function(e){return"paths"===e[0]&&"responses"===e[3]&&"content"===e[5]&&"example"===e[7]},function(e){return"paths"===e[0]&&"responses"===e[3]&&"content"===e[5]&&"examples"===e[7]&&"value"===e[9]},function(e){return"paths"===e[0]&&"requestBody"===e[3]&&"content"===e[4]&&"example"===e[6]},function(e){return"paths"===e[0]&&"requestBody"===e[3]&&"content"===e[4]&&"examples"===e[6]&&"value"===e[8]},function(e){return"paths"===e[0]&&"parameters"===e[2]&&"example"===e[4]},function(e){return"paths"===e[0]&&"parameters"===e[3]&&"example"===e[5]},function(e){return"paths"===e[0]&&"parameters"===e[2]&&"examples"===e[4]&&"value"===e[6]},function(e){return"paths"===e[0]&&"parameters"===e[3]&&"examples"===e[5]&&"value"===e[7]},function(e){return"paths"===e[0]&&"parameters"===e[2]&&"content"===e[4]&&"example"===e[6]},function(e){return"paths"===e[0]&&"parameters"===e[2]&&"content"===e[4]&&"examples"===e[6]&&"value"===e[8]},function(e){return"paths"===e[0]&&"parameters"===e[3]&&"content"===e[4]&&"example"===e[7]},function(e){return"paths"===e[0]&&"parameters"===e[3]&&"content"===e[5]&&"examples"===e[7]&&"value"===e[9]}],qt={key:"$ref",plugin:function(e,t,r,n){var o=n.getInstance(),a=$()(r).call(r,0,-1);if(!Rt(a)&&!function(e){return Ut.some((function(t){return t(e)}))}(a)){var i=n.getContext(r).baseDoc;if("string"!=typeof e)return new Bt("$ref: must be a string (JSON-Ref)",{$ref:e,baseDoc:i,fullPath:r});var s,l,u,c=Jt(e),p=c[0],f=c[1]||"";try{s=i||p?Wt(p,i):null}catch(t){return Ht(t,{pointer:f,$ref:e,basePath:s,fullPath:r})}if(function(e,t,r,n){var o,a,i=zt.get(n);i||(i={},zt.set(n,i));var s=function(e){if(0===e.length)return"";return"/".concat(A()(e).call(e,Xt).join("/"))}(r),l=j()(o="".concat(t||"","#")).call(o,e),u=s.replace(/allOf\/\d+\/?/g,""),c=n.contextTree.get([]).baseDoc;if(t===c&&er(u,e))return!0;var p="",f=r.some((function(e){var t;return p=j()(t="".concat(p,"/")).call(t,Xt(e)),i[p]&&i[p].some((function(e){return er(e,l)||er(l,e)}))}));if(f)return!0;return void(i[u]=j()(a=i[u]||[]).call(a,l))}(f,s,a,n)&&!o.useCircularStructures){var h=Mt(e,s);return e===h?null:it.replace(r,h)}if(null==s?(u=Yt(f),void 0===(l=n.get(u))&&(l=new Bt("Could not resolve reference: ".concat(e),{pointer:f,$ref:e,baseDoc:i,fullPath:r}))):l=null!=(l=Kt(s,f)).__value?l.__value:l.catch((function(t){throw Ht(t,{pointer:f,$ref:e,baseDoc:i,fullPath:r})})),l instanceof Error)return[it.remove(r),l];var d=Mt(e,s),m=it.replace(a,l,{$$ref:d});if(s&&s!==i)return[m,it.context(a,{baseDoc:s})];try{if(!function(e,t){var r=[e];return t.path.reduce((function(e,t){return r.push(e[t]),e[t]}),e),n(t.value);function n(e){return it.isObject(e)&&(r.indexOf(e)>=0||C()(e).some((function(t){return n(e[t])})))}}(n.state,m)||o.useCircularStructures)return m}catch(e){return null}}}},Vt=Ee()(qt,{docCache:Ft,absoluteify:Wt,clearCache:function(e){void 0!==e?delete Ft[e]:C()(Ft).forEach((function(e){delete Ft[e]}))},JSONRefError:Bt,wrapError:Ht,getDoc:Gt,split:Jt,extractFromDoc:Kt,fetchJSON:function(e){return fetch(e,{headers:{Accept:Dt},loadSpec:!0}).then((function(e){return e.text()})).then((function(e){return D.ZP.load(e)}))},extract:Zt,jsonPointerToArray:Yt,unescapeJsonPointerToken:Qt});const $t=Vt;function Wt(e,t){if(!Lt.test(e)){var r;if(!t)throw new Bt(j()(r="Tried to resolve a relative URL, without having a basePath. path: '".concat(e,"' basePath: '")).call(r,t,"'"));return kt.resolve(t,e)}return e}function Ht(e,t){var r,n;e&&e.response&&e.response.body?r=j()(n="".concat(e.response.body.code," ")).call(n,e.response.body.message):r=e.message;return new Bt("Could not resolve reference: ".concat(r),t,e)}function Jt(e){return(e+"").split("#")}function Kt(e,t){var r=Ft[e];if(r&&!it.isPromise(r))try{var n=Zt(t,r);return Ee()(Ae().resolve(n),{__value:n})}catch(e){return Ae().reject(e)}return Gt(e).then((function(e){return Zt(t,e)}))}function Gt(e){var t=Ft[e];return t?it.isPromise(t)?t:Ae().resolve(t):(Ft[e]=Vt.fetchJSON(e).then((function(t){return Ft[e]=t,t})),Ft[e])}function Zt(e,t){var r=Yt(e);if(r.length<1)return t;var n=it.getIn(t,r);if(void 0===n)throw new Bt("Could not resolve pointer: ".concat(e," does not exist in document"),{pointer:e});return n}function Yt(e){var t;if("string"!=typeof e)throw new TypeError("Expected a string, got a ".concat(h()(e)));return"/"===e[0]&&(e=e.substr(1)),""===e?[]:A()(t=e.split("/")).call(t,Qt)}function Qt(e){return"string"!=typeof e?e:new(At())("=".concat(e.replace(/~1/g,"/").replace(/~0/g,"~"))).get("")}function Xt(e){var t,r=new(At())([["",e.replace(/~/g,"~0").replace(/\//g,"~1")]]);return $()(t=r.toString()).call(t,1)}function er(e,t){if(!(r=t)||"/"===r||"#"===r)return!0;var r,n=e.charAt(t.length),o=$()(t).call(t,-1);return 0===e.indexOf(t)&&(!n||"/"===n||"#"===n)&&"#"!==o}const tr={key:"allOf",plugin:function(e,t,r,n,o){if(!o.meta||!o.meta.$$ref){var a=$()(r).call(r,0,-1);if(!Rt(a)){if(!Array.isArray(e)){var i=new TypeError("allOf must be an array");return i.fullPath=r,i}var s=!1,l=o.value;if(a.forEach((function(e){l&&(l=l[e])})),l=me()({},l),0!==C()(l).length){delete l.allOf;var u,c,p=[];if(p.push(n.replace(a,{})),e.forEach((function(e,t){if(!n.isObject(e)){if(s)return null;s=!0;var o=new TypeError("Elements in allOf must be objects");return o.fullPath=r,p.push(o)}p.push(n.mergeDeep(a,e));var i=function(e,t){var r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},n=r.specmap,o=r.getBaseUrlForNodePath,a=void 0===o?function(e){var r;return n.getContext(j()(r=[]).call(r,q()(t),q()(e))).baseDoc}:o,i=r.targetKeys,s=void 0===i?["$ref","$$ref"]:i,l=[];return jt()(e).forEach((function(){if(v()(s).call(s,this.key)&&"string"==typeof this.node){var e=this.path,r=j()(t).call(t,this.path),o=Mt(this.node,a(e));l.push(n.replace(r,o))}})),l}(e,$()(r).call(r,0,-1),{getBaseUrlForNodePath:function(e){var o;return n.getContext(j()(o=[]).call(o,q()(r),[t],q()(e))).baseDoc},specmap:n});p.push.apply(p,q()(i))})),l.example)p.push(n.remove(j()(u=[]).call(u,a,"example")));if(p.push(n.mergeDeep(a,l)),!l.$$ref)p.push(n.remove(j()(c=[]).call(c,a,"$$ref")));return p}}}}},rr={key:"parameters",plugin:function(e,t,r,n){if(Array.isArray(e)&&e.length){var o=Ee()([],e),a=$()(r).call(r,0,-1),i=me()({},it.getIn(n.spec,a));return e.forEach((function(e,t){try{o[t].default=n.parameterMacro(i,e)}catch(e){var a=new Error(e);return a.fullPath=r,a}})),it.replace(r,o)}return it.replace(r,e)}},nr={key:"properties",plugin:function(e,t,r,n){var o=me()({},e);for(var a in e)try{o[a].default=n.modelPropertyMacro(o[a])}catch(e){var i=new Error(e);return i.fullPath=r,i}return it.replace(r,o)}};var or=function(){function e(t){ve()(this,e),this.root=ar(t||{})}return be()(e,[{key:"set",value:function(e,t){var r=this.getParent(e,!0);if(r){var n=e[e.length-1],o=r.children;o[n]?ir(o[n],t,r):o[n]=ar(t,r)}else ir(this.root,t,null)}},{key:"get",value:function(e){if((e=e||[]).length<1)return this.root.value;for(var t,r,n=this.root,o=0;o1?r-1:0),o=1;o1?n-1:0),a=1;a0}))}},{key:"nextPromisedPatch",value:function(){var e;if(this.promisedPatches.length>0)return Ae().race(A()(e=this.promisedPatches).call(e,(function(e){return e.value})))}},{key:"getPluginHistory",value:function(e){var t=this.constructor.getPluginName(e);return this.pluginHistory[t]||[]}},{key:"getPluginRunCount",value:function(e){return this.getPluginHistory(e).length}},{key:"getPluginHistoryTip",value:function(e){var t=this.getPluginHistory(e);return t&&t[t.length-1]||{}}},{key:"getPluginMutationIndex",value:function(e){var t=this.getPluginHistoryTip(e).mutationIndex;return"number"!=typeof t?-1:t}},{key:"updatePluginHistory",value:function(e,t){var r=this.constructor.getPluginName(e);this.pluginHistory[r]=this.pluginHistory[r]||[],this.pluginHistory[r].push(t)}},{key:"updatePatches",value:function(e){var t=this;it.normalizeArray(e).forEach((function(e){if(e instanceof Error)t.errors.push(e);else try{if(!it.isObject(e))return void t.debug("updatePatches","Got a non-object patch",e);if(t.showDebug&&t.allPatches.push(e),it.isPromise(e.value))return t.promisedPatches.push(e),void t.promisedPatchThen(e);if(it.isContextPatch(e))return void t.setContext(e.path,e.value);if(it.isMutation(e))return void t.updateMutations(e)}catch(e){console.error(e),t.errors.push(e)}}))}},{key:"updateMutations",value:function(e){"object"===h()(e.value)&&!Array.isArray(e.value)&&this.allowMetaPatches&&(e.value=me()({},e.value));var t=it.applyPatch(this.state,e,{allowMetaPatches:this.allowMetaPatches});t&&(this.mutations.push(e),this.state=t)}},{key:"removePromisedPatch",value:function(e){var t,r=this.promisedPatches.indexOf(e);r<0?this.debug("Tried to remove a promisedPatch that isn't there!"):Ce()(t=this.promisedPatches).call(t,r,1)}},{key:"promisedPatchThen",value:function(e){var t=this;return e.value=e.value.then((function(r){var n=me()(me()({},e),{},{value:r});t.removePromisedPatch(e),t.updatePatches(n)})).catch((function(r){t.removePromisedPatch(e),t.updatePatches(r)})),e.value}},{key:"getMutations",value:function(e,t){var r;return e=e||0,"number"!=typeof t&&(t=this.mutations.length),$()(r=this.mutations).call(r,e,t)}},{key:"getCurrentMutations",value:function(){return this.getMutationsForPlugin(this.getCurrentPlugin())}},{key:"getMutationsForPlugin",value:function(e){var t=this.getPluginMutationIndex(e);return this.getMutations(t+1)}},{key:"getCurrentPlugin",value:function(){return this.currentPlugin}},{key:"getLib",value:function(){return this.libMethods}},{key:"_get",value:function(e){return it.getIn(this.state,e)}},{key:"_getContext",value:function(e){return this.contextTree.get(e)}},{key:"setContext",value:function(e,t){return this.contextTree.set(e,t)}},{key:"_hasRun",value:function(e){return this.getPluginRunCount(this.getCurrentPlugin())>(e||0)}},{key:"dispatch",value:function(){var e,t=this,r=this,n=this.nextPlugin();if(!n){var o=this.nextPromisedPatch();if(o)return o.then((function(){return t.dispatch()})).catch((function(){return t.dispatch()}));var a={spec:this.state,errors:this.errors};return this.showDebug&&(a.patches=this.allPatches),Ae().resolve(a)}if(r.pluginCount=r.pluginCount||{},r.pluginCount[n]=(r.pluginCount[n]||0)+1,r.pluginCount[n]>100)return Ae().resolve({spec:r.state,errors:j()(e=r.errors).call(e,new Error("We've reached a hard limit of ".concat(100," plugin runs")))});if(n!==this.currentPlugin&&this.promisedPatches.length){var i,s=A()(i=this.promisedPatches).call(i,(function(e){return e.value}));return Ae().all(A()(s).call(s,(function(e){return e.then(sr,sr)}))).then((function(){return t.dispatch()}))}return function(){r.currentPlugin=n;var e=r.getCurrentMutations(),t=r.mutations.length-1;try{if(n.isGenerator){var o,a=p()(n(e,r.getLib()));try{for(a.s();!(o=a.n()).done;){l(o.value)}}catch(e){a.e(e)}finally{a.f()}}else{l(n(e,r.getLib()))}}catch(e){console.error(e),l([Ee()(Object.create(e),{plugin:n})])}finally{r.updatePluginHistory(n,{mutationIndex:t})}return r.dispatch()}();function l(e){e&&(e=it.fullyNormalizeArray(e),r.updatePatches(e,n))}}}],[{key:"getPluginName",value:function(e){return e.pluginName}},{key:"getPatchesOfType",value:function(e,t){return P()(e).call(e,t)}}]),e}();var ur={refs:$t,allOf:tr,parameters:rr,properties:nr},cr=r(23159);function pr(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},r=t.requestInterceptor,n=t.responseInterceptor,o=e.withCredentials?"include":"same-origin";return function(t){return e({url:t,loadSpec:!0,requestInterceptor:r,responseInterceptor:n,headers:{Accept:Dt},credentials:o}).then((function(e){return e.body}))}}function fr(e){var t=e.fetch,r=e.spec,n=e.url,o=e.mode,a=e.allowMetaPatches,i=void 0===a||a,l=e.pathDiscriminator,c=e.modelPropertyMacro,p=e.parameterMacro,f=e.requestInterceptor,h=e.responseInterceptor,d=e.skipNormalization,m=e.useCircularStructures,g=e.http,v=e.baseDoc;return v=v||n,g=t||g||Y,r?y(r):pr(g,{requestInterceptor:f,responseInterceptor:h})(v).then(y);function y(e){v&&(ur.refs.docCache[v]=e),ur.refs.fetchJSON=pr(g,{requestInterceptor:f,responseInterceptor:h});var t,r=[ur.refs];return"function"==typeof p&&r.push(ur.parameters),"function"==typeof c&&r.push(ur.properties),"strict"!==o&&r.push(ur.allOf),(t={spec:e,context:{baseDoc:v},plugins:r,allowMetaPatches:i,pathDiscriminator:l,parameterMacro:p,modelPropertyMacro:c,useCircularStructures:m},new lr(t).dispatch()).then(d?function(){var e=s()(u().mark((function e(t){return u().wrap((function(e){for(;;)switch(e.prev=e.next){case 0:return e.abrupt("return",t);case 1:case"end":return e.stop()}}),e)})));return function(t){return e.apply(this,arguments)}}():cr.K1)}}var hr=r(88436),dr=r.n(hr),mr=r(27361),gr=r.n(mr),vr=r(30006);function yr(e){return"[object Object]"===Object.prototype.toString.call(e)}function br(e){var t,r;return!1!==yr(e)&&(void 0===(t=e.constructor)||!1!==yr(r=t.prototype)&&!1!==r.hasOwnProperty("isPrototypeOf"))}const wr={body:function(e){var t=e.req,r=e.value;t.body=r},header:function(e){var t=e.req,r=e.parameter,n=e.value;t.headers=t.headers||{},void 0!==n&&(t.headers[r.name]=n)},query:function(e){var t=e.req,r=e.value,n=e.parameter;t.query=t.query||{},!1===r&&"boolean"===n.type&&(r="false");0===r&&["number","integer"].indexOf(n.type)>-1&&(r="0");if(r)t.query[n.name]={collectionFormat:n.collectionFormat,value:r};else if(n.allowEmptyValue&&void 0!==r){var o=n.name;t.query[o]=t.query[o]||{},t.query[o].allowEmptyValue=!0}},path:function(e){var t=e.req,r=e.value,n=e.parameter;t.url=t.url.split("{".concat(n.name,"}")).join(encodeURIComponent(r))},formData:function(e){var t=e.req,r=e.value,n=e.parameter;(r||n.allowEmptyValue)&&(t.form=t.form||{},t.form[n.name]={value:r,allowEmptyValue:n.allowEmptyValue,collectionFormat:n.collectionFormat})}};function Er(e,t){return v()(t).call(t,"application/json")?"string"==typeof e?e:_()(e):e.toString()}function xr(e){var t=e.req,r=e.value,n=e.parameter,o=n.name,a=n.style,i=n.explode,s=n.content;if(s){var l=C()(s)[0];t.url=t.url.split("{".concat(o,"}")).join(J(Er(r,l),{escape:!0}))}else{var u=K({key:n.name,value:r,style:a||"simple",explode:i||!1,escape:!0});t.url=t.url.split("{".concat(o,"}")).join(u)}}function _r(e){var t=e.req,r=e.value,n=e.parameter;if(t.query=t.query||{},n.content){var o=C()(n.content)[0];t.query[n.name]=Er(r,o)}else if(!1===r&&(r="false"),0===r&&(r="0"),r){var a=n.style,i=n.explode,s=n.allowReserved;t.query[n.name]={value:r,serializationOption:{style:a,explode:i,allowReserved:s}}}else if(n.allowEmptyValue&&void 0!==r){var l=n.name;t.query[l]=t.query[l]||{},t.query[l].allowEmptyValue=!0}}var Sr=["accept","authorization","content-type"];function Ar(e){var t=e.req,r=e.parameter,n=e.value;if(t.headers=t.headers||{},!(Sr.indexOf(r.name.toLowerCase())>-1))if(r.content){var o=C()(r.content)[0];t.headers[r.name]=Er(n,o)}else void 0!==n&&(t.headers[r.name]=K({key:r.name,value:n,style:r.style||"simple",explode:void 0!==r.explode&&r.explode,escape:!1}))}function kr(e){var t=e.req,r=e.parameter,n=e.value;t.headers=t.headers||{};var o=h()(n);if(r.content){var a,i=C()(r.content)[0];t.headers.Cookie=j()(a="".concat(r.name,"=")).call(a,Er(n,i))}else if("undefined"!==o){var s="object"===o&&!Array.isArray(n)&&r.explode?"":"".concat(r.name,"=");t.headers.Cookie=s+K({key:r.name,value:n,escape:!1,style:r.style||"form",explode:void 0!==r.explode&&r.explode})}}var Cr=r(92381),Or=r.n(Cr);const jr=(void 0!==Or()?Or():"undefined"!=typeof self?self:window).btoa;function Ir(e,t){var r=e.operation,n=e.requestBody,o=e.securities,a=e.spec,i=e.attachContentTypeForEmptyPayload,s=e.requestContentType;t=function(e){var t=e.request,r=e.securities,n=void 0===r?{}:r,o=e.operation,a=void 0===o?{}:o,i=e.spec,s=me()({},t),l=n.authorized,u=void 0===l?{}:l,c=a.security||i.security||[],p=u&&!!C()(u).length,f=gr()(i,["components","securitySchemes"])||{};if(s.headers=s.headers||{},s.query=s.query||{},!C()(n).length||!p||!c||Array.isArray(a.security)&&!a.security.length)return t;return c.forEach((function(e){C()(e).forEach((function(e){var t=u[e],r=f[e];if(t){var n=t.value||t,o=r.type;if(t)if("apiKey"===o)"query"===r.in&&(s.query[r.name]=n),"header"===r.in&&(s.headers[r.name]=n),"cookie"===r.in&&(s.cookies[r.name]=n);else if("http"===o){if(/^basic$/i.test(r.scheme)){var a,i=n.username||"",l=n.password||"",c=jr(j()(a="".concat(i,":")).call(a,l));s.headers.Authorization="Basic ".concat(c)}/^bearer$/i.test(r.scheme)&&(s.headers.Authorization="Bearer ".concat(n))}else if("oauth2"===o||"openIdConnect"===o){var p,h=t.token||{},d=h[r["x-tokenName"]||"access_token"],m=h.token_type;m&&"bearer"!==m.toLowerCase()||(m="Bearer"),s.headers.Authorization=j()(p="".concat(m," ")).call(p,d)}}}))})),s}({request:t,securities:o,operation:r,spec:a});var l=r.requestBody||{},u=C()(l.content||{}),c=s&&u.indexOf(s)>-1;if(n||i){if(s&&c)t.headers["Content-Type"]=s;else if(!s){var p=u[0];p&&(t.headers["Content-Type"]=p,s=p)}}else s&&c&&(t.headers["Content-Type"]=s);if(!e.responseContentType&&r.responses){var f,d=P()(f=N()(r.responses)).call(f,(function(e){var t=m()(e,2),r=t[0],n=t[1],o=parseInt(r,10);return o>=200&&o<300&&br(n.content)})).reduce((function(e,t){var r=m()(t,2)[1];return j()(e).call(e,C()(r.content))}),[]);d.length>0&&(t.headers.accept=d.join(", "))}if(n)if(s){if(u.indexOf(s)>-1)if("application/x-www-form-urlencoded"===s||"multipart/form-data"===s)if("object"===h()(n)){var g=(l.content[s]||{}).encoding||{};t.form={},C()(n).forEach((function(e){t.form[e]={value:n[e],encoding:g[e]||{}}}))}else t.form=n;else t.body=n}else t.body=n;return t}function Nr(e,t){var r,n,o=e.spec,a=e.operation,i=e.securities,s=e.requestContentType,l=e.responseContentType,u=e.attachContentTypeForEmptyPayload;if(t=function(e){var t=e.request,r=e.securities,n=void 0===r?{}:r,o=e.operation,a=void 0===o?{}:o,i=e.spec,s=me()({},t),l=n.authorized,u=void 0===l?{}:l,c=n.specSecurity,p=void 0===c?[]:c,f=a.security||p,h=u&&!!C()(u).length,d=i.securityDefinitions;if(s.headers=s.headers||{},s.query=s.query||{},!C()(n).length||!h||!f||Array.isArray(a.security)&&!a.security.length)return t;return f.forEach((function(e){C()(e).forEach((function(e){var t=u[e];if(t){var r=t.token,n=t.value||t,o=d[e],a=o.type,i=o["x-tokenName"]||"access_token",l=r&&r[i],c=r&&r.token_type;if(t)if("apiKey"===a){var p="query"===o.in?"query":"headers";s[p]=s[p]||{},s[p][o.name]=n}else if("basic"===a)if(n.header)s.headers.authorization=n.header;else{var f,h=n.username||"",m=n.password||"";n.base64=jr(j()(f="".concat(h,":")).call(f,m)),s.headers.authorization="Basic ".concat(n.base64)}else if("oauth2"===a&&l){var g;c=c&&"bearer"!==c.toLowerCase()?c:"Bearer",s.headers.authorization=j()(g="".concat(c," ")).call(g,l)}}}))})),s}({request:t,securities:i,operation:a,spec:o}),t.body||t.form||u)if(s)t.headers["Content-Type"]=s;else if(Array.isArray(a.consumes)){var c=m()(a.consumes,1);t.headers["Content-Type"]=c[0]}else if(Array.isArray(o.consumes)){var p=m()(o.consumes,1);t.headers["Content-Type"]=p[0]}else a.parameters&&P()(r=a.parameters).call(r,(function(e){return"file"===e.type})).length?t.headers["Content-Type"]="multipart/form-data":a.parameters&&P()(n=a.parameters).call(n,(function(e){return"formData"===e.in})).length&&(t.headers["Content-Type"]="application/x-www-form-urlencoded");else if(s){var f,h,d=a.parameters&&P()(f=a.parameters).call(f,(function(e){return"body"===e.in})).length>0,g=a.parameters&&P()(h=a.parameters).call(h,(function(e){return"formData"===e.in})).length>0;(d||g)&&(t.headers["Content-Type"]=s)}return!l&&Array.isArray(a.produces)&&a.produces.length>0&&(t.headers.accept=a.produces.join(", ")),t}var Tr=["http","fetch","spec","operationId","pathName","method","parameters","securities"],Pr=function(e){return Array.isArray(e)?e:[]},Rr=Ct("OperationNotFoundError",(function(e,t,r){this.originalError=r,Ee()(this,t||{})})),Mr={buildRequest:Lr};function Dr(e){var t=e.http,r=e.fetch,n=e.spec,o=e.operationId,a=e.pathName,i=e.method,s=e.parameters,l=e.securities,u=dr()(e,Tr),c=t||r||Y;a&&i&&!o&&(o=(0,cr.nc)(a,i));var p=Mr.buildRequest(me()({spec:n,operationId:o,parameters:s,securities:l,http:c},u));return p.body&&(br(p.body)||Array.isArray(p.body))&&(p.body=_()(p.body)),c(p)}function Lr(e){var t,r,n=e.spec,o=e.operationId,i=e.responseContentType,s=e.scheme,l=e.requestInterceptor,u=e.responseInterceptor,c=e.contextUrl,p=e.userFetch,f=e.server,h=e.serverVariables,d=e.http,g=e.signal,v=e.parameters,y=e.parameterBuilders,b=(0,cr.z6)(n);y||(y=b?a:wr);var w={url:"",credentials:d&&d.withCredentials?"include":"same-origin",headers:{},cookies:{}};g&&(w.signal=g),l&&(w.requestInterceptor=l),u&&(w.responseInterceptor=u),p&&(w.userFetch=p);var E=(0,cr.$r)(n,o);if(!E)throw new Rr("Operation ".concat(o," not found"));var x,_=E.operation,S=void 0===_?{}:_,k=E.method,O=E.pathName;if(w.url+=(x={spec:n,scheme:s,contextUrl:c,server:f,serverVariables:h,pathName:O,method:k},(0,cr.z6)(x.spec)?function(e){var t=e.spec,r=e.pathName,n=e.method,o=e.server,a=e.contextUrl,i=e.serverVariables,s=void 0===i?{}:i,l=gr()(t,["paths",r,(n||"").toLowerCase(),"servers"])||gr()(t,["paths",r,"servers"])||gr()(t,["servers"]),u="",c=null;if(o&&l&&l.length){var p=A()(l).call(l,(function(e){return e.url}));p.indexOf(o)>-1&&(u=o,c=l[p.indexOf(o)])}if(!u&&l&&l.length){u=l[0].url;var f=m()(l,1);c=f[0]}return u.indexOf("{")>-1&&function(e){for(var t,r=[],n=/{([^}]+)}/g;t=n.exec(e);)r.push(t[1]);return r}(u).forEach((function(e){if(c.variables&&c.variables[e]){var t=c.variables[e],r=s[e]||t.default,n=new RegExp("{".concat(e,"}"),"g");u=u.replace(n,r)}})),function(){var e,t,r=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"",n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",o=r&&n?kt.parse(kt.resolve(n,r)):kt.parse(r),a=kt.parse(n),i=Br(o.protocol)||Br(a.protocol)||"",s=o.host||a.host,l=o.pathname||"";return"/"===(e=i&&s?j()(t="".concat(i,"://")).call(t,s+l):l)[e.length-1]?$()(e).call(e,0,-1):e}(u,a)}(x):function(e){var t,r,n=e.spec,o=e.scheme,a=e.contextUrl,i=void 0===a?"":a,s=kt.parse(i),l=Array.isArray(n.schemes)?n.schemes[0]:null,u=o||l||Br(s.protocol)||"http",c=n.host||s.host||"",p=n.basePath||"";return"/"===(t=u&&c?j()(r="".concat(u,"://")).call(r,c+p):p)[t.length-1]?$()(t).call(t,0,-1):t}(x)),!o)return delete w.cookies,w;w.url+=O,w.method="".concat(k).toUpperCase(),v=v||{};var I=n.paths[O]||{};i&&(w.headers.accept=i);var N=function(e){var t={};e.forEach((function(e){t[e.in]||(t[e.in]={}),t[e.in][e.name]=e}));var r=[];return C()(t).forEach((function(e){C()(t[e]).forEach((function(n){r.push(t[e][n])}))})),r}(j()(t=j()(r=[]).call(r,Pr(S.parameters))).call(t,Pr(I.parameters)));N.forEach((function(e){var t,r,o=y[e.in];if("body"===e.in&&e.schema&&e.schema.properties&&(t=v),void 0===(t=e&&e.name&&v[e.name]))t=e&&e.name&&v[j()(r="".concat(e.in,".")).call(r,e.name)];else if(function(e,t){return P()(t).call(t,(function(t){return t.name===e}))}(e.name,N).length>1){var a;console.warn(j()(a="Parameter '".concat(e.name,"' is ambiguous because the defined spec has more than one parameter with the name: '")).call(a,e.name,"' and the passed-in parameter values did not define an 'in' value."))}if(null!==t){if(void 0!==e.default&&void 0===t&&(t=e.default),void 0===t&&e.required&&!e.allowEmptyValue)throw new Error("Required parameter ".concat(e.name," is not provided"));if(b&&e.schema&&"object"===e.schema.type&&"string"==typeof t)try{t=JSON.parse(t)}catch(e){throw new Error("Could not parse object parameter value string as JSON")}o&&o({req:w,parameter:e,value:t,operation:S,spec:n})}}));var T=me()(me()({},e),{},{operation:S});if((w=b?Ir(T,w):Nr(T,w)).cookies&&C()(w.cookies).length){var R=C()(w.cookies).reduce((function(e,t){var r=w.cookies[t];return e+(e?"&":"")+vr.serialize(t,r)}),"");w.headers.Cookie=R}return w.cookies&&delete w.cookies,fe(w),w}var Br=function(e){return e?e.replace(/\W/g,""):null};function Fr(e,t){return zr.apply(this,arguments)}function zr(){return zr=s()(u().mark((function e(t,r){var n,o,a,i,s,l,c,p,f,h,d,m,g=arguments;return u().wrap((function(e){for(;;)switch(e.prev=e.next){case 0:return n=g.length>2&&void 0!==g[2]?g[2]:{},o=n.returnEntireTree,a=n.baseDoc,i=n.requestInterceptor,s=n.responseInterceptor,l=n.parameterMacro,c=n.modelPropertyMacro,p=n.useCircularStructures,f={pathDiscriminator:r,baseDoc:a,requestInterceptor:i,responseInterceptor:s,parameterMacro:l,modelPropertyMacro:c,useCircularStructures:p},h=(0,cr.K1)({spec:t}),d=h.spec,e.next=6,fr(me()(me()({},f),{},{spec:d,allowMetaPatches:!0,skipNormalization:!0}));case 6:return m=e.sent,!o&&Array.isArray(r)&&r.length&&(m.spec=gr()(m.spec,r)||null),e.abrupt("return",m);case 9:case"end":return e.stop()}}),e)}))),zr.apply(this,arguments)}var Ur=r(34852);function qr(e){let{configs:t,getConfigs:r}=e;return{fn:{fetch:(n=Y,o=t.preFetch,a=t.postFetch,a=a||function(e){return e},o=o||function(e){return e},function(e){return"string"==typeof e&&(e={url:e}),Z.mergeInQueryOrForm(e),e=o(e),a(n(e))}),buildRequest:Lr,execute:Dr,resolve:fr,resolveSubtree:function(e,t,n){if(void 0===n){const e=r();n={modelPropertyMacro:e.modelPropertyMacro,parameterMacro:e.parameterMacro,requestInterceptor:e.requestInterceptor,responseInterceptor:e.responseInterceptor}}for(var o=arguments.length,a=new Array(o>3?o-3:0),i=3;i{"use strict";r.r(t),r.d(t,{default:()=>o});var n=r(90242);function o(){return{fn:{shallowEqualKeys:n.be}}}},48347:(e,t,r)=>{"use strict";r.r(t),r.d(t,{getDisplayName:()=>n});const n=e=>e.displayName||e.name||"Component"},73420:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>u});var n=r(35627),o=r.n(n),a=r(90242),i=r(55776),s=r(48347),l=r(60314);const u=e=>{let{getComponents:t,getStore:r,getSystem:n}=e;const u=(c=(0,i.getComponent)(n,r,t),(0,a.HP)(c,(function(){for(var e=arguments.length,t=new Array(e),r=0;r(0,l.Z)(e,(function(){for(var e=arguments.length,t=new Array(e),r=0;r{"use strict";r.r(t),r.d(t,{getComponent:()=>ne,render:()=>re,withMappedContainer:()=>te});var n=r(23101),o=r.n(n),a=r(28222),i=r.n(a),s=r(67294),l=r(73935),u=r(97779),c=s.createContext(null);var p=function(e){e()},f=function(){return p},h={notify:function(){}};var d=function(){function e(e,t){this.store=e,this.parentSub=t,this.unsubscribe=null,this.listeners=h,this.handleChangeWrapper=this.handleChangeWrapper.bind(this)}var t=e.prototype;return t.addNestedSub=function(e){return this.trySubscribe(),this.listeners.subscribe(e)},t.notifyNestedSubs=function(){this.listeners.notify()},t.handleChangeWrapper=function(){this.onStateChange&&this.onStateChange()},t.isSubscribed=function(){return Boolean(this.unsubscribe)},t.trySubscribe=function(){this.unsubscribe||(this.unsubscribe=this.parentSub?this.parentSub.addNestedSub(this.handleChangeWrapper):this.store.subscribe(this.handleChangeWrapper),this.listeners=function(){var e=f(),t=null,r=null;return{clear:function(){t=null,r=null},notify:function(){e((function(){for(var e=t;e;)e.callback(),e=e.next}))},get:function(){for(var e=[],r=t;r;)e.push(r),r=r.next;return e},subscribe:function(e){var n=!0,o=r={callback:e,next:null,prev:r};return o.prev?o.prev.next=o:t=o,function(){n&&null!==t&&(n=!1,o.next?o.next.prev=o.prev:r=o.prev,o.prev?o.prev.next=o.next:t=o.next)}}}}())},t.tryUnsubscribe=function(){this.unsubscribe&&(this.unsubscribe(),this.unsubscribe=null,this.listeners.clear(),this.listeners=h)},e}(),m="undefined"!=typeof window&&void 0!==window.document&&void 0!==window.document.createElement?s.useLayoutEffect:s.useEffect;const g=function(e){var t=e.store,r=e.context,n=e.children,o=(0,s.useMemo)((function(){var e=new d(t);return e.onStateChange=e.notifyNestedSubs,{store:t,subscription:e}}),[t]),a=(0,s.useMemo)((function(){return t.getState()}),[t]);m((function(){var e=o.subscription;return e.trySubscribe(),a!==t.getState()&&e.notifyNestedSubs(),function(){e.tryUnsubscribe(),e.onStateChange=null}}),[o,a]);var i=r||c;return s.createElement(i.Provider,{value:o},n)};var v=r(87462),y=r(63366),b=r(8679),w=r.n(b),E=r(72973),x=[],_=[null,null];function S(e,t){var r=e[1];return[t.payload,r+1]}function A(e,t,r){m((function(){return e.apply(void 0,t)}),r)}function k(e,t,r,n,o,a,i){e.current=n,t.current=o,r.current=!1,a.current&&(a.current=null,i())}function C(e,t,r,n,o,a,i,s,l,u){if(e){var c=!1,p=null,f=function(){if(!c){var e,r,f=t.getState();try{e=n(f,o.current)}catch(e){r=e,p=e}r||(p=null),e===a.current?i.current||l():(a.current=e,s.current=e,i.current=!0,u({type:"STORE_UPDATED",payload:{error:r}}))}};r.onStateChange=f,r.trySubscribe(),f();return function(){if(c=!0,r.tryUnsubscribe(),r.onStateChange=null,p)throw p}}}var O=function(){return[null,0]};function j(e,t){void 0===t&&(t={});var r=t,n=r.getDisplayName,o=void 0===n?function(e){return"ConnectAdvanced("+e+")"}:n,a=r.methodName,i=void 0===a?"connectAdvanced":a,l=r.renderCountProp,u=void 0===l?void 0:l,p=r.shouldHandleStateChanges,f=void 0===p||p,h=r.storeKey,m=void 0===h?"store":h,g=(r.withRef,r.forwardRef),b=void 0!==g&&g,j=r.context,I=void 0===j?c:j,N=(0,y.Z)(r,["getDisplayName","methodName","renderCountProp","shouldHandleStateChanges","storeKey","withRef","forwardRef","context"]),T=I;return function(t){var r=t.displayName||t.name||"Component",n=o(r),a=(0,v.Z)({},N,{getDisplayName:o,methodName:i,renderCountProp:u,shouldHandleStateChanges:f,storeKey:m,displayName:n,wrappedComponentName:r,WrappedComponent:t}),l=N.pure;var c=l?s.useMemo:function(e){return e()};function p(r){var n=(0,s.useMemo)((function(){var e=r.reactReduxForwardedRef,t=(0,y.Z)(r,["reactReduxForwardedRef"]);return[r.context,e,t]}),[r]),o=n[0],i=n[1],l=n[2],u=(0,s.useMemo)((function(){return o&&o.Consumer&&(0,E.isContextConsumer)(s.createElement(o.Consumer,null))?o:T}),[o,T]),p=(0,s.useContext)(u),h=Boolean(r.store)&&Boolean(r.store.getState)&&Boolean(r.store.dispatch);Boolean(p)&&Boolean(p.store);var m=h?r.store:p.store,g=(0,s.useMemo)((function(){return function(t){return e(t.dispatch,a)}(m)}),[m]),b=(0,s.useMemo)((function(){if(!f)return _;var e=new d(m,h?null:p.subscription),t=e.notifyNestedSubs.bind(e);return[e,t]}),[m,h,p]),w=b[0],j=b[1],I=(0,s.useMemo)((function(){return h?p:(0,v.Z)({},p,{subscription:w})}),[h,p,w]),N=(0,s.useReducer)(S,x,O),P=N[0][0],R=N[1];if(P&&P.error)throw P.error;var M=(0,s.useRef)(),D=(0,s.useRef)(l),L=(0,s.useRef)(),B=(0,s.useRef)(!1),F=c((function(){return L.current&&l===D.current?L.current:g(m.getState(),l)}),[m,P,l]);A(k,[D,M,B,l,F,L,j]),A(C,[f,m,w,g,D,M,B,L,j,R],[m,w,g]);var z=(0,s.useMemo)((function(){return s.createElement(t,(0,v.Z)({},F,{ref:i}))}),[i,t,F]);return(0,s.useMemo)((function(){return f?s.createElement(u.Provider,{value:I},z):z}),[u,z,I])}var h=l?s.memo(p):p;if(h.WrappedComponent=t,h.displayName=p.displayName=n,b){var g=s.forwardRef((function(e,t){return s.createElement(h,(0,v.Z)({},e,{reactReduxForwardedRef:t}))}));return g.displayName=n,g.WrappedComponent=t,w()(g,t)}return w()(h,t)}}function I(e,t){return e===t?0!==e||0!==t||1/e==1/t:e!=e&&t!=t}function N(e,t){if(I(e,t))return!0;if("object"!=typeof e||null===e||"object"!=typeof t||null===t)return!1;var r=Object.keys(e),n=Object.keys(t);if(r.length!==n.length)return!1;for(var o=0;o=0;n--){var o=t[n](e);if(o)return o}return function(t,n){throw new Error("Invalid value of type "+typeof e+" for "+r+" argument when connecting component "+n.wrappedComponentName+".")}}function V(e,t){return e===t}function $(e){var t=void 0===e?{}:e,r=t.connectHOC,n=void 0===r?j:r,o=t.mapStateToPropsFactories,a=void 0===o?D:o,i=t.mapDispatchToPropsFactories,s=void 0===i?M:i,l=t.mergePropsFactories,u=void 0===l?B:l,c=t.selectorFactory,p=void 0===c?U:c;return function(e,t,r,o){void 0===o&&(o={});var i=o,l=i.pure,c=void 0===l||l,f=i.areStatesEqual,h=void 0===f?V:f,d=i.areOwnPropsEqual,m=void 0===d?N:d,g=i.areStatePropsEqual,b=void 0===g?N:g,w=i.areMergedPropsEqual,E=void 0===w?N:w,x=(0,y.Z)(i,["pure","areStatesEqual","areOwnPropsEqual","areStatePropsEqual","areMergedPropsEqual"]),_=q(e,a,"mapStateToProps"),S=q(t,s,"mapDispatchToProps"),A=q(r,u,"mergeProps");return n(p,(0,v.Z)({methodName:"connect",getDisplayName:function(e){return"Connect("+e+")"},shouldHandleStateChanges:Boolean(e),initMapStateToProps:_,initMapDispatchToProps:S,initMergeProps:A,pure:c,areStatesEqual:h,areOwnPropsEqual:m,areStatePropsEqual:b,areMergedPropsEqual:E},x))}}const W=$();var H;H=l.unstable_batchedUpdates,p=H;var J=r(57557),K=r.n(J),G=r(6557),Z=r.n(G);const Y=e=>t=>{const{fn:r}=e();class n extends s.Component{render(){return s.createElement(t,o()({},e(),this.props,this.context))}}return n.displayName=`WithSystem(${r.getDisplayName(t)})`,n},Q=(e,t)=>r=>{const{fn:n}=e();class a extends s.Component{render(){return s.createElement(g,{store:t},s.createElement(r,o()({},this.props,this.context)))}}return a.displayName=`WithRoot(${n.getDisplayName(r)})`,a},X=(e,t,r)=>(0,u.qC)(r?Q(e,r):Z(),W(((r,n)=>{var o;const a={...n,...e()},i=(null===(o=t.prototype)||void 0===o?void 0:o.mapStateToProps)||(e=>({state:e}));return i(r,a)})),Y(e))(t),ee=(e,t,r,n)=>{for(const o in t){const a=t[o];"function"==typeof a&&a(r[o],n[o],e())}},te=(e,t,r)=>(t,n)=>{const{fn:o}=e(),a=r(t,"root");class l extends s.Component{constructor(t,r){super(t,r),ee(e,n,t,{})}UNSAFE_componentWillReceiveProps(t){ee(e,n,t,this.props)}render(){const e=K()(this.props,n?i()(n):[]);return s.createElement(a,e)}}return l.displayName=`WithMappedContainer(${o.getDisplayName(a)})`,l},re=(e,t,r,n)=>o=>{const a=r(e,t,n)("App","root");l.render(s.createElement(a,null),o)},ne=(e,t,r)=>function(n,o){let a=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};if("string"!=typeof n)throw new TypeError("Need a string, to fetch a component. Was given a "+typeof n);const i=r(n);return i?o?"root"===o?X(e,i,t()):X(e,i):i:(a.failSilently||e().log.warn("Could not find component:",n),null)}},36581:(e,t,r)=>{"use strict";r.d(t,{d3:()=>T,C2:()=>Z});var n=r(28222),o=r.n(n),a=r(58118),i=r.n(a),s=r(63366);function l(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,n=new Array(t);r=4?[t[0],t[1],t[2],t[3],"".concat(t[0],".").concat(t[1]),"".concat(t[0],".").concat(t[2]),"".concat(t[0],".").concat(t[3]),"".concat(t[1],".").concat(t[0]),"".concat(t[1],".").concat(t[2]),"".concat(t[1],".").concat(t[3]),"".concat(t[2],".").concat(t[0]),"".concat(t[2],".").concat(t[1]),"".concat(t[2],".").concat(t[3]),"".concat(t[3],".").concat(t[0]),"".concat(t[3],".").concat(t[1]),"".concat(t[3],".").concat(t[2]),"".concat(t[0],".").concat(t[1],".").concat(t[2]),"".concat(t[0],".").concat(t[1],".").concat(t[3]),"".concat(t[0],".").concat(t[2],".").concat(t[1]),"".concat(t[0],".").concat(t[2],".").concat(t[3]),"".concat(t[0],".").concat(t[3],".").concat(t[1]),"".concat(t[0],".").concat(t[3],".").concat(t[2]),"".concat(t[1],".").concat(t[0],".").concat(t[2]),"".concat(t[1],".").concat(t[0],".").concat(t[3]),"".concat(t[1],".").concat(t[2],".").concat(t[0]),"".concat(t[1],".").concat(t[2],".").concat(t[3]),"".concat(t[1],".").concat(t[3],".").concat(t[0]),"".concat(t[1],".").concat(t[3],".").concat(t[2]),"".concat(t[2],".").concat(t[0],".").concat(t[1]),"".concat(t[2],".").concat(t[0],".").concat(t[3]),"".concat(t[2],".").concat(t[1],".").concat(t[0]),"".concat(t[2],".").concat(t[1],".").concat(t[3]),"".concat(t[2],".").concat(t[3],".").concat(t[0]),"".concat(t[2],".").concat(t[3],".").concat(t[1]),"".concat(t[3],".").concat(t[0],".").concat(t[1]),"".concat(t[3],".").concat(t[0],".").concat(t[2]),"".concat(t[3],".").concat(t[1],".").concat(t[0]),"".concat(t[3],".").concat(t[1],".").concat(t[2]),"".concat(t[3],".").concat(t[2],".").concat(t[0]),"".concat(t[3],".").concat(t[2],".").concat(t[1]),"".concat(t[0],".").concat(t[1],".").concat(t[2],".").concat(t[3]),"".concat(t[0],".").concat(t[1],".").concat(t[3],".").concat(t[2]),"".concat(t[0],".").concat(t[2],".").concat(t[1],".").concat(t[3]),"".concat(t[0],".").concat(t[2],".").concat(t[3],".").concat(t[1]),"".concat(t[0],".").concat(t[3],".").concat(t[1],".").concat(t[2]),"".concat(t[0],".").concat(t[3],".").concat(t[2],".").concat(t[1]),"".concat(t[1],".").concat(t[0],".").concat(t[2],".").concat(t[3]),"".concat(t[1],".").concat(t[0],".").concat(t[3],".").concat(t[2]),"".concat(t[1],".").concat(t[2],".").concat(t[0],".").concat(t[3]),"".concat(t[1],".").concat(t[2],".").concat(t[3],".").concat(t[0]),"".concat(t[1],".").concat(t[3],".").concat(t[0],".").concat(t[2]),"".concat(t[1],".").concat(t[3],".").concat(t[2],".").concat(t[0]),"".concat(t[2],".").concat(t[0],".").concat(t[1],".").concat(t[3]),"".concat(t[2],".").concat(t[0],".").concat(t[3],".").concat(t[1]),"".concat(t[2],".").concat(t[1],".").concat(t[0],".").concat(t[3]),"".concat(t[2],".").concat(t[1],".").concat(t[3],".").concat(t[0]),"".concat(t[2],".").concat(t[3],".").concat(t[0],".").concat(t[1]),"".concat(t[2],".").concat(t[3],".").concat(t[1],".").concat(t[0]),"".concat(t[3],".").concat(t[0],".").concat(t[1],".").concat(t[2]),"".concat(t[3],".").concat(t[0],".").concat(t[2],".").concat(t[1]),"".concat(t[3],".").concat(t[1],".").concat(t[0],".").concat(t[2]),"".concat(t[3],".").concat(t[1],".").concat(t[2],".").concat(t[0]),"".concat(t[3],".").concat(t[2],".").concat(t[0],".").concat(t[1]),"".concat(t[3],".").concat(t[2],".").concat(t[1],".").concat(t[0])]:void 0),d[n]}function g(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},r=arguments.length>2?arguments[2]:void 0,n=e.filter((function(e){return"token"!==e})),o=m(n);return o.reduce((function(e,t){return p({},e,r[t])}),t)}function v(e){return e.join(" ")}function y(e){var t=e.node,r=e.stylesheet,n=e.style,o=void 0===n?{}:n,a=e.useInlineStyles,i=e.key,s=t.properties,l=t.type,u=t.tagName,c=t.value;if("text"===l)return c;if(u){var d,m=function(e,t){var r=0;return function(n){return r+=1,n.map((function(n,o){return y({node:n,stylesheet:e,useInlineStyles:t,key:"code-segment-".concat(r,"-").concat(o)})}))}}(r,a);if(a){var b=Object.keys(r).reduce((function(e,t){return t.split(".").forEach((function(t){e.includes(t)||e.push(t)})),e}),[]),w=s.className&&s.className.includes("token")?["token"]:[],E=s.className&&w.concat(s.className.filter((function(e){return!b.includes(e)})));d=p({},s,{className:v(E)||void 0,style:g(s.className,Object.assign({},s.style,o),r)})}else d=p({},s,{className:v(s.className)});var x=m(t.children);return f.createElement(u,(0,h.Z)({key:i},d),x)}}var b=/\n/g;function w(e){var t=e.codeString,r=e.codeStyle,n=e.containerStyle,o=void 0===n?{float:"left",paddingRight:"10px"}:n,a=e.numberStyle,i=void 0===a?{}:a,s=e.startingLineNumber;return f.createElement("code",{style:Object.assign({},r,o)},function(e){var t=e.lines,r=e.startingLineNumber,n=e.style;return t.map((function(e,t){var o=t+r;return f.createElement("span",{key:"line-".concat(t),className:"react-syntax-highlighter-line-number",style:"function"==typeof n?n(o):n},"".concat(o,"\n"))}))}({lines:t.replace(/\n$/,"").split("\n"),style:i,startingLineNumber:s}))}function E(e,t){return{type:"element",tagName:"span",properties:{key:"line-number--".concat(e),className:["comment","linenumber","react-syntax-highlighter-line-number"],style:t},children:[{type:"text",value:e}]}}function x(e,t,r){var n;return p({},{display:"inline-block",minWidth:(n=r,"".concat(n.toString().length,".25em")),paddingRight:"1em",textAlign:"right",userSelect:"none"},"function"==typeof e?e(t):e)}function _(e){var t=e.children,r=e.lineNumber,n=e.lineNumberStyle,o=e.largestLineNumber,a=e.showInlineLineNumbers,i=e.lineProps,s=void 0===i?{}:i,l=e.className,u=void 0===l?[]:l,c=e.showLineNumbers,f=e.wrapLongLines,h="function"==typeof s?s(r):s;if(h.className=u,r&&a){var d=x(n,r,o);t.unshift(E(r,d))}return f&c&&(h.style=p({},h.style,{display:"flex"})),{type:"element",tagName:"span",properties:h,children:t}}function S(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:[],n=0;n2&&void 0!==arguments[2]?arguments[2]:[];return _({children:e,lineNumber:t,lineNumberStyle:s,largestLineNumber:i,showInlineLineNumbers:o,lineProps:r,className:a,showLineNumbers:n,wrapLongLines:l})}function m(e,t){if(n&&t&&o){var r=x(s,t,i);e.unshift(E(t,r))}return e}function g(e,r){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:[];return t||n.length>0?d(e,r,n):m(e,r)}for(var v=function(){var e=c[h],t=e.children[0].value;if(t.match(b)){var r=t.split("\n");r.forEach((function(t,o){var i=n&&p.length+a,s={type:"text",value:"".concat(t,"\n")};if(0===o){var l=g(c.slice(f+1,h).concat(_({children:[s],className:e.properties.className})),i);p.push(l)}else if(o===r.length-1){if(c[h+1]&&c[h+1].children&&c[h+1].children[0]){var u=_({children:[{type:"text",value:"".concat(t)}],className:e.properties.className});c.splice(h+1,0,u)}else{var d=g([s],i,e.properties.className);p.push(d)}}else{var m=g([s],i,e.properties.className);p.push(m)}})),f=h}h++};h=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}(e,["language","children","style","customStyle","codeTagProps","useInlineStyles","showLineNumbers","showInlineLineNumbers","startingLineNumber","lineNumberContainerStyle","lineNumberStyle","wrapLines","wrapLongLines","lineProps","renderer","PreTag","CodeTag","code","astGenerator"]);U=U||O;var V=m?f.createElement(w,{containerStyle:E,codeStyle:u.style||{},numberStyle:_,startingLineNumber:b,codeString:z}):null,$=o.hljs||o['pre[class*="language-"]']||{backgroundColor:"#fff"},W=C(U)?"hljs":"prismjs",H=h?Object.assign({},q,{style:Object.assign({},$,i)}):Object.assign({},q,{className:q.className?"".concat(W," ").concat(q.className):W,style:Object.assign({},i)});if(!U)return f.createElement(D,H,V,f.createElement(B,u,z));(void 0===S&&R||N)&&(S=!0),R=R||k;var J=[{type:"text",value:z}],K=function(e){var t=e.astGenerator,r=e.language,n=e.code,o=e.defaultCodeValue;if(C(t)){var a=function(e,t){return-1!==e.listLanguages().indexOf(t)}(t,r);return"text"===r?{value:o,language:"text"}:a?t.highlight(r,n):t.highlightAuto(n)}try{return r&&"text"!==r?{value:t.highlight(n,r)}:{value:o}}catch(e){return{value:o}}}({astGenerator:U,language:t,code:z,defaultCodeValue:J});null===K.language&&(K.value=J);var G=A(K,S,P,m,v,b,K.value.length+b,_,N);return u.style=p({},u.style,N?{whiteSpace:"pre-wrap"}:{whiteSpace:"pre"}),f.createElement(D,H,f.createElement(B,u,!v&&V,R({rows:G,stylesheet:o,useInlineStyles:h})))});N.registerLanguage=I.registerLanguage;const T=N;var P=r(96344);const R=r.n(P)();var M=r(82026);const D=r.n(M)();var L=r(42157);const B=r.n(L)();var F=r(61519);const z=r.n(F)();var U=r(54587);const q=r.n(U)();var V=r(30786);const $=r.n(V)();var W=r(66336);const H=r.n(W)(),J={hljs:{display:"block",overflowX:"auto",padding:"0.5em",background:"#333",color:"white"},"hljs-name":{fontWeight:"bold"},"hljs-strong":{fontWeight:"bold"},"hljs-code":{fontStyle:"italic",color:"#888"},"hljs-emphasis":{fontStyle:"italic"},"hljs-tag":{color:"#62c8f3"},"hljs-variable":{color:"#ade5fc"},"hljs-template-variable":{color:"#ade5fc"},"hljs-selector-id":{color:"#ade5fc"},"hljs-selector-class":{color:"#ade5fc"},"hljs-string":{color:"#a2fca2"},"hljs-bullet":{color:"#d36363"},"hljs-type":{color:"#ffa"},"hljs-title":{color:"#ffa"},"hljs-section":{color:"#ffa"},"hljs-attribute":{color:"#ffa"},"hljs-quote":{color:"#ffa"},"hljs-built_in":{color:"#ffa"},"hljs-builtin-name":{color:"#ffa"},"hljs-number":{color:"#d36363"},"hljs-symbol":{color:"#d36363"},"hljs-keyword":{color:"#fcc28c"},"hljs-selector-tag":{color:"#fcc28c"},"hljs-literal":{color:"#fcc28c"},"hljs-comment":{color:"#888"},"hljs-deletion":{color:"#333",backgroundColor:"#fc9b9b"},"hljs-regexp":{color:"#c6b4f0"},"hljs-link":{color:"#c6b4f0"},"hljs-meta":{color:"#fc9b9b"},"hljs-addition":{backgroundColor:"#a2fca2",color:"#333"}};T.registerLanguage("json",D),T.registerLanguage("js",R),T.registerLanguage("xml",B),T.registerLanguage("yaml",q),T.registerLanguage("http",$),T.registerLanguage("bash",z),T.registerLanguage("powershell",H),T.registerLanguage("javascript",R);const K={agate:J,arta:{hljs:{display:"block",overflowX:"auto",padding:"0.5em",background:"#222",color:"#aaa"},"hljs-subst":{color:"#aaa"},"hljs-section":{color:"#fff",fontWeight:"bold"},"hljs-comment":{color:"#444"},"hljs-quote":{color:"#444"},"hljs-meta":{color:"#444"},"hljs-string":{color:"#ffcc33"},"hljs-symbol":{color:"#ffcc33"},"hljs-bullet":{color:"#ffcc33"},"hljs-regexp":{color:"#ffcc33"},"hljs-number":{color:"#00cc66"},"hljs-addition":{color:"#00cc66"},"hljs-built_in":{color:"#32aaee"},"hljs-builtin-name":{color:"#32aaee"},"hljs-literal":{color:"#32aaee"},"hljs-type":{color:"#32aaee"},"hljs-template-variable":{color:"#32aaee"},"hljs-attribute":{color:"#32aaee"},"hljs-link":{color:"#32aaee"},"hljs-keyword":{color:"#6644aa"},"hljs-selector-tag":{color:"#6644aa"},"hljs-name":{color:"#6644aa"},"hljs-selector-id":{color:"#6644aa"},"hljs-selector-class":{color:"#6644aa"},"hljs-title":{color:"#bb1166"},"hljs-variable":{color:"#bb1166"},"hljs-deletion":{color:"#bb1166"},"hljs-template-tag":{color:"#bb1166"},"hljs-doctag":{fontWeight:"bold"},"hljs-strong":{fontWeight:"bold"},"hljs-emphasis":{fontStyle:"italic"}},monokai:{hljs:{display:"block",overflowX:"auto",padding:"0.5em",background:"#272822",color:"#ddd"},"hljs-tag":{color:"#f92672"},"hljs-keyword":{color:"#f92672",fontWeight:"bold"},"hljs-selector-tag":{color:"#f92672",fontWeight:"bold"},"hljs-literal":{color:"#f92672",fontWeight:"bold"},"hljs-strong":{color:"#f92672"},"hljs-name":{color:"#f92672"},"hljs-code":{color:"#66d9ef"},"hljs-class .hljs-title":{color:"white"},"hljs-attribute":{color:"#bf79db"},"hljs-symbol":{color:"#bf79db"},"hljs-regexp":{color:"#bf79db"},"hljs-link":{color:"#bf79db"},"hljs-string":{color:"#a6e22e"},"hljs-bullet":{color:"#a6e22e"},"hljs-subst":{color:"#a6e22e"},"hljs-title":{color:"#a6e22e",fontWeight:"bold"},"hljs-section":{color:"#a6e22e",fontWeight:"bold"},"hljs-emphasis":{color:"#a6e22e"},"hljs-type":{color:"#a6e22e",fontWeight:"bold"},"hljs-built_in":{color:"#a6e22e"},"hljs-builtin-name":{color:"#a6e22e"},"hljs-selector-attr":{color:"#a6e22e"},"hljs-selector-pseudo":{color:"#a6e22e"},"hljs-addition":{color:"#a6e22e"},"hljs-variable":{color:"#a6e22e"},"hljs-template-tag":{color:"#a6e22e"},"hljs-template-variable":{color:"#a6e22e"},"hljs-comment":{color:"#75715e"},"hljs-quote":{color:"#75715e"},"hljs-deletion":{color:"#75715e"},"hljs-meta":{color:"#75715e"},"hljs-doctag":{fontWeight:"bold"},"hljs-selector-id":{fontWeight:"bold"}},nord:{hljs:{display:"block",overflowX:"auto",padding:"0.5em",background:"#2E3440",color:"#D8DEE9"},"hljs-subst":{color:"#D8DEE9"},"hljs-selector-tag":{color:"#81A1C1"},"hljs-selector-id":{color:"#8FBCBB",fontWeight:"bold"},"hljs-selector-class":{color:"#8FBCBB"},"hljs-selector-attr":{color:"#8FBCBB"},"hljs-selector-pseudo":{color:"#88C0D0"},"hljs-addition":{backgroundColor:"rgba(163, 190, 140, 0.5)"},"hljs-deletion":{backgroundColor:"rgba(191, 97, 106, 0.5)"},"hljs-built_in":{color:"#8FBCBB"},"hljs-type":{color:"#8FBCBB"},"hljs-class":{color:"#8FBCBB"},"hljs-function":{color:"#88C0D0"},"hljs-function > .hljs-title":{color:"#88C0D0"},"hljs-keyword":{color:"#81A1C1"},"hljs-literal":{color:"#81A1C1"},"hljs-symbol":{color:"#81A1C1"},"hljs-number":{color:"#B48EAD"},"hljs-regexp":{color:"#EBCB8B"},"hljs-string":{color:"#A3BE8C"},"hljs-title":{color:"#8FBCBB"},"hljs-params":{color:"#D8DEE9"},"hljs-bullet":{color:"#81A1C1"},"hljs-code":{color:"#8FBCBB"},"hljs-emphasis":{fontStyle:"italic"},"hljs-formula":{color:"#8FBCBB"},"hljs-strong":{fontWeight:"bold"},"hljs-link:hover":{textDecoration:"underline"},"hljs-quote":{color:"#4C566A"},"hljs-comment":{color:"#4C566A"},"hljs-doctag":{color:"#8FBCBB"},"hljs-meta":{color:"#5E81AC"},"hljs-meta-keyword":{color:"#5E81AC"},"hljs-meta-string":{color:"#A3BE8C"},"hljs-attr":{color:"#8FBCBB"},"hljs-attribute":{color:"#D8DEE9"},"hljs-builtin-name":{color:"#81A1C1"},"hljs-name":{color:"#81A1C1"},"hljs-section":{color:"#88C0D0"},"hljs-tag":{color:"#81A1C1"},"hljs-variable":{color:"#D8DEE9"},"hljs-template-variable":{color:"#D8DEE9"},"hljs-template-tag":{color:"#5E81AC"},"abnf .hljs-attribute":{color:"#88C0D0"},"abnf .hljs-symbol":{color:"#EBCB8B"},"apache .hljs-attribute":{color:"#88C0D0"},"apache .hljs-section":{color:"#81A1C1"},"arduino .hljs-built_in":{color:"#88C0D0"},"aspectj .hljs-meta":{color:"#D08770"},"aspectj > .hljs-title":{color:"#88C0D0"},"bnf .hljs-attribute":{color:"#8FBCBB"},"clojure .hljs-name":{color:"#88C0D0"},"clojure .hljs-symbol":{color:"#EBCB8B"},"coq .hljs-built_in":{color:"#88C0D0"},"cpp .hljs-meta-string":{color:"#8FBCBB"},"css .hljs-built_in":{color:"#88C0D0"},"css .hljs-keyword":{color:"#D08770"},"diff .hljs-meta":{color:"#8FBCBB"},"ebnf .hljs-attribute":{color:"#8FBCBB"},"glsl .hljs-built_in":{color:"#88C0D0"},"groovy .hljs-meta:not(:first-child)":{color:"#D08770"},"haxe .hljs-meta":{color:"#D08770"},"java .hljs-meta":{color:"#D08770"},"ldif .hljs-attribute":{color:"#8FBCBB"},"lisp .hljs-name":{color:"#88C0D0"},"lua .hljs-built_in":{color:"#88C0D0"},"moonscript .hljs-built_in":{color:"#88C0D0"},"nginx .hljs-attribute":{color:"#88C0D0"},"nginx .hljs-section":{color:"#5E81AC"},"pf .hljs-built_in":{color:"#88C0D0"},"processing .hljs-built_in":{color:"#88C0D0"},"scss .hljs-keyword":{color:"#81A1C1"},"stylus .hljs-keyword":{color:"#81A1C1"},"swift .hljs-meta":{color:"#D08770"},"vim .hljs-built_in":{color:"#88C0D0",fontStyle:"italic"},"yaml .hljs-meta":{color:"#D08770"}},obsidian:{hljs:{display:"block",overflowX:"auto",padding:"0.5em",background:"#282b2e",color:"#e0e2e4"},"hljs-keyword":{color:"#93c763",fontWeight:"bold"},"hljs-selector-tag":{color:"#93c763",fontWeight:"bold"},"hljs-literal":{color:"#93c763",fontWeight:"bold"},"hljs-selector-id":{color:"#93c763"},"hljs-number":{color:"#ffcd22"},"hljs-attribute":{color:"#668bb0"},"hljs-code":{color:"white"},"hljs-class .hljs-title":{color:"white"},"hljs-section":{color:"white",fontWeight:"bold"},"hljs-regexp":{color:"#d39745"},"hljs-link":{color:"#d39745"},"hljs-meta":{color:"#557182"},"hljs-tag":{color:"#8cbbad"},"hljs-name":{color:"#8cbbad",fontWeight:"bold"},"hljs-bullet":{color:"#8cbbad"},"hljs-subst":{color:"#8cbbad"},"hljs-emphasis":{color:"#8cbbad"},"hljs-type":{color:"#8cbbad",fontWeight:"bold"},"hljs-built_in":{color:"#8cbbad"},"hljs-selector-attr":{color:"#8cbbad"},"hljs-selector-pseudo":{color:"#8cbbad"},"hljs-addition":{color:"#8cbbad"},"hljs-variable":{color:"#8cbbad"},"hljs-template-tag":{color:"#8cbbad"},"hljs-template-variable":{color:"#8cbbad"},"hljs-string":{color:"#ec7600"},"hljs-symbol":{color:"#ec7600"},"hljs-comment":{color:"#818e96"},"hljs-quote":{color:"#818e96"},"hljs-deletion":{color:"#818e96"},"hljs-selector-class":{color:"#A082BD"},"hljs-doctag":{fontWeight:"bold"},"hljs-title":{fontWeight:"bold"},"hljs-strong":{fontWeight:"bold"}},"tomorrow-night":{"hljs-comment":{color:"#969896"},"hljs-quote":{color:"#969896"},"hljs-variable":{color:"#cc6666"},"hljs-template-variable":{color:"#cc6666"},"hljs-tag":{color:"#cc6666"},"hljs-name":{color:"#cc6666"},"hljs-selector-id":{color:"#cc6666"},"hljs-selector-class":{color:"#cc6666"},"hljs-regexp":{color:"#cc6666"},"hljs-deletion":{color:"#cc6666"},"hljs-number":{color:"#de935f"},"hljs-built_in":{color:"#de935f"},"hljs-builtin-name":{color:"#de935f"},"hljs-literal":{color:"#de935f"},"hljs-type":{color:"#de935f"},"hljs-params":{color:"#de935f"},"hljs-meta":{color:"#de935f"},"hljs-link":{color:"#de935f"},"hljs-attribute":{color:"#f0c674"},"hljs-string":{color:"#b5bd68"},"hljs-symbol":{color:"#b5bd68"},"hljs-bullet":{color:"#b5bd68"},"hljs-addition":{color:"#b5bd68"},"hljs-title":{color:"#81a2be"},"hljs-section":{color:"#81a2be"},"hljs-keyword":{color:"#b294bb"},"hljs-selector-tag":{color:"#b294bb"},hljs:{display:"block",overflowX:"auto",background:"#1d1f21",color:"#c5c8c6",padding:"0.5em"},"hljs-emphasis":{fontStyle:"italic"},"hljs-strong":{fontWeight:"bold"}}},G=o()(K),Z=e=>i()(G).call(G,e)?K[e]:(console.warn(`Request style '${e}' is not available, returning default instead`),J)},90242:(e,t,r)=>{"use strict";r.d(t,{mz:()=>pe,oG:()=>fe,AF:()=>he,LQ:()=>de,Kn:()=>me,Wl:()=>ge,kJ:()=>ve,HP:()=>ye,Ay:()=>be,Q2:()=>we,_5:()=>Ee,iQ:()=>xe,gp:()=>_e,DR:()=>Se,Zl:()=>Ae,Ik:()=>Ce,xi:()=>Pe,UG:()=>Re,r3:()=>Me,wh:()=>De,GZ:()=>Le,be:()=>Be,Nm:()=>Fe,hW:()=>ze,QG:()=>Ue,oJ:()=>qe,J6:()=>Ve,nX:()=>$e,po:()=>We,XV:()=>He,Pz:()=>Je,D$:()=>Ke,V9:()=>Ge,cz:()=>Ze,Uj:()=>Ye,Xb:()=>Qe,O2:()=>et});var n=r(58309),o=r.n(n),a=r(97606),i=r.n(a),s=r(74386),l=r.n(s),u=r(86),c=r.n(u),p=r(14418),f=r.n(p),h=r(28222),d=r.n(h),m=(r(11189),r(24282)),g=r.n(m),v=r(76986),y=r.n(v),b=r(2578),w=r.n(b),E=r(24278),x=r.n(E),_=(r(39022),r(92039)),S=r.n(_),A=(r(58118),r(35627)),k=r.n(A),C=r(11882),O=r.n(C),j=r(51679),I=r.n(j),N=r(27043),T=r.n(N),P=r(81607),R=r.n(P),M=r(43393),D=r.n(M),L=r(17967),B=r(68929),F=r.n(B),z=r(11700),U=r.n(z),q=r(88306),V=r.n(q),$=r(13311),W=r.n($),H=r(59704),J=r.n(H),K=r(77813),G=r.n(K),Z=r(23560),Y=r.n(Z),Q=r(57050),X=r(27504),ee=r(8269),te=r.n(ee),re=r(19069),ne=r(92282),oe=r.n(ne),ae=r(89072),ie=r.n(ae),se=r(1272),le=r(48764).Buffer;const ue="default",ce=e=>D().Iterable.isIterable(e);function pe(e){return me(e)?ce(e)?e.toJS():e:{}}function fe(e){var t,r;if(ce(e))return e;if(e instanceof X.Z.File)return e;if(!me(e))return e;if(o()(e))return i()(r=D().Seq(e)).call(r,fe).toList();if(Y()(l()(e))){var n;const t=function(e){if(!Y()(l()(e)))return e;const t={},r="_**[]",n={};for(let o of l()(e).call(e))if(t[o[0]]||n[o[0]]&&n[o[0]].containsMultiple){if(!n[o[0]]){n[o[0]]={containsMultiple:!0,length:1},t[`${o[0]}${r}${n[o[0]].length}`]=t[o[0]],delete t[o[0]]}n[o[0]].length+=1,t[`${o[0]}${r}${n[o[0]].length}`]=o[1]}else t[o[0]]=o[1];return t}(e);return i()(n=D().OrderedMap(t)).call(n,fe)}return i()(t=D().OrderedMap(e)).call(t,fe)}function he(e){return o()(e)?e:[e]}function de(e){return"function"==typeof e}function me(e){return!!e&&"object"==typeof e}function ge(e){return"function"==typeof e}function ve(e){return o()(e)}const ye=V();function be(e,t){var r;return g()(r=d()(e)).call(r,((r,n)=>(r[n]=t(e[n],n),r)),{})}function we(e,t){var r;return g()(r=d()(e)).call(r,((r,n)=>{let o=t(e[n],n);return o&&"object"==typeof o&&y()(r,o),r}),{})}function Ee(e){return t=>{let{dispatch:r,getState:n}=t;return t=>r=>"function"==typeof r?r(e()):t(r)}}function xe(e){var t;let r=e.keySeq();return r.contains(ue)?ue:w()(t=f()(r).call(r,(e=>"2"===(e+"")[0]))).call(t).first()}function _e(e,t){if(!D().Iterable.isIterable(e))return D().List();let r=e.getIn(o()(t)?t:[t]);return D().List.isList(r)?r:D().List()}function Se(e){let t,r=[/filename\*=[^']+'\w*'"([^"]+)";?/i,/filename\*=[^']+'\w*'([^;]+);?/i,/filename="([^;]*);?"/i,/filename=([^;]*);?/i];if(S()(r).call(r,(r=>(t=r.exec(e),null!==t))),null!==t&&t.length>1)try{return decodeURIComponent(t[1])}catch(e){console.error(e)}return null}function Ae(e){return t=e.replace(/\.[^./]*$/,""),U()(F()(t));var t}function ke(e,t,r,n,a){if(!t)return[];let s=[],l=t.get("nullable"),u=t.get("required"),p=t.get("maximum"),h=t.get("minimum"),d=t.get("type"),m=t.get("format"),g=t.get("maxLength"),v=t.get("minLength"),y=t.get("uniqueItems"),b=t.get("maxItems"),w=t.get("minItems"),E=t.get("pattern");const x=r||!0===u,_=null!=e;if(l&&null===e||!d||!(x||_&&"array"===d||!(!x&&!_)))return[];let A="string"===d&&e,k="array"===d&&o()(e)&&e.length,C="array"===d&&D().List.isList(e)&&e.count();const O=[A,k,C,"array"===d&&"string"==typeof e&&e,"file"===d&&e instanceof X.Z.File,"boolean"===d&&(e||!1===e),"number"===d&&(e||0===e),"integer"===d&&(e||0===e),"object"===d&&"object"==typeof e&&null!==e,"object"===d&&"string"==typeof e&&e],j=S()(O).call(O,(e=>!!e));if(x&&!j&&!n)return s.push("Required field is not provided"),s;if("object"===d&&(null===a||"application/json"===a)){let r=e;if("string"==typeof e)try{r=JSON.parse(e)}catch(e){return s.push("Parameter string value must be valid JSON"),s}var I;if(t&&t.has("required")&&ge(u.isList)&&u.isList()&&c()(u).call(u,(e=>{void 0===r[e]&&s.push({propKey:e,error:"Required property not found"})})),t&&t.has("properties"))c()(I=t.get("properties")).call(I,((e,t)=>{const o=ke(r[t],e,!1,n,a);s.push(...i()(o).call(o,(e=>({propKey:t,error:e}))))}))}if(E){let t=((e,t)=>{if(!new RegExp(t).test(e))return"Value must follow pattern "+t})(e,E);t&&s.push(t)}if(w&&"array"===d){let t=((e,t)=>{if(!e&&t>=1||e&&e.length{if(e&&e.length>t)return`Array must not contain more then ${t} item${1===t?"":"s"}`})(e,b);t&&s.push({needRemove:!0,error:t})}if(y&&"array"===d){let t=((e,t)=>{if(e&&("true"===t||!0===t)){const t=(0,M.fromJS)(e),r=t.toSet();if(e.length>r.size){let e=(0,M.Set)();if(c()(t).call(t,((r,n)=>{f()(t).call(t,(e=>ge(e.equals)?e.equals(r):e===r)).size>1&&(e=e.add(n))})),0!==e.size)return i()(e).call(e,(e=>({index:e,error:"No duplicates allowed."}))).toArray()}}})(e,y);t&&s.push(...t)}if(g||0===g){let t=((e,t)=>{if(e.length>t)return`Value must be no longer than ${t} character${1!==t?"s":""}`})(e,g);t&&s.push(t)}if(v){let t=((e,t)=>{if(e.length{if(e>t)return`Value must be less than ${t}`})(e,p);t&&s.push(t)}if(h||0===h){let t=((e,t)=>{if(e{if(isNaN(Date.parse(e)))return"Value must be a DateTime"})(e):"uuid"===m?(e=>{if(e=e.toString().toLowerCase(),!/^[{(]?[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}[)}]?$/.test(e))return"Value must be a Guid"})(e):(e=>{if(e&&"string"!=typeof e)return"Value must be a string"})(e),!t)return s;s.push(t)}else if("boolean"===d){let t=(e=>{if("true"!==e&&"false"!==e&&!0!==e&&!1!==e)return"Value must be a boolean"})(e);if(!t)return s;s.push(t)}else if("number"===d){let t=(e=>{if(!/^-?\d+(\.?\d+)?$/.test(e))return"Value must be a number"})(e);if(!t)return s;s.push(t)}else if("integer"===d){let t=(e=>{if(!/^-?\d+$/.test(e))return"Value must be an integer"})(e);if(!t)return s;s.push(t)}else if("array"===d){if(!k&&!C)return s;e&&c()(e).call(e,((e,r)=>{const o=ke(e,t.get("items"),!1,n,a);s.push(...i()(o).call(o,(e=>({index:r,error:e}))))}))}else if("file"===d){let t=(e=>{if(e&&!(e instanceof X.Z.File))return"Value must be a file"})(e);if(!t)return s;s.push(t)}return s}const Ce=function(e,t){let{isOAS3:r=!1,bypassRequiredCheck:n=!1}=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},o=e.get("required"),{schema:a,parameterContentMediaType:i}=(0,re.Z)(e,{isOAS3:r});return ke(t,a,o,n,i)},Oe=(e,t,r)=>{if(e&&(!e.xml||!e.xml.name)){if(e.xml=e.xml||{},!e.$$ref)return e.type||e.items||e.properties||e.additionalProperties?'\n\x3c!-- XML example cannot be generated; root element name is undefined --\x3e':null;{let t=e.$$ref.match(/\S*\/(\S+)$/);e.xml.name=t[1]}}return(0,Q.memoizedCreateXMLExample)(e,t,r)},je=[{when:/json/,shouldStringifyTypes:["string"]}],Ie=["object"],Ne=(e,t,r,n)=>{const o=(0,Q.memoizedSampleFromSchema)(e,t,n),a=typeof o,i=g()(je).call(je,((e,t)=>t.when.test(r)?[...e,...t.shouldStringifyTypes]:e),Ie);return J()(i,(e=>e===a))?k()(o,null,2):o},Te=(e,t,r,n)=>{const o=Ne(e,t,r,n);let a;try{a=se.ZP.dump(se.ZP.load(o),{lineWidth:-1},{schema:se.A8}),"\n"===a[a.length-1]&&(a=x()(a).call(a,0,a.length-1))}catch(e){return console.error(e),"error: could not generate yaml example"}return a.replace(/\t/g," ")},Pe=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},n=arguments.length>3&&void 0!==arguments[3]?arguments[3]:void 0;return e&&ge(e.toJS)&&(e=e.toJS()),n&&ge(n.toJS)&&(n=n.toJS()),/xml/.test(t)?Oe(e,r,n):/(yaml|yml)/.test(t)?Te(e,r,t,n):Ne(e,r,t,n)},Re=()=>{let e={},t=X.Z.location.search;if(!t)return{};if(""!=t){let r=t.substr(1).split("&");for(let t in r)Object.prototype.hasOwnProperty.call(r,t)&&(t=r[t].split("="),e[decodeURIComponent(t[0])]=t[1]&&decodeURIComponent(t[1])||"")}return e},Me=e=>{let t;return t=e instanceof le?e:le.from(e.toString(),"utf-8"),t.toString("base64")},De={operationsSorter:{alpha:(e,t)=>e.get("path").localeCompare(t.get("path")),method:(e,t)=>e.get("method").localeCompare(t.get("method"))},tagsSorter:{alpha:(e,t)=>e.localeCompare(t)}},Le=e=>{let t=[];for(let r in e){let n=e[r];void 0!==n&&""!==n&&t.push([r,"=",encodeURIComponent(n).replace(/%20/g,"+")].join(""))}return t.join("&")},Be=(e,t,r)=>!!W()(r,(r=>G()(e[r],t[r])));function Fe(e){return"string"!=typeof e||""===e?"":(0,L.N)(e)}function ze(e){return!(!e||O()(e).call(e,"localhost")>=0||O()(e).call(e,"127.0.0.1")>=0||"none"===e)}function Ue(e){if(!D().OrderedMap.isOrderedMap(e))return null;if(!e.size)return null;const t=I()(e).call(e,((e,t)=>T()(t).call(t,"2")&&d()(e.get("content")||{}).length>0)),r=e.get("default")||D().OrderedMap(),n=(r.get("content")||D().OrderedMap()).keySeq().toJS().length?r:null;return t||n}const qe=e=>"string"==typeof e||e instanceof String?R()(e).call(e).replace(/\s/g,"%20"):"",Ve=e=>te()(qe(e).replace(/%20/g,"_")),$e=e=>f()(e).call(e,((e,t)=>/^x-/.test(t))),We=e=>f()(e).call(e,((e,t)=>/^pattern|maxLength|minLength|maximum|minimum/.test(t)));function He(e,t){var r;let n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:()=>!0;if("object"!=typeof e||o()(e)||null===e||!t)return e;const a=y()({},e);return c()(r=d()(a)).call(r,(e=>{e===t&&n(a[e],e)?delete a[e]:a[e]=He(a[e],t,n)})),a}function Je(e){if("string"==typeof e)return e;if(e&&e.toJS&&(e=e.toJS()),"object"==typeof e&&null!==e)try{return k()(e,null,2)}catch(t){return String(e)}return null==e?"":e.toString()}function Ke(e){return"number"==typeof e?e.toString():e}function Ge(e){let{returnAll:t=!1,allowHashes:r=!0}=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};if(!D().Map.isMap(e))throw new Error("paramToIdentifier: received a non-Im.Map parameter as input");const n=e.get("name"),o=e.get("in");let a=[];return e&&e.hashCode&&o&&n&&r&&a.push(`${o}.${n}.hash-${e.hashCode()}`),o&&n&&a.push(`${o}.${n}`),a.push(n),t?a:a[0]||""}function Ze(e,t){var r;const n=Ge(e,{returnAll:!0});return f()(r=i()(n).call(n,(e=>t[e]))).call(r,(e=>void 0!==e))[0]}function Ye(){return Xe(oe()(32).toString("base64"))}function Qe(e){return Xe(ie()("sha256").update(e).digest("base64"))}function Xe(e){return e.replace(/\+/g,"-").replace(/\//g,"_").replace(/=/g,"")}const et=e=>!e||!(!ce(e)||!e.isEmpty())},2518:(e,t,r)=>{"use strict";function n(e){return function(e){try{return!!JSON.parse(e)}catch(e){return null}}(e)?"json":null}r.d(t,{O:()=>n})},27504:(e,t,r)=>{"use strict";r.d(t,{Z:()=>n});const n=function(){var e={location:{},history:{},open:()=>{},close:()=>{},File:function(){}};if("undefined"==typeof window)return e;try{e=window;for(var t of["File","Blob","FormData"])t in window&&(e[t]=window[t])}catch(e){console.error(e)}return e}()},19069:(e,t,r)=>{"use strict";r.d(t,{Z:()=>c});var n=r(14418),o=r.n(n),a=r(58118),i=r.n(a),s=r(43393),l=r.n(s);const u=l().Set.of("type","format","items","default","maximum","exclusiveMaximum","minimum","exclusiveMinimum","maxLength","minLength","pattern","maxItems","minItems","uniqueItems","enum","multipleOf");function c(e){let{isOAS3:t}=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};if(!l().Map.isMap(e))return{schema:l().Map(),parameterContentMediaType:null};if(!t)return"body"===e.get("in")?{schema:e.get("schema",l().Map()),parameterContentMediaType:null}:{schema:o()(e).call(e,((e,t)=>i()(u).call(u,t))),parameterContentMediaType:null};if(e.get("content")){const t=e.get("content",l().Map({})).keySeq().first();return{schema:e.getIn(["content",t,"schema"],l().Map()),parameterContentMediaType:t}}return{schema:e.get("schema",l().Map()),parameterContentMediaType:null}}},60314:(e,t,r)=>{"use strict";r.d(t,{Z:()=>x});var n=r(58309),o=r.n(n),a=r(2250),i=r.n(a),s=r(25110),l=r.n(s),u=r(8712),c=r.n(u),p=r(51679),f=r.n(p),h=r(12373),d=r.n(h),m=r(18492),g=r.n(m),v=r(88306),y=r.n(v);const b=e=>t=>o()(e)&&o()(t)&&e.length===t.length&&i()(e).call(e,((e,r)=>e===t[r])),w=function(){for(var e=arguments.length,t=new Array(e),r=0;r1&&void 0!==arguments[1]?arguments[1]:w;const{Cache:r}=y();y().Cache=E;const n=y()(e,t);return y().Cache=r,n}},79742:(e,t)=>{"use strict";t.byteLength=function(e){var t=l(e),r=t[0],n=t[1];return 3*(r+n)/4-n},t.toByteArray=function(e){var t,r,a=l(e),i=a[0],s=a[1],u=new o(function(e,t,r){return 3*(t+r)/4-r}(0,i,s)),c=0,p=s>0?i-4:i;for(r=0;r>16&255,u[c++]=t>>8&255,u[c++]=255&t;2===s&&(t=n[e.charCodeAt(r)]<<2|n[e.charCodeAt(r+1)]>>4,u[c++]=255&t);1===s&&(t=n[e.charCodeAt(r)]<<10|n[e.charCodeAt(r+1)]<<4|n[e.charCodeAt(r+2)]>>2,u[c++]=t>>8&255,u[c++]=255&t);return u},t.fromByteArray=function(e){for(var t,n=e.length,o=n%3,a=[],i=16383,s=0,l=n-o;sl?l:s+i));1===o?(t=e[n-1],a.push(r[t>>2]+r[t<<4&63]+"==")):2===o&&(t=(e[n-2]<<8)+e[n-1],a.push(r[t>>10]+r[t>>4&63]+r[t<<2&63]+"="));return a.join("")};for(var r=[],n=[],o="undefined"!=typeof Uint8Array?Uint8Array:Array,a="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",i=0,s=a.length;i0)throw new Error("Invalid string. Length must be a multiple of 4");var r=e.indexOf("=");return-1===r&&(r=t),[r,r===t?0:4-r%4]}function u(e,t,n){for(var o,a,i=[],s=t;s>18&63]+r[a>>12&63]+r[a>>6&63]+r[63&a]);return i.join("")}n["-".charCodeAt(0)]=62,n["_".charCodeAt(0)]=63},48764:(e,t,r)=>{"use strict";const n=r(79742),o=r(80645),a="function"==typeof Symbol&&"function"==typeof Symbol.for?Symbol.for("nodejs.util.inspect.custom"):null;t.Buffer=l,t.SlowBuffer=function(e){+e!=e&&(e=0);return l.alloc(+e)},t.INSPECT_MAX_BYTES=50;const i=2147483647;function s(e){if(e>i)throw new RangeError('The value "'+e+'" is invalid for option "size"');const t=new Uint8Array(e);return Object.setPrototypeOf(t,l.prototype),t}function l(e,t,r){if("number"==typeof e){if("string"==typeof t)throw new TypeError('The "string" argument must be of type string. Received type number');return p(e)}return u(e,t,r)}function u(e,t,r){if("string"==typeof e)return function(e,t){"string"==typeof t&&""!==t||(t="utf8");if(!l.isEncoding(t))throw new TypeError("Unknown encoding: "+t);const r=0|m(e,t);let n=s(r);const o=n.write(e,t);o!==r&&(n=n.slice(0,o));return n}(e,t);if(ArrayBuffer.isView(e))return function(e){if(G(e,Uint8Array)){const t=new Uint8Array(e);return h(t.buffer,t.byteOffset,t.byteLength)}return f(e)}(e);if(null==e)throw new TypeError("The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type "+typeof e);if(G(e,ArrayBuffer)||e&&G(e.buffer,ArrayBuffer))return h(e,t,r);if("undefined"!=typeof SharedArrayBuffer&&(G(e,SharedArrayBuffer)||e&&G(e.buffer,SharedArrayBuffer)))return h(e,t,r);if("number"==typeof e)throw new TypeError('The "value" argument must not be of type number. Received type number');const n=e.valueOf&&e.valueOf();if(null!=n&&n!==e)return l.from(n,t,r);const o=function(e){if(l.isBuffer(e)){const t=0|d(e.length),r=s(t);return 0===r.length||e.copy(r,0,0,t),r}if(void 0!==e.length)return"number"!=typeof e.length||Z(e.length)?s(0):f(e);if("Buffer"===e.type&&Array.isArray(e.data))return f(e.data)}(e);if(o)return o;if("undefined"!=typeof Symbol&&null!=Symbol.toPrimitive&&"function"==typeof e[Symbol.toPrimitive])return l.from(e[Symbol.toPrimitive]("string"),t,r);throw new TypeError("The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type "+typeof e)}function c(e){if("number"!=typeof e)throw new TypeError('"size" argument must be of type number');if(e<0)throw new RangeError('The value "'+e+'" is invalid for option "size"')}function p(e){return c(e),s(e<0?0:0|d(e))}function f(e){const t=e.length<0?0:0|d(e.length),r=s(t);for(let n=0;n=i)throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+i.toString(16)+" bytes");return 0|e}function m(e,t){if(l.isBuffer(e))return e.length;if(ArrayBuffer.isView(e)||G(e,ArrayBuffer))return e.byteLength;if("string"!=typeof e)throw new TypeError('The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type '+typeof e);const r=e.length,n=arguments.length>2&&!0===arguments[2];if(!n&&0===r)return 0;let o=!1;for(;;)switch(t){case"ascii":case"latin1":case"binary":return r;case"utf8":case"utf-8":return H(e).length;case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return 2*r;case"hex":return r>>>1;case"base64":return J(e).length;default:if(o)return n?-1:H(e).length;t=(""+t).toLowerCase(),o=!0}}function g(e,t,r){let n=!1;if((void 0===t||t<0)&&(t=0),t>this.length)return"";if((void 0===r||r>this.length)&&(r=this.length),r<=0)return"";if((r>>>=0)<=(t>>>=0))return"";for(e||(e="utf8");;)switch(e){case"hex":return I(this,t,r);case"utf8":case"utf-8":return k(this,t,r);case"ascii":return O(this,t,r);case"latin1":case"binary":return j(this,t,r);case"base64":return A(this,t,r);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return N(this,t,r);default:if(n)throw new TypeError("Unknown encoding: "+e);e=(e+"").toLowerCase(),n=!0}}function v(e,t,r){const n=e[t];e[t]=e[r],e[r]=n}function y(e,t,r,n,o){if(0===e.length)return-1;if("string"==typeof r?(n=r,r=0):r>2147483647?r=2147483647:r<-2147483648&&(r=-2147483648),Z(r=+r)&&(r=o?0:e.length-1),r<0&&(r=e.length+r),r>=e.length){if(o)return-1;r=e.length-1}else if(r<0){if(!o)return-1;r=0}if("string"==typeof t&&(t=l.from(t,n)),l.isBuffer(t))return 0===t.length?-1:b(e,t,r,n,o);if("number"==typeof t)return t&=255,"function"==typeof Uint8Array.prototype.indexOf?o?Uint8Array.prototype.indexOf.call(e,t,r):Uint8Array.prototype.lastIndexOf.call(e,t,r):b(e,[t],r,n,o);throw new TypeError("val must be string, number or Buffer")}function b(e,t,r,n,o){let a,i=1,s=e.length,l=t.length;if(void 0!==n&&("ucs2"===(n=String(n).toLowerCase())||"ucs-2"===n||"utf16le"===n||"utf-16le"===n)){if(e.length<2||t.length<2)return-1;i=2,s/=2,l/=2,r/=2}function u(e,t){return 1===i?e[t]:e.readUInt16BE(t*i)}if(o){let n=-1;for(a=r;as&&(r=s-l),a=r;a>=0;a--){let r=!0;for(let n=0;no&&(n=o):n=o;const a=t.length;let i;for(n>a/2&&(n=a/2),i=0;i>8,o=r%256,a.push(o),a.push(n);return a}(t,e.length-r),e,r,n)}function A(e,t,r){return 0===t&&r===e.length?n.fromByteArray(e):n.fromByteArray(e.slice(t,r))}function k(e,t,r){r=Math.min(e.length,r);const n=[];let o=t;for(;o239?4:t>223?3:t>191?2:1;if(o+i<=r){let r,n,s,l;switch(i){case 1:t<128&&(a=t);break;case 2:r=e[o+1],128==(192&r)&&(l=(31&t)<<6|63&r,l>127&&(a=l));break;case 3:r=e[o+1],n=e[o+2],128==(192&r)&&128==(192&n)&&(l=(15&t)<<12|(63&r)<<6|63&n,l>2047&&(l<55296||l>57343)&&(a=l));break;case 4:r=e[o+1],n=e[o+2],s=e[o+3],128==(192&r)&&128==(192&n)&&128==(192&s)&&(l=(15&t)<<18|(63&r)<<12|(63&n)<<6|63&s,l>65535&&l<1114112&&(a=l))}}null===a?(a=65533,i=1):a>65535&&(a-=65536,n.push(a>>>10&1023|55296),a=56320|1023&a),n.push(a),o+=i}return function(e){const t=e.length;if(t<=C)return String.fromCharCode.apply(String,e);let r="",n=0;for(;nn.length?(l.isBuffer(t)||(t=l.from(t)),t.copy(n,o)):Uint8Array.prototype.set.call(n,t,o);else{if(!l.isBuffer(t))throw new TypeError('"list" argument must be an Array of Buffers');t.copy(n,o)}o+=t.length}return n},l.byteLength=m,l.prototype._isBuffer=!0,l.prototype.swap16=function(){const e=this.length;if(e%2!=0)throw new RangeError("Buffer size must be a multiple of 16-bits");for(let t=0;tr&&(e+=" ... "),""},a&&(l.prototype[a]=l.prototype.inspect),l.prototype.compare=function(e,t,r,n,o){if(G(e,Uint8Array)&&(e=l.from(e,e.offset,e.byteLength)),!l.isBuffer(e))throw new TypeError('The "target" argument must be one of type Buffer or Uint8Array. Received type '+typeof e);if(void 0===t&&(t=0),void 0===r&&(r=e?e.length:0),void 0===n&&(n=0),void 0===o&&(o=this.length),t<0||r>e.length||n<0||o>this.length)throw new RangeError("out of range index");if(n>=o&&t>=r)return 0;if(n>=o)return-1;if(t>=r)return 1;if(this===e)return 0;let a=(o>>>=0)-(n>>>=0),i=(r>>>=0)-(t>>>=0);const s=Math.min(a,i),u=this.slice(n,o),c=e.slice(t,r);for(let e=0;e>>=0,isFinite(r)?(r>>>=0,void 0===n&&(n="utf8")):(n=r,r=void 0)}const o=this.length-t;if((void 0===r||r>o)&&(r=o),e.length>0&&(r<0||t<0)||t>this.length)throw new RangeError("Attempt to write outside buffer bounds");n||(n="utf8");let a=!1;for(;;)switch(n){case"hex":return w(this,e,t,r);case"utf8":case"utf-8":return E(this,e,t,r);case"ascii":case"latin1":case"binary":return x(this,e,t,r);case"base64":return _(this,e,t,r);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return S(this,e,t,r);default:if(a)throw new TypeError("Unknown encoding: "+n);n=(""+n).toLowerCase(),a=!0}},l.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};const C=4096;function O(e,t,r){let n="";r=Math.min(e.length,r);for(let o=t;on)&&(r=n);let o="";for(let n=t;nr)throw new RangeError("Trying to access beyond buffer length")}function P(e,t,r,n,o,a){if(!l.isBuffer(e))throw new TypeError('"buffer" argument must be a Buffer instance');if(t>o||te.length)throw new RangeError("Index out of range")}function R(e,t,r,n,o){q(t,n,o,e,r,7);let a=Number(t&BigInt(4294967295));e[r++]=a,a>>=8,e[r++]=a,a>>=8,e[r++]=a,a>>=8,e[r++]=a;let i=Number(t>>BigInt(32)&BigInt(4294967295));return e[r++]=i,i>>=8,e[r++]=i,i>>=8,e[r++]=i,i>>=8,e[r++]=i,r}function M(e,t,r,n,o){q(t,n,o,e,r,7);let a=Number(t&BigInt(4294967295));e[r+7]=a,a>>=8,e[r+6]=a,a>>=8,e[r+5]=a,a>>=8,e[r+4]=a;let i=Number(t>>BigInt(32)&BigInt(4294967295));return e[r+3]=i,i>>=8,e[r+2]=i,i>>=8,e[r+1]=i,i>>=8,e[r]=i,r+8}function D(e,t,r,n,o,a){if(r+n>e.length)throw new RangeError("Index out of range");if(r<0)throw new RangeError("Index out of range")}function L(e,t,r,n,a){return t=+t,r>>>=0,a||D(e,0,r,4),o.write(e,t,r,n,23,4),r+4}function B(e,t,r,n,a){return t=+t,r>>>=0,a||D(e,0,r,8),o.write(e,t,r,n,52,8),r+8}l.prototype.slice=function(e,t){const r=this.length;(e=~~e)<0?(e+=r)<0&&(e=0):e>r&&(e=r),(t=void 0===t?r:~~t)<0?(t+=r)<0&&(t=0):t>r&&(t=r),t>>=0,t>>>=0,r||T(e,t,this.length);let n=this[e],o=1,a=0;for(;++a>>=0,t>>>=0,r||T(e,t,this.length);let n=this[e+--t],o=1;for(;t>0&&(o*=256);)n+=this[e+--t]*o;return n},l.prototype.readUint8=l.prototype.readUInt8=function(e,t){return e>>>=0,t||T(e,1,this.length),this[e]},l.prototype.readUint16LE=l.prototype.readUInt16LE=function(e,t){return e>>>=0,t||T(e,2,this.length),this[e]|this[e+1]<<8},l.prototype.readUint16BE=l.prototype.readUInt16BE=function(e,t){return e>>>=0,t||T(e,2,this.length),this[e]<<8|this[e+1]},l.prototype.readUint32LE=l.prototype.readUInt32LE=function(e,t){return e>>>=0,t||T(e,4,this.length),(this[e]|this[e+1]<<8|this[e+2]<<16)+16777216*this[e+3]},l.prototype.readUint32BE=l.prototype.readUInt32BE=function(e,t){return e>>>=0,t||T(e,4,this.length),16777216*this[e]+(this[e+1]<<16|this[e+2]<<8|this[e+3])},l.prototype.readBigUInt64LE=Q((function(e){V(e>>>=0,"offset");const t=this[e],r=this[e+7];void 0!==t&&void 0!==r||$(e,this.length-8);const n=t+256*this[++e]+65536*this[++e]+this[++e]*2**24,o=this[++e]+256*this[++e]+65536*this[++e]+r*2**24;return BigInt(n)+(BigInt(o)<>>=0,"offset");const t=this[e],r=this[e+7];void 0!==t&&void 0!==r||$(e,this.length-8);const n=t*2**24+65536*this[++e]+256*this[++e]+this[++e],o=this[++e]*2**24+65536*this[++e]+256*this[++e]+r;return(BigInt(n)<>>=0,t>>>=0,r||T(e,t,this.length);let n=this[e],o=1,a=0;for(;++a=o&&(n-=Math.pow(2,8*t)),n},l.prototype.readIntBE=function(e,t,r){e>>>=0,t>>>=0,r||T(e,t,this.length);let n=t,o=1,a=this[e+--n];for(;n>0&&(o*=256);)a+=this[e+--n]*o;return o*=128,a>=o&&(a-=Math.pow(2,8*t)),a},l.prototype.readInt8=function(e,t){return e>>>=0,t||T(e,1,this.length),128&this[e]?-1*(255-this[e]+1):this[e]},l.prototype.readInt16LE=function(e,t){e>>>=0,t||T(e,2,this.length);const r=this[e]|this[e+1]<<8;return 32768&r?4294901760|r:r},l.prototype.readInt16BE=function(e,t){e>>>=0,t||T(e,2,this.length);const r=this[e+1]|this[e]<<8;return 32768&r?4294901760|r:r},l.prototype.readInt32LE=function(e,t){return e>>>=0,t||T(e,4,this.length),this[e]|this[e+1]<<8|this[e+2]<<16|this[e+3]<<24},l.prototype.readInt32BE=function(e,t){return e>>>=0,t||T(e,4,this.length),this[e]<<24|this[e+1]<<16|this[e+2]<<8|this[e+3]},l.prototype.readBigInt64LE=Q((function(e){V(e>>>=0,"offset");const t=this[e],r=this[e+7];void 0!==t&&void 0!==r||$(e,this.length-8);const n=this[e+4]+256*this[e+5]+65536*this[e+6]+(r<<24);return(BigInt(n)<>>=0,"offset");const t=this[e],r=this[e+7];void 0!==t&&void 0!==r||$(e,this.length-8);const n=(t<<24)+65536*this[++e]+256*this[++e]+this[++e];return(BigInt(n)<>>=0,t||T(e,4,this.length),o.read(this,e,!0,23,4)},l.prototype.readFloatBE=function(e,t){return e>>>=0,t||T(e,4,this.length),o.read(this,e,!1,23,4)},l.prototype.readDoubleLE=function(e,t){return e>>>=0,t||T(e,8,this.length),o.read(this,e,!0,52,8)},l.prototype.readDoubleBE=function(e,t){return e>>>=0,t||T(e,8,this.length),o.read(this,e,!1,52,8)},l.prototype.writeUintLE=l.prototype.writeUIntLE=function(e,t,r,n){if(e=+e,t>>>=0,r>>>=0,!n){P(this,e,t,r,Math.pow(2,8*r)-1,0)}let o=1,a=0;for(this[t]=255&e;++a>>=0,r>>>=0,!n){P(this,e,t,r,Math.pow(2,8*r)-1,0)}let o=r-1,a=1;for(this[t+o]=255&e;--o>=0&&(a*=256);)this[t+o]=e/a&255;return t+r},l.prototype.writeUint8=l.prototype.writeUInt8=function(e,t,r){return e=+e,t>>>=0,r||P(this,e,t,1,255,0),this[t]=255&e,t+1},l.prototype.writeUint16LE=l.prototype.writeUInt16LE=function(e,t,r){return e=+e,t>>>=0,r||P(this,e,t,2,65535,0),this[t]=255&e,this[t+1]=e>>>8,t+2},l.prototype.writeUint16BE=l.prototype.writeUInt16BE=function(e,t,r){return e=+e,t>>>=0,r||P(this,e,t,2,65535,0),this[t]=e>>>8,this[t+1]=255&e,t+2},l.prototype.writeUint32LE=l.prototype.writeUInt32LE=function(e,t,r){return e=+e,t>>>=0,r||P(this,e,t,4,4294967295,0),this[t+3]=e>>>24,this[t+2]=e>>>16,this[t+1]=e>>>8,this[t]=255&e,t+4},l.prototype.writeUint32BE=l.prototype.writeUInt32BE=function(e,t,r){return e=+e,t>>>=0,r||P(this,e,t,4,4294967295,0),this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e,t+4},l.prototype.writeBigUInt64LE=Q((function(e,t=0){return R(this,e,t,BigInt(0),BigInt("0xffffffffffffffff"))})),l.prototype.writeBigUInt64BE=Q((function(e,t=0){return M(this,e,t,BigInt(0),BigInt("0xffffffffffffffff"))})),l.prototype.writeIntLE=function(e,t,r,n){if(e=+e,t>>>=0,!n){const n=Math.pow(2,8*r-1);P(this,e,t,r,n-1,-n)}let o=0,a=1,i=0;for(this[t]=255&e;++o>0)-i&255;return t+r},l.prototype.writeIntBE=function(e,t,r,n){if(e=+e,t>>>=0,!n){const n=Math.pow(2,8*r-1);P(this,e,t,r,n-1,-n)}let o=r-1,a=1,i=0;for(this[t+o]=255&e;--o>=0&&(a*=256);)e<0&&0===i&&0!==this[t+o+1]&&(i=1),this[t+o]=(e/a>>0)-i&255;return t+r},l.prototype.writeInt8=function(e,t,r){return e=+e,t>>>=0,r||P(this,e,t,1,127,-128),e<0&&(e=255+e+1),this[t]=255&e,t+1},l.prototype.writeInt16LE=function(e,t,r){return e=+e,t>>>=0,r||P(this,e,t,2,32767,-32768),this[t]=255&e,this[t+1]=e>>>8,t+2},l.prototype.writeInt16BE=function(e,t,r){return e=+e,t>>>=0,r||P(this,e,t,2,32767,-32768),this[t]=e>>>8,this[t+1]=255&e,t+2},l.prototype.writeInt32LE=function(e,t,r){return e=+e,t>>>=0,r||P(this,e,t,4,2147483647,-2147483648),this[t]=255&e,this[t+1]=e>>>8,this[t+2]=e>>>16,this[t+3]=e>>>24,t+4},l.prototype.writeInt32BE=function(e,t,r){return e=+e,t>>>=0,r||P(this,e,t,4,2147483647,-2147483648),e<0&&(e=4294967295+e+1),this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e,t+4},l.prototype.writeBigInt64LE=Q((function(e,t=0){return R(this,e,t,-BigInt("0x8000000000000000"),BigInt("0x7fffffffffffffff"))})),l.prototype.writeBigInt64BE=Q((function(e,t=0){return M(this,e,t,-BigInt("0x8000000000000000"),BigInt("0x7fffffffffffffff"))})),l.prototype.writeFloatLE=function(e,t,r){return L(this,e,t,!0,r)},l.prototype.writeFloatBE=function(e,t,r){return L(this,e,t,!1,r)},l.prototype.writeDoubleLE=function(e,t,r){return B(this,e,t,!0,r)},l.prototype.writeDoubleBE=function(e,t,r){return B(this,e,t,!1,r)},l.prototype.copy=function(e,t,r,n){if(!l.isBuffer(e))throw new TypeError("argument should be a Buffer");if(r||(r=0),n||0===n||(n=this.length),t>=e.length&&(t=e.length),t||(t=0),n>0&&n=this.length)throw new RangeError("Index out of range");if(n<0)throw new RangeError("sourceEnd out of bounds");n>this.length&&(n=this.length),e.length-t>>=0,r=void 0===r?this.length:r>>>0,e||(e=0),"number"==typeof e)for(o=t;o=n+4;r-=3)t=`_${e.slice(r-3,r)}${t}`;return`${e.slice(0,r)}${t}`}function q(e,t,r,n,o,a){if(e>r||e3?0===t||t===BigInt(0)?`>= 0${n} and < 2${n} ** ${8*(a+1)}${n}`:`>= -(2${n} ** ${8*(a+1)-1}${n}) and < 2 ** ${8*(a+1)-1}${n}`:`>= ${t}${n} and <= ${r}${n}`,new F.ERR_OUT_OF_RANGE("value",o,e)}!function(e,t,r){V(t,"offset"),void 0!==e[t]&&void 0!==e[t+r]||$(t,e.length-(r+1))}(n,o,a)}function V(e,t){if("number"!=typeof e)throw new F.ERR_INVALID_ARG_TYPE(t,"number",e)}function $(e,t,r){if(Math.floor(e)!==e)throw V(e,r),new F.ERR_OUT_OF_RANGE(r||"offset","an integer",e);if(t<0)throw new F.ERR_BUFFER_OUT_OF_BOUNDS;throw new F.ERR_OUT_OF_RANGE(r||"offset",`>= ${r?1:0} and <= ${t}`,e)}z("ERR_BUFFER_OUT_OF_BOUNDS",(function(e){return e?`${e} is outside of buffer bounds`:"Attempt to access memory outside buffer bounds"}),RangeError),z("ERR_INVALID_ARG_TYPE",(function(e,t){return`The "${e}" argument must be of type number. Received type ${typeof t}`}),TypeError),z("ERR_OUT_OF_RANGE",(function(e,t,r){let n=`The value of "${e}" is out of range.`,o=r;return Number.isInteger(r)&&Math.abs(r)>2**32?o=U(String(r)):"bigint"==typeof r&&(o=String(r),(r>BigInt(2)**BigInt(32)||r<-(BigInt(2)**BigInt(32)))&&(o=U(o)),o+="n"),n+=` It must be ${t}. Received ${o}`,n}),RangeError);const W=/[^+/0-9A-Za-z-_]/g;function H(e,t){let r;t=t||1/0;const n=e.length;let o=null;const a=[];for(let i=0;i55295&&r<57344){if(!o){if(r>56319){(t-=3)>-1&&a.push(239,191,189);continue}if(i+1===n){(t-=3)>-1&&a.push(239,191,189);continue}o=r;continue}if(r<56320){(t-=3)>-1&&a.push(239,191,189),o=r;continue}r=65536+(o-55296<<10|r-56320)}else o&&(t-=3)>-1&&a.push(239,191,189);if(o=null,r<128){if((t-=1)<0)break;a.push(r)}else if(r<2048){if((t-=2)<0)break;a.push(r>>6|192,63&r|128)}else if(r<65536){if((t-=3)<0)break;a.push(r>>12|224,r>>6&63|128,63&r|128)}else{if(!(r<1114112))throw new Error("Invalid code point");if((t-=4)<0)break;a.push(r>>18|240,r>>12&63|128,r>>6&63|128,63&r|128)}}return a}function J(e){return n.toByteArray(function(e){if((e=(e=e.split("=")[0]).trim().replace(W,"")).length<2)return"";for(;e.length%4!=0;)e+="=";return e}(e))}function K(e,t,r,n){let o;for(o=0;o=t.length||o>=e.length);++o)t[o+r]=e[o];return o}function G(e,t){return e instanceof t||null!=e&&null!=e.constructor&&null!=e.constructor.name&&e.constructor.name===t.name}function Z(e){return e!=e}const Y=function(){const e="0123456789abcdef",t=new Array(256);for(let r=0;r<16;++r){const n=16*r;for(let o=0;o<16;++o)t[n+o]=e[r]+e[o]}return t}();function Q(e){return"undefined"==typeof BigInt?X:e}function X(){throw new Error("BigInt not supported")}},21924:(e,t,r)=>{"use strict";var n=r(40210),o=r(55559),a=o(n("String.prototype.indexOf"));e.exports=function(e,t){var r=n(e,!!t);return"function"==typeof r&&a(e,".prototype.")>-1?o(r):r}},55559:(e,t,r)=>{"use strict";var n=r(58612),o=r(40210),a=o("%Function.prototype.apply%"),i=o("%Function.prototype.call%"),s=o("%Reflect.apply%",!0)||n.call(i,a),l=o("%Object.getOwnPropertyDescriptor%",!0),u=o("%Object.defineProperty%",!0),c=o("%Math.max%");if(u)try{u({},"a",{value:1})}catch(e){u=null}e.exports=function(e){var t=s(n,i,arguments);if(l&&u){var r=l(t,"length");r.configurable&&u(t,"length",{value:1+c(0,e.length-(arguments.length-1))})}return t};var p=function(){return s(n,a,arguments)};u?u(e.exports,"apply",{value:p}):e.exports.apply=p},94184:(e,t)=>{var r;!function(){"use strict";var n={}.hasOwnProperty;function o(){for(var e=[],t=0;t{"use strict";var n=r(11742),o={"text/plain":"Text","text/html":"Url",default:"Text"};e.exports=function(e,t){var r,a,i,s,l,u,c=!1;t||(t={}),r=t.debug||!1;try{if(i=n(),s=document.createRange(),l=document.getSelection(),(u=document.createElement("span")).textContent=e,u.style.all="unset",u.style.position="fixed",u.style.top=0,u.style.clip="rect(0, 0, 0, 0)",u.style.whiteSpace="pre",u.style.webkitUserSelect="text",u.style.MozUserSelect="text",u.style.msUserSelect="text",u.style.userSelect="text",u.addEventListener("copy",(function(n){if(n.stopPropagation(),t.format)if(n.preventDefault(),void 0===n.clipboardData){r&&console.warn("unable to use e.clipboardData"),r&&console.warn("trying IE specific stuff"),window.clipboardData.clearData();var a=o[t.format]||o.default;window.clipboardData.setData(a,e)}else n.clipboardData.clearData(),n.clipboardData.setData(t.format,e);t.onCopy&&(n.preventDefault(),t.onCopy(n.clipboardData))})),document.body.appendChild(u),s.selectNodeContents(u),l.addRange(s),!document.execCommand("copy"))throw new Error("copy command was unsuccessful");c=!0}catch(n){r&&console.error("unable to copy using execCommand: ",n),r&&console.warn("trying IE specific stuff");try{window.clipboardData.setData(t.format||"text",e),t.onCopy&&t.onCopy(window.clipboardData),c=!0}catch(n){r&&console.error("unable to copy using clipboardData: ",n),r&&console.error("falling back to prompt"),a=function(e){var t=(/mac os x/i.test(navigator.userAgent)?"⌘":"Ctrl")+"+C";return e.replace(/#{\s*key\s*}/g,t)}("message"in t?t.message:"Copy to clipboard: #{key}, Enter"),window.prompt(a,e)}}finally{l&&("function"==typeof l.removeRange?l.removeRange(s):l.removeAllRanges()),u&&document.body.removeChild(u),i()}return c}},95299:(e,t,r)=>{var n=r(24848);e.exports=n},83450:(e,t,r)=>{var n=r(83363);e.exports=n},66820:(e,t,r)=>{var n=r(56243);e.exports=n},5023:(e,t,r)=>{var n=r(72369);e.exports=n},90093:(e,t,r)=>{var n=r(28196);e.exports=n},3688:(e,t,r)=>{var n=r(11955);e.exports=n},83838:(e,t,r)=>{var n=r(46279);e.exports=n},15684:(e,t,r)=>{var n=r(19373);e.exports=n},99826:(e,t,r)=>{var n=r(28427);e.exports=n},84234:(e,t,r)=>{var n=r(82073);e.exports=n},65362:(e,t,r)=>{var n=r(63383);e.exports=n},32271:(e,t,r)=>{var n=r(14471);e.exports=n},91254:(e,t,r)=>{var n=r(57396);e.exports=n},43536:(e,t,r)=>{var n=r(41910);e.exports=n},37331:(e,t,r)=>{var n=r(79427);e.exports=n},68522:(e,t,r)=>{var n=r(62857);e.exports=n},73151:(e,t,r)=>{var n=r(9534);e.exports=n},99565:(e,t,r)=>{var n=r(96507);e.exports=n},45012:(e,t,r)=>{var n=r(23059);e.exports=n},78690:(e,t,r)=>{var n=r(16670);e.exports=n},25626:(e,t,r)=>{var n=r(27460);e.exports=n},80281:(e,t,r)=>{var n=r(92547);e.exports=n},40031:(e,t,r)=>{var n=r(46509);e.exports=n},54493:(e,t,r)=>{r(77971),r(53242);var n=r(54058);e.exports=n.Array.from},24034:(e,t,r)=>{r(92737);var n=r(54058);e.exports=n.Array.isArray},15367:(e,t,r)=>{r(85906);var n=r(35703);e.exports=n("Array").concat},12710:(e,t,r)=>{r(66274),r(55967);var n=r(35703);e.exports=n("Array").entries},51459:(e,t,r)=>{r(48851);var n=r(35703);e.exports=n("Array").every},6172:(e,t,r)=>{r(80290);var n=r(35703);e.exports=n("Array").fill},62383:(e,t,r)=>{r(21501);var n=r(35703);e.exports=n("Array").filter},60009:(e,t,r)=>{r(44929);var n=r(35703);e.exports=n("Array").findIndex},17671:(e,t,r)=>{r(80833);var n=r(35703);e.exports=n("Array").find},99324:(e,t,r)=>{r(2437);var n=r(35703);e.exports=n("Array").forEach},80991:(e,t,r)=>{r(97690);var n=r(35703);e.exports=n("Array").includes},8700:(e,t,r)=>{r(99076);var n=r(35703);e.exports=n("Array").indexOf},95909:(e,t,r)=>{r(66274),r(55967);var n=r(35703);e.exports=n("Array").keys},6442:(e,t,r)=>{r(75915);var n=r(35703);e.exports=n("Array").lastIndexOf},23866:(e,t,r)=>{r(68787);var n=r(35703);e.exports=n("Array").map},52999:(e,t,r)=>{r(81876);var n=r(35703);e.exports=n("Array").reduce},91876:(e,t,r)=>{r(11490);var n=r(35703);e.exports=n("Array").reverse},24900:(e,t,r)=>{r(60186);var n=r(35703);e.exports=n("Array").slice},3824:(e,t,r)=>{r(36026);var n=r(35703);e.exports=n("Array").some},2948:(e,t,r)=>{r(4115);var n=r(35703);e.exports=n("Array").sort},78209:(e,t,r)=>{r(98611);var n=r(35703);e.exports=n("Array").splice},14423:(e,t,r)=>{r(66274),r(55967);var n=r(35703);e.exports=n("Array").values},81103:(e,t,r)=>{r(95160);var n=r(54058);e.exports=n.Date.now},27700:(e,t,r)=>{r(73381);var n=r(35703);e.exports=n("Function").bind},13830:(e,t,r)=>{r(66274),r(77971);var n=r(22902);e.exports=n},91031:(e,t,r)=>{r(52595),e.exports=r(21899)},16246:(e,t,r)=>{var n=r(7046),o=r(27700),a=Function.prototype;e.exports=function(e){var t=e.bind;return e===a||n(a,e)&&t===a.bind?o:t}},56043:(e,t,r)=>{var n=r(7046),o=r(15367),a=Array.prototype;e.exports=function(e){var t=e.concat;return e===a||n(a,e)&&t===a.concat?o:t}},13160:(e,t,r)=>{var n=r(7046),o=r(51459),a=Array.prototype;e.exports=function(e){var t=e.every;return e===a||n(a,e)&&t===a.every?o:t}},80446:(e,t,r)=>{var n=r(7046),o=r(6172),a=Array.prototype;e.exports=function(e){var t=e.fill;return e===a||n(a,e)&&t===a.fill?o:t}},2480:(e,t,r)=>{var n=r(7046),o=r(62383),a=Array.prototype;e.exports=function(e){var t=e.filter;return e===a||n(a,e)&&t===a.filter?o:t}},7147:(e,t,r)=>{var n=r(7046),o=r(60009),a=Array.prototype;e.exports=function(e){var t=e.findIndex;return e===a||n(a,e)&&t===a.findIndex?o:t}},32236:(e,t,r)=>{var n=r(7046),o=r(17671),a=Array.prototype;e.exports=function(e){var t=e.find;return e===a||n(a,e)&&t===a.find?o:t}},58557:(e,t,r)=>{var n=r(7046),o=r(80991),a=r(21631),i=Array.prototype,s=String.prototype;e.exports=function(e){var t=e.includes;return e===i||n(i,e)&&t===i.includes?o:"string"==typeof e||e===s||n(s,e)&&t===s.includes?a:t}},34570:(e,t,r)=>{var n=r(7046),o=r(8700),a=Array.prototype;e.exports=function(e){var t=e.indexOf;return e===a||n(a,e)&&t===a.indexOf?o:t}},57564:(e,t,r)=>{var n=r(7046),o=r(6442),a=Array.prototype;e.exports=function(e){var t=e.lastIndexOf;return e===a||n(a,e)&&t===a.lastIndexOf?o:t}},88287:(e,t,r)=>{var n=r(7046),o=r(23866),a=Array.prototype;e.exports=function(e){var t=e.map;return e===a||n(a,e)&&t===a.map?o:t}},68025:(e,t,r)=>{var n=r(7046),o=r(52999),a=Array.prototype;e.exports=function(e){var t=e.reduce;return e===a||n(a,e)&&t===a.reduce?o:t}},59257:(e,t,r)=>{var n=r(7046),o=r(80454),a=String.prototype;e.exports=function(e){var t=e.repeat;return"string"==typeof e||e===a||n(a,e)&&t===a.repeat?o:t}},91060:(e,t,r)=>{var n=r(7046),o=r(91876),a=Array.prototype;e.exports=function(e){var t=e.reverse;return e===a||n(a,e)&&t===a.reverse?o:t}},69601:(e,t,r)=>{var n=r(7046),o=r(24900),a=Array.prototype;e.exports=function(e){var t=e.slice;return e===a||n(a,e)&&t===a.slice?o:t}},28299:(e,t,r)=>{var n=r(7046),o=r(3824),a=Array.prototype;e.exports=function(e){var t=e.some;return e===a||n(a,e)&&t===a.some?o:t}},69355:(e,t,r)=>{var n=r(7046),o=r(2948),a=Array.prototype;e.exports=function(e){var t=e.sort;return e===a||n(a,e)&&t===a.sort?o:t}},18339:(e,t,r)=>{var n=r(7046),o=r(78209),a=Array.prototype;e.exports=function(e){var t=e.splice;return e===a||n(a,e)&&t===a.splice?o:t}},71611:(e,t,r)=>{var n=r(7046),o=r(3269),a=String.prototype;e.exports=function(e){var t=e.startsWith;return"string"==typeof e||e===a||n(a,e)&&t===a.startsWith?o:t}},62774:(e,t,r)=>{var n=r(7046),o=r(13348),a=String.prototype;e.exports=function(e){var t=e.trim;return"string"==typeof e||e===a||n(a,e)&&t===a.trim?o:t}},84426:(e,t,r)=>{r(32619);var n=r(54058),o=r(79730);n.JSON||(n.JSON={stringify:JSON.stringify}),e.exports=function(e,t,r){return o(n.JSON.stringify,null,arguments)}},91018:(e,t,r)=>{r(66274),r(37501),r(55967),r(77971);var n=r(54058);e.exports=n.Map},45999:(e,t,r)=>{r(49221);var n=r(54058);e.exports=n.Object.assign},35254:(e,t,r)=>{r(53882);var n=r(54058).Object;e.exports=function(e,t){return n.create(e,t)}},7702:(e,t,r)=>{r(74979);var n=r(54058).Object,o=e.exports=function(e,t){return n.defineProperties(e,t)};n.defineProperties.sham&&(o.sham=!0)},48171:(e,t,r)=>{r(86450);var n=r(54058).Object,o=e.exports=function(e,t,r){return n.defineProperty(e,t,r)};n.defineProperty.sham&&(o.sham=!0)},73081:(e,t,r)=>{r(94366);var n=r(54058);e.exports=n.Object.entries},286:(e,t,r)=>{r(46924);var n=r(54058).Object,o=e.exports=function(e,t){return n.getOwnPropertyDescriptor(e,t)};n.getOwnPropertyDescriptor.sham&&(o.sham=!0)},92766:(e,t,r)=>{r(88482);var n=r(54058);e.exports=n.Object.getOwnPropertyDescriptors},30498:(e,t,r)=>{r(35824);var n=r(54058);e.exports=n.Object.getOwnPropertySymbols},13966:(e,t,r)=>{r(17405);var n=r(54058);e.exports=n.Object.getPrototypeOf},48494:(e,t,r)=>{r(21724);var n=r(54058);e.exports=n.Object.keys},3065:(e,t,r)=>{r(90108);var n=r(54058);e.exports=n.Object.setPrototypeOf},98430:(e,t,r)=>{r(26614);var n=r(54058);e.exports=n.Object.values},52956:(e,t,r)=>{r(47627),r(66274),r(55967),r(98881),r(4560),r(91302),r(44349),r(77971);var n=r(54058);e.exports=n.Promise},21631:(e,t,r)=>{r(11035);var n=r(35703);e.exports=n("String").includes},80454:(e,t,r)=>{r(60986);var n=r(35703);e.exports=n("String").repeat},3269:(e,t,r)=>{r(94761);var n=r(35703);e.exports=n("String").startsWith},13348:(e,t,r)=>{r(57398);var n=r(35703);e.exports=n("String").trim},57473:(e,t,r)=>{r(85906),r(55967),r(35824),r(8555),r(52615),r(21732),r(35903),r(1825),r(28394),r(45915),r(61766),r(62737),r(89911),r(74315),r(63131),r(64714),r(70659),r(69120),r(79413),r(1502);var n=r(54058);e.exports=n.Symbol},24227:(e,t,r)=>{r(66274),r(55967),r(77971),r(1825);var n=r(11477);e.exports=n.f("iterator")},32304:(e,t,r)=>{r(66274),r(55967),r(54334);var n=r(54058);e.exports=n.WeakMap},27385:(e,t,r)=>{var n=r(95299);e.exports=n},81522:(e,t,r)=>{var n=r(83450);e.exports=n},32209:(e,t,r)=>{var n=r(66820);e.exports=n},30888:(e,t,r)=>{r(9668);var n=r(5023);e.exports=n},14122:(e,t,r)=>{var n=r(90093);e.exports=n},44442:(e,t,r)=>{var n=r(3688);e.exports=n},57152:(e,t,r)=>{var n=r(83838);e.exports=n},69447:(e,t,r)=>{var n=r(15684);e.exports=n},17579:(e,t,r)=>{var n=r(99826);e.exports=n},81493:(e,t,r)=>{var n=r(84234);e.exports=n},60269:(e,t,r)=>{var n=r(65362);e.exports=n},76094:(e,t,r)=>{var n=r(32271);e.exports=n},70573:(e,t,r)=>{var n=r(91254);e.exports=n},73685:(e,t,r)=>{var n=r(43536);e.exports=n},27533:(e,t,r)=>{var n=r(37331);e.exports=n},39057:(e,t,r)=>{var n=r(68522);e.exports=n},84710:(e,t,r)=>{var n=r(73151);e.exports=n},74303:(e,t,r)=>{var n=r(99565);e.exports=n},93799:(e,t,r)=>{var n=r(45012);e.exports=n},55122:(e,t,r)=>{var n=r(78690);e.exports=n},29531:(e,t,r)=>{var n=r(25626);r(89731),r(55708),r(30014),r(88731),e.exports=n},86600:(e,t,r)=>{var n=r(80281);r(28783),r(43975),r(65799),r(45414),r(46774),r(80620),r(36172),e.exports=n},9759:(e,t,r)=>{var n=r(40031);e.exports=n},24883:(e,t,r)=>{var n=r(21899),o=r(57475),a=r(69826),i=n.TypeError;e.exports=function(e){if(o(e))return e;throw i(a(e)+" is not a function")}},174:(e,t,r)=>{var n=r(21899),o=r(24284),a=r(69826),i=n.TypeError;e.exports=function(e){if(o(e))return e;throw i(a(e)+" is not a constructor")}},11851:(e,t,r)=>{var n=r(21899),o=r(57475),a=n.String,i=n.TypeError;e.exports=function(e){if("object"==typeof e||o(e))return e;throw i("Can't set "+a(e)+" as a prototype")}},18479:e=>{e.exports=function(){}},5743:(e,t,r)=>{var n=r(21899),o=r(7046),a=n.TypeError;e.exports=function(e,t){if(o(t,e))return e;throw a("Incorrect invocation")}},96059:(e,t,r)=>{var n=r(21899),o=r(10941),a=n.String,i=n.TypeError;e.exports=function(e){if(o(e))return e;throw i(a(e)+" is not an object")}},97135:(e,t,r)=>{var n=r(95981);e.exports=n((function(){if("function"==typeof ArrayBuffer){var e=new ArrayBuffer(8);Object.isExtensible(e)&&Object.defineProperty(e,"a",{value:8})}}))},91860:(e,t,r)=>{"use strict";var n=r(89678),o=r(59413),a=r(10623);e.exports=function(e){for(var t=n(this),r=a(t),i=arguments.length,s=o(i>1?arguments[1]:void 0,r),l=i>2?arguments[2]:void 0,u=void 0===l?r:o(l,r);u>s;)t[s++]=e;return t}},56837:(e,t,r)=>{"use strict";var n=r(3610).forEach,o=r(34194)("forEach");e.exports=o?[].forEach:function(e){return n(this,e,arguments.length>1?arguments[1]:void 0)}},11354:(e,t,r)=>{"use strict";var n=r(21899),o=r(86843),a=r(78834),i=r(89678),s=r(75196),l=r(6782),u=r(24284),c=r(10623),p=r(55449),f=r(53476),h=r(22902),d=n.Array;e.exports=function(e){var t=i(e),r=u(this),n=arguments.length,m=n>1?arguments[1]:void 0,g=void 0!==m;g&&(m=o(m,n>2?arguments[2]:void 0));var v,y,b,w,E,x,_=h(t),S=0;if(!_||this==d&&l(_))for(v=c(t),y=r?new this(v):d(v);v>S;S++)x=g?m(t[S],S):t[S],p(y,S,x);else for(E=(w=f(t,_)).next,y=r?new this:[];!(b=a(E,w)).done;S++)x=g?s(w,m,[b.value,S],!0):b.value,p(y,S,x);return y.length=S,y}},31692:(e,t,r)=>{var n=r(74529),o=r(59413),a=r(10623),i=function(e){return function(t,r,i){var s,l=n(t),u=a(l),c=o(i,u);if(e&&r!=r){for(;u>c;)if((s=l[c++])!=s)return!0}else for(;u>c;c++)if((e||c in l)&&l[c]===r)return e||c||0;return!e&&-1}};e.exports={includes:i(!0),indexOf:i(!1)}},3610:(e,t,r)=>{var n=r(86843),o=r(95329),a=r(37026),i=r(89678),s=r(10623),l=r(64692),u=o([].push),c=function(e){var t=1==e,r=2==e,o=3==e,c=4==e,p=6==e,f=7==e,h=5==e||p;return function(d,m,g,v){for(var y,b,w=i(d),E=a(w),x=n(m,g),_=s(E),S=0,A=v||l,k=t?A(d,_):r||f?A(d,0):void 0;_>S;S++)if((h||S in E)&&(b=x(y=E[S],S,w),e))if(t)k[S]=b;else if(b)switch(e){case 3:return!0;case 5:return y;case 6:return S;case 2:u(k,y)}else switch(e){case 4:return!1;case 7:u(k,y)}return p?-1:o||c?c:k}};e.exports={forEach:c(0),map:c(1),filter:c(2),some:c(3),every:c(4),find:c(5),findIndex:c(6),filterReject:c(7)}},67145:(e,t,r)=>{"use strict";var n=r(79730),o=r(74529),a=r(62435),i=r(10623),s=r(34194),l=Math.min,u=[].lastIndexOf,c=!!u&&1/[1].lastIndexOf(1,-0)<0,p=s("lastIndexOf"),f=c||!p;e.exports=f?function(e){if(c)return n(u,this,arguments)||0;var t=o(this),r=i(t),s=r-1;for(arguments.length>1&&(s=l(s,a(arguments[1]))),s<0&&(s=r+s);s>=0;s--)if(s in t&&t[s]===e)return s||0;return-1}:u},50568:(e,t,r)=>{var n=r(95981),o=r(99813),a=r(53385),i=o("species");e.exports=function(e){return a>=51||!n((function(){var t=[];return(t.constructor={})[i]=function(){return{foo:1}},1!==t[e](Boolean).foo}))}},34194:(e,t,r)=>{"use strict";var n=r(95981);e.exports=function(e,t){var r=[][e];return!!r&&n((function(){r.call(null,t||function(){throw 1},1)}))}},46499:(e,t,r)=>{var n=r(21899),o=r(24883),a=r(89678),i=r(37026),s=r(10623),l=n.TypeError,u=function(e){return function(t,r,n,u){o(r);var c=a(t),p=i(c),f=s(c),h=e?f-1:0,d=e?-1:1;if(n<2)for(;;){if(h in p){u=p[h],h+=d;break}if(h+=d,e?h<0:f<=h)throw l("Reduce of empty array with no initial value")}for(;e?h>=0:f>h;h+=d)h in p&&(u=r(u,p[h],h,c));return u}};e.exports={left:u(!1),right:u(!0)}},15790:(e,t,r)=>{var n=r(21899),o=r(59413),a=r(10623),i=r(55449),s=n.Array,l=Math.max;e.exports=function(e,t,r){for(var n=a(e),u=o(t,n),c=o(void 0===r?n:r,n),p=s(l(c-u,0)),f=0;u{var n=r(95329);e.exports=n([].slice)},61388:(e,t,r)=>{var n=r(15790),o=Math.floor,a=function(e,t){var r=e.length,l=o(r/2);return r<8?i(e,t):s(e,a(n(e,0,l),t),a(n(e,l),t),t)},i=function(e,t){for(var r,n,o=e.length,a=1;a0;)e[n]=e[--n];n!==a++&&(e[n]=r)}return e},s=function(e,t,r,n){for(var o=t.length,a=r.length,i=0,s=0;i{var n=r(21899),o=r(1052),a=r(24284),i=r(10941),s=r(99813)("species"),l=n.Array;e.exports=function(e){var t;return o(e)&&(t=e.constructor,(a(t)&&(t===l||o(t.prototype))||i(t)&&null===(t=t[s]))&&(t=void 0)),void 0===t?l:t}},64692:(e,t,r)=>{var n=r(5693);e.exports=function(e,t){return new(n(e))(0===t?0:t)}},75196:(e,t,r)=>{var n=r(96059),o=r(7609);e.exports=function(e,t,r,a){try{return a?t(n(r)[0],r[1]):t(r)}catch(t){o(e,"throw",t)}}},21385:(e,t,r)=>{var n=r(99813)("iterator"),o=!1;try{var a=0,i={next:function(){return{done:!!a++}},return:function(){o=!0}};i[n]=function(){return this},Array.from(i,(function(){throw 2}))}catch(e){}e.exports=function(e,t){if(!t&&!o)return!1;var r=!1;try{var a={};a[n]=function(){return{next:function(){return{done:r=!0}}}},e(a)}catch(e){}return r}},82532:(e,t,r)=>{var n=r(95329),o=n({}.toString),a=n("".slice);e.exports=function(e){return a(o(e),8,-1)}},9697:(e,t,r)=>{var n=r(21899),o=r(22885),a=r(57475),i=r(82532),s=r(99813)("toStringTag"),l=n.Object,u="Arguments"==i(function(){return arguments}());e.exports=o?i:function(e){var t,r,n;return void 0===e?"Undefined":null===e?"Null":"string"==typeof(r=function(e,t){try{return e[t]}catch(e){}}(t=l(e),s))?r:u?i(t):"Object"==(n=i(t))&&a(t.callee)?"Arguments":n}},38694:(e,t,r)=>{var n=r(95329)("".replace),o=String(Error("zxcasd").stack),a=/\n\s*at [^:]*:[^\n]*/,i=a.test(o);e.exports=function(e,t){if(i&&"string"==typeof e)for(;t--;)e=n(e,a,"");return e}},85616:(e,t,r)=>{"use strict";var n=r(65988).f,o=r(29290),a=r(87524),i=r(86843),s=r(5743),l=r(93091),u=r(47771),c=r(94431),p=r(55746),f=r(21647).fastKey,h=r(45402),d=h.set,m=h.getterFor;e.exports={getConstructor:function(e,t,r,u){var c=e((function(e,n){s(e,h),d(e,{type:t,index:o(null),first:void 0,last:void 0,size:0}),p||(e.size=0),null!=n&&l(n,e[u],{that:e,AS_ENTRIES:r})})),h=c.prototype,g=m(t),v=function(e,t,r){var n,o,a=g(e),i=y(e,t);return i?i.value=r:(a.last=i={index:o=f(t,!0),key:t,value:r,previous:n=a.last,next:void 0,removed:!1},a.first||(a.first=i),n&&(n.next=i),p?a.size++:e.size++,"F"!==o&&(a.index[o]=i)),e},y=function(e,t){var r,n=g(e),o=f(t);if("F"!==o)return n.index[o];for(r=n.first;r;r=r.next)if(r.key==t)return r};return a(h,{clear:function(){for(var e=g(this),t=e.index,r=e.first;r;)r.removed=!0,r.previous&&(r.previous=r.previous.next=void 0),delete t[r.index],r=r.next;e.first=e.last=void 0,p?e.size=0:this.size=0},delete:function(e){var t=this,r=g(t),n=y(t,e);if(n){var o=n.next,a=n.previous;delete r.index[n.index],n.removed=!0,a&&(a.next=o),o&&(o.previous=a),r.first==n&&(r.first=o),r.last==n&&(r.last=a),p?r.size--:t.size--}return!!n},forEach:function(e){for(var t,r=g(this),n=i(e,arguments.length>1?arguments[1]:void 0);t=t?t.next:r.first;)for(n(t.value,t.key,this);t&&t.removed;)t=t.previous},has:function(e){return!!y(this,e)}}),a(h,r?{get:function(e){var t=y(this,e);return t&&t.value},set:function(e,t){return v(this,0===e?0:e,t)}}:{add:function(e){return v(this,e=0===e?0:e,e)}}),p&&n(h,"size",{get:function(){return g(this).size}}),c},setStrong:function(e,t,r){var n=t+" Iterator",o=m(t),a=m(n);u(e,t,(function(e,t){d(this,{type:n,target:e,state:o(e),kind:t,last:void 0})}),(function(){for(var e=a(this),t=e.kind,r=e.last;r&&r.removed;)r=r.previous;return e.target&&(e.last=r=r?r.next:e.state.first)?"keys"==t?{value:r.key,done:!1}:"values"==t?{value:r.value,done:!1}:{value:[r.key,r.value],done:!1}:(e.target=void 0,{value:void 0,done:!0})}),r?"entries":"values",!r,!0),c(t)}}},8850:(e,t,r)=>{"use strict";var n=r(95329),o=r(87524),a=r(21647).getWeakData,i=r(96059),s=r(10941),l=r(5743),u=r(93091),c=r(3610),p=r(90953),f=r(45402),h=f.set,d=f.getterFor,m=c.find,g=c.findIndex,v=n([].splice),y=0,b=function(e){return e.frozen||(e.frozen=new w)},w=function(){this.entries=[]},E=function(e,t){return m(e.entries,(function(e){return e[0]===t}))};w.prototype={get:function(e){var t=E(this,e);if(t)return t[1]},has:function(e){return!!E(this,e)},set:function(e,t){var r=E(this,e);r?r[1]=t:this.entries.push([e,t])},delete:function(e){var t=g(this.entries,(function(t){return t[0]===e}));return~t&&v(this.entries,t,1),!!~t}},e.exports={getConstructor:function(e,t,r,n){var c=e((function(e,o){l(e,f),h(e,{type:t,id:y++,frozen:void 0}),null!=o&&u(o,e[n],{that:e,AS_ENTRIES:r})})),f=c.prototype,m=d(t),g=function(e,t,r){var n=m(e),o=a(i(t),!0);return!0===o?b(n).set(t,r):o[n.id]=r,e};return o(f,{delete:function(e){var t=m(this);if(!s(e))return!1;var r=a(e);return!0===r?b(t).delete(e):r&&p(r,t.id)&&delete r[t.id]},has:function(e){var t=m(this);if(!s(e))return!1;var r=a(e);return!0===r?b(t).has(e):r&&p(r,t.id)}}),o(f,r?{get:function(e){var t=m(this);if(s(e)){var r=a(e);return!0===r?b(t).get(e):r?r[t.id]:void 0}},set:function(e,t){return g(this,e,t)}}:{add:function(e){return g(this,e,!0)}}),c}}},24683:(e,t,r)=>{"use strict";var n=r(76887),o=r(21899),a=r(21647),i=r(95981),s=r(32029),l=r(93091),u=r(5743),c=r(57475),p=r(10941),f=r(90904),h=r(65988).f,d=r(3610).forEach,m=r(55746),g=r(45402),v=g.set,y=g.getterFor;e.exports=function(e,t,r){var g,b=-1!==e.indexOf("Map"),w=-1!==e.indexOf("Weak"),E=b?"set":"add",x=o[e],_=x&&x.prototype,S={};if(m&&c(x)&&(w||_.forEach&&!i((function(){(new x).entries().next()})))){var A=(g=t((function(t,r){v(u(t,A),{type:e,collection:new x}),null!=r&&l(r,t[E],{that:t,AS_ENTRIES:b})}))).prototype,k=y(e);d(["add","clear","delete","forEach","get","has","set","keys","values","entries"],(function(e){var t="add"==e||"set"==e;!(e in _)||w&&"clear"==e||s(A,e,(function(r,n){var o=k(this).collection;if(!t&&w&&!p(r))return"get"==e&&void 0;var a=o[e](0===r?0:r,n);return t?this:a}))})),w||h(A,"size",{configurable:!0,get:function(){return k(this).collection.size}})}else g=r.getConstructor(t,e,b,E),a.enable();return f(g,e,!1,!0),S[e]=g,n({global:!0,forced:!0},S),w||r.setStrong(g,e,b),g}},23489:(e,t,r)=>{var n=r(90953),o=r(31136),a=r(49677),i=r(65988);e.exports=function(e,t,r){for(var s=o(t),l=i.f,u=a.f,c=0;c{var n=r(99813)("match");e.exports=function(e){var t=/./;try{"/./"[e](t)}catch(r){try{return t[n]=!1,"/./"[e](t)}catch(e){}}return!1}},64160:(e,t,r)=>{var n=r(95981);e.exports=!n((function(){function e(){}return e.prototype.constructor=null,Object.getPrototypeOf(new e)!==e.prototype}))},31046:(e,t,r)=>{"use strict";var n=r(35143).IteratorPrototype,o=r(29290),a=r(31887),i=r(90904),s=r(12077),l=function(){return this};e.exports=function(e,t,r,u){var c=t+" Iterator";return e.prototype=o(n,{next:a(+!u,r)}),i(e,c,!1,!0),s[c]=l,e}},32029:(e,t,r)=>{var n=r(55746),o=r(65988),a=r(31887);e.exports=n?function(e,t,r){return o.f(e,t,a(1,r))}:function(e,t,r){return e[t]=r,e}},31887:e=>{e.exports=function(e,t){return{enumerable:!(1&e),configurable:!(2&e),writable:!(4&e),value:t}}},55449:(e,t,r)=>{"use strict";var n=r(83894),o=r(65988),a=r(31887);e.exports=function(e,t,r){var i=n(t);i in e?o.f(e,i,a(0,r)):e[i]=r}},47771:(e,t,r)=>{"use strict";var n=r(76887),o=r(78834),a=r(82529),i=r(79417),s=r(57475),l=r(31046),u=r(249),c=r(88929),p=r(90904),f=r(32029),h=r(99754),d=r(99813),m=r(12077),g=r(35143),v=i.PROPER,y=i.CONFIGURABLE,b=g.IteratorPrototype,w=g.BUGGY_SAFARI_ITERATORS,E=d("iterator"),x="keys",_="values",S="entries",A=function(){return this};e.exports=function(e,t,r,i,d,g,k){l(r,t,i);var C,O,j,I=function(e){if(e===d&&M)return M;if(!w&&e in P)return P[e];switch(e){case x:case _:case S:return function(){return new r(this,e)}}return function(){return new r(this)}},N=t+" Iterator",T=!1,P=e.prototype,R=P[E]||P["@@iterator"]||d&&P[d],M=!w&&R||I(d),D="Array"==t&&P.entries||R;if(D&&(C=u(D.call(new e)))!==Object.prototype&&C.next&&(a||u(C)===b||(c?c(C,b):s(C[E])||h(C,E,A)),p(C,N,!0,!0),a&&(m[N]=A)),v&&d==_&&R&&R.name!==_&&(!a&&y?f(P,"name",_):(T=!0,M=function(){return o(R,this)})),d)if(O={values:I(_),keys:g?M:I(x),entries:I(S)},k)for(j in O)(w||T||!(j in P))&&h(P,j,O[j]);else n({target:t,proto:!0,forced:w||T},O);return a&&!k||P[E]===M||h(P,E,M,{name:d}),m[t]=M,O}},66349:(e,t,r)=>{var n=r(54058),o=r(90953),a=r(11477),i=r(65988).f;e.exports=function(e){var t=n.Symbol||(n.Symbol={});o(t,e)||i(t,e,{value:a.f(e)})}},55746:(e,t,r)=>{var n=r(95981);e.exports=!n((function(){return 7!=Object.defineProperty({},1,{get:function(){return 7}})[1]}))},61333:(e,t,r)=>{var n=r(21899),o=r(10941),a=n.document,i=o(a)&&o(a.createElement);e.exports=function(e){return i?a.createElement(e):{}}},63281:e=>{e.exports={CSSRuleList:0,CSSStyleDeclaration:0,CSSValueList:0,ClientRectList:0,DOMRectList:0,DOMStringList:0,DOMTokenList:1,DataTransferItemList:0,FileList:0,HTMLAllCollection:0,HTMLCollection:0,HTMLFormElement:0,HTMLSelectElement:0,MediaList:0,MimeTypeArray:0,NamedNodeMap:0,NodeList:1,PaintRequestList:0,Plugin:0,PluginArray:0,SVGLengthList:0,SVGNumberList:0,SVGPathSegList:0,SVGPointList:0,SVGStringList:0,SVGTransformList:0,SourceBufferList:0,StyleSheetList:0,TextTrackCueList:0,TextTrackList:0,TouchList:0}},34342:(e,t,r)=>{var n=r(2861).match(/firefox\/(\d+)/i);e.exports=!!n&&+n[1]},23321:e=>{e.exports="object"==typeof window},81046:(e,t,r)=>{var n=r(2861);e.exports=/MSIE|Trident/.test(n)},4470:(e,t,r)=>{var n=r(2861),o=r(21899);e.exports=/ipad|iphone|ipod/i.test(n)&&void 0!==o.Pebble},22749:(e,t,r)=>{var n=r(2861);e.exports=/(?:ipad|iphone|ipod).*applewebkit/i.test(n)},6049:(e,t,r)=>{var n=r(82532),o=r(21899);e.exports="process"==n(o.process)},58045:(e,t,r)=>{var n=r(2861);e.exports=/web0s(?!.*chrome)/i.test(n)},2861:(e,t,r)=>{var n=r(626);e.exports=n("navigator","userAgent")||""},53385:(e,t,r)=>{var n,o,a=r(21899),i=r(2861),s=a.process,l=a.Deno,u=s&&s.versions||l&&l.version,c=u&&u.v8;c&&(o=(n=c.split("."))[0]>0&&n[0]<4?1:+(n[0]+n[1])),!o&&i&&(!(n=i.match(/Edge\/(\d+)/))||n[1]>=74)&&(n=i.match(/Chrome\/(\d+)/))&&(o=+n[1]),e.exports=o},18938:(e,t,r)=>{var n=r(2861).match(/AppleWebKit\/(\d+)\./);e.exports=!!n&&+n[1]},35703:(e,t,r)=>{var n=r(54058);e.exports=function(e){return n[e+"Prototype"]}},56759:e=>{e.exports=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"]},18780:(e,t,r)=>{var n=r(95981),o=r(31887);e.exports=!n((function(){var e=Error("a");return!("stack"in e)||(Object.defineProperty(e,"stack",o(1,7)),7!==e.stack)}))},76887:(e,t,r)=>{"use strict";var n=r(21899),o=r(79730),a=r(95329),i=r(57475),s=r(49677).f,l=r(37252),u=r(54058),c=r(86843),p=r(32029),f=r(90953),h=function(e){var t=function(r,n,a){if(this instanceof t){switch(arguments.length){case 0:return new e;case 1:return new e(r);case 2:return new e(r,n)}return new e(r,n,a)}return o(e,this,arguments)};return t.prototype=e.prototype,t};e.exports=function(e,t){var r,o,d,m,g,v,y,b,w=e.target,E=e.global,x=e.stat,_=e.proto,S=E?n:x?n[w]:(n[w]||{}).prototype,A=E?u:u[w]||p(u,w,{})[w],k=A.prototype;for(d in t)r=!l(E?d:w+(x?".":"#")+d,e.forced)&&S&&f(S,d),g=A[d],r&&(v=e.noTargetGet?(b=s(S,d))&&b.value:S[d]),m=r&&v?v:t[d],r&&typeof g==typeof m||(y=e.bind&&r?c(m,n):e.wrap&&r?h(m):_&&i(m)?a(m):m,(e.sham||m&&m.sham||g&&g.sham)&&p(y,"sham",!0),p(A,d,y),_&&(f(u,o=w+"Prototype")||p(u,o,{}),p(u[o],d,m),e.real&&k&&!k[d]&&p(k,d,m)))}},95981:e=>{e.exports=function(e){try{return!!e()}catch(e){return!0}}},45602:(e,t,r)=>{var n=r(95981);e.exports=!n((function(){return Object.isExtensible(Object.preventExtensions({}))}))},79730:(e,t,r)=>{var n=r(18285),o=Function.prototype,a=o.apply,i=o.call;e.exports="object"==typeof Reflect&&Reflect.apply||(n?i.bind(a):function(){return i.apply(a,arguments)})},86843:(e,t,r)=>{var n=r(95329),o=r(24883),a=r(18285),i=n(n.bind);e.exports=function(e,t){return o(e),void 0===t?e:a?i(e,t):function(){return e.apply(t,arguments)}}},18285:(e,t,r)=>{var n=r(95981);e.exports=!n((function(){var e=function(){}.bind();return"function"!=typeof e||e.hasOwnProperty("prototype")}))},98308:(e,t,r)=>{"use strict";var n=r(21899),o=r(95329),a=r(24883),i=r(10941),s=r(90953),l=r(93765),u=r(18285),c=n.Function,p=o([].concat),f=o([].join),h={},d=function(e,t,r){if(!s(h,t)){for(var n=[],o=0;o{var n=r(18285),o=Function.prototype.call;e.exports=n?o.bind(o):function(){return o.apply(o,arguments)}},79417:(e,t,r)=>{var n=r(55746),o=r(90953),a=Function.prototype,i=n&&Object.getOwnPropertyDescriptor,s=o(a,"name"),l=s&&"something"===function(){}.name,u=s&&(!n||n&&i(a,"name").configurable);e.exports={EXISTS:s,PROPER:l,CONFIGURABLE:u}},95329:(e,t,r)=>{var n=r(18285),o=Function.prototype,a=o.bind,i=o.call,s=n&&a.bind(i,i);e.exports=n?function(e){return e&&s(e)}:function(e){return e&&function(){return i.apply(e,arguments)}}},626:(e,t,r)=>{var n=r(54058),o=r(21899),a=r(57475),i=function(e){return a(e)?e:void 0};e.exports=function(e,t){return arguments.length<2?i(n[e])||i(o[e]):n[e]&&n[e][t]||o[e]&&o[e][t]}},22902:(e,t,r)=>{var n=r(9697),o=r(14229),a=r(12077),i=r(99813)("iterator");e.exports=function(e){if(null!=e)return o(e,i)||o(e,"@@iterator")||a[n(e)]}},53476:(e,t,r)=>{var n=r(21899),o=r(78834),a=r(24883),i=r(96059),s=r(69826),l=r(22902),u=n.TypeError;e.exports=function(e,t){var r=arguments.length<2?l(e):t;if(a(r))return i(o(r,e));throw u(s(e)+" is not iterable")}},14229:(e,t,r)=>{var n=r(24883);e.exports=function(e,t){var r=e[t];return null==r?void 0:n(r)}},21899:(e,t,r)=>{var n=function(e){return e&&e.Math==Math&&e};e.exports=n("object"==typeof globalThis&&globalThis)||n("object"==typeof window&&window)||n("object"==typeof self&&self)||n("object"==typeof r.g&&r.g)||function(){return this}()||Function("return this")()},90953:(e,t,r)=>{var n=r(95329),o=r(89678),a=n({}.hasOwnProperty);e.exports=Object.hasOwn||function(e,t){return a(o(e),t)}},27748:e=>{e.exports={}},34845:(e,t,r)=>{var n=r(21899);e.exports=function(e,t){var r=n.console;r&&r.error&&(1==arguments.length?r.error(e):r.error(e,t))}},15463:(e,t,r)=>{var n=r(626);e.exports=n("document","documentElement")},2840:(e,t,r)=>{var n=r(55746),o=r(95981),a=r(61333);e.exports=!n&&!o((function(){return 7!=Object.defineProperty(a("div"),"a",{get:function(){return 7}}).a}))},37026:(e,t,r)=>{var n=r(21899),o=r(95329),a=r(95981),i=r(82532),s=n.Object,l=o("".split);e.exports=a((function(){return!s("z").propertyIsEnumerable(0)}))?function(e){return"String"==i(e)?l(e,""):s(e)}:s},81302:(e,t,r)=>{var n=r(95329),o=r(57475),a=r(63030),i=n(Function.toString);o(a.inspectSource)||(a.inspectSource=function(e){return i(e)}),e.exports=a.inspectSource},53794:(e,t,r)=>{var n=r(10941),o=r(32029);e.exports=function(e,t){n(t)&&"cause"in t&&o(e,"cause",t.cause)}},21647:(e,t,r)=>{var n=r(76887),o=r(95329),a=r(27748),i=r(10941),s=r(90953),l=r(65988).f,u=r(10946),c=r(684),p=r(91584),f=r(99418),h=r(45602),d=!1,m=f("meta"),g=0,v=function(e){l(e,m,{value:{objectID:"O"+g++,weakData:{}}})},y=e.exports={enable:function(){y.enable=function(){},d=!0;var e=u.f,t=o([].splice),r={};r[m]=1,e(r).length&&(u.f=function(r){for(var n=e(r),o=0,a=n.length;o{var n,o,a,i=r(38019),s=r(21899),l=r(95329),u=r(10941),c=r(32029),p=r(90953),f=r(63030),h=r(44262),d=r(27748),m="Object already initialized",g=s.TypeError,v=s.WeakMap;if(i||f.state){var y=f.state||(f.state=new v),b=l(y.get),w=l(y.has),E=l(y.set);n=function(e,t){if(w(y,e))throw new g(m);return t.facade=e,E(y,e,t),t},o=function(e){return b(y,e)||{}},a=function(e){return w(y,e)}}else{var x=h("state");d[x]=!0,n=function(e,t){if(p(e,x))throw new g(m);return t.facade=e,c(e,x,t),t},o=function(e){return p(e,x)?e[x]:{}},a=function(e){return p(e,x)}}e.exports={set:n,get:o,has:a,enforce:function(e){return a(e)?o(e):n(e,{})},getterFor:function(e){return function(t){var r;if(!u(t)||(r=o(t)).type!==e)throw g("Incompatible receiver, "+e+" required");return r}}}},6782:(e,t,r)=>{var n=r(99813),o=r(12077),a=n("iterator"),i=Array.prototype;e.exports=function(e){return void 0!==e&&(o.Array===e||i[a]===e)}},1052:(e,t,r)=>{var n=r(82532);e.exports=Array.isArray||function(e){return"Array"==n(e)}},57475:e=>{e.exports=function(e){return"function"==typeof e}},24284:(e,t,r)=>{var n=r(95329),o=r(95981),a=r(57475),i=r(9697),s=r(626),l=r(81302),u=function(){},c=[],p=s("Reflect","construct"),f=/^\s*(?:class|function)\b/,h=n(f.exec),d=!f.exec(u),m=function(e){if(!a(e))return!1;try{return p(u,c,e),!0}catch(e){return!1}},g=function(e){if(!a(e))return!1;switch(i(e)){case"AsyncFunction":case"GeneratorFunction":case"AsyncGeneratorFunction":return!1}try{return d||!!h(f,l(e))}catch(e){return!0}};g.sham=!0,e.exports=!p||o((function(){var e;return m(m.call)||!m(Object)||!m((function(){e=!0}))||e}))?g:m},37252:(e,t,r)=>{var n=r(95981),o=r(57475),a=/#|\.prototype\./,i=function(e,t){var r=l[s(e)];return r==c||r!=u&&(o(t)?n(t):!!t)},s=i.normalize=function(e){return String(e).replace(a,".").toLowerCase()},l=i.data={},u=i.NATIVE="N",c=i.POLYFILL="P";e.exports=i},10941:(e,t,r)=>{var n=r(57475);e.exports=function(e){return"object"==typeof e?null!==e:n(e)}},82529:e=>{e.exports=!0},60685:(e,t,r)=>{var n=r(10941),o=r(82532),a=r(99813)("match");e.exports=function(e){var t;return n(e)&&(void 0!==(t=e[a])?!!t:"RegExp"==o(e))}},56664:(e,t,r)=>{var n=r(21899),o=r(626),a=r(57475),i=r(7046),s=r(32302),l=n.Object;e.exports=s?function(e){return"symbol"==typeof e}:function(e){var t=o("Symbol");return a(t)&&i(t.prototype,l(e))}},93091:(e,t,r)=>{var n=r(21899),o=r(86843),a=r(78834),i=r(96059),s=r(69826),l=r(6782),u=r(10623),c=r(7046),p=r(53476),f=r(22902),h=r(7609),d=n.TypeError,m=function(e,t){this.stopped=e,this.result=t},g=m.prototype;e.exports=function(e,t,r){var n,v,y,b,w,E,x,_=r&&r.that,S=!(!r||!r.AS_ENTRIES),A=!(!r||!r.IS_ITERATOR),k=!(!r||!r.INTERRUPTED),C=o(t,_),O=function(e){return n&&h(n,"normal",e),new m(!0,e)},j=function(e){return S?(i(e),k?C(e[0],e[1],O):C(e[0],e[1])):k?C(e,O):C(e)};if(A)n=e;else{if(!(v=f(e)))throw d(s(e)+" is not iterable");if(l(v)){for(y=0,b=u(e);b>y;y++)if((w=j(e[y]))&&c(g,w))return w;return new m(!1)}n=p(e,v)}for(E=n.next;!(x=a(E,n)).done;){try{w=j(x.value)}catch(e){h(n,"throw",e)}if("object"==typeof w&&w&&c(g,w))return w}return new m(!1)}},7609:(e,t,r)=>{var n=r(78834),o=r(96059),a=r(14229);e.exports=function(e,t,r){var i,s;o(e);try{if(!(i=a(e,"return"))){if("throw"===t)throw r;return r}i=n(i,e)}catch(e){s=!0,i=e}if("throw"===t)throw r;if(s)throw i;return o(i),r}},35143:(e,t,r)=>{"use strict";var n,o,a,i=r(95981),s=r(57475),l=r(29290),u=r(249),c=r(99754),p=r(99813),f=r(82529),h=p("iterator"),d=!1;[].keys&&("next"in(a=[].keys())?(o=u(u(a)))!==Object.prototype&&(n=o):d=!0),null==n||i((function(){var e={};return n[h].call(e)!==e}))?n={}:f&&(n=l(n)),s(n[h])||c(n,h,(function(){return this})),e.exports={IteratorPrototype:n,BUGGY_SAFARI_ITERATORS:d}},12077:e=>{e.exports={}},10623:(e,t,r)=>{var n=r(43057);e.exports=function(e){return n(e.length)}},66132:(e,t,r)=>{var n,o,a,i,s,l,u,c,p=r(21899),f=r(86843),h=r(49677).f,d=r(42941).set,m=r(22749),g=r(4470),v=r(58045),y=r(6049),b=p.MutationObserver||p.WebKitMutationObserver,w=p.document,E=p.process,x=p.Promise,_=h(p,"queueMicrotask"),S=_&&_.value;S||(n=function(){var e,t;for(y&&(e=E.domain)&&e.exit();o;){t=o.fn,o=o.next;try{t()}catch(e){throw o?i():a=void 0,e}}a=void 0,e&&e.enter()},m||y||v||!b||!w?!g&&x&&x.resolve?((u=x.resolve(void 0)).constructor=x,c=f(u.then,u),i=function(){c(n)}):y?i=function(){E.nextTick(n)}:(d=f(d,p),i=function(){d(n)}):(s=!0,l=w.createTextNode(""),new b(n).observe(l,{characterData:!0}),i=function(){l.data=s=!s})),e.exports=S||function(e){var t={fn:e,next:void 0};a&&(a.next=t),o||(o=t,i()),a=t}},19297:(e,t,r)=>{var n=r(21899);e.exports=n.Promise},72497:(e,t,r)=>{var n=r(53385),o=r(95981);e.exports=!!Object.getOwnPropertySymbols&&!o((function(){var e=Symbol();return!String(e)||!(Object(e)instanceof Symbol)||!Symbol.sham&&n&&n<41}))},28468:(e,t,r)=>{var n=r(95981),o=r(99813),a=r(82529),i=o("iterator");e.exports=!n((function(){var e=new URL("b?a=1&b=2&c=3","http://a"),t=e.searchParams,r="";return e.pathname="c%20d",t.forEach((function(e,n){t.delete("b"),r+=n+e})),a&&!e.toJSON||!t.sort||"http://a/c%20d?a=1&c=3"!==e.href||"3"!==t.get("c")||"a=1"!==String(new URLSearchParams("?a=1"))||!t[i]||"a"!==new URL("https://a@b").username||"b"!==new URLSearchParams(new URLSearchParams("a=b")).get("a")||"xn--e1aybc"!==new URL("http://тест").host||"#%D0%B1"!==new URL("http://a#б").hash||"a1c3"!==r||"x"!==new URL("http://x",void 0).host}))},38019:(e,t,r)=>{var n=r(21899),o=r(57475),a=r(81302),i=n.WeakMap;e.exports=o(i)&&/native code/.test(a(i))},69520:(e,t,r)=>{"use strict";var n=r(24883),o=function(e){var t,r;this.promise=new e((function(e,n){if(void 0!==t||void 0!==r)throw TypeError("Bad Promise constructor");t=e,r=n})),this.resolve=n(t),this.reject=n(r)};e.exports.f=function(e){return new o(e)}},14649:(e,t,r)=>{var n=r(85803);e.exports=function(e,t){return void 0===e?arguments.length<2?"":t:n(e)}},70344:(e,t,r)=>{var n=r(21899),o=r(60685),a=n.TypeError;e.exports=function(e){if(o(e))throw a("The method doesn't accept regular expressions");return e}},24420:(e,t,r)=>{"use strict";var n=r(55746),o=r(95329),a=r(78834),i=r(95981),s=r(14771),l=r(87857),u=r(36760),c=r(89678),p=r(37026),f=Object.assign,h=Object.defineProperty,d=o([].concat);e.exports=!f||i((function(){if(n&&1!==f({b:1},f(h({},"a",{enumerable:!0,get:function(){h(this,"b",{value:3,enumerable:!1})}}),{b:2})).b)return!0;var e={},t={},r=Symbol(),o="abcdefghijklmnopqrst";return e[r]=7,o.split("").forEach((function(e){t[e]=e})),7!=f({},e)[r]||s(f({},t)).join("")!=o}))?function(e,t){for(var r=c(e),o=arguments.length,i=1,f=l.f,h=u.f;o>i;)for(var m,g=p(arguments[i++]),v=f?d(s(g),f(g)):s(g),y=v.length,b=0;y>b;)m=v[b++],n&&!a(h,g,m)||(r[m]=g[m]);return r}:f},29290:(e,t,r)=>{var n,o=r(96059),a=r(59938),i=r(56759),s=r(27748),l=r(15463),u=r(61333),c=r(44262),p=c("IE_PROTO"),f=function(){},h=function(e){return" @@ -17,7 +18,8 @@ - + + @@ -33,6 +35,11 @@