Merge commit 'refs/pull/378/head' of https://github.com/ebolam/KoboldAI into united

This commit is contained in:
ebolam
2023-04-21 08:27:36 -04:00
5 changed files with 67 additions and 27 deletions

View File

@@ -1494,7 +1494,7 @@ def general_startup(override_args=None):
parser.add_argument("--noaimenu", action='store_true', help="Disables the ability to select the AI") parser.add_argument("--noaimenu", action='store_true', help="Disables the ability to select the AI")
parser.add_argument("--ngrok", action='store_true', help="Optimizes KoboldAI for Remote Play using Ngrok") parser.add_argument("--ngrok", action='store_true', help="Optimizes KoboldAI for Remote Play using Ngrok")
parser.add_argument("--localtunnel", action='store_true', help="Optimizes KoboldAI for Remote Play using Localtunnel") parser.add_argument("--localtunnel", action='store_true', help="Optimizes KoboldAI for Remote Play using Localtunnel")
parser.add_argument("--host", type=str, default="", nargs="?", const="", help="Optimizes KoboldAI for LAN Remote Play without using a proxy service. --host opens to all LAN. Enable IP whitelisting by using a comma separated IP list. Supports individual IPs, ranges, and subnets --host 127.0.0.1,127.0.0.2,127.0.0.3,192.168.1.0-192.168.1.255,10.0.0.0/24,etc") parser.add_argument("--host", type=str, default="Disabled", nargs="?", const="", help="Optimizes KoboldAI for LAN Remote Play without using a proxy service. --host opens to all LAN. Enable IP whitelisting by using a comma separated IP list. Supports individual IPs, ranges, and subnets --host 127.0.0.1,127.0.0.2,127.0.0.3,192.168.1.0-192.168.1.255,10.0.0.0/24,etc")
parser.add_argument("--port", type=int, help="Specify the port on which the application will be joinable") parser.add_argument("--port", type=int, help="Specify the port on which the application will be joinable")
parser.add_argument("--aria2_port", type=int, help="Specify the port on which aria2's RPC interface will be open if aria2 is installed (defaults to 6799)") parser.add_argument("--aria2_port", type=int, help="Specify the port on which aria2's RPC interface will be open if aria2 is installed (defaults to 6799)")
parser.add_argument("--model", help="Specify the Model Type to skip the Menu") parser.add_argument("--model", help="Specify the Model Type to skip the Menu")
@@ -1625,17 +1625,14 @@ def general_startup(override_args=None):
if args.localtunnel: if args.localtunnel:
koboldai_vars.host = True; koboldai_vars.host = True;
if args.host == "": if args.host != "Disabled":
koboldai_vars.host = True
args.unblock = True
if args.host:
# This means --host option was submitted without an argument # This means --host option was submitted without an argument
# Enable all LAN IPs (0.0.0.0/0) # Enable all LAN IPs (0.0.0.0/0)
koboldai_vars.host = True
args.unblock = True
if args.host != "": if args.host != "":
# Check if --host option was submitted with an argument # Check if --host option was submitted with an argument
# Parse the supplied IP(s) and add them to the allowed IPs list # Parse the supplied IP(s) and add them to the allowed IPs list
koboldai_vars.host = True
args.unblock = True
enable_whitelist = True enable_whitelist = True
for ip_str in args.host.split(","): for ip_str in args.host.split(","):
if "/" in ip_str: if "/" in ip_str:
@@ -1652,6 +1649,7 @@ def general_startup(override_args=None):
print(f"Allowed IPs: {allowed_ips}") print(f"Allowed IPs: {allowed_ips}")
if args.cpu: if args.cpu:
koboldai_vars.use_colab_tpu = False koboldai_vars.use_colab_tpu = False
@@ -3504,6 +3502,7 @@ def is_allowed_ip():
client_ip = request.remote_addr client_ip = request.remote_addr
if request.path != '/genre_data.json': if request.path != '/genre_data.json':
print("Connection Attempt: " + request.remote_addr) print("Connection Attempt: " + request.remote_addr)
if allowed_ips:
print("Allowed?: ", request.remote_addr in allowed_ips) print("Allowed?: ", request.remote_addr in allowed_ips)
return client_ip in allowed_ips return client_ip in allowed_ips
@@ -4203,6 +4202,7 @@ def execute_outmod():
@socketio.on('connect') @socketio.on('connect')
def do_connect(): def do_connect():
print("Connection Attempt: " + request.remote_addr) print("Connection Attempt: " + request.remote_addr)
if allowed_ips:
print("Allowed?: ", request.remote_addr in allowed_ips) print("Allowed?: ", request.remote_addr in allowed_ips)
if request.args.get("rely") == "true": if request.args.get("rely") == "true":
return return
@@ -7475,11 +7475,11 @@ def loadRequest(loadpath, filename=None):
start_time = time.time() start_time = time.time()
if(isinstance(loadpath, str)): if(isinstance(loadpath, str)):
#Original UI only sends the story name and assumes it's always a .json file... here we check to see if it's a directory to load that way #Original UI only sends the story name and assumes it's always a .json file... here we check to see if it's a directory to load that way
if not os.path.exists(loadpath): if not isinstance(loadpath, dict) and not os.path.exists(loadpath):
if os.path.exists(loadpath.replace(".json", "")): if os.path.exists(loadpath.replace(".json", "")):
loadpath = loadpath.replace(".json", "") loadpath = loadpath.replace(".json", "")
if os.path.isdir(loadpath): if not isinstance(loadpath, dict) and os.path.isdir(loadpath):
if not valid_v3_story(loadpath): if not valid_v3_story(loadpath):
raise RuntimeError(f"Tried to load {loadpath}, a non-save directory.") raise RuntimeError(f"Tried to load {loadpath}, a non-save directory.")
koboldai_vars.update_story_path_structure(loadpath) koboldai_vars.update_story_path_structure(loadpath)

View File

@@ -32,7 +32,7 @@ dependencies:
- transformers==4.28.0 - transformers==4.28.0
- huggingface_hub==0.12.1 - huggingface_hub==0.12.1
- safetensors - safetensors
- accelerate - accelerate==0.18.0
- git+https://github.com/VE-FORBRYDERNE/mkultra - git+https://github.com/VE-FORBRYDERNE/mkultra
- ansi2html - ansi2html
- flask_compress - flask_compress

View File

@@ -881,6 +881,38 @@ gensettingstf = [
"classname": "system", "classname": "system",
"name": "seed", "name": "seed",
"extra_classes": "var_sync_alt_system_seed_specified", "extra_classes": "var_sync_alt_system_seed_specified",
"ui_level": 2
},
{
"uitype": "text",
"unit": "text",
"label": "comregex_ai_string",
"id": "comregex_ai_string",
"min": 0,
"max": 1,
"step": 1,
"default": 1,
"tooltip": "Pattern for matching comments to remove them before sending them to the AI.",
"menu_path": "Settings",
"sub_path": "Other",
"classname": "system",
"name": "comregex_ai_string",
"ui_level": 2
},
{
"uitype": "text",
"unit": "text",
"label": "comregex_ui_string",
"id": "comregex_ui_string",
"min": 0,
"max": 1,
"step": 1,
"default": 1,
"tooltip": "Pattern for matching comments in the editor.",
"menu_path": "Settings",
"sub_path": "Other",
"classname": "system",
"name": "comregex_ui_string",
"ui_level": 2 "ui_level": 2
}, },
{ {

View File

@@ -1203,14 +1203,14 @@ class undefined_settings(settings):
class system_settings(settings): class system_settings(settings):
local_only_variables = ['lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold', local_only_variables = ['lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold',
'lua_koboldcore', 'regex_sl', 'acregex_ai', 'acregex_ui', 'comregex_ai', 'lua_koboldcore', 'regex_sl', 'acregex_ai', 'acregex_ui', 'comregex_ai', 'comregex_ui',
'comregex_ui', 'sp', '_horde_pid', 'inference_config', 'image_pipeline', 'sp', '_horde_pid', 'inference_config', 'image_pipeline',
'summarizer', 'summary_tokenizer', 'tts_model', 'rng_states'] 'summarizer', 'summary_tokenizer', 'tts_model', 'rng_states']
no_save_variables = ['lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold', no_save_variables = ['lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold',
'lua_koboldcore', 'sp', 'sp_length', '_horde_pid', 'horde_share', 'aibusy', 'lua_koboldcore', 'sp', 'sp_length', '_horde_pid', 'horde_share', 'aibusy',
'serverstarted', 'inference_config', 'image_pipeline', 'summarizer', 'serverstarted', 'inference_config', 'image_pipeline', 'summarizer',
'summary_tokenizer', 'use_colab_tpu', 'noai', 'disable_set_aibusy', 'cloudflare_link', 'tts_model', 'summary_tokenizer', 'use_colab_tpu', 'noai', 'disable_set_aibusy', 'cloudflare_link', 'tts_model',
'generating_image', 'bit_8_available', 'host', 'hascuda', 'usegpu', 'rng_states', 'git_repository', 'git_branch'] 'generating_image', 'bit_8_available', 'host', 'hascuda', 'usegpu', 'rng_states', 'comregex_ai', 'comregex_ui', 'git_repository', 'git_branch']
settings_name = "system" settings_name = "system"
def __init__(self, socketio, koboldai_var): def __init__(self, socketio, koboldai_var):
self._socketio = socketio self._socketio = socketio
@@ -1252,8 +1252,10 @@ class system_settings(settings):
self.regex_sl = re.compile(r'\n*(?<=.) *\n(.|\n)*') # Pattern for limiting the output to a single line self.regex_sl = re.compile(r'\n*(?<=.) *\n(.|\n)*') # Pattern for limiting the output to a single line
self.acregex_ai = re.compile(r'\n* *>(.|\n)*') # Pattern for matching adventure actions from the AI so we can remove them self.acregex_ai = re.compile(r'\n* *>(.|\n)*') # Pattern for matching adventure actions from the AI so we can remove them
self.acregex_ui = re.compile(r'^ *(&gt;.*)$', re.MULTILINE) # Pattern for matching actions in the HTML-escaped story so we can apply colouring, etc (make sure to encase part to format in parentheses) self.acregex_ui = re.compile(r'^ *(&gt;.*)$', re.MULTILINE) # Pattern for matching actions in the HTML-escaped story so we can apply colouring, etc (make sure to encase part to format in parentheses)
self.comregex_ai = re.compile(r'(?:\n<\|(?:.|\n)*?\|>(?=\n|$))|(?:<\|(?:.|\n)*?\|>\n?)') # Pattern for matching comments to remove them before sending them to the AI self.comregex_ai_string = '(?:\n\[<\|(?:.|\n)*?\|>\](?=\n|$))|(?:\[<\|(?:.|\n)*?\|>\]\n?)' # Pattern for matching comments to remove them before sending them to the AI
self.comregex_ui = re.compile(r'(&lt;\|(?:.|\n)*?\|&gt;)') # Pattern for matching comments in the editor self.comregex_ui_string = '(\[&lt;\|(?:.|\n)*?\|&gt;\])' # Pattern for matching comments in the editor
self.comregex_ai = re.compile(self.comregex_ai_string) # Pattern for matching comments to remove them before sending them to the AI
self.comregex_ui = re.compile(self.comregex_ui_string) # Pattern for matching comments in the editor
self.host = False self.host = False
self.flaskwebgui = False self.flaskwebgui = False
self.quiet = False # If set will suppress any story text from being printed to the console (will only be seen on the client web page) self.quiet = False # If set will suppress any story text from being printed to the console (will only be seen on the client web page)
@@ -1331,6 +1333,12 @@ class system_settings(settings):
self._socketio.emit('from_server', {'cmd': 'spstatitems', 'data': {self.spfilename: self.spmeta} if self.allowsp and len(self.spfilename) else {}}, namespace=None, broadcast=True, room="UI_1") self._socketio.emit('from_server', {'cmd': 'spstatitems', 'data': {self.spfilename: self.spmeta} if self.allowsp and len(self.spfilename) else {}}, namespace=None, broadcast=True, room="UI_1")
super().__setattr__("sp_changed", False) super().__setattr__("sp_changed", False)
if name == 'comregex_ai_string':
self.comregex_ai = re.compile(self.comregex_ai_string)
if name == 'comregex_ui_string':
self.comregex_ui = re.compile(self.comregex_ui_string)
if name == 'keep_img_gen_in_memory' and value == False: if name == 'keep_img_gen_in_memory' and value == False:
self.image_pipeline = None self.image_pipeline = None

View File

@@ -15,7 +15,7 @@ markdown
bleach==4.1.0 bleach==4.1.0
sentencepiece sentencepiece
protobuf protobuf
accelerate accelerate==0.18.0
flask-session==0.4.0 flask-session==0.4.0
marshmallow>=3.13 marshmallow>=3.13
apispec-webframeworks apispec-webframeworks