mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Initial Horde Mode Integration (Auto-Disabled on Colab)
This commit is contained in:
@@ -1345,14 +1345,14 @@ def get_model_info(model, directory=""):
|
||||
filename = "settings/{}.breakmodel".format(model.replace("/", "_"))
|
||||
if path.exists(filename):
|
||||
with open(filename, "r") as file:
|
||||
data = file.read().split("\n")[:2]
|
||||
data = [x for x in file.read().split("\n")[:2] if x != '']
|
||||
if len(data) < 2:
|
||||
data.append("0")
|
||||
break_values, disk_blocks = data
|
||||
break_values = break_values.split(",")
|
||||
else:
|
||||
break_values = [layer_count]
|
||||
break_values = [int(x) for x in break_values]
|
||||
break_values = [int(x) for x in break_values if x != '']
|
||||
break_values += [0] * (gpu_count - len(break_values))
|
||||
emit('from_server', {'cmd': 'selected_model_info', 'key_value': key_value, 'key':key,
|
||||
'gpu':gpu, 'layer_count':layer_count, 'breakmodel':breakmodel,
|
||||
@@ -6716,7 +6716,7 @@ def new_ui_index():
|
||||
if 'story' in session:
|
||||
if session['story'] not in koboldai_vars.story_list():
|
||||
session['story'] = 'default'
|
||||
return render_template('index_new.html', settings=gensettings.gensettingstf if koboldai_vars.model != "InferKit" else gensettings.gensettingsik )
|
||||
return render_template('index_new.html', settings=gensettings.gensettingstf, on_colab=koboldai_vars.on_colab )
|
||||
|
||||
def ui2_connect():
|
||||
#Send all variables to client
|
||||
@@ -10336,7 +10336,7 @@ if __name__ == "__main__":
|
||||
patch_transformers()
|
||||
# Start Flask/SocketIO (Blocking, so this must be last method!)
|
||||
port = args.port if "port" in args and args.port is not None else 5000
|
||||
koboldai_settings.port = port
|
||||
koboldai_vars.port = port
|
||||
|
||||
if(koboldai_vars.host):
|
||||
if(args.localtunnel):
|
||||
|
@@ -1,8 +1,9 @@
|
||||
import os, re, time, threading, json, pickle, base64, copy, tqdm, datetime
|
||||
import os, re, time, threading, json, pickle, base64, copy, tqdm, datetime, sys
|
||||
from io import BytesIO
|
||||
from flask import has_request_context
|
||||
from flask_socketio import SocketIO
|
||||
from collections import OrderedDict
|
||||
import multiprocessing
|
||||
|
||||
serverstarted = False
|
||||
queue = None
|
||||
@@ -669,8 +670,8 @@ class user_settings(settings):
|
||||
process_variable_changes(self.socketio, self.__class__.__name__.replace("_settings", ""), name, value, old_value)
|
||||
|
||||
class system_settings(settings):
|
||||
local_only_variables = ['socketio', 'lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold', 'lua_koboldcore', 'regex_sl', 'acregex_ai', 'acregex_ui', 'comregex_ai', 'comregex_ui', 'sp']
|
||||
no_save_variables = ['socketio', 'lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold', 'lua_koboldcore', 'sp']
|
||||
local_only_variables = ['socketio', 'lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold', 'lua_koboldcore', 'regex_sl', 'acregex_ai', 'acregex_ui', 'comregex_ai', 'comregex_ui', 'sp', '_horde_pid']
|
||||
no_save_variables = ['socketio', 'lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold', 'lua_koboldcore', 'sp', '_horde_pid']
|
||||
settings_name = "system"
|
||||
def __init__(self, socketio):
|
||||
self.socketio = socketio
|
||||
@@ -730,8 +731,17 @@ class system_settings(settings):
|
||||
self.seed = None # The current RNG seed (as an int), or None if unknown
|
||||
self.alt_gen = False # Use the calc_ai_text method for generating text to go to the AI
|
||||
self.theme_list = [".".join(f.split(".")[:-1]) for f in os.listdir("./themes") if os.path.isfile(os.path.join("./themes", f))]
|
||||
self.port = 5000
|
||||
self.on_colab = 'google.colab' in sys.modules
|
||||
self.horde_share = False
|
||||
self._horde_pid = None
|
||||
|
||||
|
||||
def start_horde_bridge(port):
|
||||
while True:
|
||||
print("Running horde")
|
||||
time.sleep(10)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
new_variable = name not in self.__dict__
|
||||
old_value = getattr(self, name, None)
|
||||
@@ -747,6 +757,23 @@ class system_settings(settings):
|
||||
if name == "aibusy" and value == False:
|
||||
koboldai_vars.abort = False
|
||||
|
||||
if name == 'horde_share':
|
||||
if self.on_colab == False:
|
||||
if os.path.exists("./KoboldAI-Horde"):
|
||||
if value == True:
|
||||
import subprocess
|
||||
if os.path.exists('./KoboldAI-Horde/venv/scripts/python.exe'):
|
||||
self._horde_pid = subprocess.Popen(['./KoboldAI-Horde/venv/scripts/python.exe', './KoboldAI-Horde/bridge.py',
|
||||
'--username', 'new_ui_user', '--password', '3589yhusd*YT$^', '--kai_name', 'Test New UI',
|
||||
'--kai_url', 'http://127.0.0.1:{}'.format(self.port), '--cluster_url', "http://koboldai.net"])
|
||||
else:
|
||||
self._horde_pid = subprocess.Popen(['./KoboldAI-Horde/venv/bin/python', './KoboldAI-Horde/bridge.py',
|
||||
'--username', 'new_ui_user', '--password', '3589yhusd*YT$^', '--kai_name', 'Test New UI',
|
||||
'--kai_url', 'http://127.0.0.1:{}'.format(self.port), '--cluster_url', "http://koboldai.net"])
|
||||
else:
|
||||
print("kill bridge")
|
||||
self._horde_pid.terminate()
|
||||
|
||||
|
||||
class KoboldStoryRegister(object):
|
||||
def __init__(self, socketio, story_settings, koboldai_vars, tokenizer=None, sequence=[]):
|
||||
|
@@ -53,6 +53,10 @@
|
||||
<span> Load Model</span>
|
||||
</button>
|
||||
<select class="var_sync_model_selected_preset settings_select presets" onchange='sync_to_server(this)'><option>Preset</option></select>
|
||||
{% if not on_colab %}
|
||||
<br/><input type=checkbox data-size="mini" data-onstyle="success" data-toggle="toggle" class='var_sync_system_horde_share' onchange='sync_to_server(this)'> Share with Horde
|
||||
<span class="helpicon material-icons-outlined" title="Shares your GPU with other KoboldAI users. Does not share data/stories.">help_icon</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
<div id="Story_Info">
|
||||
|
Reference in New Issue
Block a user