diff --git a/aiserver.py b/aiserver.py
index 49a9ae16..00075ccb 100644
--- a/aiserver.py
+++ b/aiserver.py
@@ -563,6 +563,7 @@ from flask_socketio import SocketIO, emit, join_room, leave_room
from flask_socketio import emit as _emit
from flask_session import Session
from flask_compress import Compress
+from flask_cors import CORS
from werkzeug.exceptions import HTTPException, NotFound, InternalServerError
import secrets
app = Flask(__name__, root_path=os.getcwd())
@@ -6491,10 +6492,14 @@ def applyoutputformatting(txt, no_sentence_trimming=False, no_single_line=False)
if len(txt) == 0:
return txt
- # Workaround for endoftext appearing in models that need it, you can supposedly do this directly with the tokenizer but it keeps showing up
- # So for now since we only have two known end of text tokens and only one model that wishes to have its generation stopped this is easier
- # If you see this and you wish to do a universal implementation for this, feel free just make sure to test it on all platforms - Henk
- txt = txt.replace("<|endoftext|>", "")
+ # Handle <|endoftext|> for models that want this
+ # In the future it would be nice if we could extend this to all EOS models.
+ # However, since EOS detection may have unforseen consequences for now we hardcode <|endoftext|> until more can be tested
+ # - Henk
+ eotregex = re.compile(r'<\|endoftext\|>[.|\n|\W|\w]*')
+ txt = eotregex.sub('', txt)
+
+ # Cleanup stray
txt = txt.replace("", "")
# Use standard quotes and apostrophes
@@ -8696,9 +8701,9 @@ def UI_2_redo(data):
@logger.catch
def UI_2_retry(data):
- koboldai_vars.actions.clear_unused_options()
if len(koboldai_vars.actions.get_current_options_no_edits()) == 0:
- ignore = koboldai_vars.actions.pop(keep=False)
+ ignore = koboldai_vars.actions.pop(keep=True)
+ koboldai_vars.actions.clear_unused_options()
koboldai_vars.lua_koboldbridge.feedback = None
koboldai_vars.recentrng = koboldai_vars.recentrngm = None
actionsubmit("", actionmode=koboldai_vars.actionmode)
@@ -10326,7 +10331,9 @@ def UI_2_action_image():
filename,
mimetype="image/jpeg")
else:
- return None
+ return send_file(
+ "static/blank.png",
+ mimetype="image/png")
#==================================================================#
# display messages if they have never been sent before on this install
@@ -13253,6 +13260,8 @@ def run():
general_startup()
# Start flask & SocketIO
logger.init("Flask", status="Starting")
+ if koboldai_vars.host:
+ CORS(app)
Session(app)
logger.init_ok("Flask", status="OK")
logger.init("Webserver", status="Starting")
diff --git a/environments/huggingface.yml b/environments/huggingface.yml
index 28c6c916..e8010f88 100644
--- a/environments/huggingface.yml
+++ b/environments/huggingface.yml
@@ -27,6 +27,7 @@ dependencies:
- pip:
- flask-cloudflared
- flask-ngrok
+ - flask-cors
- lupa==1.10
- transformers==4.25.1
- huggingface_hub>=0.10.1
diff --git a/environments/rocm.yml b/environments/rocm.yml
index de71a87c..55129cbd 100644
--- a/environments/rocm.yml
+++ b/environments/rocm.yml
@@ -26,6 +26,7 @@ dependencies:
- torch==1.12.1+rocm5.1.1
- flask-cloudflared
- flask-ngrok
+ - flask-cors
- lupa==1.10
- transformers==4.25.1
- huggingface_hub>=0.10.1
diff --git a/koboldai_settings.py b/koboldai_settings.py
index ae3f2f2b..071c7670 100644
--- a/koboldai_settings.py
+++ b/koboldai_settings.py
@@ -1332,11 +1332,29 @@ class system_settings(settings):
logger.info("Starting Horde bridge")
bridge = importlib.import_module("KoboldAI-Horde-Bridge.bridge")
self._horde_pid = bridge.kai_bridge()
- threading.Thread(target=self._horde_pid.bridge, args=(1, "0000000000", f"Automated Instance #{random.randint(-100000000, 100000000)}", 'http://127.0.0.1:{}'.format(self.port), "http://koboldai.net", [])).run()
+ try:
+ bridge_cd = importlib.import_module("KoboldAI-Horde-Bridge.clientData")
+ cluster_url = bridge_cd.cluster_url
+ kai_name = bridge_cd.kai_name
+ if kai_name == "My Awesome Instance":
+ kai_name = f"Automated Instance #{random.randint(-100000000, 100000000)}"
+ api_key = bridge_cd.api_key
+ priority_usernames = bridge_cd.priority_usernames
+ except:
+ cluster_url = "http://koboldai.net"
+ kai_name = f"Automated Instance #{random.randint(-100000000, 100000000)}"
+ api_key = "0000000000"
+ priority_usernames = []
+ # Always use the local URL & port
+ kai_url = f'http://127.0.0.1:{self.port}'
+
+ logger.info(f"Name: {kai_name} on {kai_url}")
+ threading.Thread(target=self._horde_pid.bridge, args=(1, api_key, kai_name, kai_url, cluster_url, priority_usernames)).run()
else:
if self._horde_pid is not None:
logger.info("Killing Horde bridge")
self._horde_pid.stop()
+ self._horde_pid = None
class KoboldStoryRegister(object):
def __init__(self, socketio, story_settings, koboldai_vars, tokenizer=None, sequence=[]):
@@ -2085,8 +2103,8 @@ class KoboldStoryRegister(object):
if self.story_settings.gen_audio and self.koboldai_vars.experimental_features:
for i in reversed([-1]+list(self.actions.keys())):
self.gen_audio(i, overwrite=False)
- else:
- print("{} and {}".format(self.story_settings.gen_audio, self.koboldai_vars.experimental_features))
+ #else:
+ # print("{} and {}".format(self.story_settings.gen_audio, self.koboldai_vars.experimental_features))
def set_picture(self, action_id, filename, prompt):
if action_id == -1:
@@ -2106,7 +2124,22 @@ class KoboldStoryRegister(object):
filename = os.path.join(self.koboldai_vars.save_paths.generated_images, self.actions[action_id]['picture_filename'])
prompt = self.actions[action_id]['picture_prompt']
else:
- return None, None
+ #Let's find the last picture if there is one
+ found = False
+ for i in reversed(range(-1, action_id)):
+ if i in self.actions and 'picture_filename' in self.actions[i]:
+ filename = os.path.join(self.koboldai_vars.save_paths.generated_images, self.actions[i]['picture_filename'])
+ prompt = self.actions[i]['picture_prompt']
+ found = True
+ break
+ elif i == -1:
+ if self.story_settings.prompt_picture_filename == "":
+ return None, None
+ filename = os.path.join(self.koboldai_vars.save_paths.generated_images, self.story_settings.prompt_picture_filename)
+ prompt = self.story_settings.prompt_picture_prompt
+ found = True
+ if not found:
+ return None, None
if os.path.exists(filename):
return filename, prompt
diff --git a/requirements.txt b/requirements.txt
index 175c3b21..f12faf0d 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -6,6 +6,7 @@ requests
torch >= 1.9, < 1.13
flask-cloudflared
flask-ngrok
+flask-cors
eventlet
dnspython==2.2.1
lupa==1.10
diff --git a/requirements_mtj.txt b/requirements_mtj.txt
index a24bb423..998895ad 100644
--- a/requirements_mtj.txt
+++ b/requirements_mtj.txt
@@ -13,6 +13,7 @@ flask
Flask-SocketIO
flask-cloudflared >= 0.0.5
flask-ngrok
+flask-cors
eventlet
dnspython==2.2.1
lupa==1.10
diff --git a/static/blank.png b/static/blank.png
new file mode 100644
index 00000000..994c3ba3
Binary files /dev/null and b/static/blank.png differ
diff --git a/templates/settings flyout.html b/templates/settings flyout.html
index b5307461..fbc0f955 100644
--- a/templates/settings flyout.html
+++ b/templates/settings flyout.html
@@ -103,7 +103,7 @@
-
+