Merge pull request #17 from henk717/united

Merge united
This commit is contained in:
Llama
2023-02-02 09:42:53 -08:00
committed by GitHub
8 changed files with 58 additions and 12 deletions

View File

@@ -563,6 +563,7 @@ from flask_socketio import SocketIO, emit, join_room, leave_room
from flask_socketio import emit as _emit
from flask_session import Session
from flask_compress import Compress
from flask_cors import CORS
from werkzeug.exceptions import HTTPException, NotFound, InternalServerError
import secrets
app = Flask(__name__, root_path=os.getcwd())
@@ -6491,10 +6492,14 @@ def applyoutputformatting(txt, no_sentence_trimming=False, no_single_line=False)
if len(txt) == 0:
return txt
# Workaround for endoftext appearing in models that need it, you can supposedly do this directly with the tokenizer but it keeps showing up
# So for now since we only have two known end of text tokens and only one model that wishes to have its generation stopped this is easier
# If you see this and you wish to do a universal implementation for this, feel free just make sure to test it on all platforms - Henk
txt = txt.replace("<|endoftext|>", "")
# Handle <|endoftext|> for models that want this
# In the future it would be nice if we could extend this to all EOS models.
# However, since EOS detection may have unforseen consequences for now we hardcode <|endoftext|> until more can be tested
# - Henk
eotregex = re.compile(r'<\|endoftext\|>[.|\n|\W|\w]*')
txt = eotregex.sub('', txt)
# Cleanup stray </s>
txt = txt.replace("</s>", "")
# Use standard quotes and apostrophes
@@ -8696,9 +8701,9 @@ def UI_2_redo(data):
@logger.catch
def UI_2_retry(data):
koboldai_vars.actions.clear_unused_options()
if len(koboldai_vars.actions.get_current_options_no_edits()) == 0:
ignore = koboldai_vars.actions.pop(keep=False)
ignore = koboldai_vars.actions.pop(keep=True)
koboldai_vars.actions.clear_unused_options()
koboldai_vars.lua_koboldbridge.feedback = None
koboldai_vars.recentrng = koboldai_vars.recentrngm = None
actionsubmit("", actionmode=koboldai_vars.actionmode)
@@ -10326,7 +10331,9 @@ def UI_2_action_image():
filename,
mimetype="image/jpeg")
else:
return None
return send_file(
"static/blank.png",
mimetype="image/png")
#==================================================================#
# display messages if they have never been sent before on this install
@@ -13253,6 +13260,8 @@ def run():
general_startup()
# Start flask & SocketIO
logger.init("Flask", status="Starting")
if koboldai_vars.host:
CORS(app)
Session(app)
logger.init_ok("Flask", status="OK")
logger.init("Webserver", status="Starting")

View File

@@ -27,6 +27,7 @@ dependencies:
- pip:
- flask-cloudflared
- flask-ngrok
- flask-cors
- lupa==1.10
- transformers==4.25.1
- huggingface_hub>=0.10.1

View File

@@ -26,6 +26,7 @@ dependencies:
- torch==1.12.1+rocm5.1.1
- flask-cloudflared
- flask-ngrok
- flask-cors
- lupa==1.10
- transformers==4.25.1
- huggingface_hub>=0.10.1

View File

@@ -1332,11 +1332,29 @@ class system_settings(settings):
logger.info("Starting Horde bridge")
bridge = importlib.import_module("KoboldAI-Horde-Bridge.bridge")
self._horde_pid = bridge.kai_bridge()
threading.Thread(target=self._horde_pid.bridge, args=(1, "0000000000", f"Automated Instance #{random.randint(-100000000, 100000000)}", 'http://127.0.0.1:{}'.format(self.port), "http://koboldai.net", [])).run()
try:
bridge_cd = importlib.import_module("KoboldAI-Horde-Bridge.clientData")
cluster_url = bridge_cd.cluster_url
kai_name = bridge_cd.kai_name
if kai_name == "My Awesome Instance":
kai_name = f"Automated Instance #{random.randint(-100000000, 100000000)}"
api_key = bridge_cd.api_key
priority_usernames = bridge_cd.priority_usernames
except:
cluster_url = "http://koboldai.net"
kai_name = f"Automated Instance #{random.randint(-100000000, 100000000)}"
api_key = "0000000000"
priority_usernames = []
# Always use the local URL & port
kai_url = f'http://127.0.0.1:{self.port}'
logger.info(f"Name: {kai_name} on {kai_url}")
threading.Thread(target=self._horde_pid.bridge, args=(1, api_key, kai_name, kai_url, cluster_url, priority_usernames)).run()
else:
if self._horde_pid is not None:
logger.info("Killing Horde bridge")
self._horde_pid.stop()
self._horde_pid = None
class KoboldStoryRegister(object):
def __init__(self, socketio, story_settings, koboldai_vars, tokenizer=None, sequence=[]):
@@ -2085,8 +2103,8 @@ class KoboldStoryRegister(object):
if self.story_settings.gen_audio and self.koboldai_vars.experimental_features:
for i in reversed([-1]+list(self.actions.keys())):
self.gen_audio(i, overwrite=False)
else:
print("{} and {}".format(self.story_settings.gen_audio, self.koboldai_vars.experimental_features))
#else:
# print("{} and {}".format(self.story_settings.gen_audio, self.koboldai_vars.experimental_features))
def set_picture(self, action_id, filename, prompt):
if action_id == -1:
@@ -2106,7 +2124,22 @@ class KoboldStoryRegister(object):
filename = os.path.join(self.koboldai_vars.save_paths.generated_images, self.actions[action_id]['picture_filename'])
prompt = self.actions[action_id]['picture_prompt']
else:
return None, None
#Let's find the last picture if there is one
found = False
for i in reversed(range(-1, action_id)):
if i in self.actions and 'picture_filename' in self.actions[i]:
filename = os.path.join(self.koboldai_vars.save_paths.generated_images, self.actions[i]['picture_filename'])
prompt = self.actions[i]['picture_prompt']
found = True
break
elif i == -1:
if self.story_settings.prompt_picture_filename == "":
return None, None
filename = os.path.join(self.koboldai_vars.save_paths.generated_images, self.story_settings.prompt_picture_filename)
prompt = self.story_settings.prompt_picture_prompt
found = True
if not found:
return None, None
if os.path.exists(filename):
return filename, prompt

View File

@@ -6,6 +6,7 @@ requests
torch >= 1.9, < 1.13
flask-cloudflared
flask-ngrok
flask-cors
eventlet
dnspython==2.2.1
lupa==1.10

View File

@@ -13,6 +13,7 @@ flask
Flask-SocketIO
flask-cloudflared >= 0.0.5
flask-ngrok
flask-cors
eventlet
dnspython==2.2.1
lupa==1.10

BIN
static/blank.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 148 B

View File

@@ -103,7 +103,7 @@
</span>
<!---Bottom Row---->
<span class="setting_item" style="height: 25px;">
<input autocomplete="off" id="var_sync_story_chatname" class="var_sync_story_chatname settings_select" onclick="sync_to_server(this);">
<input autocomplete="off" id="var_sync_story_chatname" class="var_sync_story_chatname settings_select" onchange="sync_to_server(this);">
</span>
<!---Slider Labels--->
<span class="setting_minlabel"><span style="top: -4px; position: relative;"></span></span>