Resolve merge conflict

This commit is contained in:
Gnome Ann 2022-03-05 14:13:56 -05:00
commit 579e85820c
3 changed files with 38 additions and 32 deletions

View File

@ -1097,9 +1097,10 @@ if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "OAI", "Re
else: else:
vars.lazy_load = False vars.lazy_load = False
# Temporary fix for XGLM positional embedding issues until # Some versions of transformers 4.17.0.dev0 are affected by
# https://github.com/huggingface/transformers/issues/15736 # https://github.com/huggingface/transformers/issues/15736
# is resolved # This is a workaround for those versions of transformers.
if(transformers_version == "4.17.0.dev0"):
try: try:
from transformers.models.xglm.modeling_xglm import XGLMSinusoidalPositionalEmbedding from transformers.models.xglm.modeling_xglm import XGLMSinusoidalPositionalEmbedding
except ImportError: except ImportError:
@ -2877,7 +2878,7 @@ def actionback():
vars.recentback = True vars.recentback = True
remove_story_chunk(last_key + 1) remove_story_chunk(last_key + 1)
#for the redo to not get out of whack, need to reset the max # in the actions sequence #for the redo to not get out of whack, need to reset the max # in the actions sequence
vars.actions.set_next_id(vars.actions.get_last_key()) vars.actions.set_next_id(last_key)
elif(len(vars.genseqs) == 0): elif(len(vars.genseqs) == 0):
emit('from_server', {'cmd': 'errmsg', 'data': "Cannot delete the prompt."}) emit('from_server', {'cmd': 'errmsg', 'data': "Cannot delete the prompt."})
else: else:
@ -2890,10 +2891,9 @@ def actionredo():
genout = [{"generated_text": item['Text']} for item in vars.actions_metadata[vars.actions.get_last_key()+1]['Alternative Text'] if (item["Previous Selection"]==True)] genout = [{"generated_text": item['Text']} for item in vars.actions_metadata[vars.actions.get_last_key()+1]['Alternative Text'] if (item["Previous Selection"]==True)]
if len(genout) > 0: if len(genout) > 0:
genout = genout + [{"generated_text": item['Text']} for item in vars.actions_metadata[vars.actions.get_last_key()+1]['Alternative Text'] if (item["Pinned"]==True) and (item["Previous Selection"]==False)] genout = genout + [{"generated_text": item['Text']} for item in vars.actions_metadata[vars.actions.get_last_key()+1]['Alternative Text'] if (item["Pinned"]==True) and (item["Previous Selection"]==False)]
if len(genout) == 1: if len(genout) == 1:
vars.actions_metadata[vars.actions.get_last_key()+1]['Alternative Text'] = [item for item in vars.actions_metadata[vars.actions.get_last_key()+1]['Alternative Text'] if (item["Previous Selection"]!=True)] vars.actions_metadata[vars.actions.get_last_key()+1]['Alternative Text'] = [item for item in vars.actions_metadata[vars.actions.get_last_key()+1]['Alternative Text'] if (item["Previous Selection"]!=True)]
genresult(genout[0]['generated_text'], flash=True) genresult(genout[0]['generated_text'], flash=True, ignore_formatting=True)
else: else:
# Store sequences in memory until selection is made # Store sequences in memory until selection is made
vars.genseqs = genout vars.genseqs = genout
@ -2901,6 +2901,7 @@ def actionredo():
# Send sequences to UI for selection # Send sequences to UI for selection
genout = [[item['Text'], "redo"] for item in vars.actions_metadata[vars.actions.get_last_key()+1]['Alternative Text'] if (item["Previous Selection"]==True)] genout = [[item['Text'], "redo"] for item in vars.actions_metadata[vars.actions.get_last_key()+1]['Alternative Text'] if (item["Previous Selection"]==True)]
emit('from_server', {'cmd': 'genseqs', 'data': genout}, broadcast=True) emit('from_server', {'cmd': 'genseqs', 'data': genout}, broadcast=True)
else: else:
emit('from_server', {'cmd': 'popuperror', 'data': "There's nothing to undo"}, broadcast=True) emit('from_server', {'cmd': 'popuperror', 'data': "There's nothing to undo"}, broadcast=True)
@ -3281,11 +3282,12 @@ def generate(txt, minimum, maximum, found_entries=None):
#==================================================================# #==================================================================#
# Deal with a single return sequence from generate() # Deal with a single return sequence from generate()
#==================================================================# #==================================================================#
def genresult(genout, flash=True): def genresult(genout, flash=True, ignore_formatting=False):
if not vars.quiet: if not vars.quiet:
print("{0}{1}{2}".format(colors.CYAN, genout, colors.END)) print("{0}{1}{2}".format(colors.CYAN, genout, colors.END))
# Format output before continuing # Format output before continuing
if not ignore_formatting:
genout = applyoutputformatting(genout) genout = applyoutputformatting(genout)
vars.lua_koboldbridge.feedback = genout vars.lua_koboldbridge.feedback = genout
@ -4708,8 +4710,6 @@ def loadRequest(loadpath, filename=None):
emit('from_server', {'cmd': 'hidegenseqs', 'data': ''}, broadcast=True) emit('from_server', {'cmd': 'hidegenseqs', 'data': ''}, broadcast=True)
print("{0}Story loaded from {1}!{2}".format(colors.GREEN, filename, colors.END)) print("{0}Story loaded from {1}!{2}".format(colors.GREEN, filename, colors.END))
print([k for k in vars.actions])
print([k for k in vars.actions_metadata])
send_debug() send_debug()
#==================================================================# #==================================================================#
@ -5120,7 +5120,7 @@ def send_debug():
except: except:
pass pass
try: try:
debug_info = "{}Actions: {}\n".format(debug_info, vars.actions.get_last_key()) debug_info = "{}Actions: {}\n".format(debug_info, [k for k in vars.actions])
except: except:
pass pass
try: try:

View File

@ -48,7 +48,7 @@ function launch
exit 0 exit 0
else else
cd /content/KoboldAI-Client cd /content/KoboldAI-Client
echo "Launching KoboldAI with the following options : python3 aiserver.py$model$kmpath$configname$ngrok --remote --override_delete --override_rename" echo "Launching KoboldAI with the following options : python3 aiserver.py$model$kmpath$configname$ngrok --colab"
python3 aiserver.py$model$kmpath$configname$ngrok --colab python3 aiserver.py$model$kmpath$configname$ngrok --colab
exit exit
fi fi
@ -151,7 +151,7 @@ if [ "$init" != "skip" ]; then
ln -s /content/drive/MyDrive/KoboldAI/userscripts/ userscripts ln -s /content/drive/MyDrive/KoboldAI/userscripts/ userscripts
ln -s /content/drive/MyDrive/KoboldAI/models/ models ln -s /content/drive/MyDrive/KoboldAI/models/ models
if [ "$model" == " --model TPUMeshTransformerGPTJ" ]; then if [ -n "${COLAB_TPU_ADDR+set}" ]; then
pip install -r requirements_mtj.txt pip install -r requirements_mtj.txt
else else
pip install -r requirements.txt pip install -r requirements.txt

View File

@ -19,9 +19,15 @@ class KoboldStoryRegister(collections.OrderedDict):
return self.popitem()[1] return self.popitem()[1]
def get_first_key(self) -> int: def get_first_key(self) -> int:
if len(self) == 0:
return -1
else:
return next(iter(self)) return next(iter(self))
def get_last_key(self) -> int: def get_last_key(self) -> int:
if len(self) == 0:
return -1
else:
return next(reversed(self)) return next(reversed(self))
def __getitem__(self, k: int) -> str: def __getitem__(self, k: int) -> str: