mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
renamed vars to koboldai_vars
This commit is contained in:
3586
aiserver.py
3586
aiserver.py
File diff suppressed because it is too large
Load Diff
46
bridge.lua
46
bridge.lua
@@ -380,7 +380,7 @@ return function(_python, _bridged)
|
||||
|
||||
---@return boolean
|
||||
function KoboldWorldInfoEntry:is_valid()
|
||||
return _python.as_attrgetter(bridged.vars.worldinfo_u).get(rawget(self, "_uid")) ~= nil
|
||||
return _python.as_attrgetter(bridged.koboldai_vars.worldinfo_u).get(rawget(self, "_uid")) ~= nil
|
||||
end
|
||||
|
||||
---@param submission? string
|
||||
@@ -475,7 +475,7 @@ return function(_python, _bridged)
|
||||
if not check_validity(self) or type(u) ~= "number" then
|
||||
return
|
||||
end
|
||||
local query = _python.as_attrgetter(bridged.vars.worldinfo_u).get(u)
|
||||
local query = _python.as_attrgetter(bridged.koboldai_vars.worldinfo_u).get(u)
|
||||
if query == nil or (rawget(self, "_name") == "KoboldWorldInfoFolder" and self.uid ~= _python.as_attrgetter(query).get("folder")) then
|
||||
return
|
||||
end
|
||||
@@ -522,7 +522,7 @@ return function(_python, _bridged)
|
||||
|
||||
---@return boolean
|
||||
function KoboldWorldInfoFolder:is_valid()
|
||||
return _python.as_attrgetter(bridged.vars.wifolders_d).get(rawget(self, "_uid")) ~= nil
|
||||
return _python.as_attrgetter(bridged.koboldai_vars.wifolders_d).get(rawget(self, "_uid")) ~= nil
|
||||
end
|
||||
|
||||
---@param t KoboldWorldInfoFolder
|
||||
@@ -531,7 +531,7 @@ return function(_python, _bridged)
|
||||
if not check_validity(t) then
|
||||
return 0
|
||||
end
|
||||
return math.tointeger(_python.builtins.len(_python.as_attrgetter(bridged.vars.wifolders_u).get(t.uid))) - 1
|
||||
return math.tointeger(_python.builtins.len(_python.as_attrgetter(bridged.koboldai_vars.wifolders_u).get(t.uid))) - 1
|
||||
end
|
||||
|
||||
KoboldWorldInfoFolder_mt._kobold_next = KoboldWorldInfoEntry_mt._kobold_next
|
||||
@@ -548,7 +548,7 @@ return function(_python, _bridged)
|
||||
elseif rawget(t, "_name") == "KoboldWorldInfoFolder" and k == "name" then
|
||||
return bridged.folder_get_attr(t.uid, k)
|
||||
elseif type(k) == "number" then
|
||||
local query = rawget(t, "_name") == "KoboldWorldInfoFolder" and _python.as_attrgetter(bridged.vars.wifolders_u).get(t.uid) or bridged.vars.worldinfo_i
|
||||
local query = rawget(t, "_name") == "KoboldWorldInfoFolder" and _python.as_attrgetter(bridged.koboldai_vars.wifolders_u).get(t.uid) or bridged.koboldai_vars.worldinfo_i
|
||||
k = math.tointeger(k)
|
||||
if k == nil or k < 1 or k > #t then
|
||||
return
|
||||
@@ -599,7 +599,7 @@ return function(_python, _bridged)
|
||||
if not check_validity(self) or type(u) ~= "number" then
|
||||
return
|
||||
end
|
||||
local query = _python.as_attrgetter(bridged.vars.wifolders_d).get(u)
|
||||
local query = _python.as_attrgetter(bridged.koboldai_vars.wifolders_d).get(u)
|
||||
if query == nil then
|
||||
return
|
||||
end
|
||||
@@ -619,7 +619,7 @@ return function(_python, _bridged)
|
||||
if not check_validity(t) then
|
||||
return 0
|
||||
end
|
||||
return _python.builtins.len(bridged.vars.wifolders_l)
|
||||
return _python.builtins.len(bridged.koboldai_vars.wifolders_l)
|
||||
end
|
||||
|
||||
KoboldWorldInfoFolderSelector_mt._kobold_next = KoboldWorldInfoEntry_mt._kobold_next
|
||||
@@ -633,7 +633,7 @@ return function(_python, _bridged)
|
||||
return
|
||||
end
|
||||
local folder = deepcopy(KoboldWorldInfoFolder)
|
||||
rawset(folder, "_uid", math.tointeger(bridged.vars.wifolders_l[k-1]))
|
||||
rawset(folder, "_uid", math.tointeger(bridged.koboldai_vars.wifolders_l[k-1]))
|
||||
return folder
|
||||
end
|
||||
|
||||
@@ -672,7 +672,7 @@ return function(_python, _bridged)
|
||||
if not check_validity(t) then
|
||||
return 0
|
||||
end
|
||||
return math.tointeger(_python.builtins.len(bridged.vars.worldinfo)) - math.tointeger(_python.builtins.len(bridged.vars.wifolders_l)) - 1
|
||||
return math.tointeger(_python.builtins.len(bridged.koboldai_vars.worldinfo)) - math.tointeger(_python.builtins.len(bridged.koboldai_vars.wifolders_l)) - 1
|
||||
end
|
||||
|
||||
KoboldWorldInfo_mt._kobold_next = KoboldWorldInfoEntry_mt._kobold_next
|
||||
@@ -725,12 +725,12 @@ return function(_python, _bridged)
|
||||
end
|
||||
if k == "content" then
|
||||
if rawget(t, "_num") == 0 then
|
||||
if bridged.vars.gamestarted then
|
||||
local prompt = koboldbridge.userstate == "genmod" and bridged.vars._prompt or bridged.vars.prompt
|
||||
if bridged.koboldai_vars.gamestarted then
|
||||
local prompt = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._prompt or bridged.koboldai_vars.prompt
|
||||
return prompt
|
||||
end
|
||||
end
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.vars._actions or bridged.vars.actions
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
|
||||
return _python.as_attrgetter(actions).get(math.tointeger(rawget(t, "_num")) - 1)
|
||||
end
|
||||
end
|
||||
@@ -752,7 +752,7 @@ return function(_python, _bridged)
|
||||
error("Attempted to set the prompt chunk's content to the empty string; this is not allowed")
|
||||
return
|
||||
end
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.vars._actions or bridged.vars.actions
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
|
||||
if _k ~= 0 and _python.as_attrgetter(actions).get(_k-1) == nil then
|
||||
return
|
||||
end
|
||||
@@ -777,11 +777,11 @@ return function(_python, _bridged)
|
||||
|
||||
---@return fun(): KoboldStoryChunk, table, nil
|
||||
function KoboldStory:forward_iter()
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.vars._actions or bridged.vars.actions
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
|
||||
local nxt, iterator = _python.iter(actions)
|
||||
local run_once = false
|
||||
local function f()
|
||||
if not bridged.vars.gamestarted then
|
||||
if not bridged.koboldai_vars.gamestarted then
|
||||
return
|
||||
end
|
||||
local chunk = deepcopy(KoboldStoryChunk)
|
||||
@@ -805,11 +805,11 @@ return function(_python, _bridged)
|
||||
|
||||
---@return fun(): KoboldStoryChunk, table, nil
|
||||
function KoboldStory:reverse_iter()
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.vars._actions or bridged.vars.actions
|
||||
local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
|
||||
local nxt, iterator = _python.iter(_python.builtins.reversed(actions))
|
||||
local last_run = false
|
||||
local function f()
|
||||
if not bridged.vars.gamestarted or last_run then
|
||||
if not bridged.koboldai_vars.gamestarted or last_run then
|
||||
return
|
||||
end
|
||||
local chunk = deepcopy(KoboldStoryChunk)
|
||||
@@ -1039,7 +1039,7 @@ return function(_python, _bridged)
|
||||
---@param t KoboldLib
|
||||
---@return string
|
||||
function KoboldLib_getters.submission(t)
|
||||
return bridged.vars.submission
|
||||
return bridged.koboldai_vars.submission
|
||||
end
|
||||
|
||||
---@param t KoboldLib
|
||||
@@ -1051,11 +1051,11 @@ return function(_python, _bridged)
|
||||
elseif type(v) ~= "string" then
|
||||
error("`KoboldLib.submission` must be a string; you attempted to set it to a " .. type(v))
|
||||
return
|
||||
elseif not bridged.vars.gamestarted and v == "" then
|
||||
elseif not bridged.koboldai_vars.gamestarted and v == "" then
|
||||
error("`KoboldLib.submission` must not be set to the empty string when the story is empty")
|
||||
return
|
||||
end
|
||||
bridged.vars.submission = v
|
||||
bridged.koboldai_vars.submission = v
|
||||
end
|
||||
|
||||
|
||||
@@ -1100,7 +1100,7 @@ return function(_python, _bridged)
|
||||
---@param t KoboldLib
|
||||
---@return string
|
||||
function KoboldLib_getters.model(t)
|
||||
return bridged.vars.model
|
||||
return bridged.koboldai_vars.model
|
||||
end
|
||||
|
||||
---@param t KoboldLib
|
||||
@@ -1136,7 +1136,7 @@ return function(_python, _bridged)
|
||||
---@param t KoboldLib
|
||||
---@return string
|
||||
function KoboldLib_getters.custmodpth(t)
|
||||
return bridged.vars.custmodpth
|
||||
return bridged.koboldai_vars.custmodpth
|
||||
end
|
||||
|
||||
---@param t KoboldLib
|
||||
@@ -2013,7 +2013,7 @@ return function(_python, _bridged)
|
||||
koboldbridge.userstate = "genmod"
|
||||
if koboldbridge.genmod ~= nil then
|
||||
local _generated = deepcopy(koboldbridge.generated)
|
||||
if not bridged.vars.nogenmod then
|
||||
if not bridged.koboldai_vars.nogenmod then
|
||||
r = koboldbridge.genmod()
|
||||
end
|
||||
setmetatable(koboldbridge.logits, nil)
|
||||
|
@@ -563,7 +563,7 @@ class PenalizingCausalTransformer(CausalTransformer):
|
||||
compiling_callback()
|
||||
numseqs = numseqs_aux.shape[0]
|
||||
# These are the tokens that we don't want the AI to ever write
|
||||
badwords = jnp.array(vars.badwordsids).squeeze()
|
||||
badwords = jnp.array(koboldai_vars.badwordsids).squeeze()
|
||||
@hk.transform
|
||||
def generate_sample(context, ctx_length):
|
||||
# Give the initial context to the transformer
|
||||
@@ -1041,8 +1041,8 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
||||
elif "eos_token_id" in kwargs:
|
||||
pad_token_id = kwargs["eos_token_id"]
|
||||
|
||||
if not hasattr(vars, "sampler_order") or not vars.sampler_order:
|
||||
vars.sampler_order = utils.default_sampler_order.copy()
|
||||
if not hasattr(koboldai_vars, "sampler_order") or not koboldai_vars.sampler_order:
|
||||
koboldai_vars.sampler_order = utils.default_sampler_order.copy()
|
||||
|
||||
default_params = {
|
||||
"compat": "j",
|
||||
@@ -1061,7 +1061,7 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
||||
}
|
||||
params = kwargs
|
||||
|
||||
if vars.model == "TPUMeshTransformerGPTNeoX":
|
||||
if koboldai_vars.model == "TPUMeshTransformerGPTNeoX":
|
||||
default_params = {
|
||||
"compat": "neox",
|
||||
"layers": 44,
|
||||
@@ -1080,9 +1080,9 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
||||
|
||||
# Try to convert HF config.json to MTJ config
|
||||
if hf_checkpoint:
|
||||
spec_path = os.path.join("maps", vars.model_type + ".json")
|
||||
spec_path = os.path.join("maps", koboldai_vars.model_type + ".json")
|
||||
if not os.path.isfile(spec_path):
|
||||
raise NotImplementedError(f"Unsupported model type {repr(vars.model_type)}")
|
||||
raise NotImplementedError(f"Unsupported model type {repr(koboldai_vars.model_type)}")
|
||||
with open(spec_path) as f:
|
||||
lazy_load_spec = json.load(f)
|
||||
|
||||
@@ -1133,7 +1133,7 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
||||
params[param] = default_params[param]
|
||||
|
||||
# Load tokenizer
|
||||
if vars.model == "TPUMeshTransformerGPTNeoX":
|
||||
if koboldai_vars.model == "TPUMeshTransformerGPTNeoX":
|
||||
tokenizer = Tokenizer.from_file(os.path.join(path, "20B_tokenizer.json"))
|
||||
def new_encode(old_encode):
|
||||
def encode(s, *args, **kwargs):
|
||||
@@ -1181,19 +1181,19 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
||||
|
||||
global badwords
|
||||
# These are the tokens that we don't want the AI to ever write
|
||||
badwords = jnp.array(vars.badwordsids).squeeze()
|
||||
badwords = jnp.array(koboldai_vars.badwordsids).squeeze()
|
||||
|
||||
if not path.endswith("/"):
|
||||
path += "/"
|
||||
|
||||
network = PenalizingCausalTransformer(params, dematerialized=True)
|
||||
|
||||
if not hf_checkpoint and vars.model != "TPUMeshTransformerGPTNeoX":
|
||||
if not hf_checkpoint and koboldai_vars.model != "TPUMeshTransformerGPTNeoX":
|
||||
network.state = read_ckpt_lowmem(network.state, path, devices.shape[1])
|
||||
#network.state = network.move_xmap(network.state, np.zeros(cores_per_replica))
|
||||
return
|
||||
|
||||
if vars.model == "TPUMeshTransformerGPTNeoX":
|
||||
if koboldai_vars.model == "TPUMeshTransformerGPTNeoX":
|
||||
print("\n\n\nThis model has ", f"{hk.data_structures.tree_size(network.state['params']):,d}".replace(",", " "), " parameters.\n")
|
||||
read_neox_checkpoint(network.state, path, params)
|
||||
return
|
||||
@@ -1339,58 +1339,58 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
||||
f.close()
|
||||
callback.nested = False
|
||||
|
||||
if os.path.isdir(vars.model.replace('/', '_')):
|
||||
if os.path.isdir(koboldai_vars.model.replace('/', '_')):
|
||||
import shutil
|
||||
shutil.move(vars.model.replace('/', '_'), "models/{}".format(vars.model.replace('/', '_')))
|
||||
shutil.move(koboldai_vars.model.replace('/', '_'), "models/{}".format(koboldai_vars.model.replace('/', '_')))
|
||||
print("\n", flush=True)
|
||||
with torch_lazy_loader.use_lazy_torch_load(callback=callback, dematerialized_modules=True):
|
||||
if(os.path.isdir(vars.custmodpth)):
|
||||
if(os.path.isdir(koboldai_vars.custmodpth)):
|
||||
try:
|
||||
tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache")
|
||||
tokenizer = AutoTokenizer.from_pretrained(koboldai_vars.custmodpth, revision=koboldai_vars.revision, cache_dir="cache")
|
||||
except Exception as e:
|
||||
pass
|
||||
try:
|
||||
tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache", use_fast=False)
|
||||
tokenizer = AutoTokenizer.from_pretrained(koboldai_vars.custmodpth, revision=koboldai_vars.revision, cache_dir="cache", use_fast=False)
|
||||
except Exception as e:
|
||||
try:
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache")
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained(koboldai_vars.custmodpth, revision=koboldai_vars.revision, cache_dir="cache")
|
||||
except Exception as e:
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache")
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=koboldai_vars.revision, cache_dir="cache")
|
||||
try:
|
||||
model = AutoModelForCausalLM.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache")
|
||||
model = AutoModelForCausalLM.from_pretrained(koboldai_vars.custmodpth, revision=koboldai_vars.revision, cache_dir="cache")
|
||||
except Exception as e:
|
||||
model = GPTNeoForCausalLM.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache")
|
||||
elif(os.path.isdir("models/{}".format(vars.model.replace('/', '_')))):
|
||||
model = GPTNeoForCausalLM.from_pretrained(koboldai_vars.custmodpth, revision=koboldai_vars.revision, cache_dir="cache")
|
||||
elif(os.path.isdir("models/{}".format(koboldai_vars.model.replace('/', '_')))):
|
||||
try:
|
||||
tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache")
|
||||
tokenizer = AutoTokenizer.from_pretrained("models/{}".format(koboldai_vars.model.replace('/', '_')), revision=koboldai_vars.revision, cache_dir="cache")
|
||||
except Exception as e:
|
||||
pass
|
||||
try:
|
||||
tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache", use_fast=False)
|
||||
tokenizer = AutoTokenizer.from_pretrained("models/{}".format(koboldai_vars.model.replace('/', '_')), revision=koboldai_vars.revision, cache_dir="cache", use_fast=False)
|
||||
except Exception as e:
|
||||
try:
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache")
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained("models/{}".format(koboldai_vars.model.replace('/', '_')), revision=koboldai_vars.revision, cache_dir="cache")
|
||||
except Exception as e:
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache")
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=koboldai_vars.revision, cache_dir="cache")
|
||||
try:
|
||||
model = AutoModelForCausalLM.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache")
|
||||
model = AutoModelForCausalLM.from_pretrained("models/{}".format(koboldai_vars.model.replace('/', '_')), revision=koboldai_vars.revision, cache_dir="cache")
|
||||
except Exception as e:
|
||||
model = GPTNeoForCausalLM.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache")
|
||||
model = GPTNeoForCausalLM.from_pretrained("models/{}".format(koboldai_vars.model.replace('/', '_')), revision=koboldai_vars.revision, cache_dir="cache")
|
||||
else:
|
||||
try:
|
||||
tokenizer = AutoTokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache")
|
||||
tokenizer = AutoTokenizer.from_pretrained(koboldai_vars.model, revision=koboldai_vars.revision, cache_dir="cache")
|
||||
except Exception as e:
|
||||
pass
|
||||
try:
|
||||
tokenizer = AutoTokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache", use_fast=False)
|
||||
tokenizer = AutoTokenizer.from_pretrained(koboldai_vars.model, revision=koboldai_vars.revision, cache_dir="cache", use_fast=False)
|
||||
except Exception as e:
|
||||
try:
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache")
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained(koboldai_vars.model, revision=koboldai_vars.revision, cache_dir="cache")
|
||||
except Exception as e:
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache")
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=koboldai_vars.revision, cache_dir="cache")
|
||||
try:
|
||||
model = AutoModelForCausalLM.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache")
|
||||
model = AutoModelForCausalLM.from_pretrained(koboldai_vars.model, revision=koboldai_vars.revision, cache_dir="cache")
|
||||
except Exception as e:
|
||||
model = GPTNeoForCausalLM.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache")
|
||||
model = GPTNeoForCausalLM.from_pretrained(koboldai_vars.model, revision=koboldai_vars.revision, cache_dir="cache")
|
||||
|
||||
#network.state = network.move_xmap(network.state, np.zeros(cores_per_replica))
|
||||
|
38
utils.py
38
utils.py
@@ -21,7 +21,7 @@ try:
|
||||
except ImportError:
|
||||
HAS_ACCELERATE = False
|
||||
|
||||
vars = None
|
||||
koboldai_vars = None
|
||||
num_shards: Optional[int] = None
|
||||
current_shard = 0
|
||||
from_pretrained_model_name = ""
|
||||
@@ -93,8 +93,8 @@ def replaceblanklines(txt):
|
||||
#==================================================================#
|
||||
#
|
||||
#==================================================================#
|
||||
def removespecialchars(txt, vars=None):
|
||||
if vars is None or vars.actionmode == 0:
|
||||
def removespecialchars(txt, koboldai_vars=None):
|
||||
if koboldai_vars is None or koboldai_vars.actionmode == 0:
|
||||
txt = re.sub(r"[#/@%<>{}+=~|\^]", "", txt)
|
||||
else:
|
||||
txt = re.sub(r"[#/@%{}+=~|\^]", "", txt)
|
||||
@@ -103,38 +103,38 @@ def removespecialchars(txt, vars=None):
|
||||
#==================================================================#
|
||||
# If the next action follows a sentence closure, add a space
|
||||
#==================================================================#
|
||||
def addsentencespacing(txt, vars):
|
||||
def addsentencespacing(txt, koboldai_vars):
|
||||
# Don't add sentence spacing if submission is empty or starts with whitespace
|
||||
if(len(txt) == 0 or len(txt) != len(txt.lstrip())):
|
||||
return txt
|
||||
# Get last character of last action
|
||||
if(len(vars.actions) > 0):
|
||||
if(len(vars.actions[vars.actions.get_last_key()]) > 0):
|
||||
action = vars.actions[vars.actions.get_last_key()]
|
||||
if(len(koboldai_vars.actions) > 0):
|
||||
if(len(koboldai_vars.actions[koboldai_vars.actions.get_last_key()]) > 0):
|
||||
action = koboldai_vars.actions[koboldai_vars.actions.get_last_key()]
|
||||
lastchar = action[-1] if len(action) else ""
|
||||
else:
|
||||
# Last action is blank, this should never happen, but
|
||||
# since it did let's bail out.
|
||||
return txt
|
||||
else:
|
||||
action = vars.prompt
|
||||
action = koboldai_vars.prompt
|
||||
lastchar = action[-1] if len(action) else ""
|
||||
if(lastchar != " "):
|
||||
txt = " " + txt
|
||||
return txt
|
||||
|
||||
def singlelineprocessing(txt, vars):
|
||||
txt = vars.regex_sl.sub('', txt)
|
||||
if(len(vars.actions) > 0):
|
||||
if(len(vars.actions[vars.actions.get_last_key()]) > 0):
|
||||
action = vars.actions[vars.actions.get_last_key()]
|
||||
def singlelineprocessing(txt, koboldai_vars):
|
||||
txt = koboldai_vars.regex_sl.sub('', txt)
|
||||
if(len(koboldai_vars.actions) > 0):
|
||||
if(len(koboldai_vars.actions[koboldai_vars.actions.get_last_key()]) > 0):
|
||||
action = koboldai_vars.actions[koboldai_vars.actions.get_last_key()]
|
||||
lastchar = action[-1] if len(action) else ""
|
||||
else:
|
||||
# Last action is blank, this should never happen, but
|
||||
# since it did let's bail out.
|
||||
return txt
|
||||
else:
|
||||
action = vars.prompt
|
||||
action = koboldai_vars.prompt
|
||||
lastchar = action[-1] if len(action) else ""
|
||||
if(lastchar != "\n"):
|
||||
txt = txt + "\n"
|
||||
@@ -152,14 +152,14 @@ def cleanfilename(filename):
|
||||
# Newline substitution for fairseq models
|
||||
#==================================================================#
|
||||
def encodenewlines(txt):
|
||||
if(vars.newlinemode == "s"):
|
||||
if(koboldai_vars.newlinemode == "s"):
|
||||
return txt.replace('\n', "</s>")
|
||||
return txt
|
||||
|
||||
def decodenewlines(txt):
|
||||
if(vars.newlinemode == "s"):
|
||||
if(koboldai_vars.newlinemode == "s"):
|
||||
return txt.replace("</s>", '\n')
|
||||
if(vars.newlinemode == "ns"):
|
||||
if(koboldai_vars.newlinemode == "ns"):
|
||||
return txt.replace("</s>", '')
|
||||
return txt
|
||||
|
||||
@@ -265,9 +265,9 @@ def aria2_hook(pretrained_model_name_or_path: str, force_download=False, cache_d
|
||||
with tempfile.NamedTemporaryFile("w+b", delete=False) as f:
|
||||
f.write(aria2_config)
|
||||
f.flush()
|
||||
p = subprocess.Popen(["aria2c", "-x", "10", "-s", "10", "-j", "10", "--enable-rpc=true", f"--rpc-secret={secret}", "--rpc-listen-port", str(vars.aria2_port), "--disable-ipv6", "--file-allocation=trunc", "--allow-overwrite", "--auto-file-renaming=false", "-d", _cache_dir, "-i", f.name, "-U", transformers.file_utils.http_user_agent(user_agent)] + (["-c"] if not force_download else []) + ([f"--header='Authorization: Bearer {token}'"] if use_auth_token else []), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||
p = subprocess.Popen(["aria2c", "-x", "10", "-s", "10", "-j", "10", "--enable-rpc=true", f"--rpc-secret={secret}", "--rpc-listen-port", str(koboldai_vars.aria2_port), "--disable-ipv6", "--file-allocation=trunc", "--allow-overwrite", "--auto-file-renaming=false", "-d", _cache_dir, "-i", f.name, "-U", transformers.file_utils.http_user_agent(user_agent)] + (["-c"] if not force_download else []) + ([f"--header='Authorization: Bearer {token}'"] if use_auth_token else []), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||
while p.poll() is None:
|
||||
r = s.post(f"http://localhost:{vars.aria2_port}/jsonrpc", json={"jsonrpc": "2.0", "id": "kai", "method": "aria2.tellActive", "params": [f"token:{secret}"]}).json()["result"]
|
||||
r = s.post(f"http://localhost:{koboldai_vars.aria2_port}/jsonrpc", json={"jsonrpc": "2.0", "id": "kai", "method": "aria2.tellActive", "params": [f"token:{secret}"]}).json()["result"]
|
||||
if not r:
|
||||
s.close()
|
||||
if bar is not None:
|
||||
|
Reference in New Issue
Block a user