mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
renamed vars to koboldai_vars
This commit is contained in:
3586
aiserver.py
3586
aiserver.py
File diff suppressed because it is too large
Load Diff
46
bridge.lua
46
bridge.lua
@@ -380,7 +380,7 @@ return function(_python, _bridged)
|
|||||||
|
|
||||||
---@return boolean
|
---@return boolean
|
||||||
function KoboldWorldInfoEntry:is_valid()
|
function KoboldWorldInfoEntry:is_valid()
|
||||||
return _python.as_attrgetter(bridged.vars.worldinfo_u).get(rawget(self, "_uid")) ~= nil
|
return _python.as_attrgetter(bridged.koboldai_vars.worldinfo_u).get(rawget(self, "_uid")) ~= nil
|
||||||
end
|
end
|
||||||
|
|
||||||
---@param submission? string
|
---@param submission? string
|
||||||
@@ -475,7 +475,7 @@ return function(_python, _bridged)
|
|||||||
if not check_validity(self) or type(u) ~= "number" then
|
if not check_validity(self) or type(u) ~= "number" then
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
local query = _python.as_attrgetter(bridged.vars.worldinfo_u).get(u)
|
local query = _python.as_attrgetter(bridged.koboldai_vars.worldinfo_u).get(u)
|
||||||
if query == nil or (rawget(self, "_name") == "KoboldWorldInfoFolder" and self.uid ~= _python.as_attrgetter(query).get("folder")) then
|
if query == nil or (rawget(self, "_name") == "KoboldWorldInfoFolder" and self.uid ~= _python.as_attrgetter(query).get("folder")) then
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
@@ -522,7 +522,7 @@ return function(_python, _bridged)
|
|||||||
|
|
||||||
---@return boolean
|
---@return boolean
|
||||||
function KoboldWorldInfoFolder:is_valid()
|
function KoboldWorldInfoFolder:is_valid()
|
||||||
return _python.as_attrgetter(bridged.vars.wifolders_d).get(rawget(self, "_uid")) ~= nil
|
return _python.as_attrgetter(bridged.koboldai_vars.wifolders_d).get(rawget(self, "_uid")) ~= nil
|
||||||
end
|
end
|
||||||
|
|
||||||
---@param t KoboldWorldInfoFolder
|
---@param t KoboldWorldInfoFolder
|
||||||
@@ -531,7 +531,7 @@ return function(_python, _bridged)
|
|||||||
if not check_validity(t) then
|
if not check_validity(t) then
|
||||||
return 0
|
return 0
|
||||||
end
|
end
|
||||||
return math.tointeger(_python.builtins.len(_python.as_attrgetter(bridged.vars.wifolders_u).get(t.uid))) - 1
|
return math.tointeger(_python.builtins.len(_python.as_attrgetter(bridged.koboldai_vars.wifolders_u).get(t.uid))) - 1
|
||||||
end
|
end
|
||||||
|
|
||||||
KoboldWorldInfoFolder_mt._kobold_next = KoboldWorldInfoEntry_mt._kobold_next
|
KoboldWorldInfoFolder_mt._kobold_next = KoboldWorldInfoEntry_mt._kobold_next
|
||||||
@@ -548,7 +548,7 @@ return function(_python, _bridged)
|
|||||||
elseif rawget(t, "_name") == "KoboldWorldInfoFolder" and k == "name" then
|
elseif rawget(t, "_name") == "KoboldWorldInfoFolder" and k == "name" then
|
||||||
return bridged.folder_get_attr(t.uid, k)
|
return bridged.folder_get_attr(t.uid, k)
|
||||||
elseif type(k) == "number" then
|
elseif type(k) == "number" then
|
||||||
local query = rawget(t, "_name") == "KoboldWorldInfoFolder" and _python.as_attrgetter(bridged.vars.wifolders_u).get(t.uid) or bridged.vars.worldinfo_i
|
local query = rawget(t, "_name") == "KoboldWorldInfoFolder" and _python.as_attrgetter(bridged.koboldai_vars.wifolders_u).get(t.uid) or bridged.koboldai_vars.worldinfo_i
|
||||||
k = math.tointeger(k)
|
k = math.tointeger(k)
|
||||||
if k == nil or k < 1 or k > #t then
|
if k == nil or k < 1 or k > #t then
|
||||||
return
|
return
|
||||||
@@ -599,7 +599,7 @@ return function(_python, _bridged)
|
|||||||
if not check_validity(self) or type(u) ~= "number" then
|
if not check_validity(self) or type(u) ~= "number" then
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
local query = _python.as_attrgetter(bridged.vars.wifolders_d).get(u)
|
local query = _python.as_attrgetter(bridged.koboldai_vars.wifolders_d).get(u)
|
||||||
if query == nil then
|
if query == nil then
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
@@ -619,7 +619,7 @@ return function(_python, _bridged)
|
|||||||
if not check_validity(t) then
|
if not check_validity(t) then
|
||||||
return 0
|
return 0
|
||||||
end
|
end
|
||||||
return _python.builtins.len(bridged.vars.wifolders_l)
|
return _python.builtins.len(bridged.koboldai_vars.wifolders_l)
|
||||||
end
|
end
|
||||||
|
|
||||||
KoboldWorldInfoFolderSelector_mt._kobold_next = KoboldWorldInfoEntry_mt._kobold_next
|
KoboldWorldInfoFolderSelector_mt._kobold_next = KoboldWorldInfoEntry_mt._kobold_next
|
||||||
@@ -633,7 +633,7 @@ return function(_python, _bridged)
|
|||||||
return
|
return
|
||||||
end
|
end
|
||||||
local folder = deepcopy(KoboldWorldInfoFolder)
|
local folder = deepcopy(KoboldWorldInfoFolder)
|
||||||
rawset(folder, "_uid", math.tointeger(bridged.vars.wifolders_l[k-1]))
|
rawset(folder, "_uid", math.tointeger(bridged.koboldai_vars.wifolders_l[k-1]))
|
||||||
return folder
|
return folder
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -672,7 +672,7 @@ return function(_python, _bridged)
|
|||||||
if not check_validity(t) then
|
if not check_validity(t) then
|
||||||
return 0
|
return 0
|
||||||
end
|
end
|
||||||
return math.tointeger(_python.builtins.len(bridged.vars.worldinfo)) - math.tointeger(_python.builtins.len(bridged.vars.wifolders_l)) - 1
|
return math.tointeger(_python.builtins.len(bridged.koboldai_vars.worldinfo)) - math.tointeger(_python.builtins.len(bridged.koboldai_vars.wifolders_l)) - 1
|
||||||
end
|
end
|
||||||
|
|
||||||
KoboldWorldInfo_mt._kobold_next = KoboldWorldInfoEntry_mt._kobold_next
|
KoboldWorldInfo_mt._kobold_next = KoboldWorldInfoEntry_mt._kobold_next
|
||||||
@@ -725,12 +725,12 @@ return function(_python, _bridged)
|
|||||||
end
|
end
|
||||||
if k == "content" then
|
if k == "content" then
|
||||||
if rawget(t, "_num") == 0 then
|
if rawget(t, "_num") == 0 then
|
||||||
if bridged.vars.gamestarted then
|
if bridged.koboldai_vars.gamestarted then
|
||||||
local prompt = koboldbridge.userstate == "genmod" and bridged.vars._prompt or bridged.vars.prompt
|
local prompt = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._prompt or bridged.koboldai_vars.prompt
|
||||||
return prompt
|
return prompt
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
local actions = koboldbridge.userstate == "genmod" and bridged.vars._actions or bridged.vars.actions
|
local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
|
||||||
return _python.as_attrgetter(actions).get(math.tointeger(rawget(t, "_num")) - 1)
|
return _python.as_attrgetter(actions).get(math.tointeger(rawget(t, "_num")) - 1)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -752,7 +752,7 @@ return function(_python, _bridged)
|
|||||||
error("Attempted to set the prompt chunk's content to the empty string; this is not allowed")
|
error("Attempted to set the prompt chunk's content to the empty string; this is not allowed")
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
local actions = koboldbridge.userstate == "genmod" and bridged.vars._actions or bridged.vars.actions
|
local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
|
||||||
if _k ~= 0 and _python.as_attrgetter(actions).get(_k-1) == nil then
|
if _k ~= 0 and _python.as_attrgetter(actions).get(_k-1) == nil then
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
@@ -777,11 +777,11 @@ return function(_python, _bridged)
|
|||||||
|
|
||||||
---@return fun(): KoboldStoryChunk, table, nil
|
---@return fun(): KoboldStoryChunk, table, nil
|
||||||
function KoboldStory:forward_iter()
|
function KoboldStory:forward_iter()
|
||||||
local actions = koboldbridge.userstate == "genmod" and bridged.vars._actions or bridged.vars.actions
|
local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
|
||||||
local nxt, iterator = _python.iter(actions)
|
local nxt, iterator = _python.iter(actions)
|
||||||
local run_once = false
|
local run_once = false
|
||||||
local function f()
|
local function f()
|
||||||
if not bridged.vars.gamestarted then
|
if not bridged.koboldai_vars.gamestarted then
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
local chunk = deepcopy(KoboldStoryChunk)
|
local chunk = deepcopy(KoboldStoryChunk)
|
||||||
@@ -805,11 +805,11 @@ return function(_python, _bridged)
|
|||||||
|
|
||||||
---@return fun(): KoboldStoryChunk, table, nil
|
---@return fun(): KoboldStoryChunk, table, nil
|
||||||
function KoboldStory:reverse_iter()
|
function KoboldStory:reverse_iter()
|
||||||
local actions = koboldbridge.userstate == "genmod" and bridged.vars._actions or bridged.vars.actions
|
local actions = koboldbridge.userstate == "genmod" and bridged.koboldai_vars._actions or bridged.koboldai_vars.actions
|
||||||
local nxt, iterator = _python.iter(_python.builtins.reversed(actions))
|
local nxt, iterator = _python.iter(_python.builtins.reversed(actions))
|
||||||
local last_run = false
|
local last_run = false
|
||||||
local function f()
|
local function f()
|
||||||
if not bridged.vars.gamestarted or last_run then
|
if not bridged.koboldai_vars.gamestarted or last_run then
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
local chunk = deepcopy(KoboldStoryChunk)
|
local chunk = deepcopy(KoboldStoryChunk)
|
||||||
@@ -1039,7 +1039,7 @@ return function(_python, _bridged)
|
|||||||
---@param t KoboldLib
|
---@param t KoboldLib
|
||||||
---@return string
|
---@return string
|
||||||
function KoboldLib_getters.submission(t)
|
function KoboldLib_getters.submission(t)
|
||||||
return bridged.vars.submission
|
return bridged.koboldai_vars.submission
|
||||||
end
|
end
|
||||||
|
|
||||||
---@param t KoboldLib
|
---@param t KoboldLib
|
||||||
@@ -1051,11 +1051,11 @@ return function(_python, _bridged)
|
|||||||
elseif type(v) ~= "string" then
|
elseif type(v) ~= "string" then
|
||||||
error("`KoboldLib.submission` must be a string; you attempted to set it to a " .. type(v))
|
error("`KoboldLib.submission` must be a string; you attempted to set it to a " .. type(v))
|
||||||
return
|
return
|
||||||
elseif not bridged.vars.gamestarted and v == "" then
|
elseif not bridged.koboldai_vars.gamestarted and v == "" then
|
||||||
error("`KoboldLib.submission` must not be set to the empty string when the story is empty")
|
error("`KoboldLib.submission` must not be set to the empty string when the story is empty")
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
bridged.vars.submission = v
|
bridged.koboldai_vars.submission = v
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
@@ -1100,7 +1100,7 @@ return function(_python, _bridged)
|
|||||||
---@param t KoboldLib
|
---@param t KoboldLib
|
||||||
---@return string
|
---@return string
|
||||||
function KoboldLib_getters.model(t)
|
function KoboldLib_getters.model(t)
|
||||||
return bridged.vars.model
|
return bridged.koboldai_vars.model
|
||||||
end
|
end
|
||||||
|
|
||||||
---@param t KoboldLib
|
---@param t KoboldLib
|
||||||
@@ -1136,7 +1136,7 @@ return function(_python, _bridged)
|
|||||||
---@param t KoboldLib
|
---@param t KoboldLib
|
||||||
---@return string
|
---@return string
|
||||||
function KoboldLib_getters.custmodpth(t)
|
function KoboldLib_getters.custmodpth(t)
|
||||||
return bridged.vars.custmodpth
|
return bridged.koboldai_vars.custmodpth
|
||||||
end
|
end
|
||||||
|
|
||||||
---@param t KoboldLib
|
---@param t KoboldLib
|
||||||
@@ -2013,7 +2013,7 @@ return function(_python, _bridged)
|
|||||||
koboldbridge.userstate = "genmod"
|
koboldbridge.userstate = "genmod"
|
||||||
if koboldbridge.genmod ~= nil then
|
if koboldbridge.genmod ~= nil then
|
||||||
local _generated = deepcopy(koboldbridge.generated)
|
local _generated = deepcopy(koboldbridge.generated)
|
||||||
if not bridged.vars.nogenmod then
|
if not bridged.koboldai_vars.nogenmod then
|
||||||
r = koboldbridge.genmod()
|
r = koboldbridge.genmod()
|
||||||
end
|
end
|
||||||
setmetatable(koboldbridge.logits, nil)
|
setmetatable(koboldbridge.logits, nil)
|
||||||
|
@@ -563,7 +563,7 @@ class PenalizingCausalTransformer(CausalTransformer):
|
|||||||
compiling_callback()
|
compiling_callback()
|
||||||
numseqs = numseqs_aux.shape[0]
|
numseqs = numseqs_aux.shape[0]
|
||||||
# These are the tokens that we don't want the AI to ever write
|
# These are the tokens that we don't want the AI to ever write
|
||||||
badwords = jnp.array(vars.badwordsids).squeeze()
|
badwords = jnp.array(koboldai_vars.badwordsids).squeeze()
|
||||||
@hk.transform
|
@hk.transform
|
||||||
def generate_sample(context, ctx_length):
|
def generate_sample(context, ctx_length):
|
||||||
# Give the initial context to the transformer
|
# Give the initial context to the transformer
|
||||||
@@ -1041,8 +1041,8 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
|||||||
elif "eos_token_id" in kwargs:
|
elif "eos_token_id" in kwargs:
|
||||||
pad_token_id = kwargs["eos_token_id"]
|
pad_token_id = kwargs["eos_token_id"]
|
||||||
|
|
||||||
if not hasattr(vars, "sampler_order") or not vars.sampler_order:
|
if not hasattr(koboldai_vars, "sampler_order") or not koboldai_vars.sampler_order:
|
||||||
vars.sampler_order = utils.default_sampler_order.copy()
|
koboldai_vars.sampler_order = utils.default_sampler_order.copy()
|
||||||
|
|
||||||
default_params = {
|
default_params = {
|
||||||
"compat": "j",
|
"compat": "j",
|
||||||
@@ -1061,7 +1061,7 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
|||||||
}
|
}
|
||||||
params = kwargs
|
params = kwargs
|
||||||
|
|
||||||
if vars.model == "TPUMeshTransformerGPTNeoX":
|
if koboldai_vars.model == "TPUMeshTransformerGPTNeoX":
|
||||||
default_params = {
|
default_params = {
|
||||||
"compat": "neox",
|
"compat": "neox",
|
||||||
"layers": 44,
|
"layers": 44,
|
||||||
@@ -1080,9 +1080,9 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
|||||||
|
|
||||||
# Try to convert HF config.json to MTJ config
|
# Try to convert HF config.json to MTJ config
|
||||||
if hf_checkpoint:
|
if hf_checkpoint:
|
||||||
spec_path = os.path.join("maps", vars.model_type + ".json")
|
spec_path = os.path.join("maps", koboldai_vars.model_type + ".json")
|
||||||
if not os.path.isfile(spec_path):
|
if not os.path.isfile(spec_path):
|
||||||
raise NotImplementedError(f"Unsupported model type {repr(vars.model_type)}")
|
raise NotImplementedError(f"Unsupported model type {repr(koboldai_vars.model_type)}")
|
||||||
with open(spec_path) as f:
|
with open(spec_path) as f:
|
||||||
lazy_load_spec = json.load(f)
|
lazy_load_spec = json.load(f)
|
||||||
|
|
||||||
@@ -1133,7 +1133,7 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
|||||||
params[param] = default_params[param]
|
params[param] = default_params[param]
|
||||||
|
|
||||||
# Load tokenizer
|
# Load tokenizer
|
||||||
if vars.model == "TPUMeshTransformerGPTNeoX":
|
if koboldai_vars.model == "TPUMeshTransformerGPTNeoX":
|
||||||
tokenizer = Tokenizer.from_file(os.path.join(path, "20B_tokenizer.json"))
|
tokenizer = Tokenizer.from_file(os.path.join(path, "20B_tokenizer.json"))
|
||||||
def new_encode(old_encode):
|
def new_encode(old_encode):
|
||||||
def encode(s, *args, **kwargs):
|
def encode(s, *args, **kwargs):
|
||||||
@@ -1181,19 +1181,19 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
|||||||
|
|
||||||
global badwords
|
global badwords
|
||||||
# These are the tokens that we don't want the AI to ever write
|
# These are the tokens that we don't want the AI to ever write
|
||||||
badwords = jnp.array(vars.badwordsids).squeeze()
|
badwords = jnp.array(koboldai_vars.badwordsids).squeeze()
|
||||||
|
|
||||||
if not path.endswith("/"):
|
if not path.endswith("/"):
|
||||||
path += "/"
|
path += "/"
|
||||||
|
|
||||||
network = PenalizingCausalTransformer(params, dematerialized=True)
|
network = PenalizingCausalTransformer(params, dematerialized=True)
|
||||||
|
|
||||||
if not hf_checkpoint and vars.model != "TPUMeshTransformerGPTNeoX":
|
if not hf_checkpoint and koboldai_vars.model != "TPUMeshTransformerGPTNeoX":
|
||||||
network.state = read_ckpt_lowmem(network.state, path, devices.shape[1])
|
network.state = read_ckpt_lowmem(network.state, path, devices.shape[1])
|
||||||
#network.state = network.move_xmap(network.state, np.zeros(cores_per_replica))
|
#network.state = network.move_xmap(network.state, np.zeros(cores_per_replica))
|
||||||
return
|
return
|
||||||
|
|
||||||
if vars.model == "TPUMeshTransformerGPTNeoX":
|
if koboldai_vars.model == "TPUMeshTransformerGPTNeoX":
|
||||||
print("\n\n\nThis model has ", f"{hk.data_structures.tree_size(network.state['params']):,d}".replace(",", " "), " parameters.\n")
|
print("\n\n\nThis model has ", f"{hk.data_structures.tree_size(network.state['params']):,d}".replace(",", " "), " parameters.\n")
|
||||||
read_neox_checkpoint(network.state, path, params)
|
read_neox_checkpoint(network.state, path, params)
|
||||||
return
|
return
|
||||||
@@ -1339,58 +1339,58 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
|
|||||||
f.close()
|
f.close()
|
||||||
callback.nested = False
|
callback.nested = False
|
||||||
|
|
||||||
if os.path.isdir(vars.model.replace('/', '_')):
|
if os.path.isdir(koboldai_vars.model.replace('/', '_')):
|
||||||
import shutil
|
import shutil
|
||||||
shutil.move(vars.model.replace('/', '_'), "models/{}".format(vars.model.replace('/', '_')))
|
shutil.move(koboldai_vars.model.replace('/', '_'), "models/{}".format(koboldai_vars.model.replace('/', '_')))
|
||||||
print("\n", flush=True)
|
print("\n", flush=True)
|
||||||
with torch_lazy_loader.use_lazy_torch_load(callback=callback, dematerialized_modules=True):
|
with torch_lazy_loader.use_lazy_torch_load(callback=callback, dematerialized_modules=True):
|
||||||
if(os.path.isdir(vars.custmodpth)):
|
if(os.path.isdir(koboldai_vars.custmodpth)):
|
||||||
try:
|
try:
|
||||||
tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache")
|
tokenizer = AutoTokenizer.from_pretrained(koboldai_vars.custmodpth, revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
pass
|
pass
|
||||||
try:
|
try:
|
||||||
tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache", use_fast=False)
|
tokenizer = AutoTokenizer.from_pretrained(koboldai_vars.custmodpth, revision=koboldai_vars.revision, cache_dir="cache", use_fast=False)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
try:
|
try:
|
||||||
tokenizer = GPT2TokenizerFast.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache")
|
tokenizer = GPT2TokenizerFast.from_pretrained(koboldai_vars.custmodpth, revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache")
|
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
try:
|
try:
|
||||||
model = AutoModelForCausalLM.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache")
|
model = AutoModelForCausalLM.from_pretrained(koboldai_vars.custmodpth, revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
model = GPTNeoForCausalLM.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache")
|
model = GPTNeoForCausalLM.from_pretrained(koboldai_vars.custmodpth, revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
elif(os.path.isdir("models/{}".format(vars.model.replace('/', '_')))):
|
elif(os.path.isdir("models/{}".format(koboldai_vars.model.replace('/', '_')))):
|
||||||
try:
|
try:
|
||||||
tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache")
|
tokenizer = AutoTokenizer.from_pretrained("models/{}".format(koboldai_vars.model.replace('/', '_')), revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
pass
|
pass
|
||||||
try:
|
try:
|
||||||
tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache", use_fast=False)
|
tokenizer = AutoTokenizer.from_pretrained("models/{}".format(koboldai_vars.model.replace('/', '_')), revision=koboldai_vars.revision, cache_dir="cache", use_fast=False)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
try:
|
try:
|
||||||
tokenizer = GPT2TokenizerFast.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache")
|
tokenizer = GPT2TokenizerFast.from_pretrained("models/{}".format(koboldai_vars.model.replace('/', '_')), revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache")
|
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
try:
|
try:
|
||||||
model = AutoModelForCausalLM.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache")
|
model = AutoModelForCausalLM.from_pretrained("models/{}".format(koboldai_vars.model.replace('/', '_')), revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
model = GPTNeoForCausalLM.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache")
|
model = GPTNeoForCausalLM.from_pretrained("models/{}".format(koboldai_vars.model.replace('/', '_')), revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
tokenizer = AutoTokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache")
|
tokenizer = AutoTokenizer.from_pretrained(koboldai_vars.model, revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
pass
|
pass
|
||||||
try:
|
try:
|
||||||
tokenizer = AutoTokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache", use_fast=False)
|
tokenizer = AutoTokenizer.from_pretrained(koboldai_vars.model, revision=koboldai_vars.revision, cache_dir="cache", use_fast=False)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
try:
|
try:
|
||||||
tokenizer = GPT2TokenizerFast.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache")
|
tokenizer = GPT2TokenizerFast.from_pretrained(koboldai_vars.model, revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=vars.revision, cache_dir="cache")
|
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2", revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
try:
|
try:
|
||||||
model = AutoModelForCausalLM.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache")
|
model = AutoModelForCausalLM.from_pretrained(koboldai_vars.model, revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
model = GPTNeoForCausalLM.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache")
|
model = GPTNeoForCausalLM.from_pretrained(koboldai_vars.model, revision=koboldai_vars.revision, cache_dir="cache")
|
||||||
|
|
||||||
#network.state = network.move_xmap(network.state, np.zeros(cores_per_replica))
|
#network.state = network.move_xmap(network.state, np.zeros(cores_per_replica))
|
||||||
|
38
utils.py
38
utils.py
@@ -21,7 +21,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_ACCELERATE = False
|
HAS_ACCELERATE = False
|
||||||
|
|
||||||
vars = None
|
koboldai_vars = None
|
||||||
num_shards: Optional[int] = None
|
num_shards: Optional[int] = None
|
||||||
current_shard = 0
|
current_shard = 0
|
||||||
from_pretrained_model_name = ""
|
from_pretrained_model_name = ""
|
||||||
@@ -93,8 +93,8 @@ def replaceblanklines(txt):
|
|||||||
#==================================================================#
|
#==================================================================#
|
||||||
#
|
#
|
||||||
#==================================================================#
|
#==================================================================#
|
||||||
def removespecialchars(txt, vars=None):
|
def removespecialchars(txt, koboldai_vars=None):
|
||||||
if vars is None or vars.actionmode == 0:
|
if koboldai_vars is None or koboldai_vars.actionmode == 0:
|
||||||
txt = re.sub(r"[#/@%<>{}+=~|\^]", "", txt)
|
txt = re.sub(r"[#/@%<>{}+=~|\^]", "", txt)
|
||||||
else:
|
else:
|
||||||
txt = re.sub(r"[#/@%{}+=~|\^]", "", txt)
|
txt = re.sub(r"[#/@%{}+=~|\^]", "", txt)
|
||||||
@@ -103,38 +103,38 @@ def removespecialchars(txt, vars=None):
|
|||||||
#==================================================================#
|
#==================================================================#
|
||||||
# If the next action follows a sentence closure, add a space
|
# If the next action follows a sentence closure, add a space
|
||||||
#==================================================================#
|
#==================================================================#
|
||||||
def addsentencespacing(txt, vars):
|
def addsentencespacing(txt, koboldai_vars):
|
||||||
# Don't add sentence spacing if submission is empty or starts with whitespace
|
# Don't add sentence spacing if submission is empty or starts with whitespace
|
||||||
if(len(txt) == 0 or len(txt) != len(txt.lstrip())):
|
if(len(txt) == 0 or len(txt) != len(txt.lstrip())):
|
||||||
return txt
|
return txt
|
||||||
# Get last character of last action
|
# Get last character of last action
|
||||||
if(len(vars.actions) > 0):
|
if(len(koboldai_vars.actions) > 0):
|
||||||
if(len(vars.actions[vars.actions.get_last_key()]) > 0):
|
if(len(koboldai_vars.actions[koboldai_vars.actions.get_last_key()]) > 0):
|
||||||
action = vars.actions[vars.actions.get_last_key()]
|
action = koboldai_vars.actions[koboldai_vars.actions.get_last_key()]
|
||||||
lastchar = action[-1] if len(action) else ""
|
lastchar = action[-1] if len(action) else ""
|
||||||
else:
|
else:
|
||||||
# Last action is blank, this should never happen, but
|
# Last action is blank, this should never happen, but
|
||||||
# since it did let's bail out.
|
# since it did let's bail out.
|
||||||
return txt
|
return txt
|
||||||
else:
|
else:
|
||||||
action = vars.prompt
|
action = koboldai_vars.prompt
|
||||||
lastchar = action[-1] if len(action) else ""
|
lastchar = action[-1] if len(action) else ""
|
||||||
if(lastchar != " "):
|
if(lastchar != " "):
|
||||||
txt = " " + txt
|
txt = " " + txt
|
||||||
return txt
|
return txt
|
||||||
|
|
||||||
def singlelineprocessing(txt, vars):
|
def singlelineprocessing(txt, koboldai_vars):
|
||||||
txt = vars.regex_sl.sub('', txt)
|
txt = koboldai_vars.regex_sl.sub('', txt)
|
||||||
if(len(vars.actions) > 0):
|
if(len(koboldai_vars.actions) > 0):
|
||||||
if(len(vars.actions[vars.actions.get_last_key()]) > 0):
|
if(len(koboldai_vars.actions[koboldai_vars.actions.get_last_key()]) > 0):
|
||||||
action = vars.actions[vars.actions.get_last_key()]
|
action = koboldai_vars.actions[koboldai_vars.actions.get_last_key()]
|
||||||
lastchar = action[-1] if len(action) else ""
|
lastchar = action[-1] if len(action) else ""
|
||||||
else:
|
else:
|
||||||
# Last action is blank, this should never happen, but
|
# Last action is blank, this should never happen, but
|
||||||
# since it did let's bail out.
|
# since it did let's bail out.
|
||||||
return txt
|
return txt
|
||||||
else:
|
else:
|
||||||
action = vars.prompt
|
action = koboldai_vars.prompt
|
||||||
lastchar = action[-1] if len(action) else ""
|
lastchar = action[-1] if len(action) else ""
|
||||||
if(lastchar != "\n"):
|
if(lastchar != "\n"):
|
||||||
txt = txt + "\n"
|
txt = txt + "\n"
|
||||||
@@ -152,14 +152,14 @@ def cleanfilename(filename):
|
|||||||
# Newline substitution for fairseq models
|
# Newline substitution for fairseq models
|
||||||
#==================================================================#
|
#==================================================================#
|
||||||
def encodenewlines(txt):
|
def encodenewlines(txt):
|
||||||
if(vars.newlinemode == "s"):
|
if(koboldai_vars.newlinemode == "s"):
|
||||||
return txt.replace('\n', "</s>")
|
return txt.replace('\n', "</s>")
|
||||||
return txt
|
return txt
|
||||||
|
|
||||||
def decodenewlines(txt):
|
def decodenewlines(txt):
|
||||||
if(vars.newlinemode == "s"):
|
if(koboldai_vars.newlinemode == "s"):
|
||||||
return txt.replace("</s>", '\n')
|
return txt.replace("</s>", '\n')
|
||||||
if(vars.newlinemode == "ns"):
|
if(koboldai_vars.newlinemode == "ns"):
|
||||||
return txt.replace("</s>", '')
|
return txt.replace("</s>", '')
|
||||||
return txt
|
return txt
|
||||||
|
|
||||||
@@ -265,9 +265,9 @@ def aria2_hook(pretrained_model_name_or_path: str, force_download=False, cache_d
|
|||||||
with tempfile.NamedTemporaryFile("w+b", delete=False) as f:
|
with tempfile.NamedTemporaryFile("w+b", delete=False) as f:
|
||||||
f.write(aria2_config)
|
f.write(aria2_config)
|
||||||
f.flush()
|
f.flush()
|
||||||
p = subprocess.Popen(["aria2c", "-x", "10", "-s", "10", "-j", "10", "--enable-rpc=true", f"--rpc-secret={secret}", "--rpc-listen-port", str(vars.aria2_port), "--disable-ipv6", "--file-allocation=trunc", "--allow-overwrite", "--auto-file-renaming=false", "-d", _cache_dir, "-i", f.name, "-U", transformers.file_utils.http_user_agent(user_agent)] + (["-c"] if not force_download else []) + ([f"--header='Authorization: Bearer {token}'"] if use_auth_token else []), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
p = subprocess.Popen(["aria2c", "-x", "10", "-s", "10", "-j", "10", "--enable-rpc=true", f"--rpc-secret={secret}", "--rpc-listen-port", str(koboldai_vars.aria2_port), "--disable-ipv6", "--file-allocation=trunc", "--allow-overwrite", "--auto-file-renaming=false", "-d", _cache_dir, "-i", f.name, "-U", transformers.file_utils.http_user_agent(user_agent)] + (["-c"] if not force_download else []) + ([f"--header='Authorization: Bearer {token}'"] if use_auth_token else []), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
while p.poll() is None:
|
while p.poll() is None:
|
||||||
r = s.post(f"http://localhost:{vars.aria2_port}/jsonrpc", json={"jsonrpc": "2.0", "id": "kai", "method": "aria2.tellActive", "params": [f"token:{secret}"]}).json()["result"]
|
r = s.post(f"http://localhost:{koboldai_vars.aria2_port}/jsonrpc", json={"jsonrpc": "2.0", "id": "kai", "method": "aria2.tellActive", "params": [f"token:{secret}"]}).json()["result"]
|
||||||
if not r:
|
if not r:
|
||||||
s.close()
|
s.close()
|
||||||
if bar is not None:
|
if bar is not None:
|
||||||
|
Reference in New Issue
Block a user