Pull upstream changes, fix conflict

This commit is contained in:
0cc4m
2023-06-04 09:06:31 +02:00
15 changed files with 85 additions and 69 deletions

View File

@@ -6,6 +6,13 @@ IF %M%==1 GOTO drivemap
IF %M%==2 GOTO subfolder
IF %M%==3 GOTO drivemap_B
:Isolation
call conda deactivate 2>NUL
set Path=%windir%\system32;%windir%;C:\Windows\System32\Wbem;%windir%\System32\WindowsPowerShell\v1.0\;%windir%\System32\OpenSSH\
SET CONDA_SHLVL=
SET PYTHONNOUSERSITE=1
SET PYTHONPATH=
:subfolder
umamba.exe install --no-shortcuts -r miniconda3 -n base -c conda-forge jupyterlab jupyterlab-git
call miniconda3\condabin\activate

View File

@@ -1390,9 +1390,7 @@ def general_startup(override_args=None):
parser.add_argument("--summarizer_model", action='store', default="philschmid/bart-large-cnn-samsum", help="Huggingface model to use for summarization. Defaults to sshleifer/distilbart-cnn-12-6")
parser.add_argument("--max_summary_length", action='store', default=75, help="Maximum size for summary to send to image generation")
parser.add_argument("--multi_story", action='store_true', default=False, help="Allow multi-story mode (experimental)")
parser.add_argument("--peft", type=str, help="Specify the path or HuggingFace ID of a Peft to load it. Not supported on TPU. (Experimental)")
parser.add_argument("--trust_remote_code", action='store_true', default=False, help="Allow Huggingface Models to Execute Code (Insecure!)")
parser.add_argument("--peft", type=str, help="Specify the path or HuggingFace ID of a Peft to load it. Not supported on TPU. (Experimental)")
parser.add_argument('-f', action='store', help="option for compatability with colab memory profiles")
parser.add_argument('-v', '--verbosity', action='count', default=0, help="The default logging level is ERROR or higher. This value increases the amount of logging seen in your screen")
parser.add_argument('-q', '--quiesce', action='count', default=0, help="The default logging level is ERROR or higher. This value decreases the amount of logging seen in your screen")
@@ -1474,7 +1472,6 @@ def general_startup(override_args=None):
args.remote = True;
args.override_rename = True;
args.override_delete = True;
args.nobreakmodel = True;
args.quiet = True;
args.lowmem = True;
args.noaimenu = True;
@@ -1521,13 +1518,6 @@ def general_startup(override_args=None):
allowed_ips = sorted(allowed_ips, key=lambda ip: int(''.join([i.zfill(3) for i in ip.split('.')])))
print(f"Allowed IPs: {allowed_ips}")
if args.trust_remote_code:
logger.warning("EXECUTION OF UNSAFE REMOTE CODE IS ENABLED!!!")
logger.warning("You are not protected from Model Viruses in this mode!")
logger.warning("Exit the program now to abort execution!")
logger.warning("Only use this mode with models that you trust and verified!")
time.sleep(25)
koboldai_vars.trust_remote_code = True
if args.cpu:
koboldai_vars.use_colab_tpu = False
koboldai_vars.hascuda = False
@@ -1682,7 +1672,6 @@ class RestrictedUnpickler(pickle.Unpickler):
)
def load(self, *args, **kwargs):
logger.info("Using safe unpickle")
self.original_persistent_load = getattr(
self, "persistent_load", pickle.Unpickler.persistent_load
)

View File

@@ -2,6 +2,8 @@
cd /D %~dp0
:Isolation
call conda deactivate 2>NUL
set Path=%windir%\system32;%windir%;C:\Windows\System32\Wbem;%windir%\System32\WindowsPowerShell\v1.0\;%windir%\System32\OpenSSH\
SET CONDA_SHLVL=
SET PYTHONNOUSERSITE=1
SET PYTHONPATH=

View File

@@ -2,6 +2,8 @@
cd /D %~dp0
:Isolation
call conda deactivate 2>NUL
set Path=%windir%\system32;%windir%;C:\Windows\System32\Wbem;%windir%\System32\WindowsPowerShell\v1.0\;%windir%\System32\OpenSSH\
SET CONDA_SHLVL=
SET PYTHONNOUSERSITE=1
SET PYTHONPATH=

View File

@@ -10,6 +10,8 @@ Reg add "HKLM\SYSTEM\CurrentControlSet\Control\FileSystem" /v "LongPathsEnabled"
cd /D %~dp0
:Isolation
call conda deactivate 2>NUL
set Path=%windir%\system32;%windir%;C:\Windows\System32\Wbem;%windir%\System32\WindowsPowerShell\v1.0\;%windir%\System32\OpenSSH\
SET CONDA_SHLVL=
SET PYTHONNOUSERSITE=1
SET PYTHONPATH=

View File

@@ -1206,12 +1206,12 @@ class system_settings(settings):
local_only_variables = ['lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold',
'lua_koboldcore', 'regex_sl', 'acregex_ai', 'acregex_ui', 'comregex_ai', 'comregex_ui',
'sp', '_horde_pid', 'inference_config', 'image_pipeline',
'summarizer', 'summary_tokenizer', 'tts_model', 'rng_states', 'comregex_ai', 'comregex_ui', 'trust_remote_code']
'summarizer', 'summary_tokenizer', 'tts_model', 'rng_states', 'comregex_ai', 'comregex_ui']
no_save_variables = ['lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold',
'lua_koboldcore', 'sp', 'sp_length', '_horde_pid', 'horde_share', 'aibusy',
'serverstarted', 'inference_config', 'image_pipeline', 'summarizer', 'on_colab'
'summary_tokenizer', 'use_colab_tpu', 'noai', 'disable_set_aibusy', 'cloudflare_link', 'tts_model',
'generating_image', 'bit_8_available', 'bit_4_available', 'host', 'hascuda', 'usegpu', 'rng_states', 'comregex_ai', 'comregex_ui', 'git_repository', 'git_branch', 'trust_remote_code']
'generating_image', 'bit_8_available', 'bit_4_available', 'host', 'hascuda', 'usegpu', 'rng_states', 'comregex_ai', 'comregex_ui', 'git_repository', 'git_branch']
settings_name = "system"
def __init__(self, socketio, koboldai_var):
self._socketio = socketio
@@ -1304,7 +1304,6 @@ class system_settings(settings):
self.seen_messages = []
self.git_repository = ""
self.git_branch = ""
self.trust_remote_code = False
@dataclass

View File

@@ -251,7 +251,7 @@ class model_backend(HFTorchInferenceModel):
if utils.koboldai_vars.hascuda:
if self.usegpu:
if self.usegpu or self.nobreakmodel:
# Use just VRAM
self.model = self.model.half().to(utils.koboldai_vars.gpu_device)
elif self.breakmodel:

View File

@@ -41,7 +41,7 @@ class model_backend(InferenceModel):
return model_name == "CLUSTER" or model_name in [x['value'] for x in self.models]
def get_requested_parameters(self, model_name, model_path, menu_path, parameters = {}):
if os.path.exists("settings/api.model_backend.settings") and 'base_url' not in vars(self):
if os.path.exists("settings/horde.model_backend.settings") and 'base_url' not in vars(self):
with open("settings/horde.model_backend.settings", "r") as f:
temp = json.load(f)
self.base_url = temp['url']

View File

@@ -42,10 +42,17 @@ import utils
import torch
import numpy as np
if utils.koboldai_vars.use_colab_tpu:
import jax
import jax.numpy as jnp
import tpu_mtj_backend
try:
ignore = utils.koboldai_vars.use_colab_tpu
ok = True
except:
ok = False
if ok:
if utils.koboldai_vars.use_colab_tpu:
import jax
import jax.numpy as jnp
import tpu_mtj_backend
def update_settings():

View File

@@ -2,11 +2,13 @@
cd /D %~dp0
:Isolation
call conda deactivate 2>NUL
set Path=%windir%\system32;%windir%;C:\Windows\System32\Wbem;%windir%\System32\WindowsPowerShell\v1.0\;%windir%\System32\OpenSSH\
SET CONDA_SHLVL=
SET PYTHONNOUSERSITE=1
SET PYTHONPATH=
rmdir /S /Q flask_session
rmdir /S /Q flask_session 2>NUL
TITLE KoboldAI - Server
SET /P M=<loader.settings

View File

@@ -17,13 +17,13 @@
"rep_pen_range": 1024,
"rep_pen_slope": 0.7,
"sampler_order": [
6,
5,
4,
3,
2,
1,
0,
6
0
]
},
{
@@ -44,13 +44,13 @@
"rep_pen_range": 1024,
"rep_pen_slope": 0.7,
"sampler_order": [
6,
5,
4,
3,
1,
2,
0,
6
0
]
},
{
@@ -71,13 +71,13 @@
"rep_pen_range": 1024,
"rep_pen_slope": 0.7,
"sampler_order": [
6,
0,
1,
2,
3,
4,
5,
6
5
]
},
{
@@ -98,13 +98,13 @@
"rep_pen_range": 1024,
"rep_pen_slope": 0.7,
"sampler_order": [
6,
0,
1,
2,
3,
4,
5,
6
5
]
},
{
@@ -125,13 +125,13 @@
"rep_pen_range": 1024,
"rep_pen_slope": 0.7,
"sampler_order": [
6,
0,
1,
2,
3,
4,
5,
6
5
]
},
{
@@ -152,13 +152,13 @@
"rep_pen_range": 1024,
"rep_pen_slope": 0.7,
"sampler_order": [
6,
4,
5,
1,
0,
2,
3,
6
3
]
}
]

View File

@@ -17,13 +17,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 0.1,
"sampler_order": [
6,
2,
0,
3,
5,
1,
4,
6
4
]
},
{
@@ -44,13 +44,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 0.3,
"sampler_order": [
6,
5,
0,
2,
3,
1,
4,
6
4
]
},
{
@@ -71,13 +71,13 @@
"rep_pen_range": 404,
"rep_pen_slope": 0.8,
"sampler_order": [
6,
0,
5,
3,
2,
1,
4,
6
4
]
},
{
@@ -98,13 +98,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 7,
"sampler_order": [
6,
3,
2,
0,
5,
1,
4,
6
4
]
},
{
@@ -125,13 +125,13 @@
"rep_pen_range": 0,
"rep_pen_slope": 0.0,
"sampler_order": [
6,
0,
4,
1,
3,
5,
2,
6
2
]
},
{
@@ -152,13 +152,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 0.1,
"sampler_order": [
6,
5,
0,
2,
3,
1,
4,
6
4
]
},
{
@@ -179,13 +179,13 @@
"rep_pen_range": 400,
"rep_pen_slope": 0.3,
"sampler_order": [
6,
5,
0,
3,
2,
1,
4,
6
4
]
},
{
@@ -206,13 +206,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 0.2,
"sampler_order": [
6,
5,
0,
2,
3,
1,
4,
6
4
]
},
{
@@ -233,13 +233,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 0.0,
"sampler_order": [
6,
5,
0,
2,
3,
1,
4,
6
4
]
},
{
@@ -260,13 +260,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 0.1,
"sampler_order": [
6,
3,
2,
5,
0,
1,
4,
6
4
]
}
]

View File

@@ -8,13 +8,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 0.2,
"sampler_order": [
6,
5,
0,
2,
3,
1,
4,
6
4
],
"temp": 0.72,
"tfs": 1.0,
@@ -35,13 +35,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 0.0,
"sampler_order": [
6,
5,
0,
2,
3,
1,
4,
6
4
],
"temp": 0.51,
"tfs": 0.99,
@@ -71,13 +71,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 0.0,
"sampler_order": [
6,
5,
0,
2,
3,
1,
4,
6
4
]
},
{
@@ -98,13 +98,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 0.0,
"sampler_order": [
6,
5,
0,
2,
3,
1,
4,
6
4
]
},
{
@@ -125,13 +125,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 0.0,
"sampler_order": [
6,
5,
0,
2,
3,
1,
4,
6
4
]
},
{
@@ -152,13 +152,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 3.4,
"sampler_order": [
6,
5,
0,
2,
3,
1,
4,
6
4
]
},
{
@@ -179,13 +179,13 @@
"rep_pen_range": 2048,
"rep_pen_slope": 6.8,
"sampler_order": [
6,
5,
0,
2,
3,
1,
4,
6
4
]
}
]

View File

@@ -1255,7 +1255,11 @@ def load_model(path: str, model_type: str, badwordsids=koboldai_settings.badword
params["cores_per_replica"],
network.state["params"][spec["module"]][spec["param"]].shape,
)
tensor = jnp.array(tensor.detach())
tensor = tensor.detach()
# numpy does not support bfloat16
if tensor.dtype is torch.bfloat16:
tensor = tensor.to(torch.float32)
tensor = jnp.array(tensor)
if tensor.dtype is torch.float16 or tensor.dtype is torch.float32:
tensor = tensor.bfloat16()
network.state["params"][spec["module"]][spec["param"]] = move_xmap(

View File

@@ -2,6 +2,8 @@
cd /d %~dp0
:Isolation
call conda deactivate 2>NUL
set Path=%windir%\system32;%windir%;C:\Windows\System32\Wbem;%windir%\System32\WindowsPowerShell\v1.0\;%windir%\System32\OpenSSH\
SET CONDA_SHLVL=
SET PYTHONNOUSERSITE=1
SET PYTHONPATH=