mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Merge branch 'Model_Plugins' of https://github.com/ebolam/KoboldAI into Model_Plugins
This commit is contained in:
@@ -32,7 +32,7 @@ dependencies:
|
|||||||
- flask-ngrok
|
- flask-ngrok
|
||||||
- flask-cors
|
- flask-cors
|
||||||
- lupa==1.10
|
- lupa==1.10
|
||||||
- transformers==4.29.*
|
- transformers==4.28.*
|
||||||
- huggingface_hub==0.12.1
|
- huggingface_hub==0.12.1
|
||||||
- safetensors==0.3.1
|
- safetensors==0.3.1
|
||||||
- accelerate==0.18.0
|
- accelerate==0.18.0
|
||||||
|
@@ -30,7 +30,7 @@ dependencies:
|
|||||||
- flask-ngrok
|
- flask-ngrok
|
||||||
- flask-cors
|
- flask-cors
|
||||||
- lupa==1.10
|
- lupa==1.10
|
||||||
- transformers==4.29.*
|
- transformers==4.28.*
|
||||||
- huggingface_hub==0.12.1
|
- huggingface_hub==0.12.1
|
||||||
- safetensors==0.3.1
|
- safetensors==0.3.1
|
||||||
- accelerate==0.18.0
|
- accelerate==0.18.0
|
||||||
|
@@ -536,10 +536,9 @@ class HFTorchInferenceModel(HFInferenceModel):
|
|||||||
last_storage_key = storage_key
|
last_storage_key = storage_key
|
||||||
if isinstance(f, zipfile.ZipExtFile):
|
if isinstance(f, zipfile.ZipExtFile):
|
||||||
f.close()
|
f.close()
|
||||||
try:
|
ziproot = z.namelist()[0].split("/")[0]
|
||||||
f = z.open(f"archive/data/{storage_key}")
|
f = z.open(f"{ziproot}/data/{storage_key}")
|
||||||
except:
|
|
||||||
f = z.open(f"{zipfolder}/data/{storage_key}")
|
|
||||||
current_offset = 0
|
current_offset = 0
|
||||||
if current_offset != model_dict[key].seek_offset:
|
if current_offset != model_dict[key].seek_offset:
|
||||||
f.read(model_dict[key].seek_offset - current_offset)
|
f.read(model_dict[key].seek_offset - current_offset)
|
||||||
|
@@ -196,6 +196,8 @@ class RestrictedUnpickler(pickle.Unpickler):
|
|||||||
return collections.OrderedDict
|
return collections.OrderedDict
|
||||||
elif module == "torch._utils" and name == "_rebuild_tensor_v2":
|
elif module == "torch._utils" and name == "_rebuild_tensor_v2":
|
||||||
return torch._utils._rebuild_tensor_v2
|
return torch._utils._rebuild_tensor_v2
|
||||||
|
elif module == "torch._tensor" and name == "_rebuild_from_type_v2":
|
||||||
|
return torch._tensor._rebuild_from_type_v2
|
||||||
elif module == "torch" and name in (
|
elif module == "torch" and name in (
|
||||||
"DoubleStorage",
|
"DoubleStorage",
|
||||||
"FloatStorage",
|
"FloatStorage",
|
||||||
@@ -207,6 +209,7 @@ class RestrictedUnpickler(pickle.Unpickler):
|
|||||||
"ByteStorage",
|
"ByteStorage",
|
||||||
"BoolStorage",
|
"BoolStorage",
|
||||||
"BFloat16Storage",
|
"BFloat16Storage",
|
||||||
|
"Tensor",
|
||||||
):
|
):
|
||||||
return getattr(torch, name)
|
return getattr(torch, name)
|
||||||
elif module == "numpy.core.multiarray" and name == "scalar":
|
elif module == "numpy.core.multiarray" and name == "scalar":
|
||||||
@@ -219,7 +222,7 @@ class RestrictedUnpickler(pickle.Unpickler):
|
|||||||
# Forbid everything else.
|
# Forbid everything else.
|
||||||
qualified_name = name if module == "__builtin__" else f"{module}.{name}"
|
qualified_name = name if module == "__builtin__" else f"{module}.{name}"
|
||||||
raise pickle.UnpicklingError(
|
raise pickle.UnpicklingError(
|
||||||
f"`{qualified_name}` is forbidden; the model you are loading probably contains malicious code"
|
f"`{qualified_name}` is forbidden; the model you are loading probably contains malicious code. If you think this is incorrect ask the developer to unban the ability for {module} to execute {name}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def load(self, *args, **kwargs):
|
def load(self, *args, **kwargs):
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
transformers==4.29.*
|
transformers==4.28.*
|
||||||
huggingface_hub==0.12.1
|
huggingface_hub==0.12.1
|
||||||
Flask==2.2.3
|
Flask==2.2.3
|
||||||
Flask-SocketIO==5.3.2
|
Flask-SocketIO==5.3.2
|
||||||
|
@@ -5,7 +5,7 @@ requests
|
|||||||
dm-haiku==0.0.9
|
dm-haiku==0.0.9
|
||||||
jax==0.3.25
|
jax==0.3.25
|
||||||
jaxlib==0.3.25
|
jaxlib==0.3.25
|
||||||
transformers==4.29.*
|
transformers==4.28.*
|
||||||
chex == 0.1.5
|
chex == 0.1.5
|
||||||
huggingface_hub==0.12.1
|
huggingface_hub==0.12.1
|
||||||
progressbar2
|
progressbar2
|
||||||
|
Reference in New Issue
Block a user