mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Transformers 4.28 support
This commit is contained in:
@@ -2951,7 +2951,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
|
||||
koboldai_vars.status_message = "Loading model"
|
||||
koboldai_vars.total_layers = num_tensors
|
||||
koboldai_vars.loaded_layers = 0
|
||||
utils.bar = tqdm(total=num_tensors, desc="Loading model tensors", file=Send_to_socketio())
|
||||
utils.bar = tqdm(total=num_tensors, desc="Loading model tensors", file=Send_to_socketio(), position=1)
|
||||
|
||||
with zipfile.ZipFile(f, "r") as z:
|
||||
try:
|
||||
|
@@ -30,7 +30,7 @@ dependencies:
|
||||
- flask-ngrok
|
||||
- flask-cors
|
||||
- lupa==1.10
|
||||
- transformers==4.25.1
|
||||
- transformers==4.28.0
|
||||
- huggingface_hub==0.12.1
|
||||
- safetensors
|
||||
- accelerate==0.18.0
|
||||
|
@@ -29,7 +29,7 @@ dependencies:
|
||||
- flask-ngrok
|
||||
- flask-cors
|
||||
- lupa==1.10
|
||||
- transformers==4.25.1
|
||||
- transformers==4.28.0
|
||||
- huggingface_hub==0.12.1
|
||||
- safetensors
|
||||
- accelerate
|
||||
|
@@ -1,4 +1,4 @@
|
||||
transformers==4.25.1
|
||||
transformers==4.28.0
|
||||
huggingface_hub==0.12.1
|
||||
Flask==2.2.3
|
||||
Flask-SocketIO==5.3.2
|
||||
|
@@ -5,7 +5,7 @@ requests
|
||||
dm-haiku == 0.0.5
|
||||
jax == 0.2.21
|
||||
jaxlib >= 0.1.69, <= 0.3.7
|
||||
transformers == 4.25.1
|
||||
transformers == 4.28.0
|
||||
chex == 0.1.5
|
||||
huggingface_hub==0.12.1
|
||||
progressbar2
|
||||
|
Reference in New Issue
Block a user