mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Colab transformers fix
This commit is contained in:
43
aiserver.py
43
aiserver.py
@@ -1438,49 +1438,6 @@ def patch_transformers_download():
|
||||
def flush(self):
|
||||
pass
|
||||
|
||||
def http_get(
|
||||
url: str,
|
||||
temp_file: BinaryIO,
|
||||
*,
|
||||
proxies=None,
|
||||
resume_size=0,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
timeout=10.0,
|
||||
max_retries=0,
|
||||
):
|
||||
"""
|
||||
Donwload a remote file. Do not gobble up errors, and will return errors tailored to the Hugging Face Hub.
|
||||
"""
|
||||
headers = copy.deepcopy(headers)
|
||||
if resume_size > 0:
|
||||
headers["Range"] = "bytes=%d-" % (resume_size,)
|
||||
r = _request_wrapper(
|
||||
method="GET",
|
||||
url=url,
|
||||
stream=True,
|
||||
proxies=proxies,
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
max_retries=max_retries,
|
||||
)
|
||||
hf_raise_for_status(r)
|
||||
content_length = r.headers.get("Content-Length")
|
||||
total = resume_size + int(content_length) if content_length is not None else None
|
||||
progress = tqdm(
|
||||
unit="B",
|
||||
unit_scale=True,
|
||||
total=total,
|
||||
initial=resume_size,
|
||||
desc="Downloading",
|
||||
file=Send_to_socketio(),
|
||||
disable=bool(logger.getEffectiveLevel() == logging.NOTSET),
|
||||
)
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk: # filter out keep-alive new chunks
|
||||
progress.update(len(chunk))
|
||||
temp_file.write(chunk)
|
||||
progress.close()
|
||||
|
||||
def http_get(
|
||||
url: str,
|
||||
temp_file: transformers.utils.hub.BinaryIO,
|
||||
|
Reference in New Issue
Block a user