Colab transformers fix

This commit is contained in:
ebolam
2022-09-14 18:27:22 -04:00
parent 22fc1b69ee
commit 04621ccbbc

View File

@@ -1438,49 +1438,6 @@ def patch_transformers_download():
def flush(self):
pass
def http_get(
url: str,
temp_file: BinaryIO,
*,
proxies=None,
resume_size=0,
headers: Optional[Dict[str, str]] = None,
timeout=10.0,
max_retries=0,
):
"""
Donwload a remote file. Do not gobble up errors, and will return errors tailored to the Hugging Face Hub.
"""
headers = copy.deepcopy(headers)
if resume_size > 0:
headers["Range"] = "bytes=%d-" % (resume_size,)
r = _request_wrapper(
method="GET",
url=url,
stream=True,
proxies=proxies,
headers=headers,
timeout=timeout,
max_retries=max_retries,
)
hf_raise_for_status(r)
content_length = r.headers.get("Content-Length")
total = resume_size + int(content_length) if content_length is not None else None
progress = tqdm(
unit="B",
unit_scale=True,
total=total,
initial=resume_size,
desc="Downloading",
file=Send_to_socketio(),
disable=bool(logger.getEffectiveLevel() == logging.NOTSET),
)
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
progress.update(len(chunk))
temp_file.write(chunk)
progress.close()
def http_get(
url: str,
temp_file: transformers.utils.hub.BinaryIO,