From 04621ccbbccf1dc44b2860d596cf212d0a4a94de Mon Sep 17 00:00:00 2001 From: ebolam Date: Wed, 14 Sep 2022 18:27:22 -0400 Subject: [PATCH] Colab transformers fix --- aiserver.py | 43 ------------------------------------------- 1 file changed, 43 deletions(-) diff --git a/aiserver.py b/aiserver.py index a7c24a85..e8322f4a 100644 --- a/aiserver.py +++ b/aiserver.py @@ -1438,49 +1438,6 @@ def patch_transformers_download(): def flush(self): pass - def http_get( - url: str, - temp_file: BinaryIO, - *, - proxies=None, - resume_size=0, - headers: Optional[Dict[str, str]] = None, - timeout=10.0, - max_retries=0, - ): - """ - Donwload a remote file. Do not gobble up errors, and will return errors tailored to the Hugging Face Hub. - """ - headers = copy.deepcopy(headers) - if resume_size > 0: - headers["Range"] = "bytes=%d-" % (resume_size,) - r = _request_wrapper( - method="GET", - url=url, - stream=True, - proxies=proxies, - headers=headers, - timeout=timeout, - max_retries=max_retries, - ) - hf_raise_for_status(r) - content_length = r.headers.get("Content-Length") - total = resume_size + int(content_length) if content_length is not None else None - progress = tqdm( - unit="B", - unit_scale=True, - total=total, - initial=resume_size, - desc="Downloading", - file=Send_to_socketio(), - disable=bool(logger.getEffectiveLevel() == logging.NOTSET), - ) - for chunk in r.iter_content(chunk_size=1024): - if chunk: # filter out keep-alive new chunks - progress.update(len(chunk)) - temp_file.write(chunk) - progress.close() - def http_get( url: str, temp_file: transformers.utils.hub.BinaryIO,