diff --git a/aiserver.py b/aiserver.py index 5c93f87f..7cd88393 100644 --- a/aiserver.py +++ b/aiserver.py @@ -1389,9 +1389,7 @@ def general_startup(override_args=None): parser.add_argument("--summarizer_model", action='store', default="philschmid/bart-large-cnn-samsum", help="Huggingface model to use for summarization. Defaults to sshleifer/distilbart-cnn-12-6") parser.add_argument("--max_summary_length", action='store', default=75, help="Maximum size for summary to send to image generation") parser.add_argument("--multi_story", action='store_true', default=False, help="Allow multi-story mode (experimental)") - parser.add_argument("--peft", type=str, help="Specify the path or HuggingFace ID of a Peft to load it. Not supported on TPU. (Experimental)") - parser.add_argument("--trust_remote_code", action='store_true', default=False, help="Allow Huggingface Models to Execute Code (Insecure!)") - + parser.add_argument("--peft", type=str, help="Specify the path or HuggingFace ID of a Peft to load it. Not supported on TPU. (Experimental)") parser.add_argument('-f', action='store', help="option for compatability with colab memory profiles") parser.add_argument('-v', '--verbosity', action='count', default=0, help="The default logging level is ERROR or higher. This value increases the amount of logging seen in your screen") parser.add_argument('-q', '--quiesce', action='count', default=0, help="The default logging level is ERROR or higher. This value decreases the amount of logging seen in your screen") @@ -1520,13 +1518,6 @@ def general_startup(override_args=None): allowed_ips = sorted(allowed_ips, key=lambda ip: int(''.join([i.zfill(3) for i in ip.split('.')]))) print(f"Allowed IPs: {allowed_ips}") - if args.trust_remote_code: - logger.warning("EXECUTION OF UNSAFE REMOTE CODE IS ENABLED!!!") - logger.warning("You are not protected from Model Viruses in this mode!") - logger.warning("Exit the program now to abort execution!") - logger.warning("Only use this mode with models that you trust and verified!") - time.sleep(25) - koboldai_vars.trust_remote_code = True if args.cpu: koboldai_vars.use_colab_tpu = False koboldai_vars.hascuda = False diff --git a/koboldai_settings.py b/koboldai_settings.py index 29a82406..e9cc16d0 100644 --- a/koboldai_settings.py +++ b/koboldai_settings.py @@ -1206,12 +1206,12 @@ class system_settings(settings): local_only_variables = ['lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold', 'lua_koboldcore', 'regex_sl', 'acregex_ai', 'acregex_ui', 'comregex_ai', 'comregex_ui', 'sp', '_horde_pid', 'inference_config', 'image_pipeline', - 'summarizer', 'summary_tokenizer', 'tts_model', 'rng_states', 'comregex_ai', 'comregex_ui', 'trust_remote_code'] + 'summarizer', 'summary_tokenizer', 'tts_model', 'rng_states', 'comregex_ai', 'comregex_ui'] no_save_variables = ['lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold', 'lua_koboldcore', 'sp', 'sp_length', '_horde_pid', 'horde_share', 'aibusy', 'serverstarted', 'inference_config', 'image_pipeline', 'summarizer', 'on_colab' 'summary_tokenizer', 'use_colab_tpu', 'noai', 'disable_set_aibusy', 'cloudflare_link', 'tts_model', - 'generating_image', 'bit_8_available', 'host', 'hascuda', 'usegpu', 'rng_states', 'comregex_ai', 'comregex_ui', 'git_repository', 'git_branch', 'trust_remote_code'] + 'generating_image', 'bit_8_available', 'host', 'hascuda', 'usegpu', 'rng_states', 'comregex_ai', 'comregex_ui', 'git_repository', 'git_branch'] settings_name = "system" def __init__(self, socketio, koboldai_var): self._socketio = socketio @@ -1297,7 +1297,6 @@ class system_settings(settings): self.seen_messages = [] self.git_repository = "" self.git_branch = "" - self.trust_remote_code = False @dataclass