mirror of
				https://github.com/KoboldAI/KoboldAI-Client.git
				synced 2025-06-05 21:59:24 +02:00 
			
		
		
		
	Command line support
Added command line options for model selection, this makes it usable inside Google Colab or other unattended servers people might want to use/provide.
This commit is contained in:
		
							
								
								
									
										5
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -5,4 +5,7 @@ client.settings | |||||||
| stories/* | stories/* | ||||||
| !stories/sample_story.json | !stories/sample_story.json | ||||||
| /.project | /.project | ||||||
| *.bak | *.bak | ||||||
|  | miniconda3/* | ||||||
|  | *.settings | ||||||
|  | __pycache__ | ||||||
							
								
								
									
										17
									
								
								aiserver.py
									
									
									
									
									
								
							
							
						
						
									
										17
									
								
								aiserver.py
									
									
									
									
									
								
							| @@ -155,16 +155,25 @@ def gettokenids(char): | |||||||
| #==================================================================# | #==================================================================# | ||||||
|  |  | ||||||
| # Parsing Parameters | # Parsing Parameters | ||||||
| parser = argparse.ArgumentParser(description="My Script") | parser = argparse.ArgumentParser(description="KoboldAI Server") | ||||||
| parser.add_argument("--remote", action='store_true') | parser.add_argument("--remote", action='store_true', help="Optimizes KoboldAI for Remote Play") | ||||||
|  | parser.add_argument("--model", help="Specify the Model Type to skip the Menu") | ||||||
|  | parser.add_argument("--path", help="Specify the Path for local models (For model NeoCustom or GPT2Custom)") | ||||||
| args = parser.parse_args() | args = parser.parse_args() | ||||||
|  | vars.model = args.model; | ||||||
|  |  | ||||||
| if args.remote: | if args.remote: | ||||||
|     vars.remote = True; |     vars.remote = True; | ||||||
|  |  | ||||||
| # Select a model to run | # Select a model to run | ||||||
| print("{0}Welcome to the KoboldAI Client!\nSelect an AI model to continue:{1}\n".format(colors.CYAN, colors.END)) | if args.model: | ||||||
| getModelSelection() |     print("Welcome to KoboldAI!\nYou have selected the following Model:", vars.model) | ||||||
|  |     if args.path: | ||||||
|  |         print("You have selected the following path for your Model :", args.path) | ||||||
|  |         vars.custmodpth = args.path; | ||||||
|  | else: | ||||||
|  |     print("{0}Welcome to the KoboldAI Client!\nSelect an AI model to continue:{1}\n".format(colors.CYAN, colors.END)) | ||||||
|  |     getModelSelection() | ||||||
|  |  | ||||||
| # If transformers model was selected & GPU available, ask to use CPU or GPU | # If transformers model was selected & GPU available, ask to use CPU or GPU | ||||||
| if(not vars.model in ["InferKit", "Colab", "OAI", "ReadOnly"]): | if(not vars.model in ["InferKit", "Colab", "OAI", "ReadOnly"]): | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user