mirror of
				https://github.com/KoboldAI/KoboldAI-Client.git
				synced 2025-06-05 21:59:24 +02:00 
			
		
		
		
	Command line support
Added command line options for model selection, this makes it usable inside Google Colab or other unattended servers people might want to use/provide.
This commit is contained in:
		
							
								
								
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -6,3 +6,6 @@ stories/* | ||||
| !stories/sample_story.json | ||||
| /.project | ||||
| *.bak | ||||
| miniconda3/* | ||||
| *.settings | ||||
| __pycache__ | ||||
							
								
								
									
										17
									
								
								aiserver.py
									
									
									
									
									
								
							
							
						
						
									
										17
									
								
								aiserver.py
									
									
									
									
									
								
							| @@ -155,16 +155,25 @@ def gettokenids(char): | ||||
| #==================================================================# | ||||
|  | ||||
| # Parsing Parameters | ||||
| parser = argparse.ArgumentParser(description="My Script") | ||||
| parser.add_argument("--remote", action='store_true') | ||||
| parser = argparse.ArgumentParser(description="KoboldAI Server") | ||||
| parser.add_argument("--remote", action='store_true', help="Optimizes KoboldAI for Remote Play") | ||||
| parser.add_argument("--model", help="Specify the Model Type to skip the Menu") | ||||
| parser.add_argument("--path", help="Specify the Path for local models (For model NeoCustom or GPT2Custom)") | ||||
| args = parser.parse_args() | ||||
| vars.model = args.model; | ||||
|  | ||||
| if args.remote: | ||||
|     vars.remote = True; | ||||
|  | ||||
| # Select a model to run | ||||
| print("{0}Welcome to the KoboldAI Client!\nSelect an AI model to continue:{1}\n".format(colors.CYAN, colors.END)) | ||||
| getModelSelection() | ||||
| if args.model: | ||||
|     print("Welcome to KoboldAI!\nYou have selected the following Model:", vars.model) | ||||
|     if args.path: | ||||
|         print("You have selected the following path for your Model :", args.path) | ||||
|         vars.custmodpth = args.path; | ||||
| else: | ||||
|     print("{0}Welcome to the KoboldAI Client!\nSelect an AI model to continue:{1}\n".format(colors.CYAN, colors.END)) | ||||
|     getModelSelection() | ||||
|  | ||||
| # If transformers model was selected & GPU available, ask to use CPU or GPU | ||||
| if(not vars.model in ["InferKit", "Colab", "OAI", "ReadOnly"]): | ||||
|   | ||||
		Reference in New Issue
	
	Block a user