mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Etc
This commit is contained in:
@@ -2069,8 +2069,6 @@ def patch_transformers():
|
|||||||
tokenizer,
|
tokenizer,
|
||||||
excluded_world_info: List[Set],
|
excluded_world_info: List[Set],
|
||||||
):
|
):
|
||||||
# self.regeneration_required = False
|
|
||||||
# self.halt = False
|
|
||||||
self.tokenizer = tokenizer
|
self.tokenizer = tokenizer
|
||||||
self.excluded_world_info = excluded_world_info
|
self.excluded_world_info = excluded_world_info
|
||||||
|
|
||||||
@@ -4968,8 +4966,10 @@ def raw_generate(
|
|||||||
) -> List:
|
) -> List:
|
||||||
|
|
||||||
if isinstance(prompt, str):
|
if isinstance(prompt, str):
|
||||||
|
prompt_decoded = prompt
|
||||||
prompt_tokens = tokenizer.encode(prompt)
|
prompt_tokens = tokenizer.encode(prompt)
|
||||||
else:
|
else:
|
||||||
|
prompt_decoded = tokenizer.decode(prompt)
|
||||||
prompt_tokens = prompt
|
prompt_tokens = prompt
|
||||||
|
|
||||||
# Some gen methods such as OAI don't return tokens.
|
# Some gen methods such as OAI don't return tokens.
|
||||||
|
Reference in New Issue
Block a user