mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Model: Respect model lazyload over kaivars
kaivars dictates model config unless its from outside aiserver or whatever.
This commit is contained in:
@@ -10,7 +10,7 @@ class PostTokenHooks:
|
||||
model: InferenceModel,
|
||||
input_ids: torch.LongTensor,
|
||||
) -> None:
|
||||
if not model.gen_state["do_streaming"]:
|
||||
if not model.gen_state.get("do_streaming"):
|
||||
return
|
||||
|
||||
if not utils.koboldai_vars.output_streaming:
|
||||
|
Reference in New Issue
Block a user