Modeling: Add seed parameter to raw_generate

Yahooo, decoupling from koboldai_vars. This makes the generation test
pass in `test_generation.py`, and makes full determinism outside of
core_generate work.
This commit is contained in:
somebody
2023-03-12 21:49:10 -05:00
parent 38c4edac40
commit cd8ccf0a5e
8 changed files with 51 additions and 20 deletions

View File

@@ -10,7 +10,7 @@ import itertools
import traceback
import contextlib
from tqdm.auto import tqdm
from typing import Dict, List, Union
from typing import Dict, List, Optional, Union
import torch
from torch.nn import Embedding
@@ -457,6 +457,7 @@ class HFTorchInferenceModel(HFInferenceModel):
gen_settings: GenerationSettings,
single_line: bool = False,
batch_count: int = 1,
seed: Optional[int] = None,
**kwargs,
) -> GenerationResult:
if not isinstance(prompt_tokens, torch.Tensor):
@@ -469,6 +470,10 @@ class HFTorchInferenceModel(HFInferenceModel):
additional_bad_words_ids = [self.tokenizer.encode("\n")] if single_line else []
if seed is not None:
print("seeding", seed)
torch.manual_seed(seed)
with torch.no_grad():
start_time = time.time()
genout = self.model.generate(