Wolf369 commited on
Commit
9151e96
·
1 Parent(s): 81f3106
Files changed (1) hide show
  1. main.py +2 -2
main.py CHANGED
@@ -17,10 +17,10 @@ def read_root(
17
  model: str = "meta-llama/Llama-2-7b-hf",
18
  temperature: float = 0.,
19
  max_tokens: int = 1024) -> List:
20
- sampling_params = SamplingParams(temperature=temperature, max_tokens=max_tokens, token=token)
21
 
22
  llm = LLM(model=model)
23
 
24
- response = llm.generate([prompt], sampling_params)
25
 
26
  return response
 
17
  model: str = "meta-llama/Llama-2-7b-hf",
18
  temperature: float = 0.,
19
  max_tokens: int = 1024) -> List:
20
+ sampling_params = SamplingParams(temperature=temperature, max_tokens=max_tokens)
21
 
22
  llm = LLM(model=model)
23
 
24
+ response = llm.generate([prompt], sampling_params, token=token)
25
 
26
  return response