Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -92,7 +92,7 @@ if llama_tokenizer.pad_token is None:
|
|
92 |
def llama_generate(
|
93 |
message: str,
|
94 |
max_new_tokens: int = LLAMA_DEFAULT_MAX_NEW_TOKENS,
|
95 |
-
temperature: float = 0.
|
96 |
top_p: float = 0.9,
|
97 |
top_k: int = 50,
|
98 |
repetition_penalty: float = 1.2,
|
@@ -135,7 +135,7 @@ Given the following issue description:
|
|
135 |
---
|
136 |
{issue_text}
|
137 |
---
|
138 |
-
Explain why this issue might be classified as a **{quality_name}** issue. Provide a concise explanation, relating it back to the issue description.
|
139 |
"""
|
140 |
try:
|
141 |
explanation = llama_generate(prompt)
|
|
|
92 |
def llama_generate(
|
93 |
message: str,
|
94 |
max_new_tokens: int = LLAMA_DEFAULT_MAX_NEW_TOKENS,
|
95 |
+
temperature: float = 0.3,
|
96 |
top_p: float = 0.9,
|
97 |
top_k: int = 50,
|
98 |
repetition_penalty: float = 1.2,
|
|
|
135 |
---
|
136 |
{issue_text}
|
137 |
---
|
138 |
+
Explain why this issue might be classified as a **{quality_name}** issue. Provide a concise explanation, relating it back to the issue description. Keep the explanation short and concise.
|
139 |
"""
|
140 |
try:
|
141 |
explanation = llama_generate(prompt)
|