Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -9,7 +9,8 @@ from transformers import AutoModelForCausalLM, GemmaTokenizerFast, TextIteratorS
|
|
9 |
|
10 |
DESCRIPTION = """\
|
11 |
# Prompt Generator with Gemma 2 9B IT
|
12 |
-
|
|
|
13 |
"""
|
14 |
|
15 |
MAX_MAX_NEW_TOKENS = 2048
|
@@ -150,7 +151,11 @@ chat_interface = gr.ChatInterface(
|
|
150 |
],
|
151 |
stop_btn=None,
|
152 |
examples=[
|
153 |
-
["
|
|
|
|
|
|
|
|
|
154 |
],
|
155 |
cache_examples=False,
|
156 |
type="messages",
|
@@ -158,7 +163,7 @@ chat_interface = gr.ChatInterface(
|
|
158 |
|
159 |
with gr.Blocks(css="style.css", fill_height=True) as demo:
|
160 |
gr.Markdown(DESCRIPTION)
|
161 |
-
gr.DuplicateButton(value="
|
162 |
chat_interface.render()
|
163 |
|
164 |
if __name__ == "__main__":
|
|
|
9 |
|
10 |
DESCRIPTION = """\
|
11 |
# Prompt Generator with Gemma 2 9B IT
|
12 |
+
## Uses OpenAI's leaked meta prompt for optimizing GPT prompts!
|
13 |
+
### The Prompt Generator can turn a task description into a high quality prompt. For best results, be sure to describe your task in as much detail as possible, including what input data the prompt should expect as well as how the output should be formatted.
|
14 |
"""
|
15 |
|
16 |
MAX_MAX_NEW_TOKENS = 2048
|
|
|
151 |
],
|
152 |
stop_btn=None,
|
153 |
examples=[
|
154 |
+
["Draft an email responding to a customer complaint email and offer a resolution"],
|
155 |
+
["Classify chat transcripts into categories using our content moderation policy"],
|
156 |
+
["Translate code to Python"],
|
157 |
+
["Recommend a product based on a customer's previous transactions"],
|
158 |
+
["Summarize documents into 10 bullet points max"]
|
159 |
],
|
160 |
cache_examples=False,
|
161 |
type="messages",
|
|
|
163 |
|
164 |
with gr.Blocks(css="style.css", fill_height=True) as demo:
|
165 |
gr.Markdown(DESCRIPTION)
|
166 |
+
gr.DuplicateButton(value="Space by Rishiraj Acharya. Duplicate it?", elem_id="duplicate-button")
|
167 |
chat_interface.render()
|
168 |
|
169 |
if __name__ == "__main__":
|