Spaces:
Sleeping
Sleeping
New model: Falcon40
Browse filesFalcon180B was too heavy
app.py
CHANGED
|
@@ -3,7 +3,7 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
| 3 |
import torch
|
| 4 |
import os
|
| 5 |
|
| 6 |
-
MODEL_NAME = "tiiuae/falcon-
|
| 7 |
|
| 8 |
access_token = os.getenv("HF_ACCESS_TOKEN")
|
| 9 |
tokenizer = AutoTokenizer.from_pretrained(
|
|
@@ -43,10 +43,10 @@ def improve_code(code: str) -> str:
|
|
| 43 |
app = gr.Blocks()
|
| 44 |
|
| 45 |
with app:
|
| 46 |
-
gr.Markdown("##
|
| 47 |
-
code_input = gr.Textbox(label="
|
| 48 |
-
improve_btn = gr.Button("
|
| 49 |
-
code_output = gr.Textbox(label="
|
| 50 |
improve_btn.click(improve_code, inputs=code_input, outputs=code_output)
|
| 51 |
|
| 52 |
app.launch()
|
|
|
|
| 3 |
import torch
|
| 4 |
import os
|
| 5 |
|
| 6 |
+
MODEL_NAME = "tiiuae/falcon-40b"
|
| 7 |
|
| 8 |
access_token = os.getenv("HF_ACCESS_TOKEN")
|
| 9 |
tokenizer = AutoTokenizer.from_pretrained(
|
|
|
|
| 43 |
app = gr.Blocks()
|
| 44 |
|
| 45 |
with app:
|
| 46 |
+
gr.Markdown("## MCP Server Code Improver with Falcon 40B")
|
| 47 |
+
code_input = gr.Textbox(label="Original code", lines=15)
|
| 48 |
+
improve_btn = gr.Button("Improve Code")
|
| 49 |
+
code_output = gr.Textbox(label="Improved code", lines=15)
|
| 50 |
improve_btn.click(improve_code, inputs=code_input, outputs=code_output)
|
| 51 |
|
| 52 |
app.launch()
|