Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -13,7 +13,7 @@ tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neox-20b")
|
|
| 13 |
model = MambaLMHeadModel.from_pretrained("state-spaces/mamba-2.8b", device=device, dtype=torch.float16)
|
| 14 |
genlen = 200
|
| 15 |
|
| 16 |
-
def pred(text_in):
|
| 17 |
tokens = tokenizer(text_in, return_tensors="pt")
|
| 18 |
input_ids = tokens.input_ids.to(device=device)
|
| 19 |
attn_mask = tokens.attention_mask.to(device=device)
|
|
@@ -26,7 +26,7 @@ def pred(text_in):
|
|
| 26 |
output_scores=True,
|
| 27 |
enable_timing=False,
|
| 28 |
temperature=0.5,
|
| 29 |
-
top_k=
|
| 30 |
top_p=0.9,
|
| 31 |
)
|
| 32 |
out = fn()
|
|
@@ -35,7 +35,7 @@ def pred(text_in):
|
|
| 35 |
|
| 36 |
demo = gr.Interface(
|
| 37 |
title="Mamba: Selective State Space Model",
|
| 38 |
-
description="A demo for [Mamba](https://github.com/state-spaces/mamba)",
|
| 39 |
fn=pred, inputs="text", outputs="text")
|
| 40 |
|
| 41 |
if __name__ == "__main__":
|
|
|
|
| 13 |
model = MambaLMHeadModel.from_pretrained("state-spaces/mamba-2.8b", device=device, dtype=torch.float16)
|
| 14 |
genlen = 200
|
| 15 |
|
| 16 |
+
def pred(text_in,):
|
| 17 |
tokens = tokenizer(text_in, return_tensors="pt")
|
| 18 |
input_ids = tokens.input_ids.to(device=device)
|
| 19 |
attn_mask = tokens.attention_mask.to(device=device)
|
|
|
|
| 26 |
output_scores=True,
|
| 27 |
enable_timing=False,
|
| 28 |
temperature=0.5,
|
| 29 |
+
top_k=10,
|
| 30 |
top_p=0.9,
|
| 31 |
)
|
| 32 |
out = fn()
|
|
|
|
| 35 |
|
| 36 |
demo = gr.Interface(
|
| 37 |
title="Mamba: Selective State Space Model",
|
| 38 |
+
description="A demo for [Mamba](https://github.com/state-spaces/mamba) by Albert & Tri.",
|
| 39 |
fn=pred, inputs="text", outputs="text")
|
| 40 |
|
| 41 |
if __name__ == "__main__":
|