Spaces:
Runtime error
Runtime error
Commit
·
70a8dcb
1
Parent(s):
1a43738
only use nebius
Browse files- src/components/play-tab.tsx +21 -43
- src/lib/inference.tsx +1 -8
src/components/play-tab.tsx
CHANGED
|
@@ -41,7 +41,9 @@ export default function PlayTab({
|
|
| 41 |
);
|
| 42 |
const [maxHops, setMaxHops] = useState<number>(20);
|
| 43 |
const [isGameStarted, setIsGameStarted] = useState<boolean>(false);
|
| 44 |
-
const [startPage, setStartPage] = useState<string>(
|
|
|
|
|
|
|
| 45 |
const [targetPage, setTargetPage] = useState<string>(
|
| 46 |
destinationArticle || "Pokémon"
|
| 47 |
);
|
|
@@ -49,20 +51,17 @@ export default function PlayTab({
|
|
| 49 |
const [maxLinks, setMaxLinks] = useState<number>(200);
|
| 50 |
const [isServerConnected, setIsServerConnected] = useState<boolean>(false);
|
| 51 |
const [isAuthenticated, setIsAuthenticated] = useState<boolean>(false);
|
| 52 |
-
const [modelList, setModelList] = useState<
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
}[]
|
| 60 |
-
>([]);
|
| 61 |
const [allArticles, setAllArticles] = useState<string[]>([]);
|
| 62 |
|
| 63 |
// Server connection check
|
| 64 |
useEffect(() => {
|
| 65 |
-
fetchAvailableModels();
|
| 66 |
const checkServerConnection = async () => {
|
| 67 |
try {
|
| 68 |
const response = await fetch(API_BASE + "/health");
|
|
@@ -83,7 +82,9 @@ export default function PlayTab({
|
|
| 83 |
useEffect(() => {
|
| 84 |
const checkAuthentication = () => {
|
| 85 |
const idToken = window.localStorage.getItem("huggingface_id_token");
|
| 86 |
-
const accessToken = window.localStorage.getItem(
|
|
|
|
|
|
|
| 87 |
|
| 88 |
if (idToken && accessToken) {
|
| 89 |
try {
|
|
@@ -101,7 +102,7 @@ export default function PlayTab({
|
|
| 101 |
|
| 102 |
checkAuthentication();
|
| 103 |
window.addEventListener("storage", checkAuthentication);
|
| 104 |
-
|
| 105 |
return () => {
|
| 106 |
window.removeEventListener("storage", checkAuthentication);
|
| 107 |
};
|
|
@@ -128,26 +129,6 @@ export default function PlayTab({
|
|
| 128 |
setPlayer(value as "me" | "model");
|
| 129 |
};
|
| 130 |
|
| 131 |
-
const fetchAvailableModels = async () => {
|
| 132 |
-
const response = await fetch(
|
| 133 |
-
"https://huggingface.co/api/models?inference_provider=hyperbolic&pipeline_tag=text-generation"
|
| 134 |
-
);
|
| 135 |
-
const models = await response.json();
|
| 136 |
-
const filteredModels = models.filter((m: { tags: string[] }) =>
|
| 137 |
-
m.tags.includes("text-generation")
|
| 138 |
-
);
|
| 139 |
-
const modelList = filteredModels.map(
|
| 140 |
-
(m: { id: string; likes: number; trendingScore: number }) => ({
|
| 141 |
-
id: m.id,
|
| 142 |
-
likes: m.likes,
|
| 143 |
-
trendingScore: m.trendingScore,
|
| 144 |
-
author: m.id.split("/")[0],
|
| 145 |
-
name: m.id.split("/")[1],
|
| 146 |
-
})
|
| 147 |
-
);
|
| 148 |
-
setModelList(modelList);
|
| 149 |
-
};
|
| 150 |
-
|
| 151 |
const selectRandomArticle = (setter: (article: string) => void) => {
|
| 152 |
if (popularNodes.length > 0) {
|
| 153 |
const randomIndex = Math.floor(Math.random() * popularNodes.length);
|
|
@@ -251,14 +232,11 @@ export default function PlayTab({
|
|
| 251 |
/>
|
| 252 |
</SelectTrigger>
|
| 253 |
<SelectContent>
|
| 254 |
-
|
| 255 |
-
<
|
| 256 |
-
|
| 257 |
-
|
| 258 |
-
|
| 259 |
-
</SelectItem>
|
| 260 |
-
))}
|
| 261 |
-
</SelectGroup>
|
| 262 |
</SelectContent>
|
| 263 |
</Select>
|
| 264 |
</div>
|
|
@@ -309,8 +287,8 @@ export default function PlayTab({
|
|
| 309 |
<TooltipContent>
|
| 310 |
<p className="max-w-xs">
|
| 311 |
Maximum number of links the model can consider
|
| 312 |
-
per page. Small models tend to get stuck if
|
| 313 |
-
is too high.
|
| 314 |
</p>
|
| 315 |
</TooltipContent>
|
| 316 |
</Tooltip>
|
|
|
|
| 41 |
);
|
| 42 |
const [maxHops, setMaxHops] = useState<number>(20);
|
| 43 |
const [isGameStarted, setIsGameStarted] = useState<boolean>(false);
|
| 44 |
+
const [startPage, setStartPage] = useState<string>(
|
| 45 |
+
startArticle || "Capybara"
|
| 46 |
+
);
|
| 47 |
const [targetPage, setTargetPage] = useState<string>(
|
| 48 |
destinationArticle || "Pokémon"
|
| 49 |
);
|
|
|
|
| 51 |
const [maxLinks, setMaxLinks] = useState<number>(200);
|
| 52 |
const [isServerConnected, setIsServerConnected] = useState<boolean>(false);
|
| 53 |
const [isAuthenticated, setIsAuthenticated] = useState<boolean>(false);
|
| 54 |
+
const [modelList, setModelList] = useState<string[]>([
|
| 55 |
+
"deepseek-ai/DeepSeek-V3-0324",
|
| 56 |
+
"Qwen/Qwen3-235B-A22B",
|
| 57 |
+
"Qwen/Qwen3-30B-A3B",
|
| 58 |
+
"Qwen/Qwen3-14B",
|
| 59 |
+
"google/gemma-3-27b-it",
|
| 60 |
+
]);
|
|
|
|
|
|
|
| 61 |
const [allArticles, setAllArticles] = useState<string[]>([]);
|
| 62 |
|
| 63 |
// Server connection check
|
| 64 |
useEffect(() => {
|
|
|
|
| 65 |
const checkServerConnection = async () => {
|
| 66 |
try {
|
| 67 |
const response = await fetch(API_BASE + "/health");
|
|
|
|
| 82 |
useEffect(() => {
|
| 83 |
const checkAuthentication = () => {
|
| 84 |
const idToken = window.localStorage.getItem("huggingface_id_token");
|
| 85 |
+
const accessToken = window.localStorage.getItem(
|
| 86 |
+
"huggingface_access_token"
|
| 87 |
+
);
|
| 88 |
|
| 89 |
if (idToken && accessToken) {
|
| 90 |
try {
|
|
|
|
| 102 |
|
| 103 |
checkAuthentication();
|
| 104 |
window.addEventListener("storage", checkAuthentication);
|
| 105 |
+
|
| 106 |
return () => {
|
| 107 |
window.removeEventListener("storage", checkAuthentication);
|
| 108 |
};
|
|
|
|
| 129 |
setPlayer(value as "me" | "model");
|
| 130 |
};
|
| 131 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 132 |
const selectRandomArticle = (setter: (article: string) => void) => {
|
| 133 |
if (popularNodes.length > 0) {
|
| 134 |
const randomIndex = Math.floor(Math.random() * popularNodes.length);
|
|
|
|
| 232 |
/>
|
| 233 |
</SelectTrigger>
|
| 234 |
<SelectContent>
|
| 235 |
+
{modelList.map((model) => (
|
| 236 |
+
<SelectItem key={model} value={model}>
|
| 237 |
+
{model}
|
| 238 |
+
</SelectItem>
|
| 239 |
+
))}
|
|
|
|
|
|
|
|
|
|
| 240 |
</SelectContent>
|
| 241 |
</Select>
|
| 242 |
</div>
|
|
|
|
| 287 |
<TooltipContent>
|
| 288 |
<p className="max-w-xs">
|
| 289 |
Maximum number of links the model can consider
|
| 290 |
+
per page. Small models tend to get stuck if
|
| 291 |
+
this is too high.
|
| 292 |
</p>
|
| 293 |
</TooltipContent>
|
| 294 |
</Tooltip>
|
src/lib/inference.tsx
CHANGED
|
@@ -98,26 +98,19 @@ export function useInference({ apiKey }) {
|
|
| 98 |
prompt,
|
| 99 |
model,
|
| 100 |
maxTokens,
|
| 101 |
-
provider = "fireworks-ai",
|
| 102 |
}: {
|
| 103 |
prompt: string;
|
| 104 |
model: string;
|
| 105 |
maxTokens: number;
|
| 106 |
-
provider: string;
|
| 107 |
}) => {
|
| 108 |
setIsLoading(true);
|
| 109 |
setPartialText("");
|
| 110 |
|
| 111 |
const client = new InferenceClient(apiKey);
|
| 112 |
|
| 113 |
-
// fireworks doesnt support max tokens
|
| 114 |
-
if (provider === "fireworks-ai") {
|
| 115 |
-
maxTokens = undefined;
|
| 116 |
-
}
|
| 117 |
-
|
| 118 |
try {
|
| 119 |
const stream = client.chatCompletionStream({
|
| 120 |
-
|
| 121 |
model,
|
| 122 |
maxTokens,
|
| 123 |
messages: [
|
|
|
|
| 98 |
prompt,
|
| 99 |
model,
|
| 100 |
maxTokens,
|
|
|
|
| 101 |
}: {
|
| 102 |
prompt: string;
|
| 103 |
model: string;
|
| 104 |
maxTokens: number;
|
|
|
|
| 105 |
}) => {
|
| 106 |
setIsLoading(true);
|
| 107 |
setPartialText("");
|
| 108 |
|
| 109 |
const client = new InferenceClient(apiKey);
|
| 110 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 111 |
try {
|
| 112 |
const stream = client.chatCompletionStream({
|
| 113 |
+
provider: "nebius",
|
| 114 |
model,
|
| 115 |
maxTokens,
|
| 116 |
messages: [
|