mishig HF staff commited on
Commit
75c1d95
2 Parent(s): eec5743 b5f82fa

Imrpove models selector (#44)

Browse files
src/lib/components/InferencePlayground/InferencePlayground.svelte CHANGED
@@ -8,6 +8,7 @@
8
  handleStreamingResponse,
9
  handleNonStreamingResponse,
10
  isSystemPromptSupported,
 
11
  } from "./inferencePlaygroundUtils";
12
 
13
  import { onDestroy } from "svelte";
@@ -24,7 +25,7 @@
24
  const startMessageSystem: ChatCompletionInputMessage = { role: "system", content: "" };
25
 
26
  let conversation: Conversation = {
27
- model: models[0],
28
  config: defaultGenerationConfig,
29
  messages: [{ ...startMessageUser }],
30
  systemMessage: startMessageSystem,
 
8
  handleStreamingResponse,
9
  handleNonStreamingResponse,
10
  isSystemPromptSupported,
11
+ FEATUED_MODELS_IDS,
12
  } from "./inferencePlaygroundUtils";
13
 
14
  import { onDestroy } from "svelte";
 
25
  const startMessageSystem: ChatCompletionInputMessage = { role: "system", content: "" };
26
 
27
  let conversation: Conversation = {
28
+ model: models.find(m => FEATUED_MODELS_IDS.includes(m.id)) ?? models[0],
29
  config: defaultGenerationConfig,
30
  messages: [{ ...startMessageUser }],
31
  systemMessage: startMessageSystem,
src/lib/components/InferencePlayground/InferencePlaygroundModelSelectorModal.svelte CHANGED
@@ -3,12 +3,14 @@
3
 
4
  import { createEventDispatcher } from "svelte";
5
 
 
6
  import IconSearch from "../Icons/IconSearch.svelte";
7
  import IconStar from "../Icons/IconStar.svelte";
8
 
9
  export let models: ModelEntryWithTokenizer[];
10
 
11
  let backdropEl: HTMLDivElement;
 
12
 
13
  const dispatch = createEventDispatcher<{ modelSelected: string; close: void }>();
14
 
@@ -28,6 +30,17 @@
28
  dispatch("close");
29
  }
30
  }
 
 
 
 
 
 
 
 
 
 
 
31
  </script>
32
 
33
  <svelte:window on:keydown={handleKeydown} />
@@ -45,36 +58,37 @@
45
  autofocus
46
  class="flex h-10 w-full rounded-md bg-transparent py-3 text-sm placeholder-gray-400 outline-none"
47
  placeholder="Search models ..."
48
- value=""
49
  />
50
  </div>
51
  <div class="max-h-[300px] overflow-y-auto overflow-x-hidden">
52
  <div class="p-1">
53
  <div class="px-2 py-1.5 text-xs font-medium text-gray-500">Trending</div>
54
  <div>
55
- <div class="flex cursor-pointer items-center px-2 py-1.5 text-sm hover:bg-gray-100">
56
- <IconStar classNames="lucide lucide-star mr-2 h-4 w-4 text-yellow-400" />
57
- <span class="inline-flex items-center"
58
- ><span class="text-gray-500">meta-llama</span><span class="mx-1 text-black">/</span><span
59
- class="text-black">Meta-Llama-3-70B-Instruct</span
60
- ></span
61
- >
62
- </div>
63
- <div class="flex cursor-pointer items-center px-2 py-1.5 text-sm hover:bg-gray-100">
64
- <IconStar classNames="lucide lucide-star mr-2 h-4 w-4 text-yellow-400" />
65
- <span class="inline-flex items-center"
66
- ><span class="text-gray-500">mistralai</span><span class="mx-1 text-black">/</span><span
67
- class="text-black">Mixtral-8x7B-Instruct-v0.1</span
68
- ></span
69
  >
70
- </div>
 
 
 
 
 
 
 
71
  </div>
72
  </div>
73
  <div class="mx-1 h-px bg-gray-200"></div>
74
  <div class="p-1">
75
  <div class="px-2 py-1.5 text-xs font-medium text-gray-500">Other Models</div>
76
  <div>
77
- {#each models as model}
78
  {@const [nameSpace, modelName] = model.id.split("/")}
79
  <button
80
  class="flex cursor-pointer items-center px-2 py-1.5 text-sm hover:bg-gray-100"
 
3
 
4
  import { createEventDispatcher } from "svelte";
5
 
6
+ import { FEATUED_MODELS_IDS } from "./inferencePlaygroundUtils";
7
  import IconSearch from "../Icons/IconSearch.svelte";
8
  import IconStar from "../Icons/IconStar.svelte";
9
 
10
  export let models: ModelEntryWithTokenizer[];
11
 
12
  let backdropEl: HTMLDivElement;
13
+ let query = "";
14
 
15
  const dispatch = createEventDispatcher<{ modelSelected: string; close: void }>();
16
 
 
30
  dispatch("close");
31
  }
32
  }
33
+
34
+ $: featuredModels = models.filter(m =>
35
+ query
36
+ ? FEATUED_MODELS_IDS.includes(m.id) && m.id.toLocaleLowerCase().includes(query.toLocaleLowerCase().trim())
37
+ : FEATUED_MODELS_IDS.includes(m.id)
38
+ );
39
+ $: otherModels = models.filter(m =>
40
+ query
41
+ ? !FEATUED_MODELS_IDS.includes(m.id) && m.id.toLocaleLowerCase().includes(query.toLocaleLowerCase().trim())
42
+ : !FEATUED_MODELS_IDS.includes(m.id)
43
+ );
44
  </script>
45
 
46
  <svelte:window on:keydown={handleKeydown} />
 
58
  autofocus
59
  class="flex h-10 w-full rounded-md bg-transparent py-3 text-sm placeholder-gray-400 outline-none"
60
  placeholder="Search models ..."
61
+ bind:value={query}
62
  />
63
  </div>
64
  <div class="max-h-[300px] overflow-y-auto overflow-x-hidden">
65
  <div class="p-1">
66
  <div class="px-2 py-1.5 text-xs font-medium text-gray-500">Trending</div>
67
  <div>
68
+ {#each featuredModels as model}
69
+ {@const [nameSpace, modelName] = model.id.split("/")}
70
+ <button
71
+ class="flex cursor-pointer items-center px-2 py-1.5 text-sm hover:bg-gray-100"
72
+ on:click={() => {
73
+ dispatch("modelSelected", model.id);
74
+ dispatch("close");
75
+ }}
 
 
 
 
 
 
76
  >
77
+ <IconStar classNames="lucide lucide-star mr-2 h-4 w-4 text-yellow-400" />
78
+ <span class="inline-flex items-center"
79
+ ><span class="text-gray-500">{nameSpace}</span><span class="mx-1 text-black">/</span><span
80
+ class="text-black">{modelName}</span
81
+ ></span
82
+ >
83
+ </button>
84
+ {/each}
85
  </div>
86
  </div>
87
  <div class="mx-1 h-px bg-gray-200"></div>
88
  <div class="p-1">
89
  <div class="px-2 py-1.5 text-xs font-medium text-gray-500">Other Models</div>
90
  <div>
91
+ {#each otherModels as model}
92
  {@const [nameSpace, modelName] = model.id.split("/")}
93
  <button
94
  class="flex cursor-pointer items-center px-2 py-1.5 text-sm hover:bg-gray-100"
src/lib/components/InferencePlayground/inferencePlaygroundUtils.ts CHANGED
@@ -61,3 +61,9 @@ export async function handleNonStreamingResponse(
61
  export function isSystemPromptSupported(model: ModelEntryWithTokenizer) {
62
  return model.tokenizerConfig?.chat_template?.includes("system");
63
  }
 
 
 
 
 
 
 
61
  export function isSystemPromptSupported(model: ModelEntryWithTokenizer) {
62
  return model.tokenizerConfig?.chat_template?.includes("system");
63
  }
64
+
65
+ export const FEATUED_MODELS_IDS = [
66
+ "meta-llama/Meta-Llama-3-70B-Instruct",
67
+ "google/gemma-1.1-7b-it",
68
+ "mistralai/Mixtral-8x7B-Instruct-v0.1",
69
+ ];
src/routes/+page.server.ts CHANGED
@@ -17,8 +17,13 @@ export const load: PageServerLoad = async ({ fetch }) => {
17
 
18
  const promises = compatibleModels.map(async model => {
19
  const configUrl = `https://huggingface.co/${model.id}/raw/main/tokenizer_config.json`;
20
- const res = await fetch(configUrl);
 
 
 
 
21
  if (!res.ok) {
 
22
  return null; // Ignore failed requests by returning null
23
  }
24
  const tokenizerConfig = await res.json();
 
17
 
18
  const promises = compatibleModels.map(async model => {
19
  const configUrl = `https://huggingface.co/${model.id}/raw/main/tokenizer_config.json`;
20
+ const res = await fetch(configUrl, {
21
+ headers: {
22
+ Authorization: `Bearer ${HF_TOKEN}`,
23
+ },
24
+ });
25
  if (!res.ok) {
26
+ console.error("Error fetching tokenizer file", res.status, res.statusText);
27
  return null; // Ignore failed requests by returning null
28
  }
29
  const tokenizerConfig = await res.json();