mishig HF staff commited on
Commit
38434c2
1 Parent(s): 0a4dc48

Use type `ChatCompletionInputMessage` from hfjs/tasks

Browse files
package.json CHANGED
@@ -32,6 +32,7 @@
32
  "dependencies": {
33
  "@huggingface/hub": "^0.15.1",
34
  "@huggingface/inference": "^2.7.0",
 
35
  "@tailwindcss/container-queries": "^0.1.1"
36
  }
37
  }
 
32
  "dependencies": {
33
  "@huggingface/hub": "^0.15.1",
34
  "@huggingface/inference": "^2.7.0",
35
+ "@huggingface/tasks": "^0.10.22",
36
  "@tailwindcss/container-queries": "^0.1.1"
37
  }
38
  }
pnpm-lock.yaml CHANGED
@@ -11,6 +11,9 @@ dependencies:
11
  '@huggingface/inference':
12
  specifier: ^2.7.0
13
  version: 2.7.0
 
 
 
14
  '@tailwindcss/container-queries':
15
  specifier: ^0.1.1
16
  version: 0.1.1(tailwindcss@3.4.4)
@@ -287,7 +290,7 @@ packages:
287
  resolution: {integrity: sha512-uHb4aFkJDoGfLeRHfFTjkI36Z8IV6Z1c+KzhMDqUSC56opyr7Mn1Nsx7Rri/C7KDwROhQfBp/fOOqqjTzn6Cgg==}
288
  engines: {node: '>=18'}
289
  dependencies:
290
- '@huggingface/tasks': 0.10.19
291
  hash-wasm: 4.11.0
292
  dev: false
293
 
@@ -295,11 +298,11 @@ packages:
295
  resolution: {integrity: sha512-u7Fn637Q3f7nUB1tajM4CgzhvoFQkOQr5W5Fm+2wT9ETgGoLBh25BLlYPTJRjAd2WY01s71v0lqAwNvHHCc3mg==}
296
  engines: {node: '>=18'}
297
  dependencies:
298
- '@huggingface/tasks': 0.10.19
299
  dev: false
300
 
301
- /@huggingface/tasks@0.10.19:
302
- resolution: {integrity: sha512-JnfdySzAXNvuL8q0QUt1952cebwcpTjUKt8Hq80OSksY5l8hTpN2OcBpjrJ3Zk91mQnqVNJ9LS3B7RfCQ7kW/A==}
303
  dev: false
304
 
305
  /@isaacs/cliui@8.0.2:
 
11
  '@huggingface/inference':
12
  specifier: ^2.7.0
13
  version: 2.7.0
14
+ '@huggingface/tasks':
15
+ specifier: ^0.10.22
16
+ version: 0.10.22
17
  '@tailwindcss/container-queries':
18
  specifier: ^0.1.1
19
  version: 0.1.1(tailwindcss@3.4.4)
 
290
  resolution: {integrity: sha512-uHb4aFkJDoGfLeRHfFTjkI36Z8IV6Z1c+KzhMDqUSC56opyr7Mn1Nsx7Rri/C7KDwROhQfBp/fOOqqjTzn6Cgg==}
291
  engines: {node: '>=18'}
292
  dependencies:
293
+ '@huggingface/tasks': 0.10.22
294
  hash-wasm: 4.11.0
295
  dev: false
296
 
 
298
  resolution: {integrity: sha512-u7Fn637Q3f7nUB1tajM4CgzhvoFQkOQr5W5Fm+2wT9ETgGoLBh25BLlYPTJRjAd2WY01s71v0lqAwNvHHCc3mg==}
299
  engines: {node: '>=18'}
300
  dependencies:
301
+ '@huggingface/tasks': 0.10.22
302
  dev: false
303
 
304
+ /@huggingface/tasks@0.10.22:
305
+ resolution: {integrity: sha512-sCtp+A6sq6NXoUU7NXuXWoVNNjKddk1GTQIh3cJ6illF8S4zmFoerCVRvFf19BdgICGvF+RVZiv9sGGK9KRDTg==}
306
  dev: false
307
 
308
  /@isaacs/cliui@8.0.2:
src/lib/components/Playground/Playground.svelte CHANGED
@@ -12,10 +12,12 @@
12
  import PlaygroundModelSelector from './PlaygroundModelSelector.svelte';
13
  import { onDestroy, onMount } from 'svelte';
14
  import { type ModelEntry } from "@huggingface/hub";
 
 
15
 
16
  let compatibleModels: ModelEntry[] = [];
17
 
18
- const startMessages: Message[] = [{ role: 'user', content: '' }];
19
 
20
  let conversations: Conversation[] = [
21
  {
@@ -31,14 +33,14 @@
31
  }
32
 
33
  let currentConversation = conversations[0];
34
- let systemMessage: Message = { role: 'system', content: '' };
35
  $: messages = currentConversation.messages;
36
 
37
  let hfToken: string | null = import.meta.env.VITE_HF_TOKEN;
38
  let viewCode = false;
39
  let showTokenModal = false;
40
  let loading = false;
41
- let streamingMessage: Message | null = null;
42
  let tokens = 0;
43
  let latency = 0;
44
  let messageContainer: HTMLDivElement | null = null;
 
12
  import PlaygroundModelSelector from './PlaygroundModelSelector.svelte';
13
  import { onDestroy, onMount } from 'svelte';
14
  import { type ModelEntry } from "@huggingface/hub";
15
+ import { type ChatCompletionInputMessage } from "@huggingface/tasks";
16
+
17
 
18
  let compatibleModels: ModelEntry[] = [];
19
 
20
+ const startMessages: ChatCompletionInputMessage[] = [{ role: 'user', content: '' }];
21
 
22
  let conversations: Conversation[] = [
23
  {
 
33
  }
34
 
35
  let currentConversation = conversations[0];
36
+ let systemMessage: ChatCompletionInputMessage = { role: 'system', content: '' };
37
  $: messages = currentConversation.messages;
38
 
39
  let hfToken: string | null = import.meta.env.VITE_HF_TOKEN;
40
  let viewCode = false;
41
  let showTokenModal = false;
42
  let loading = false;
43
+ let streamingMessage: ChatCompletionInputMessage | null = null;
44
  let tokens = 0;
45
  let latency = 0;
46
  let messageContainer: HTMLDivElement | null = null;
src/lib/components/Playground/PlaygroundCode.svelte CHANGED
@@ -1,9 +1,11 @@
1
  <script lang="ts">
 
 
2
  export let model: string;
3
  export let streaming: Boolean;
4
  export let temperature: number;
5
  export let maxTokens: number;
6
- export let messages: Message[];
7
 
8
  $: console.log(messages);
9
 
 
1
  <script lang="ts">
2
+ import { type ChatCompletionInputMessage } from "@huggingface/tasks";
3
+
4
  export let model: string;
5
  export let streaming: Boolean;
6
  export let temperature: number;
7
  export let maxTokens: number;
8
+ export let messages: ChatCompletionInputMessage[];
9
 
10
  $: console.log(messages);
11
 
src/lib/components/Playground/PlaygroundMessage.svelte CHANGED
@@ -1,12 +1,8 @@
1
  <script lang="ts">
2
  import { createEventDispatcher } from 'svelte';
 
3
 
4
- type Message = {
5
- role: 'user' | 'assistant';
6
- content: string;
7
- };
8
-
9
- export let message: Message;
10
  export let autofocus: boolean = false;
11
 
12
  const dispatch = createEventDispatcher();
 
1
  <script lang="ts">
2
  import { createEventDispatcher } from 'svelte';
3
+ import { type ChatCompletionInputMessage } from "@huggingface/tasks";
4
 
5
+ export let message: ChatCompletionInputMessage;
 
 
 
 
 
6
  export let autofocus: boolean = false;
7
 
8
  const dispatch = createEventDispatcher();
src/lib/components/Playground/playgroundUtils.ts CHANGED
@@ -1,22 +1,18 @@
 
1
  import { HfInference } from '@huggingface/inference';
2
 
3
- export interface Message {
4
- role: string;
5
- content: string;
6
- }
7
-
8
  export function createHfInference(token: string): HfInference {
9
  return new HfInference(token);
10
  }
11
 
12
- export function prepareRequestMessages(systemMessage: Message, messages: Message[]): Message[] {
13
  return [...(systemMessage.content.length ? [systemMessage] : []), ...messages];
14
  }
15
 
16
  export async function handleStreamingResponse(
17
  hf: HfInference,
18
  model: string,
19
- messages: Message[],
20
  temperature: number,
21
  maxTokens: number,
22
  jsonMode: boolean,
@@ -49,11 +45,11 @@ export async function handleStreamingResponse(
49
  export async function handleNonStreamingResponse(
50
  hf: HfInference,
51
  model: string,
52
- messages: Message[],
53
  temperature: number,
54
  maxTokens: number,
55
  jsonMode: boolean
56
- ): Promise<Message> {
57
  const response = await hf.chatCompletion({
58
  model: model,
59
  messages: messages,
 
1
+ import { type ChatCompletionInputMessage } from "@huggingface/tasks";
2
  import { HfInference } from '@huggingface/inference';
3
 
 
 
 
 
 
4
  export function createHfInference(token: string): HfInference {
5
  return new HfInference(token);
6
  }
7
 
8
+ export function prepareRequestMessages(systemMessage: ChatCompletionInputMessage, messages: ChatCompletionInputMessage[]): ChatCompletionInputMessage[] {
9
  return [...(systemMessage.content.length ? [systemMessage] : []), ...messages];
10
  }
11
 
12
  export async function handleStreamingResponse(
13
  hf: HfInference,
14
  model: string,
15
+ messages: ChatCompletionInputMessage[],
16
  temperature: number,
17
  maxTokens: number,
18
  jsonMode: boolean,
 
45
  export async function handleNonStreamingResponse(
46
  hf: HfInference,
47
  model: string,
48
+ messages: ChatCompletionInputMessage[],
49
  temperature: number,
50
  maxTokens: number,
51
  jsonMode: boolean
52
+ ): Promise<ChatCompletionInputMessage> {
53
  const response = await hf.chatCompletion({
54
  model: model,
55
  messages: messages,
src/lib/types/index.d.ts CHANGED
@@ -1,9 +1,5 @@
1
  import type { ModelEntry } from "@huggingface/hub";
2
-
3
- type Message = {
4
- role: string;
5
- content: string;
6
- };
7
 
8
  type Model = string;
9
 
@@ -18,5 +14,5 @@ type Conversation = {
18
  id: string;
19
  model: ModelEntry;
20
  config: ModelConfig;
21
- messages: Message[];
22
  };
 
1
  import type { ModelEntry } from "@huggingface/hub";
2
+ import type { ChatCompletionInputMessage } from "@huggingface/tasks";
 
 
 
 
3
 
4
  type Model = string;
5
 
 
14
  id: string;
15
  model: ModelEntry;
16
  config: ModelConfig;
17
+ messages: ChatCompletionInputMessage[];
18
  };