smgc commited on
Commit
adbd68b
1 Parent(s): a00b4bd

Update app.js

Browse files
Files changed (1) hide show
  1. app.js +64 -28
app.js CHANGED
@@ -11,31 +11,59 @@ const NOTDIAMOND_HEADERS = {
11
  'next-action': '8189eb37107121e024940f588629a394a594e6a4'
12
  };
13
 
14
- const DEFAULT_MODEL = 'gpt-4o';
15
 
16
  const MODEL_MAPPING = {
17
- 'gpt-4o-mini': 'gpt-4o-mini',
18
- 'gpt-4o': 'gpt-4o',
19
- 'gpt-4-turbo': 'gpt-4-turbo-2024-04-09',
20
- 'claude-3-haiku-20240307': 'anthropic.claude-3-haiku-20240307-v1:0',
21
- 'claude-3-5-sonnet-20240620': 'anthropic.claude-3-5-sonnet-20240620-v1:0',
22
- 'gemini-1.5-pro-latest': 'models/gemini-1.5-pro-latest',
23
- 'gemini-1.5-pro-exp-0801': 'models/gemini-1.5-pro-exp-0801',
24
- 'llama-3.1-405b-instruct': 'meta.llama3-1-405b-instruct-v1:0',
25
- 'llama-3.1-70b-instruct': 'meta.llama3-1-70b-instruct-v1:0',
26
- 'perplexity': 'llama-3.1-sonar-large-128k-online'
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  };
28
 
29
  function getAuthKey(req) {
30
- const cookies = req.headers.cookie;
31
- if (cookies) {
32
- const cookieArray = cookies.split(';');
33
- for (let cookie of cookieArray) {
34
- cookie = cookie.trim();
35
- if (cookie.startsWith('sb-spuckhogycrxcbomznwo-auth-token=')) {
36
- return cookie.split('=')[1];
37
- }
38
- }
39
  }
40
  return null;
41
  }
@@ -143,7 +171,7 @@ app.get('/', (req, res) => {
143
  method: "POST",
144
  headers: {
145
  "Content-Type": "application/json",
146
- "Cookie": "sb-spuckhogycrxcbomznwo-auth-token=YOUR_AUTH_TOKEN"
147
  },
148
  body: {
149
  model: "One of: " + Object.keys(MODEL_MAPPING).join(", "),
@@ -156,7 +184,7 @@ app.get('/', (req, res) => {
156
  }
157
  },
158
  availableModels: Object.keys(MODEL_MAPPING),
159
- note: "Replace YOUR_AUTH_TOKEN with your actual authentication token."
160
  });
161
  });
162
 
@@ -168,11 +196,14 @@ app.get('/ai/v1/models', (req, res) => {
168
 
169
  res.json({
170
  object: "list",
171
- data: Object.keys(MODEL_MAPPING).map(model => ({
172
- id: model,
173
  object: "model",
174
  created: Math.floor(Date.now() / 1000),
175
- owned_by: "notdiamond"
 
 
 
176
  }))
177
  });
178
 
@@ -194,7 +225,12 @@ app.post('/ai/v1/chat/completions', async (req, res) => {
194
 
195
  let requestData = req.body;
196
  let messages = requestData.messages;
197
- let model = MODEL_MAPPING[userModel] || userModel;
 
 
 
 
 
198
  const stream = requestData.stream || false;
199
 
200
  if (!messages || !Array.isArray(messages)) {
@@ -215,7 +251,7 @@ app.post('/ai/v1/chat/completions', async (req, res) => {
215
  const headers = {
216
  'Content-Type': 'application/json',
217
  'next-action': '4e63dabc37fef18cae74cbfd41d1bace49acf47e',
218
- 'Cookie': `sb-spuckhogycrxcbomznwo-auth-token=${authKey}`
219
  };
220
 
221
  const response = await fetch(NOTDIAMOND_URL, {
@@ -224,7 +260,7 @@ app.post('/ai/v1/chat/completions', async (req, res) => {
224
  body: JSON.stringify([payload])
225
  });
226
 
227
- const generator = streamNotdiamondResponse(response, model);
228
 
229
  if (stream) {
230
  res.writeHead(200, {
 
11
  'next-action': '8189eb37107121e024940f588629a394a594e6a4'
12
  };
13
 
14
+ const DEFAULT_MODEL = 'gpt-4-turbo';
15
 
16
  const MODEL_MAPPING = {
17
+ "gpt-4-turbo": {
18
+ "provider": "openai",
19
+ "mapping": "gpt-4-turbo-2024-04-09"
20
+ },
21
+ "gemini-1.5-pro-exp-0801": {
22
+ "provider": "google",
23
+ "mapping": "models/gemini-1.5-pro-exp-0801"
24
+ },
25
+ "Meta-Llama-3.1-70B-Instruct-Turbo": {
26
+ "provider": "togetherai",
27
+ "mapping": "meta.llama3-1-70b-instruct-v1:0"
28
+ },
29
+ "Meta-Llama-3.1-405B-Instruct-Turbo": {
30
+ "provider": "togetherai",
31
+ "mapping": "meta.llama3-1-405b-instruct-v1:0"
32
+ },
33
+ "llama-3.1-sonar-large-128k-online": {
34
+ "provider": "perplexity",
35
+ "mapping": "llama-3.1-sonar-large-128k-online"
36
+ },
37
+ "gemini-1.5-pro-latest": {
38
+ "provider": "google",
39
+ "mapping": "models/gemini-1.5-pro-latest"
40
+ },
41
+ "claude-3-5-sonnet-20240620": {
42
+ "provider": "anthropic",
43
+ "mapping": "anthropic.claude-3-5-sonnet-20240620-v1:0"
44
+ },
45
+ "claude-3-haiku-20240307": {
46
+ "provider": "anthropic",
47
+ "mapping": "anthropic.claude-3-haiku-20240307-v1:0"
48
+ },
49
+ "gpt-4o-mini": {
50
+ "provider": "openai",
51
+ "mapping": "gpt-4o-mini"
52
+ },
53
+ "gpt-4o": {
54
+ "provider": "openai",
55
+ "mapping": "gpt-4o"
56
+ },
57
+ "mistral-large-2407": {
58
+ "provider": "mistral",
59
+ "mapping": "mistral.mistral-large-2407-v1:0"
60
+ }
61
  };
62
 
63
  function getAuthKey(req) {
64
+ const authHeader = req.headers['authorization'];
65
+ if (authHeader && authHeader.startsWith('Bearer ')) {
66
+ return authHeader.slice(7);
 
 
 
 
 
 
67
  }
68
  return null;
69
  }
 
171
  method: "POST",
172
  headers: {
173
  "Content-Type": "application/json",
174
+ "Authorization": "Bearer YOUR_AUTH_KEY"
175
  },
176
  body: {
177
  model: "One of: " + Object.keys(MODEL_MAPPING).join(", "),
 
184
  }
185
  },
186
  availableModels: Object.keys(MODEL_MAPPING),
187
+ note: "Replace YOUR_AUTH_KEY with your actual authentication key."
188
  });
189
  });
190
 
 
196
 
197
  res.json({
198
  object: "list",
199
+ data: Object.entries(MODEL_MAPPING).map(([id, info]) => ({
200
+ id: id,
201
  object: "model",
202
  created: Math.floor(Date.now() / 1000),
203
+ owned_by: info.provider,
204
+ permission: [],
205
+ root: id,
206
+ parent: null,
207
  }))
208
  });
209
 
 
225
 
226
  let requestData = req.body;
227
  let messages = requestData.messages;
228
+ let modelInfo = MODEL_MAPPING[userModel];
229
+ if (!modelInfo) {
230
+ status = 400;
231
+ throw new Error('Invalid model specified');
232
+ }
233
+ let model = modelInfo.mapping;
234
  const stream = requestData.stream || false;
235
 
236
  if (!messages || !Array.isArray(messages)) {
 
251
  const headers = {
252
  'Content-Type': 'application/json',
253
  'next-action': '4e63dabc37fef18cae74cbfd41d1bace49acf47e',
254
+ 'Authorization': `Bearer ${authKey}`
255
  };
256
 
257
  const response = await fetch(NOTDIAMOND_URL, {
 
260
  body: JSON.stringify([payload])
261
  });
262
 
263
+ const generator = streamNotdiamondResponse(response, userModel);
264
 
265
  if (stream) {
266
  res.writeHead(200, {