smgc commited on
Commit
a5e4f36
1 Parent(s): 6a0e6bf

Create app.js

Browse files
Files changed (1) hide show
  1. app.js +132 -0
app.js ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const express = require('express');
2
+ const fetch = require('node-fetch');
3
+ const crypto = require('crypto');
4
+
5
+ const app = express();
6
+ const port = 8000;
7
+
8
+ // 这里粘贴您提供的所有常量和函数定义
9
+ const NOTDIAMOND_URL = 'https://chat.notdiamond.ai/mini-chat';
10
+ const NOTDIAMOND_HEADERS = {
11
+ 'Content-Type': 'application/json',
12
+ 'next-action': '8189eb37107121e024940f588629a394a594e6a4'
13
+ };
14
+ const AUTH_KEY = process.env.AUTH_KEY;
15
+
16
+ const MODEL_LIST = [
17
+ {"provider":"openai","model":"gpt-4-turbo-2024-04-09"},{"provider":"google","model":"gemini-1.5-pro-exp-0801"},{"provider":"togetherai","model":"Meta-Llama-3.1-70B-Instruct-Turbo"},{"provider":"togetherai","model":"Meta-Llama-3.1-405B-Instruct-Turbo"},{"provider":"perplexity","model":"llama-3.1-sonar-large-128k-online"},{"provider":"google","model":"gemini-1.5-pro-latest"},{"provider":"anthropic","model":"claude-3-5-sonnet-20240620"},{"provider":"anthropic","model":"claude-3-haiku-20240307"},{"provider":"openai","model":"gpt-4o-mini"},{"provider":"openai","model":"gpt-4o"},{"provider":"mistral","model":"mistral-large-2407"}
18
+ ];
19
+
20
+ // 这里粘贴所有的函数定义(getNotdiamondModel, createOpenAIChunk, streamNotdiamondResponse)
21
+
22
+ app.use(express.json());
23
+
24
+ app.post('/ai/v1/chat/completions', async (req, res) => {
25
+ const authHeader = req.headers['authorization'];
26
+ if (!authHeader || !authHeader.startsWith('Bearer ') || authHeader.slice(7) !== AUTH_KEY) {
27
+ return res.status(401).json({ error: 'Unauthorized' });
28
+ }
29
+
30
+ try {
31
+ let requestData = req.body;
32
+
33
+ let messages = requestData.messages;
34
+ let model = requestData.model || '';
35
+ const stream = requestData.stream || false;
36
+
37
+ if (!messages || !Array.isArray(messages)) {
38
+ return res.status(400).json({
39
+ error: 'Invalid request body',
40
+ details: 'messages should be an array of message objects',
41
+ receivedBody: req.body
42
+ });
43
+ }
44
+
45
+ try {
46
+ const { model: resolvedModel } = await getNotdiamondModel(messages, model);
47
+ if (resolvedModel) {
48
+ model = resolvedModel;
49
+ } else {
50
+ throw new Error("Failed to get model from NotDiamond");
51
+ }
52
+ } catch (error) {
53
+ return res.status(400).json({ error: error.message });
54
+ }
55
+
56
+ const payload = {
57
+ messages: messages,
58
+ model: model,
59
+ stream: stream,
60
+ frequency_penalty: requestData.frequency_penalty || 0,
61
+ presence_penalty: requestData.presence_penalty || 0,
62
+ temperature: requestData.temperature || 0.6,
63
+ top_p: requestData.top_p || 1
64
+ };
65
+
66
+ const headers = {
67
+ 'Content-Type': 'application/json',
68
+ 'next-action': '4e63dabc37fef18cae74cbfd41d1bace49acf47e'
69
+ };
70
+
71
+ const response = await fetch(NOTDIAMOND_URL, {
72
+ method: 'POST',
73
+ headers: headers,
74
+ body: JSON.stringify([payload])
75
+ });
76
+
77
+ const generator = streamNotdiamondResponse(response, model);
78
+
79
+ if (stream) {
80
+ res.writeHead(200, {
81
+ 'Content-Type': 'text/event-stream',
82
+ 'Cache-Control': 'no-cache',
83
+ 'Connection': 'keep-alive'
84
+ });
85
+
86
+ for await (const chunk of generator) {
87
+ res.write(`data: ${JSON.stringify(chunk)}\n\n`);
88
+ }
89
+ res.write("data: [DONE]\n\n");
90
+ res.end();
91
+ } else {
92
+ let fullContent = "";
93
+ for await (const chunk of generator) {
94
+ if (chunk.choices[0].delta.content) {
95
+ fullContent += chunk.choices[0].delta.content;
96
+ }
97
+ }
98
+
99
+ res.json({
100
+ id: `chatcmpl-${crypto.randomUUID()}`,
101
+ object: "chat.completion",
102
+ created: Math.floor(Date.now() / 1000),
103
+ model: model,
104
+ system_fingerprint: "fp_4e2b2da518",
105
+ choices: [
106
+ {
107
+ index: 0,
108
+ message: {
109
+ role: "assistant",
110
+ content: fullContent
111
+ },
112
+ finish_reason: "stop"
113
+ }
114
+ ],
115
+ usage: {
116
+ prompt_tokens: Math.floor(fullContent.length / 4),
117
+ completion_tokens: Math.floor(fullContent.length / 4),
118
+ total_tokens: Math.floor(fullContent.length / 2)
119
+ }
120
+ });
121
+ }
122
+ } catch (error) {
123
+ res.status(500).json({
124
+ error: 'Internal Server Error',
125
+ details: error.message
126
+ });
127
+ }
128
+ });
129
+
130
+ app.listen(port, () => {
131
+ console.log(`Server running on port ${port}`);
132
+ });