smgc commited on
Commit
2068509
1 Parent(s): f589e7f

Update app.js

Browse files
Files changed (1) hide show
  1. app.js +42 -5
app.js CHANGED
@@ -10,9 +10,19 @@ const NOTDIAMOND_HEADERS = {
10
  'Content-Type': 'application/json',
11
  'next-action': '8189eb37107121e024940f588629a394a594e6a4'
12
  };
13
- const AUTH_KEY = process.env.AUTH_KEY;
14
-
15
- const DEFAULT_MODEL = 'gpt-4-turbo-2024-04-09';
 
 
 
 
 
 
 
 
 
 
16
 
17
  function createOpenAIChunk(content, model, finishReason = null) {
18
  return {
@@ -109,6 +119,32 @@ async function* streamNotdiamondResponse(response, model) {
109
 
110
  app.use(express.json());
111
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
  app.post('/ai/v1/chat/completions', async (req, res) => {
113
  const authHeader = req.headers['authorization'];
114
  if (!authHeader || !authHeader.startsWith('Bearer ') || authHeader.slice(7) !== AUTH_KEY) {
@@ -119,7 +155,8 @@ app.post('/ai/v1/chat/completions', async (req, res) => {
119
  let requestData = req.body;
120
 
121
  let messages = requestData.messages;
122
- let model = requestData.model || DEFAULT_MODEL;
 
123
  const stream = requestData.stream || false;
124
 
125
  if (!messages || !Array.isArray(messages)) {
@@ -177,7 +214,7 @@ app.post('/ai/v1/chat/completions', async (req, res) => {
177
  id: `chatcmpl-${crypto.randomUUID()}`,
178
  object: "chat.completion",
179
  created: Math.floor(Date.now() / 1000),
180
- model: model,
181
  system_fingerprint: "fp_4e2b2da518",
182
  choices: [
183
  {
 
10
  'Content-Type': 'application/json',
11
  'next-action': '8189eb37107121e024940f588629a394a594e6a4'
12
  };
13
+ const AUTH_KEY = process.env.AUTH_KEY';
14
+
15
+ const DEFAULT_MODEL = 'gpt-4o';
16
+
17
+ const MODEL_MAPPING = {
18
+ 'gpt-4-turbo': 'gpt-4-turbo-2024-04-09',
19
+ 'claude-3-5-sonnet-20240620': 'anthropic.claude-3-5-sonnet-20240620-v1:0',
20
+ 'gemini-1.5-pro-latest': 'models/gemini-1.5-pro-latest',
21
+ 'gemini-1.5-pro-exp-0801': 'models/gemini-1.5-pro-exp-0801',
22
+ 'llama-3.1-405b-instruct': 'meta.llama3-1-405b-instruct-v1:0',
23
+ 'llama-3.1-70b-instruct': 'meta.llama3-1-70b-instruct-v1:0',
24
+ 'perplexity': 'llama-3.1-sonar-large-128k-online'
25
+ };
26
 
27
  function createOpenAIChunk(content, model, finishReason = null) {
28
  return {
 
119
 
120
  app.use(express.json());
121
 
122
+ // 添加根路由,提供服务使用指引
123
+ app.get('/', (req, res) => {
124
+ res.json({
125
+ service: "AI Chat Completion Proxy",
126
+ usage: {
127
+ endpoint: "/ai/v1/chat/completions",
128
+ method: "POST",
129
+ headers: {
130
+ "Content-Type": "application/json",
131
+ "Authorization": "Bearer YOUR_AUTH_KEY"
132
+ },
133
+ body: {
134
+ model: "One of: " + Object.keys(MODEL_MAPPING).join(", "),
135
+ messages: [
136
+ { role: "system", content: "You are a helpful assistant." },
137
+ { role: "user", content: "Hello, who are you?" }
138
+ ],
139
+ stream: false,
140
+ temperature: 0.7
141
+ }
142
+ },
143
+ availableModels: Object.keys(MODEL_MAPPING),
144
+ note: "Replace YOUR_AUTH_KEY with the actual authentication key."
145
+ });
146
+ });
147
+
148
  app.post('/ai/v1/chat/completions', async (req, res) => {
149
  const authHeader = req.headers['authorization'];
150
  if (!authHeader || !authHeader.startsWith('Bearer ') || authHeader.slice(7) !== AUTH_KEY) {
 
155
  let requestData = req.body;
156
 
157
  let messages = requestData.messages;
158
+ let userModel = requestData.model || DEFAULT_MODEL;
159
+ let model = MODEL_MAPPING[userModel] || userModel;
160
  const stream = requestData.stream || false;
161
 
162
  if (!messages || !Array.isArray(messages)) {
 
214
  id: `chatcmpl-${crypto.randomUUID()}`,
215
  object: "chat.completion",
216
  created: Math.floor(Date.now() / 1000),
217
+ model: userModel,
218
  system_fingerprint: "fp_4e2b2da518",
219
  choices: [
220
  {