File size: 4,310 Bytes
a5e4f36 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 |
const express = require('express');
const fetch = require('node-fetch');
const crypto = require('crypto');
const app = express();
const port = 8000;
// 这里粘贴您提供的所有常量和函数定义
const NOTDIAMOND_URL = 'https://chat.notdiamond.ai/mini-chat';
const NOTDIAMOND_HEADERS = {
'Content-Type': 'application/json',
'next-action': '8189eb37107121e024940f588629a394a594e6a4'
};
const AUTH_KEY = process.env.AUTH_KEY;
const MODEL_LIST = [
{"provider":"openai","model":"gpt-4-turbo-2024-04-09"},{"provider":"google","model":"gemini-1.5-pro-exp-0801"},{"provider":"togetherai","model":"Meta-Llama-3.1-70B-Instruct-Turbo"},{"provider":"togetherai","model":"Meta-Llama-3.1-405B-Instruct-Turbo"},{"provider":"perplexity","model":"llama-3.1-sonar-large-128k-online"},{"provider":"google","model":"gemini-1.5-pro-latest"},{"provider":"anthropic","model":"claude-3-5-sonnet-20240620"},{"provider":"anthropic","model":"claude-3-haiku-20240307"},{"provider":"openai","model":"gpt-4o-mini"},{"provider":"openai","model":"gpt-4o"},{"provider":"mistral","model":"mistral-large-2407"}
];
// 这里粘贴所有的函数定义(getNotdiamondModel, createOpenAIChunk, streamNotdiamondResponse)
app.use(express.json());
app.post('/ai/v1/chat/completions', async (req, res) => {
const authHeader = req.headers['authorization'];
if (!authHeader || !authHeader.startsWith('Bearer ') || authHeader.slice(7) !== AUTH_KEY) {
return res.status(401).json({ error: 'Unauthorized' });
}
try {
let requestData = req.body;
let messages = requestData.messages;
let model = requestData.model || '';
const stream = requestData.stream || false;
if (!messages || !Array.isArray(messages)) {
return res.status(400).json({
error: 'Invalid request body',
details: 'messages should be an array of message objects',
receivedBody: req.body
});
}
try {
const { model: resolvedModel } = await getNotdiamondModel(messages, model);
if (resolvedModel) {
model = resolvedModel;
} else {
throw new Error("Failed to get model from NotDiamond");
}
} catch (error) {
return res.status(400).json({ error: error.message });
}
const payload = {
messages: messages,
model: model,
stream: stream,
frequency_penalty: requestData.frequency_penalty || 0,
presence_penalty: requestData.presence_penalty || 0,
temperature: requestData.temperature || 0.6,
top_p: requestData.top_p || 1
};
const headers = {
'Content-Type': 'application/json',
'next-action': '4e63dabc37fef18cae74cbfd41d1bace49acf47e'
};
const response = await fetch(NOTDIAMOND_URL, {
method: 'POST',
headers: headers,
body: JSON.stringify([payload])
});
const generator = streamNotdiamondResponse(response, model);
if (stream) {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive'
});
for await (const chunk of generator) {
res.write(`data: ${JSON.stringify(chunk)}\n\n`);
}
res.write("data: [DONE]\n\n");
res.end();
} else {
let fullContent = "";
for await (const chunk of generator) {
if (chunk.choices[0].delta.content) {
fullContent += chunk.choices[0].delta.content;
}
}
res.json({
id: `chatcmpl-${crypto.randomUUID()}`,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model: model,
system_fingerprint: "fp_4e2b2da518",
choices: [
{
index: 0,
message: {
role: "assistant",
content: fullContent
},
finish_reason: "stop"
}
],
usage: {
prompt_tokens: Math.floor(fullContent.length / 4),
completion_tokens: Math.floor(fullContent.length / 4),
total_tokens: Math.floor(fullContent.length / 2)
}
});
}
} catch (error) {
res.status(500).json({
error: 'Internal Server Error',
details: error.message
});
}
});
app.listen(port, () => {
console.log(`Server running on port ${port}`);
});
|