noapi / app.js
smgc's picture
Create app.js
a5e4f36 verified
raw
history blame
No virus
4.31 kB
const express = require('express');
const fetch = require('node-fetch');
const crypto = require('crypto');
const app = express();
const port = 8000;
// 这里粘贴您提供的所有常量和函数定义
const NOTDIAMOND_URL = 'https://chat.notdiamond.ai/mini-chat';
const NOTDIAMOND_HEADERS = {
'Content-Type': 'application/json',
'next-action': '8189eb37107121e024940f588629a394a594e6a4'
};
const AUTH_KEY = process.env.AUTH_KEY;
const MODEL_LIST = [
{"provider":"openai","model":"gpt-4-turbo-2024-04-09"},{"provider":"google","model":"gemini-1.5-pro-exp-0801"},{"provider":"togetherai","model":"Meta-Llama-3.1-70B-Instruct-Turbo"},{"provider":"togetherai","model":"Meta-Llama-3.1-405B-Instruct-Turbo"},{"provider":"perplexity","model":"llama-3.1-sonar-large-128k-online"},{"provider":"google","model":"gemini-1.5-pro-latest"},{"provider":"anthropic","model":"claude-3-5-sonnet-20240620"},{"provider":"anthropic","model":"claude-3-haiku-20240307"},{"provider":"openai","model":"gpt-4o-mini"},{"provider":"openai","model":"gpt-4o"},{"provider":"mistral","model":"mistral-large-2407"}
];
// 这里粘贴所有的函数定义(getNotdiamondModel, createOpenAIChunk, streamNotdiamondResponse)
app.use(express.json());
app.post('/ai/v1/chat/completions', async (req, res) => {
const authHeader = req.headers['authorization'];
if (!authHeader || !authHeader.startsWith('Bearer ') || authHeader.slice(7) !== AUTH_KEY) {
return res.status(401).json({ error: 'Unauthorized' });
}
try {
let requestData = req.body;
let messages = requestData.messages;
let model = requestData.model || '';
const stream = requestData.stream || false;
if (!messages || !Array.isArray(messages)) {
return res.status(400).json({
error: 'Invalid request body',
details: 'messages should be an array of message objects',
receivedBody: req.body
});
}
try {
const { model: resolvedModel } = await getNotdiamondModel(messages, model);
if (resolvedModel) {
model = resolvedModel;
} else {
throw new Error("Failed to get model from NotDiamond");
}
} catch (error) {
return res.status(400).json({ error: error.message });
}
const payload = {
messages: messages,
model: model,
stream: stream,
frequency_penalty: requestData.frequency_penalty || 0,
presence_penalty: requestData.presence_penalty || 0,
temperature: requestData.temperature || 0.6,
top_p: requestData.top_p || 1
};
const headers = {
'Content-Type': 'application/json',
'next-action': '4e63dabc37fef18cae74cbfd41d1bace49acf47e'
};
const response = await fetch(NOTDIAMOND_URL, {
method: 'POST',
headers: headers,
body: JSON.stringify([payload])
});
const generator = streamNotdiamondResponse(response, model);
if (stream) {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive'
});
for await (const chunk of generator) {
res.write(`data: ${JSON.stringify(chunk)}\n\n`);
}
res.write("data: [DONE]\n\n");
res.end();
} else {
let fullContent = "";
for await (const chunk of generator) {
if (chunk.choices[0].delta.content) {
fullContent += chunk.choices[0].delta.content;
}
}
res.json({
id: `chatcmpl-${crypto.randomUUID()}`,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model: model,
system_fingerprint: "fp_4e2b2da518",
choices: [
{
index: 0,
message: {
role: "assistant",
content: fullContent
},
finish_reason: "stop"
}
],
usage: {
prompt_tokens: Math.floor(fullContent.length / 4),
completion_tokens: Math.floor(fullContent.length / 4),
total_tokens: Math.floor(fullContent.length / 2)
}
});
}
} catch (error) {
res.status(500).json({
error: 'Internal Server Error',
details: error.message
});
}
});
app.listen(port, () => {
console.log(`Server running on port ${port}`);
});