mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
(WIP) PaLM API
This commit is contained in:
73
server.js
73
server.js
@@ -182,7 +182,7 @@ const AVATAR_WIDTH = 400;
|
||||
const AVATAR_HEIGHT = 600;
|
||||
const jsonParser = express.json({ limit: '100mb' });
|
||||
const urlencodedParser = express.urlencoded({ extended: true, limit: '100mb' });
|
||||
const { DIRECTORIES, UPLOADS_PATH } = require('./src/constants');
|
||||
const { DIRECTORIES, UPLOADS_PATH, PALM_SAFETY } = require('./src/constants');
|
||||
|
||||
// CSRF Protection //
|
||||
if (cliArguments.disableCsrf === false) {
|
||||
@@ -1299,11 +1299,11 @@ app.post("/getstats", jsonParser, function (request, response) {
|
||||
|
||||
/**
|
||||
* Endpoint: POST /recreatestats
|
||||
*
|
||||
*
|
||||
* Triggers the recreation of statistics from chat files.
|
||||
* - If successful: returns a 200 OK status.
|
||||
* - On failure: returns a 500 Internal Server Error status.
|
||||
*
|
||||
*
|
||||
* @param {Object} request - Express request object.
|
||||
* @param {Object} response - Express response object.
|
||||
*/
|
||||
@@ -2926,6 +2926,69 @@ async function sendClaudeRequest(request, response) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {express.Request} request
|
||||
* @param {express.Response} response
|
||||
*/
|
||||
async function sendPalmRequest(request, response) {
|
||||
const api_key_palm = readSecret(SECRET_KEYS.PALM);
|
||||
|
||||
if (!api_key_palm) {
|
||||
return response.status(401).send({ error: true });
|
||||
}
|
||||
|
||||
const body = {
|
||||
prompt: {
|
||||
text: request.body.messages,
|
||||
},
|
||||
stopSequences: request.body.stop,
|
||||
safetySettings: PALM_SAFETY,
|
||||
temperature: request.body.temperature,
|
||||
topP: request.body.top_p,
|
||||
topK: request.body.top_k || undefined,
|
||||
maxOutputTokens: request.body.max_tokens,
|
||||
candidate_count: 1,
|
||||
};
|
||||
|
||||
console.log('Palm request:', body);
|
||||
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
request.socket.removeAllListeners('close');
|
||||
request.socket.on('close', function () {
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
const generateResponse = await fetch(`https://generativelanguage.googleapis.com/v1beta2/models/text-bison-001:generateText?key=${api_key_palm}`, {
|
||||
body: JSON.stringify(body),
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
signal: controller.signal,
|
||||
timeout: 0,
|
||||
});
|
||||
|
||||
if (!generateResponse.ok) {
|
||||
console.log(`Palm API returned error: ${generateResponse.status} ${generateResponse.statusText} ${await generateResponse.text()}`);
|
||||
return response.status(generateResponse.status).send({ error: true });
|
||||
}
|
||||
|
||||
const generateResponseJson = await generateResponse.json();
|
||||
const responseText = generateResponseJson.candidates[0]?.output;
|
||||
console.log('Palm response:', responseText);
|
||||
|
||||
// Wrap it back to OAI format
|
||||
const reply = { choices: [{ "message": { "content": responseText, } }] };
|
||||
return response.send(reply);
|
||||
} catch (error) {
|
||||
console.log('Error communicating with Palm API: ', error);
|
||||
if (!response.headersSent) {
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
app.post("/generate_openai", jsonParser, function (request, response_generate_openai) {
|
||||
if (!request.body) return response_generate_openai.status(400).send({ error: true });
|
||||
|
||||
@@ -2941,6 +3004,10 @@ app.post("/generate_openai", jsonParser, function (request, response_generate_op
|
||||
return sendAI21Request(request, response_generate_openai);
|
||||
}
|
||||
|
||||
if (request.body.use_palm) {
|
||||
return sendPalmRequest(request, response_generate_openai);
|
||||
}
|
||||
|
||||
let api_url;
|
||||
let api_key_openai;
|
||||
let headers;
|
||||
|
Reference in New Issue
Block a user