mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Add Vertex AI express mode support (#3977)
* Add Vertex AI express mode support Split Google AI Studio and Vertex AI * Add support for Vertex AI, including updating default models and related settings, modifying frontend HTML to include Vertex AI options, and adjusting request processing logic in the backend API. * Log API name in the console * Merge sysprompt toggles back * Use Gemma tokenizers for Vertex and LearnLM * AI Studio parity updates * Add link to express mode doc. Also technically it's not a form * Split title * Use array includes * Add support for Google Vertex AI in image captioning feature * Specify caption API name, add to compression list --------- Co-authored-by: Cohee <18619528+Cohee1207@users.noreply.github.com>
This commit is contained in:
@@ -7,6 +7,7 @@ import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||
import { GEMINI_SAFETY } from '../constants.js';
|
||||
|
||||
const API_MAKERSUITE = 'https://generativelanguage.googleapis.com';
|
||||
const API_VERTEX_AI = 'https://us-central1-aiplatform.googleapis.com';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
@@ -14,12 +15,27 @@ router.post('/caption-image', async (request, response) => {
|
||||
try {
|
||||
const mimeType = request.body.image.split(';')[0].split(':')[1];
|
||||
const base64Data = request.body.image.split(',')[1];
|
||||
const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.MAKERSUITE);
|
||||
const apiUrl = new URL(request.body.reverse_proxy || API_MAKERSUITE);
|
||||
const useVertexAi = request.body.api === 'vertexai';
|
||||
const apiName = useVertexAi ? 'Google Vertex AI' : 'Google AI Studio';
|
||||
let apiKey;
|
||||
let apiUrl;
|
||||
if (useVertexAi) {
|
||||
apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.VERTEXAI);
|
||||
apiUrl = new URL(request.body.reverse_proxy || API_VERTEX_AI);
|
||||
} else {
|
||||
apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.MAKERSUITE);
|
||||
apiUrl = new URL(request.body.reverse_proxy || API_MAKERSUITE);
|
||||
}
|
||||
const model = request.body.model || 'gemini-2.0-flash';
|
||||
const url = `${apiUrl.origin}/v1beta/models/${model}:generateContent?key=${apiKey}`;
|
||||
let url;
|
||||
if (useVertexAi) {
|
||||
url = `${apiUrl.origin}/v1/publishers/google/models/${model}:generateContent?key=${apiKey}`;
|
||||
} else {
|
||||
url = `${apiUrl.origin}/v1beta/models/${model}:generateContent?key=${apiKey}`;
|
||||
}
|
||||
const body = {
|
||||
contents: [{
|
||||
role: 'user',
|
||||
parts: [
|
||||
{ text: request.body.prompt },
|
||||
{
|
||||
@@ -32,7 +48,7 @@ router.post('/caption-image', async (request, response) => {
|
||||
safetySettings: GEMINI_SAFETY,
|
||||
};
|
||||
|
||||
console.debug('Multimodal captioning request', model, body);
|
||||
console.debug(`${apiName} captioning request`, model, body);
|
||||
|
||||
const result = await fetch(url, {
|
||||
body: JSON.stringify(body),
|
||||
@@ -44,13 +60,13 @@ router.post('/caption-image', async (request, response) => {
|
||||
|
||||
if (!result.ok) {
|
||||
const error = await result.json();
|
||||
console.error(`Google AI Studio API returned error: ${result.status} ${result.statusText}`, error);
|
||||
console.error(`${apiName} API returned error: ${result.status} ${result.statusText}`, error);
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await result.json();
|
||||
console.info('Multimodal captioning response', data);
|
||||
console.info(`${apiName} captioning response`, data);
|
||||
|
||||
const candidates = data?.candidates;
|
||||
if (!candidates) {
|
||||
|
Reference in New Issue
Block a user