llamacpp: use generic CC endpoint for captioning

This commit is contained in:
Cohee
2025-05-09 22:33:25 +03:00
parent c6a64d8526
commit 8a4da487dd
3 changed files with 7 additions and 49 deletions

View File

@ -97,8 +97,6 @@ export async function getMultimodalCaption(base64Img, prompt) {
return '/api/google/caption-image'; return '/api/google/caption-image';
case 'anthropic': case 'anthropic':
return '/api/anthropic/caption-image'; return '/api/anthropic/caption-image';
case 'llamacpp':
return '/api/backends/text-completions/llamacpp/caption-image';
case 'ollama': case 'ollama':
return '/api/backends/text-completions/ollama/caption-image'; return '/api/backends/text-completions/ollama/caption-image';
default: default:

View File

@ -502,51 +502,6 @@ ollama.post('/caption-image', async function (request, response) {
const llamacpp = express.Router(); const llamacpp = express.Router();
llamacpp.post('/caption-image', async function (request, response) {
try {
if (!request.body.server_url) {
return response.sendStatus(400);
}
console.debug('LlamaCpp caption request:', request.body);
const baseUrl = trimV1(request.body.server_url);
const fetchResponse = await fetch(`${baseUrl}/completion`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
prompt: `USER:[img-1]${String(request.body.prompt).trim()}\nASSISTANT:`,
image_data: [{ data: request.body.image, id: 1 }],
temperature: 0.1,
stream: false,
stop: ['USER:', '</s>'],
}),
});
if (!fetchResponse.ok) {
console.error('LlamaCpp caption error:', fetchResponse.status, fetchResponse.statusText);
return response.status(500).send({ error: true });
}
/** @type {any} */
const data = await fetchResponse.json();
console.debug('LlamaCpp caption response:', data);
const caption = data?.content || '';
if (!caption) {
console.error('LlamaCpp caption is empty.');
return response.status(500).send({ error: true });
}
return response.send({ caption });
} catch (error) {
console.error(error);
return response.sendStatus(500);
}
});
llamacpp.post('/props', async function (request, response) { llamacpp.post('/props', async function (request, response) {
try { try {
if (!request.body.server_url) { if (!request.body.server_url) {

View File

@ -45,6 +45,10 @@ router.post('/caption-image', async (request, response) => {
key = readSecret(request.user.directories, SECRET_KEYS.KOBOLDCPP); key = readSecret(request.user.directories, SECRET_KEYS.KOBOLDCPP);
} }
if (request.body.api === 'llamacpp') {
key = readSecret(request.user.directories, SECRET_KEYS.LLAMACPP);
}
if (request.body.api === 'vllm') { if (request.body.api === 'vllm') {
key = readSecret(request.user.directories, SECRET_KEYS.VLLM); key = readSecret(request.user.directories, SECRET_KEYS.VLLM);
} }
@ -69,7 +73,8 @@ router.post('/caption-image', async (request, response) => {
key = readSecret(request.user.directories, SECRET_KEYS.XAI); key = readSecret(request.user.directories, SECRET_KEYS.XAI);
} }
if (!key && !request.body.reverse_proxy && ['custom', 'ooba', 'koboldcpp', 'vllm'].includes(request.body.api) === false) { const noKeyTypes = ['custom', 'ooba', 'koboldcpp', 'vllm', 'llamacpp'];
if (!key && !request.body.reverse_proxy && !noKeyTypes.includes(request.body.api)) {
console.warn('No key found for API', request.body.api); console.warn('No key found for API', request.body.api);
return response.sendStatus(400); return response.sendStatus(400);
} }
@ -156,7 +161,7 @@ router.post('/caption-image', async (request, response) => {
}); });
} }
if (request.body.api === 'koboldcpp' || request.body.api === 'vllm') { if (['koboldcpp', 'vllm', 'llamacpp'].includes(request.body.api)) {
apiUrl = `${trimV1(request.body.server_url)}/v1/chat/completions`; apiUrl = `${trimV1(request.body.server_url)}/v1/chat/completions`;
} }