Add ollama and llamacpp as vector sources

This commit is contained in:
Cohee
2024-05-28 22:54:50 +03:00
parent c858fccc5f
commit 2b3dfc5ae2
6 changed files with 286 additions and 20 deletions

View File

@ -164,6 +164,17 @@ function getOverrideHeaders(urlHost) {
* @param {string|null} server API server for new request
*/
function setAdditionalHeaders(request, args, server) {
setAdditionalHeadersByType(args.headers, request.body.api_type, server, request.user.directories);
}
/**
*
* @param {object} requestHeaders Request headers
* @param {string} type API type
* @param {string|null} server API server for new request
* @param {import('./users').UserDirectoryList} directories User directories
*/
function setAdditionalHeadersByType(requestHeaders, type, server, directories) {
const headerGetters = {
[TEXTGEN_TYPES.MANCER]: getMancerHeaders,
[TEXTGEN_TYPES.VLLM]: getVllmHeaders,
@ -178,13 +189,13 @@ function setAdditionalHeaders(request, args, server) {
[TEXTGEN_TYPES.LLAMACPP]: getLlamaCppHeaders,
};
const getHeaders = headerGetters[request.body.api_type];
const headers = getHeaders ? getHeaders(request.user.directories) : {};
const getHeaders = headerGetters[type];
const headers = getHeaders ? getHeaders(directories) : {};
if (typeof server === 'string' && server.length > 0) {
try {
const url = new URL(server);
const overrideHeaders = getOverrideHeaders(url.host);
const overrideHeaders = getOverrideHeaders(url.host);
if (overrideHeaders && Object.keys(overrideHeaders).length > 0) {
Object.assign(headers, overrideHeaders);
@ -194,10 +205,11 @@ function setAdditionalHeaders(request, args, server) {
}
}
Object.assign(args.headers, headers);
Object.assign(requestHeaders, headers);
}
module.exports = {
getOverrideHeaders,
setAdditionalHeaders,
setAdditionalHeadersByType,
};

View File

@ -5,7 +5,18 @@ const sanitize = require('sanitize-filename');
const { jsonParser } = require('../express-common');
// Don't forget to add new sources to the SOURCES array
const SOURCES = ['transformers', 'mistral', 'openai', 'extras', 'palm', 'togetherai', 'nomicai', 'cohere'];
const SOURCES = [
'transformers',
'mistral',
'openai',
'extras',
'palm',
'togetherai',
'nomicai',
'cohere',
'ollama',
'llamacpp',
];
/**
* Gets the vector for the given text from the given source.
@ -32,6 +43,10 @@ async function getVector(source, sourceSettings, text, isQuery, directories) {
return require('../vectors/makersuite-vectors').getMakerSuiteVector(text, directories);
case 'cohere':
return require('../vectors/cohere-vectors').getCohereVector(text, isQuery, directories, sourceSettings.model);
case 'llamacpp':
return require('../vectors/llamacpp-vectors').getLlamaCppVector(text, sourceSettings.apiUrl, directories);
case 'ollama':
return require('../vectors/ollama-vectors').getOllamaVector(text, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories);
}
throw new Error(`Unknown vector source ${source}`);
@ -73,6 +88,12 @@ async function getBatchVector(source, sourceSettings, texts, isQuery, directorie
case 'cohere':
results.push(...await require('../vectors/cohere-vectors').getCohereBatchVector(batch, isQuery, directories, sourceSettings.model));
break;
case 'llamacpp':
results.push(...await require('../vectors/llamacpp-vectors').getLlamaCppBatchVector(batch, sourceSettings.apiUrl, directories));
break;
case 'ollama':
results.push(...await require('../vectors/ollama-vectors').getOllamaBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories));
break;
default:
throw new Error(`Unknown vector source ${source}`);
}
@ -251,7 +272,23 @@ function getSourceSettings(source, request) {
return {
model: model,
};
}else {
} else if (source === 'llamacpp') {
const apiUrl = String(request.headers['x-llamacpp-url']);
return {
apiUrl: apiUrl,
};
} else if (source === 'ollama') {
const apiUrl = String(request.headers['x-ollama-url']);
const model = String(request.headers['x-ollama-model']);
const keep = Boolean(request.headers['x-ollama-keep']);
return {
apiUrl: apiUrl,
model: model,
keep: keep,
};
} else {
// Extras API settings to connect to the Extras embeddings provider
let extrasUrl = '';
let extrasKey = '';

View File

@ -0,0 +1,61 @@
const fetch = require('node-fetch').default;
const { setAdditionalHeadersByType } = require('../additional-headers');
const { TEXTGEN_TYPES } = require('../constants');
/**
* Gets the vector for the given text from LlamaCpp
* @param {string[]} texts - The array of texts to get the vectors for
* @param {string} apiUrl - The API URL
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[][]>} - The array of vectors for the texts
*/
async function getLlamaCppBatchVector(texts, apiUrl, directories) {
const url = new URL(apiUrl);
url.pathname = '/v1/embeddings';
const headers = {};
setAdditionalHeadersByType(headers, TEXTGEN_TYPES.LLAMACPP, apiUrl, directories);
const response = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...headers,
},
body: JSON.stringify({ input: texts }),
});
if (!response.ok) {
const responseText = await response.text();
throw new Error(`LlamaCpp: Failed to get vector for text: ${response.statusText} ${responseText}`);
}
const data = await response.json();
if (!Array.isArray(data?.data)) {
throw new Error('API response was not an array');
}
// Sort data by x.index to ensure the order is correct
data.data.sort((a, b) => a.index - b.index);
const vectors = data.data.map(x => x.embedding);
return vectors;
}
/**
* Gets the vector for the given text from LlamaCpp
* @param {string} text - The text to get the vector for
* @param {string} apiUrl - The API URL
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[]>} - The vector for the text
*/
async function getLlamaCppVector(text, apiUrl, directories) {
const vectors = await getLlamaCppBatchVector([text], apiUrl, directories);
return vectors[0];
}
module.exports = {
getLlamaCppBatchVector,
getLlamaCppVector,
};

View File

@ -0,0 +1,69 @@
const fetch = require('node-fetch').default;
const { setAdditionalHeadersByType } = require('../additional-headers');
const { TEXTGEN_TYPES } = require('../constants');
/**
* Gets the vector for the given text from Ollama
* @param {string[]} texts - The array of texts to get the vectors for
* @param {string} apiUrl - The API URL
* @param {string} model - The model to use
* @param {boolean} keep - Keep the model loaded in memory
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[][]>} - The array of vectors for the texts
*/
async function getOllamaBatchVector(texts, apiUrl, model, keep, directories) {
const result = [];
for (const text of texts) {
const vector = await getOllamaVector(text, apiUrl, model, keep, directories);
result.push(vector);
}
return result;
}
/**
* Gets the vector for the given text from Ollama
* @param {string} text - The text to get the vector for
* @param {string} apiUrl - The API URL
* @param {string} model - The model to use
* @param {boolean} keep - Keep the model loaded in memory
* @param {import('../users').UserDirectoryList} directories - The directories object for the user
* @returns {Promise<number[]>} - The vector for the text
*/
async function getOllamaVector(text, apiUrl, model, keep, directories) {
const url = new URL(apiUrl);
url.pathname = '/api/embeddings';
const headers = {};
setAdditionalHeadersByType(headers, TEXTGEN_TYPES.OLLAMA, apiUrl, directories);
const response = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...headers,
},
body: JSON.stringify({
prompt: text,
model: model,
keep_alive: keep ? -1 : undefined,
}),
});
if (!response.ok) {
const responseText = await response.text();
throw new Error(`Ollama: Failed to get vector for text: ${response.statusText} ${responseText}`);
}
const data = await response.json();
if (!Array.isArray(data?.embedding)) {
throw new Error('API response was not an array');
}
return data.embedding;
}
module.exports = {
getOllamaBatchVector,
getOllamaVector,
};