mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Add ollama and llamacpp as vector sources
This commit is contained in:
@@ -5,7 +5,18 @@ const sanitize = require('sanitize-filename');
|
||||
const { jsonParser } = require('../express-common');
|
||||
|
||||
// Don't forget to add new sources to the SOURCES array
|
||||
const SOURCES = ['transformers', 'mistral', 'openai', 'extras', 'palm', 'togetherai', 'nomicai', 'cohere'];
|
||||
const SOURCES = [
|
||||
'transformers',
|
||||
'mistral',
|
||||
'openai',
|
||||
'extras',
|
||||
'palm',
|
||||
'togetherai',
|
||||
'nomicai',
|
||||
'cohere',
|
||||
'ollama',
|
||||
'llamacpp',
|
||||
];
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from the given source.
|
||||
@@ -32,6 +43,10 @@ async function getVector(source, sourceSettings, text, isQuery, directories) {
|
||||
return require('../vectors/makersuite-vectors').getMakerSuiteVector(text, directories);
|
||||
case 'cohere':
|
||||
return require('../vectors/cohere-vectors').getCohereVector(text, isQuery, directories, sourceSettings.model);
|
||||
case 'llamacpp':
|
||||
return require('../vectors/llamacpp-vectors').getLlamaCppVector(text, sourceSettings.apiUrl, directories);
|
||||
case 'ollama':
|
||||
return require('../vectors/ollama-vectors').getOllamaVector(text, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories);
|
||||
}
|
||||
|
||||
throw new Error(`Unknown vector source ${source}`);
|
||||
@@ -73,6 +88,12 @@ async function getBatchVector(source, sourceSettings, texts, isQuery, directorie
|
||||
case 'cohere':
|
||||
results.push(...await require('../vectors/cohere-vectors').getCohereBatchVector(batch, isQuery, directories, sourceSettings.model));
|
||||
break;
|
||||
case 'llamacpp':
|
||||
results.push(...await require('../vectors/llamacpp-vectors').getLlamaCppBatchVector(batch, sourceSettings.apiUrl, directories));
|
||||
break;
|
||||
case 'ollama':
|
||||
results.push(...await require('../vectors/ollama-vectors').getOllamaBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories));
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown vector source ${source}`);
|
||||
}
|
||||
@@ -251,7 +272,23 @@ function getSourceSettings(source, request) {
|
||||
return {
|
||||
model: model,
|
||||
};
|
||||
}else {
|
||||
} else if (source === 'llamacpp') {
|
||||
const apiUrl = String(request.headers['x-llamacpp-url']);
|
||||
|
||||
return {
|
||||
apiUrl: apiUrl,
|
||||
};
|
||||
} else if (source === 'ollama') {
|
||||
const apiUrl = String(request.headers['x-ollama-url']);
|
||||
const model = String(request.headers['x-ollama-model']);
|
||||
const keep = Boolean(request.headers['x-ollama-keep']);
|
||||
|
||||
return {
|
||||
apiUrl: apiUrl,
|
||||
model: model,
|
||||
keep: keep,
|
||||
};
|
||||
} else {
|
||||
// Extras API settings to connect to the Extras embeddings provider
|
||||
let extrasUrl = '';
|
||||
let extrasKey = '';
|
||||
|
Reference in New Issue
Block a user