Textgen: Add model downloading for TabbyAPI

Tabby has a HuggingFace downloader API endpoint. Add direct support
in SillyTavern by using the same method as ollama, but with the correct
parameters exposed.

Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
kingbri
2024-07-07 14:02:51 -04:00
parent ed0e522c6d
commit 69077f6a6e
4 changed files with 169 additions and 1 deletions

View File

@@ -588,7 +588,36 @@ llamacpp.post('/slots', jsonParser, async function (request, response) {
}
});
const tabby = express.Router();
tabby.post('/download', jsonParser, async function (request, response) {
try {
const baseUrl = String(request.body.api_server).replace(/\/$/, '');
const args = {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(request.body),
timeout: 0,
}
setAdditionalHeaders(request, args, baseUrl);
const fetchResponse = await fetch(`${baseUrl}/v1/download`, args);
if (!fetchResponse.ok) {
console.log('Download error:', fetchResponse.status, fetchResponse.statusText);
return response.status(fetchResponse.status).send({ error: true });
}
return response.send({ ok: true });
} catch (error) {
console.error(error);
return response.status(500);
}
});
router.use('/ollama', ollama);
router.use('/llamacpp', llamacpp);
router.use('/tabby', tabby);
module.exports = { router };