Fix arch in models list, remove log
This commit is contained in:
parent
c1e126985d
commit
21e0a42060
|
@ -1226,7 +1226,7 @@ function calculateOpenRouterCost() {
|
|||
}
|
||||
|
||||
function saveModelList(data) {
|
||||
model_list = data.map((model) => ({ id: model.id, context_length: model.context_length, pricing: model.pricing }));
|
||||
model_list = data.map((model) => ({ id: model.id, context_length: model.context_length, pricing: model.pricing, architecture: model.architecture }));
|
||||
model_list.sort((a, b) => a?.id && b?.id && a.id.localeCompare(b.id));
|
||||
|
||||
if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) {
|
||||
|
|
|
@ -308,7 +308,7 @@ function registerEndpoints(app, jsonParser) {
|
|||
if (model == 'llama') {
|
||||
const jsonBody = req.body.flatMap(x => Object.values(x)).join('\n\n');
|
||||
const llamaResult = await countSentencepieceTokens(spp_llama, jsonBody);
|
||||
console.log('jsonBody', jsonBody, 'llamaResult', llamaResult);
|
||||
// console.log('jsonBody', jsonBody, 'llamaResult', llamaResult);
|
||||
num_tokens = llamaResult.count;
|
||||
return res.send({ "token_count": num_tokens });
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue