mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-02-20 14:10:39 +01:00
Update hidden reasoning model function
- Now more flexibly supports different API providers, and uses direct model names - Fixes hidden reasoning time for OpenRouter and custom endpoints
This commit is contained in:
parent
33fa5aaab8
commit
d48db9aded
@ -79,41 +79,43 @@ export function isHiddenReasoningModel() {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @typedef {Object.<chat_completion_sources, { currentModel: string; models: ({ name: string; startsWith: boolean?; matchingFunc: (model: string) => boolean?; }|string)[]; }>} */
|
/** @typedef {{ (currentModel: string, supportedModel: string): boolean }} MatchingFunc */
|
||||||
const hiddenReasoningModels = {
|
|
||||||
[chat_completion_sources.OPENAI]: {
|
/** @type {Record.<string, MatchingFunc>} */
|
||||||
currentModel: oai_settings.openai_model,
|
const FUNCS = {
|
||||||
models: [
|
startsWith: (currentModel, supportedModel) => currentModel.startsWith(supportedModel),
|
||||||
{ name: 'o1', startsWith: true },
|
|
||||||
{ name: 'o3', startsWith: true },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
[chat_completion_sources.MAKERSUITE]: {
|
|
||||||
currentModel: oai_settings.google_model,
|
|
||||||
models: [
|
|
||||||
{ name: 'gemini-2.0-flash-thinking-exp', startsWith: true },
|
|
||||||
{ name: 'gemini-2.0-pro-exp', startsWith: true },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const sourceConfig = hiddenReasoningModels[oai_settings.chat_completion_source];
|
/** @type {({ name: string; func?: MatchingFunc; }|string)[]} */
|
||||||
if (!sourceConfig) {
|
const hiddenReasoningModels = [
|
||||||
|
{ name: 'o1', func: FUNCS.startsWith },
|
||||||
|
{ name: 'o3', func: FUNCS.startsWith },
|
||||||
|
{ name: 'gemini-2.0-flash-thinking-exp', func: FUNCS.startsWith },
|
||||||
|
{ name: 'gemini-2.0-pro-exp', func: FUNCS.startsWith },
|
||||||
|
];
|
||||||
|
|
||||||
|
function isModelSupported(model) {
|
||||||
|
for (const hiddenReasoningModel of hiddenReasoningModels) {
|
||||||
|
if (typeof model === 'string') {
|
||||||
|
return hiddenReasoningModel === model;
|
||||||
|
}
|
||||||
|
if (model.matchingFunc) {
|
||||||
|
return model.matchingFunc(model, hiddenReasoningModel);
|
||||||
|
}
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return sourceConfig.models.some(model => {
|
switch (oai_settings.chat_completion_source) {
|
||||||
if (typeof model === 'string') {
|
case chat_completion_sources.OPENAI: return isModelSupported(oai_settings.openai_model);
|
||||||
return sourceConfig.currentModel === model;
|
case chat_completion_sources.MAKERSUITE: return isModelSupported(oai_settings.google_model);
|
||||||
}
|
case chat_completion_sources.CLAUDE: return isModelSupported(oai_settings.claude_model);
|
||||||
if (model.startsWith) {
|
case chat_completion_sources.OPENROUTER: return isModelSupported(oai_settings.openrouter_model);
|
||||||
return (sourceConfig.currentModel).startsWith(model.name);
|
case chat_completion_sources.ZEROONEAI: return isModelSupported(oai_settings.zerooneai_model);
|
||||||
}
|
case chat_completion_sources.MISTRALAI: return isModelSupported(oai_settings.mistralai_model);
|
||||||
if (model.matchingFunc) {
|
case chat_completion_sources.CUSTOM: return isModelSupported(oai_settings.custom_model);
|
||||||
return model.matchingFunc(sourceConfig.currentModel);
|
default: return false;
|
||||||
}
|
}
|
||||||
return false;
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
Loading…
x
Reference in New Issue
Block a user