mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Add o3 and o4-mini
This commit is contained in:
@ -2180,25 +2180,27 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
generate_data['seed'] = oai_settings.seed;
|
||||
}
|
||||
|
||||
if (isOAI && (oai_settings.openai_model.startsWith('o1') || oai_settings.openai_model.startsWith('o3'))) {
|
||||
generate_data.messages.forEach((msg) => {
|
||||
if (msg.role === 'system') {
|
||||
msg.role = 'user';
|
||||
}
|
||||
});
|
||||
if (isOAI && /^(o1|o3|o4)/.test(oai_settings.openai_model)) {
|
||||
generate_data.max_completion_tokens = generate_data.max_tokens;
|
||||
delete generate_data.max_tokens;
|
||||
delete generate_data.logprobs;
|
||||
delete generate_data.top_logprobs;
|
||||
delete generate_data.n;
|
||||
delete generate_data.stop;
|
||||
delete generate_data.logit_bias;
|
||||
delete generate_data.temperature;
|
||||
delete generate_data.top_p;
|
||||
delete generate_data.frequency_penalty;
|
||||
delete generate_data.presence_penalty;
|
||||
delete generate_data.tools;
|
||||
delete generate_data.tool_choice;
|
||||
delete generate_data.stop;
|
||||
delete generate_data.logit_bias;
|
||||
if (oai_settings.openai_model.startsWith('o1')) {
|
||||
generate_data.messages.forEach((msg) => {
|
||||
if (msg.role === 'system') {
|
||||
msg.role = 'user';
|
||||
}
|
||||
});
|
||||
delete generate_data.n;
|
||||
delete generate_data.tools;
|
||||
delete generate_data.tool_choice;
|
||||
}
|
||||
}
|
||||
|
||||
await eventSource.emit(event_types.CHAT_COMPLETION_SETTINGS_READY, generate_data);
|
||||
@ -4129,9 +4131,12 @@ function getMaxContextOpenAI(value) {
|
||||
else if (value.includes('gpt-4.1')) {
|
||||
return max_1mil;
|
||||
}
|
||||
else if (value.startsWith('o1') || value.startsWith('o3')) {
|
||||
else if (value.startsWith('o1')) {
|
||||
return max_128k;
|
||||
}
|
||||
else if (value.startsWith('o4') || value.startsWith('o3')) {
|
||||
return max_200k;
|
||||
}
|
||||
else if (value.includes('chatgpt-4o-latest') || value.includes('gpt-4-turbo') || value.includes('gpt-4o') || value.includes('gpt-4-1106') || value.includes('gpt-4-0125') || value.includes('gpt-4-vision')) {
|
||||
return max_128k;
|
||||
}
|
||||
@ -5150,11 +5155,15 @@ export function isImageInliningSupported() {
|
||||
'grok-2-vision',
|
||||
'grok-vision',
|
||||
'gpt-4.1',
|
||||
'o3',
|
||||
'o3-2025-04-16',
|
||||
'o4-mini',
|
||||
'o4-mini-2025-04-16',
|
||||
];
|
||||
|
||||
switch (oai_settings.chat_completion_source) {
|
||||
case chat_completion_sources.OPENAI:
|
||||
return visionSupportedModels.some(model => oai_settings.openai_model.includes(model) && !oai_settings.openai_model.includes('gpt-4-turbo-preview'));
|
||||
return visionSupportedModels.some(model => oai_settings.openai_model.includes(model) && !oai_settings.openai_model.includes('gpt-4-turbo-preview') && !oai_settings.openai_model.includes('o3-mini'));
|
||||
case chat_completion_sources.MAKERSUITE:
|
||||
return visionSupportedModels.some(model => oai_settings.google_model.includes(model));
|
||||
case chat_completion_sources.CLAUDE:
|
||||
|
Reference in New Issue
Block a user