mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Another crack at o3
This commit is contained in:
@ -3075,7 +3075,7 @@ export function isStreamingEnabled() {
|
|||||||
(main_api == 'openai' &&
|
(main_api == 'openai' &&
|
||||||
oai_settings.stream_openai &&
|
oai_settings.stream_openai &&
|
||||||
!noStreamSources.includes(oai_settings.chat_completion_source) &&
|
!noStreamSources.includes(oai_settings.chat_completion_source) &&
|
||||||
!(oai_settings.chat_completion_source == chat_completion_sources.OPENAI && oai_settings.openai_model.startsWith('o1-')) &&
|
!(oai_settings.chat_completion_source == chat_completion_sources.OPENAI && (oai_settings.openai_model.startsWith('o1') || oai_settings.openai_model.startsWith('o3'))) &&
|
||||||
!(oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE && oai_settings.google_model.includes('bison')))
|
!(oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE && oai_settings.google_model.includes('bison')))
|
||||||
|| (main_api == 'kobold' && kai_settings.streaming_kobold && kai_flags.can_use_streaming)
|
|| (main_api == 'kobold' && kai_settings.streaming_kobold && kai_flags.can_use_streaming)
|
||||||
|| (main_api == 'novel' && nai_settings.streaming_novel)
|
|| (main_api == 'novel' && nai_settings.streaming_novel)
|
||||||
|
@ -1869,7 +1869,7 @@ async function sendOpenAIRequest(type, messages, signal) {
|
|||||||
const isQuiet = type === 'quiet';
|
const isQuiet = type === 'quiet';
|
||||||
const isImpersonate = type === 'impersonate';
|
const isImpersonate = type === 'impersonate';
|
||||||
const isContinue = type === 'continue';
|
const isContinue = type === 'continue';
|
||||||
const stream = oai_settings.stream_openai && !isQuiet && !isScale && !(isGoogle && oai_settings.google_model.includes('bison')) && !(isOAI && oai_settings.openai_model.startsWith('o1-'));
|
const stream = oai_settings.stream_openai && !isQuiet && !isScale && !(isGoogle && oai_settings.google_model.includes('bison')) && !(isOAI && (oai_settings.openai_model.startsWith('o1') || oai_settings.openai_model.startsWith('o3')));
|
||||||
const useLogprobs = !!power_user.request_token_probabilities;
|
const useLogprobs = !!power_user.request_token_probabilities;
|
||||||
const canMultiSwipe = oai_settings.n > 1 && !isContinue && !isImpersonate && !isQuiet && (isOAI || isCustom);
|
const canMultiSwipe = oai_settings.n > 1 && !isContinue && !isImpersonate && !isQuiet && (isOAI || isCustom);
|
||||||
|
|
||||||
@ -2050,7 +2050,7 @@ async function sendOpenAIRequest(type, messages, signal) {
|
|||||||
await ToolManager.registerFunctionToolsOpenAI(generate_data);
|
await ToolManager.registerFunctionToolsOpenAI(generate_data);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isOAI && oai_settings.openai_model.startsWith('o1-')) {
|
if (isOAI && (oai_settings.openai_model.startsWith('o1') || oai_settings.openai_model.startsWith('o3'))) {
|
||||||
generate_data.messages.forEach((msg) => {
|
generate_data.messages.forEach((msg) => {
|
||||||
if (msg.role === 'system') {
|
if (msg.role === 'system') {
|
||||||
msg.role = 'user';
|
msg.role = 'user';
|
||||||
@ -4027,7 +4027,7 @@ function getMaxContextOpenAI(value) {
|
|||||||
if (oai_settings.max_context_unlocked) {
|
if (oai_settings.max_context_unlocked) {
|
||||||
return unlocked_max;
|
return unlocked_max;
|
||||||
}
|
}
|
||||||
else if (value.startsWith('o1-')) {
|
else if (value.startsWith('o1') || value.startsWith('o3')) {
|
||||||
return max_128k;
|
return max_128k;
|
||||||
}
|
}
|
||||||
else if (value.includes('chatgpt-4o-latest') || value.includes('gpt-4-turbo') || value.includes('gpt-4o') || value.includes('gpt-4-1106') || value.includes('gpt-4-0125') || value.includes('gpt-4-vision')) {
|
else if (value.includes('chatgpt-4o-latest') || value.includes('gpt-4-turbo') || value.includes('gpt-4o') || value.includes('gpt-4-1106') || value.includes('gpt-4-0125') || value.includes('gpt-4-vision')) {
|
||||||
|
@ -1107,10 +1107,6 @@ router.post('/generate', jsonParser, function (request, response) {
|
|||||||
...bodyParams,
|
...bodyParams,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENAI && String(requestBody.model).startsWith('o3')) {
|
|
||||||
delete requestBody.max_tokens;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) {
|
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.CUSTOM) {
|
||||||
excludeKeysByYaml(requestBody, request.body.custom_exclude_body);
|
excludeKeysByYaml(requestBody, request.body.custom_exclude_body);
|
||||||
}
|
}
|
||||||
|
@ -398,7 +398,7 @@ function getWebTokenizersChunks(tokenizer, ids) {
|
|||||||
* @returns {string} Tokenizer model to use
|
* @returns {string} Tokenizer model to use
|
||||||
*/
|
*/
|
||||||
export function getTokenizerModel(requestModel) {
|
export function getTokenizerModel(requestModel) {
|
||||||
if (requestModel.includes('o1-preview') || requestModel.includes('o1-mini')) {
|
if (requestModel.includes('o1-preview') || requestModel.includes('o1-mini') || requestModel.includes('o3-mini')) {
|
||||||
return 'gpt-4o';
|
return 'gpt-4o';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user