Merge branch 'staging' into woo-yeah

This commit is contained in:
Cohee
2025-01-24 00:13:06 +02:00
7 changed files with 131 additions and 23 deletions

View File

@ -2692,7 +2692,7 @@
<option value="windowai">Window AI</option> <option value="windowai">Window AI</option>
</optgroup> </optgroup>
</select> </select>
<div class="inline-drawer wide100p" data-source="openai,claude,mistralai,makersuite"> <div class="inline-drawer wide100p" data-source="openai,claude,mistralai,makersuite,deepseek">
<div class="inline-drawer-toggle inline-drawer-header"> <div class="inline-drawer-toggle inline-drawer-header">
<b data-i18n="Reverse Proxy">Reverse Proxy</b> <b data-i18n="Reverse Proxy">Reverse Proxy</b>
<div class="fa-solid fa-circle-chevron-down inline-drawer-icon down"></div> <div class="fa-solid fa-circle-chevron-down inline-drawer-icon down"></div>
@ -2755,7 +2755,7 @@
</div> </div>
</div> </div>
</div> </div>
<div id="ReverseProxyWarningMessage" data-source="openai,claude,mistralai,makersuite"> <div id="ReverseProxyWarningMessage" data-source="openai,claude,mistralai,makersuite,deepseek">
<div class="reverse_proxy_warning"> <div class="reverse_proxy_warning">
<b> <b>
<div data-i18n="Using a proxy that you're not running yourself is a risk to your data privacy."> <div data-i18n="Using a proxy that you're not running yourself is a risk to your data privacy.">

View File

@ -724,6 +724,7 @@ async function getSystemMessages() {
is_user: false, is_user: false,
is_system: true, is_system: true,
mes: await renderTemplateAsync('assistantNote'), mes: await renderTemplateAsync('assistantNote'),
uses_system_ui: true,
extra: { extra: {
isSmallSys: true, isSmallSys: true,
}, },

View File

@ -11,6 +11,7 @@ import {
getCurrentChatId, getCurrentChatId,
getRequestHeaders, getRequestHeaders,
hideSwipeButtons, hideSwipeButtons,
name1,
name2, name2,
reloadCurrentChat, reloadCurrentChat,
saveChatDebounced, saveChatDebounced,
@ -21,6 +22,7 @@ import {
chat_metadata, chat_metadata,
neutralCharacterName, neutralCharacterName,
updateChatMetadata, updateChatMetadata,
system_message_types,
} from '../script.js'; } from '../script.js';
import { selected_group } from './group-chats.js'; import { selected_group } from './group-chats.js';
import { power_user } from './power-user.js'; import { power_user } from './power-user.js';
@ -34,6 +36,7 @@ import {
humanFileSize, humanFileSize,
saveBase64AsFile, saveBase64AsFile,
extractTextFromOffice, extractTextFromOffice,
download,
} from './utils.js'; } from './utils.js';
import { extension_settings, renderExtensionTemplateAsync, saveMetadataDebounced } from './extensions.js'; import { extension_settings, renderExtensionTemplateAsync, saveMetadataDebounced } from './extensions.js';
import { POPUP_RESULT, POPUP_TYPE, Popup, callGenericPopup } from './popup.js'; import { POPUP_RESULT, POPUP_TYPE, Popup, callGenericPopup } from './popup.js';
@ -41,6 +44,7 @@ import { ScraperManager } from './scrapers.js';
import { DragAndDropHandler } from './dragdrop.js'; import { DragAndDropHandler } from './dragdrop.js';
import { renderTemplateAsync } from './templates.js'; import { renderTemplateAsync } from './templates.js';
import { t } from './i18n.js'; import { t } from './i18n.js';
import { humanizedDateTime } from './RossAscends-mods.js';
/** /**
* @typedef {Object} FileAttachment * @typedef {Object} FileAttachment
@ -1437,6 +1441,19 @@ jQuery(function () {
await viewMessageFile(messageId); await viewMessageFile(messageId);
}); });
$(document).on('click', '.assistant_note_export', async function () {
const chatToSave = [
{
user_name: name1,
character_name: name2,
chat_metadata: chat_metadata,
},
...chat.filter(x => x?.extra?.type !== system_message_types.ASSISTANT_NOTE),
];
download(JSON.stringify(chatToSave, null, 4), `Assistant - ${humanizedDateTime()}.json`, 'application/json');
});
// Do not change. #attachFile is added by extension. // Do not change. #attachFile is added by extension.
$(document).on('click', '#attachFile', function () { $(document).on('click', '#attachFile', function () {
$('#file_form_input').trigger('click'); $('#file_form_input').trigger('click');

View File

@ -1922,7 +1922,7 @@ async function sendOpenAIRequest(type, messages, signal) {
} }
// Proxy is only supported for Claude, OpenAI, Mistral, and Google MakerSuite // Proxy is only supported for Claude, OpenAI, Mistral, and Google MakerSuite
if (oai_settings.reverse_proxy && [chat_completion_sources.CLAUDE, chat_completion_sources.OPENAI, chat_completion_sources.MISTRALAI, chat_completion_sources.MAKERSUITE].includes(oai_settings.chat_completion_source)) { if (oai_settings.reverse_proxy && [chat_completion_sources.CLAUDE, chat_completion_sources.OPENAI, chat_completion_sources.MISTRALAI, chat_completion_sources.MAKERSUITE, chat_completion_sources.DEEPSEEK].includes(oai_settings.chat_completion_source)) {
await validateReverseProxy(); await validateReverseProxy();
generate_data['reverse_proxy'] = oai_settings.reverse_proxy; generate_data['reverse_proxy'] = oai_settings.reverse_proxy;
generate_data['proxy_password'] = oai_settings.proxy_password; generate_data['proxy_password'] = oai_settings.proxy_password;
@ -3371,7 +3371,7 @@ async function getStatusOpen() {
chat_completion_source: oai_settings.chat_completion_source, chat_completion_source: oai_settings.chat_completion_source,
}; };
if (oai_settings.reverse_proxy && [chat_completion_sources.CLAUDE, chat_completion_sources.OPENAI, chat_completion_sources.MISTRALAI, chat_completion_sources.MAKERSUITE].includes(oai_settings.chat_completion_source)) { if (oai_settings.reverse_proxy && [chat_completion_sources.CLAUDE, chat_completion_sources.OPENAI, chat_completion_sources.MISTRALAI, chat_completion_sources.MAKERSUITE, chat_completion_sources.DEEPSEEK].includes(oai_settings.chat_completion_source)) {
await validateReverseProxy(); await validateReverseProxy();
} }
@ -4750,7 +4750,7 @@ async function onConnectButtonClick(e) {
await writeSecret(SECRET_KEYS.DEEPSEEK, api_key_deepseek); await writeSecret(SECRET_KEYS.DEEPSEEK, api_key_deepseek);
} }
if (!secret_state[SECRET_KEYS.DEEPSEEK]) { if (!secret_state[SECRET_KEYS.DEEPSEEK] && !oai_settings.reverse_proxy) {
console.log('No secret key saved for DeepSeek'); console.log('No secret key saved for DeepSeek');
return; return;
} }

View File

@ -1,3 +1,9 @@
<div> <div data-type="assistant_note">
<b data-i18n="Note:">Note:</b> <span data-i18n="this chat is temporary and will be deleted as soon as you leave it.">this chat is temporary and will be deleted as soon as you leave it.</span> <div>
<b data-i18n="Note:">Note:</b> <span data-i18n="this chat is temporary and will be deleted as soon as you leave it.">this chat is temporary and will be deleted as soon as you leave it.</span>
<span>Click the button to save it as a file.</span>
</div>
<div class="assistant_note_export menu_button menu_button_icon" title="Export as JSONL">
<i class="fa-solid fa-file-export"></i>
</div>
</div> </div>

View File

@ -5795,3 +5795,17 @@ body:not(.movingUI) .drawer-content.maximized {
.alternate_greetings_list { .alternate_greetings_list {
overflow-y: scroll; overflow-y: scroll;
} }
.mes_text div[data-type="assistant_note"]:has(.assistant_note_export) {
display: flex;
flex-direction: row;
flex-wrap: nowrap;
justify-content: space-between;
align-items: center;
gap: 10px;
padding: 0 2px;
}
.mes_text div[data-type="assistant_note"]:has(.assistant_note_export)>div:not(.assistant_note_export) {
flex: 1;
}

View File

@ -646,6 +646,89 @@ async function sendCohereRequest(request, response) {
} }
} }
/**
* Sends a request to DeepSeek API.
* @param {express.Request} request Express request
* @param {express.Response} response Express response
*/
async function sendDeepSeekRequest(request, response) {
const apiUrl = new URL(request.body.reverse_proxy || API_DEEPSEEK).toString();
const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.DEEPSEEK);
if (!apiKey && !request.body.reverse_proxy) {
console.log('DeepSeek API key is missing.');
return response.status(400).send({ error: true });
}
const controller = new AbortController();
request.socket.removeAllListeners('close');
request.socket.on('close', function () {
controller.abort();
});
try {
let bodyParams = {};
if (request.body.logprobs > 0) {
bodyParams['top_logprobs'] = request.body.logprobs;
bodyParams['logprobs'] = true;
}
const postProcessType = String(request.body.model).endsWith('-reasoner') ? 'deepseek-reasoner' : 'deepseek';
const processedMessages = postProcessPrompt(request.body.messages, postProcessType, getPromptNames(request));
const requestBody = {
'messages': processedMessages,
'model': request.body.model,
'temperature': request.body.temperature,
'max_tokens': request.body.max_tokens,
'stream': request.body.stream,
'presence_penalty': request.body.presence_penalty,
'frequency_penalty': request.body.frequency_penalty,
'top_p': request.body.top_p,
'stop': request.body.stop,
'seed': request.body.seed,
...bodyParams,
};
const config = {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + apiKey,
},
body: JSON.stringify(requestBody),
signal: controller.signal,
};
console.log('DeepSeek request:', requestBody);
const generateResponse = await fetch(apiUrl + '/chat/completions', config);
if (request.body.stream) {
forwardFetchResponse(generateResponse, response);
} else {
if (!generateResponse.ok) {
const errorText = await generateResponse.text();
console.log(`DeepSeek API returned error: ${generateResponse.status} ${generateResponse.statusText} ${errorText}`);
const errorJson = tryParse(errorText) ?? { error: true };
return response.status(500).send(errorJson);
}
const generateResponseJson = await generateResponse.json();
console.log('DeepSeek response:', generateResponseJson);
return response.send(generateResponseJson);
}
} catch (error) {
console.log('Error communicating with DeepSeek API: ', error);
if (!response.headersSent) {
response.send({ error: true });
} else {
response.end();
}
}
}
export const router = express.Router(); export const router = express.Router();
router.post('/status', jsonParser, async function (request, response_getstatus_openai) { router.post('/status', jsonParser, async function (request, response_getstatus_openai) {
@ -690,8 +773,8 @@ router.post('/status', jsonParser, async function (request, response_getstatus_o
api_key_openai = readSecret(request.user.directories, SECRET_KEYS.NANOGPT); api_key_openai = readSecret(request.user.directories, SECRET_KEYS.NANOGPT);
headers = {}; headers = {};
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.DEEPSEEK) { } else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.DEEPSEEK) {
api_url = API_DEEPSEEK.replace('/beta', ''); api_url = new URL(request.body.reverse_proxy || API_DEEPSEEK.replace('/beta', ''));
api_key_openai = readSecret(request.user.directories, SECRET_KEYS.DEEPSEEK); api_key_openai = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.DEEPSEEK);
headers = {}; headers = {};
} else { } else {
console.log('This chat completion source is not supported yet.'); console.log('This chat completion source is not supported yet.');
@ -851,6 +934,7 @@ router.post('/generate', jsonParser, function (request, response) {
case CHAT_COMPLETION_SOURCES.MAKERSUITE: return sendMakerSuiteRequest(request, response); case CHAT_COMPLETION_SOURCES.MAKERSUITE: return sendMakerSuiteRequest(request, response);
case CHAT_COMPLETION_SOURCES.MISTRALAI: return sendMistralAIRequest(request, response); case CHAT_COMPLETION_SOURCES.MISTRALAI: return sendMistralAIRequest(request, response);
case CHAT_COMPLETION_SOURCES.COHERE: return sendCohereRequest(request, response); case CHAT_COMPLETION_SOURCES.COHERE: return sendCohereRequest(request, response);
case CHAT_COMPLETION_SOURCES.DEEPSEEK: return sendDeepSeekRequest(request, response);
} }
let apiUrl; let apiUrl;
@ -964,19 +1048,6 @@ router.post('/generate', jsonParser, function (request, response) {
apiKey = readSecret(request.user.directories, SECRET_KEYS.BLOCKENTROPY); apiKey = readSecret(request.user.directories, SECRET_KEYS.BLOCKENTROPY);
headers = {}; headers = {};
bodyParams = {}; bodyParams = {};
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.DEEPSEEK) {
apiUrl = API_DEEPSEEK;
apiKey = readSecret(request.user.directories, SECRET_KEYS.DEEPSEEK);
headers = {};
bodyParams = {};
if (request.body.logprobs > 0) {
bodyParams['top_logprobs'] = request.body.logprobs;
bodyParams['logprobs'] = true;
}
const postProcessType = String(request.body.model).endsWith('-reasoner') ? 'deepseek-reasoner' : 'deepseek';
request.body.messages = postProcessPrompt(request.body.messages, postProcessType, getPromptNames(request));
} else { } else {
console.log('This chat completion source is not supported yet.'); console.log('This chat completion source is not supported yet.');
return response.status(400).send({ error: true }); return response.status(400).send({ error: true });
@ -1114,4 +1185,3 @@ router.post('/generate', jsonParser, function (request, response) {
} }
} }
}); });