added a web-search toggle for Cohere API

This commit is contained in:
Ondřej Sakala 2024-04-14 02:25:11 +02:00
parent 8ae9212cc5
commit 5ce625fb6b
3 changed files with 75 additions and 21 deletions

View File

@ -457,6 +457,20 @@
When this is off, responses will be displayed all at once when they are complete.
</span>
</div>
</div>
<div class="range-block">
<label for="websearch_toggle" title="Enable Cohere web-search connector" data-i18n="[title]Enable Cohere web-search connector" class="checkbox_label widthFreeExpand">
<input id="websearch_toggle" type="checkbox" /><span data-i18n="Web-search">
Web-search</span>
</label>
<div class="toggle-description justifyLeft">
<span data-i18n="Allow the model to use the web-search connector.">
Allow the model to use the web-search connector.
</span><br>
<span data-i18n="When this is off, the model won't use the web-search connector.">
This is only available for the Cohere API.
</span>
</div>
</div>
<div class="range-block" data-source="openai,claude,windowai,openrouter,ai21,scale,makersuite,mistralai,custom,cohere">
<div class="range-block-title" data-i18n="Temperature">

View File

@ -209,6 +209,7 @@ const default_settings = {
top_a_openai: 1,
repetition_penalty_openai: 1,
stream_openai: false,
websearch_cohere: false,
openai_max_context: max_4k,
openai_max_tokens: 300,
wrap_in_quotes: false,
@ -278,6 +279,7 @@ const oai_settings = {
top_a_openai: 1,
repetition_penalty_openai: 1,
stream_openai: false,
websearch_cohere: false,
openai_max_context: max_4k,
openai_max_tokens: 300,
wrap_in_quotes: false,
@ -1616,6 +1618,7 @@ async function sendOpenAIRequest(type, messages, signal) {
const isImpersonate = type === 'impersonate';
const isContinue = type === 'continue';
const stream = oai_settings.stream_openai && !isQuiet && !isScale && !isAI21 && !(isGoogle && oai_settings.google_model.includes('bison'));
const websearch = oai_settings.websearch_cohere && isCohere;
const useLogprobs = !!power_user.request_token_probabilities;
const canMultiSwipe = oai_settings.n > 1 && !isContinue && !isImpersonate && !isQuiet && (isOAI || isCustom);
@ -1662,6 +1665,7 @@ async function sendOpenAIRequest(type, messages, signal) {
'top_p': Number(oai_settings.top_p_openai),
'max_tokens': oai_settings.openai_max_tokens,
'stream': stream,
'websearch': websearch,
'logit_bias': logit_bias,
'stop': getCustomStoppingStrings(openai_max_stop_strings),
'chat_completion_source': oai_settings.chat_completion_source,
@ -2600,6 +2604,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.min_p_openai = settings.min_p_openai ?? default_settings.min_p_openai;
oai_settings.repetition_penalty_openai = settings.repetition_penalty_openai ?? default_settings.repetition_penalty_openai;
oai_settings.stream_openai = settings.stream_openai ?? default_settings.stream_openai;
oai_settings.websearch_cohere = settings.websearch_cohere ?? default_settings.websearch_cohere;
oai_settings.openai_max_context = settings.openai_max_context ?? default_settings.openai_max_context;
oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens;
oai_settings.bias_preset_selected = settings.bias_preset_selected ?? default_settings.bias_preset_selected;
@ -2661,6 +2666,7 @@ function loadOpenAISettings(data, settings) {
if (settings.claude_use_sysprompt !== undefined) oai_settings.claude_use_sysprompt = !!settings.claude_use_sysprompt;
if (settings.use_alt_scale !== undefined) { oai_settings.use_alt_scale = !!settings.use_alt_scale; updateScaleForm(); }
$('#stream_toggle').prop('checked', oai_settings.stream_openai);
$('#websearch_toggle').prop('checked', oai_settings.websearch_cohere);
$('#api_url_scale').val(oai_settings.api_url_scale);
$('#openai_proxy_password').val(oai_settings.proxy_password);
$('#claude_assistant_prefill').val(oai_settings.assistant_prefill);
@ -2955,6 +2961,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
personality_format: settings.personality_format,
group_nudge_prompt: settings.group_nudge_prompt,
stream_openai: settings.stream_openai,
websearch_cohere: settings.websearch_cohere,
prompts: settings.prompts,
prompt_order: settings.prompt_order,
api_url_scale: settings.api_url_scale,
@ -3331,6 +3338,7 @@ function onSettingsPresetChange() {
personality_format: ['#personality_format_textarea', 'personality_format', false],
group_nudge_prompt: ['#group_nudge_prompt_textarea', 'group_nudge_prompt', false],
stream_openai: ['#stream_toggle', 'stream_openai', true],
websearch_cohere: ['#websearch_toggle', 'websearch_cohere', false],
prompts: ['', 'prompts', false],
prompt_order: ['', 'prompt_order', false],
api_url_scale: ['#api_url_scale', 'api_url_scale', false],
@ -4250,6 +4258,11 @@ $(document).ready(async function () {
oai_settings.stream_openai = !!$('#stream_toggle').prop('checked');
saveSettingsDebounced();
});
$('#websearch_toggle').on('change', function () {
oai_settings.websearch_cohere = !!$('#websearch_toggle').prop('checked');
saveSettingsDebounced();
});
$('#wrap_in_quotes').on('change', function () {
oai_settings.wrap_in_quotes = !!$('#wrap_in_quotes').prop('checked');

View File

@ -539,27 +539,54 @@ async function sendCohereRequest(request, response) {
const convertedHistory = convertCohereMessages(request.body.messages);
// https://docs.cohere.com/reference/chat
const requestBody = {
stream: Boolean(request.body.stream),
model: request.body.model,
message: convertedHistory.userPrompt,
preamble: convertedHistory.systemPrompt,
chat_history: convertedHistory.chatHistory,
temperature: request.body.temperature,
max_tokens: request.body.max_tokens,
k: request.body.top_k,
p: request.body.top_p,
seed: request.body.seed,
stop_sequences: request.body.stop,
frequency_penalty: request.body.frequency_penalty,
presence_penalty: request.body.presence_penalty,
prompt_truncation: 'AUTO_PRESERVE_ORDER',
connectors: [], // TODO
documents: [],
tools: [],
tool_results: [],
search_queries_only: false,
};
requestBody = {};
if (request.body.websearch) {
requestBody = {
stream: Boolean(request.body.stream),
model: request.body.model,
message: convertedHistory.userPrompt,
preamble: convertedHistory.systemPrompt,
chat_history: convertedHistory.chatHistory,
temperature: request.body.temperature,
max_tokens: request.body.max_tokens,
k: request.body.top_k,
p: request.body.top_p,
seed: request.body.seed,
stop_sequences: request.body.stop,
frequency_penalty: request.body.frequency_penalty,
presence_penalty: request.body.presence_penalty,
prompt_truncation: 'AUTO_PRESERVE_ORDER',
connectors: [{"id": "web-search"}], // TODO
documents: [],
tools: [],
tool_results: [],
search_queries_only: false,
};
}
else {
requestBody = {
stream: Boolean(request.body.stream),
model: request.body.model,
message: convertedHistory.userPrompt,
preamble: convertedHistory.systemPrompt,
chat_history: convertedHistory.chatHistory,
temperature: request.body.temperature,
max_tokens: request.body.max_tokens,
k: request.body.top_k,
p: request.body.top_p,
seed: request.body.seed,
stop_sequences: request.body.stop,
frequency_penalty: request.body.frequency_penalty,
presence_penalty: request.body.presence_penalty,
prompt_truncation: 'AUTO_PRESERVE_ORDER',
connectors: [], // TODO
documents: [],
tools: [],
tool_results: [],
search_queries_only: false,
};
}
console.log('Cohere request:', requestBody);