+
+
+
diff --git a/public/script.js b/public/script.js
index 6b2dab25e..a296f309f 100644
--- a/public/script.js
+++ b/public/script.js
@@ -286,7 +286,7 @@ let currentCroppedAvatar = '';
const durationSaveEdit = 1000;
const saveSettingsDebounced = debounce(() => saveSettings(), durationSaveEdit);
const saveCharacterDebounced = debounce(() => $("#create_button").trigger('click'), durationSaveEdit);
-const getStatusDebounced = debounce(() => getStatus(), 90000);
+const getStatusDebounced = debounce(() => getStatus(), 300_000);
const saveChatDebounced = debounce(() => saveChatConditional(), durationSaveEdit);
const system_message_types = {
diff --git a/public/scripts/extensions/tts/edge.js b/public/scripts/extensions/tts/edge.js
index e3f8a231e..e0537a6c5 100644
--- a/public/scripts/extensions/tts/edge.js
+++ b/public/scripts/extensions/tts/edge.js
@@ -16,15 +16,20 @@ class EdgeTtsProvider {
audioElement = document.createElement('audio')
defaultSettings = {
- voiceMap: {}
+ voiceMap: {},
+ rate: 0,
}
get settingsHtml() {
- let html = `Microsoft Edge TTS Provider
`
+ let html = `Microsoft Edge TTS Provider
+
+
`
return html
}
onSettingsChange() {
+ this.settings.rate = Number($('#edge_tts_rate').val());
+ $('#edge_tts_rate_output').text(this.settings.rate);
}
loadSettings(settings) {
@@ -44,6 +49,9 @@ class EdgeTtsProvider {
}
}
+ $('#edge_tts_rate').val(this.settings.rate || 0);
+ $('#edge_tts_rate_output').text(this.settings.rate || 0);
+
console.info("Settings loaded")
}
@@ -123,6 +131,7 @@ class EdgeTtsProvider {
body: JSON.stringify({
"text": inputText,
"voice": voiceId,
+ "rate": Number(this.settings.rate),
})
}
)
diff --git a/public/scripts/openai.js b/public/scripts/openai.js
index 5ee9bd2b2..3b9b5e8af 100644
--- a/public/scripts/openai.js
+++ b/public/scripts/openai.js
@@ -19,6 +19,7 @@ import {
getRequestHeaders,
system_message_types,
replaceBiasMarkup,
+ is_send_press,
} from "../script.js";
import { groups, selected_group } from "./group-chats.js";
@@ -102,6 +103,7 @@ const default_settings = {
freq_pen_openai: 0.7,
pres_pen_openai: 0.7,
top_p_openai: 1.0,
+ top_k_openai: 0,
stream_openai: false,
openai_max_context: gpt3_max,
openai_max_tokens: 300,
@@ -132,6 +134,7 @@ const oai_settings = {
freq_pen_openai: 0,
pres_pen_openai: 0,
top_p_openai: 1.0,
+ top_k_openai: 0,
stream_openai: false,
openai_max_context: gpt3_max,
openai_max_tokens: 300,
@@ -673,6 +676,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
"frequency_penalty": parseFloat(oai_settings.freq_pen_openai),
"presence_penalty": parseFloat(oai_settings.pres_pen_openai),
"top_p": parseFloat(oai_settings.top_p_openai),
+ "top_k": parseFloat(oai_settings.top_k_openai),
"max_tokens": oai_settings.openai_max_tokens,
"stream": stream,
"reverse_proxy": oai_settings.reverse_proxy,
@@ -926,6 +930,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.freq_pen_openai = settings.freq_pen_openai ?? default_settings.freq_pen_openai;
oai_settings.pres_pen_openai = settings.pres_pen_openai ?? default_settings.pres_pen_openai;
oai_settings.top_p_openai = settings.top_p_openai ?? default_settings.top_p_openai;
+ oai_settings.top_k_openai = settings.top_k_openai ?? default_settings.top_k_openai;
oai_settings.stream_openai = settings.stream_openai ?? default_settings.stream_openai;
oai_settings.openai_max_context = settings.openai_max_context ?? default_settings.openai_max_context;
oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens;
@@ -986,6 +991,9 @@ function loadOpenAISettings(data, settings) {
$('#top_p_openai').val(oai_settings.top_p_openai);
$('#top_p_counter_openai').text(Number(oai_settings.top_p_openai).toFixed(2));
+ $('#top_k_openai').val(oai_settings.top_k_openai);
+ $('#top_k_counter_openai').text(Number(oai_settings.top_k_openai).toFixed(0));
+
if (settings.reverse_proxy !== undefined) oai_settings.reverse_proxy = settings.reverse_proxy;
$('#openai_reverse_proxy').val(oai_settings.reverse_proxy);
@@ -1025,7 +1033,7 @@ async function getStatusOpen() {
}
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
- let status = 'Unable to verify key; assuming it\'s valid.';
+ let status = 'Unable to verify key; press "Test Message" to validate.';
setOnlineStatus(status);
return resultCheckStatusOpen();
}
@@ -1103,6 +1111,7 @@ async function saveOpenAIPreset(name, settings) {
frequency_penalty: settings.freq_pen_openai,
presence_penalty: settings.pres_pen_openai,
top_p: settings.top_p_openai,
+ top_k: settings.top_k_openai,
openai_max_context: settings.openai_max_context,
openai_max_tokens: settings.openai_max_tokens,
nsfw_toggle: settings.nsfw_toggle,
@@ -1374,6 +1383,7 @@ function onSettingsPresetChange() {
frequency_penalty: ['#freq_pen_openai', 'freq_pen_openai', false],
presence_penalty: ['#pres_pen_openai', 'pres_pen_openai', false],
top_p: ['#top_p_openai', 'top_p_openai', false],
+ top_k: ['#top_k_openai', 'top_k_openai', false],
max_context_unlocked: ['#oai_max_context_unlocked', 'max_context_unlocked', true],
openai_model: ['#model_openai_select', 'openai_model', false],
claude_model: ['#model_claude_select', 'claude_model', false],
@@ -1545,7 +1555,26 @@ function toggleChatCompletionForms() {
});
}
+async function testApiConnection() {
+ // Check if the previous request is still in progress
+ if (is_send_press) {
+ toastr.info('Please wait for the previous request to complete.');
+ return;
+ }
+
+ try {
+ const reply = await sendOpenAIRequest('quiet', [{ 'role': 'user', 'content': 'Hi' }]);
+ console.log(reply);
+ toastr.success('API connection successful!');
+ }
+ catch (err) {
+ toastr.error('Could not get a reply from API. Check your connection settings / API key and try again.');
+ }
+}
+
$(document).ready(function () {
+ $('#test_api_button').on('click', testApiConnection);
+
$(document).on('input', '#temp_openai', function () {
oai_settings.temp_openai = $(this).val();
$('#temp_counter_openai').text(Number($(this).val()).toFixed(2));
@@ -1569,7 +1598,12 @@ $(document).ready(function () {
oai_settings.top_p_openai = $(this).val();
$('#top_p_counter_openai').text(Number($(this).val()).toFixed(2));
saveSettingsDebounced();
+ });
+ $(document).on('input', '#top_k_openai', function () {
+ oai_settings.top_k_openai = $(this).val();
+ $('#top_k_counter_openai').text(Number($(this).val()).toFixed(0));
+ saveSettingsDebounced();
});
$(document).on('input', '#openai_max_context', function () {
diff --git a/server.js b/server.js
index 08b1f1df1..f3a00497f 100644
--- a/server.js
+++ b/server.js
@@ -2770,6 +2770,8 @@ async function sendClaudeRequest(request, response) {
max_tokens_to_sample: request.body.max_tokens,
stop_sequences: ["\n\nHuman:", "\n\nSystem:", "\n\nAssistant:"],
temperature: request.body.temperature,
+ top_p: request.body.top_p,
+ top_k: request.body.top_k,
stream: request.body.stream,
}),
headers: {