This commit is contained in:
RossAscends
2023-06-10 23:21:29 +09:00
5 changed files with 70 additions and 7 deletions

View File

@ -573,9 +573,24 @@
</div>
</div>
</div>
<div class="range-block" data-source="openai">
<div class="range-block" data-source="claude">
<div class="range-block-title" data-i18n="Top K">
Top K
</div>
<div class="range-block-range-and-counter">
<div class="range-block-range">
<input type="range" id="top_k_openai" name="volume" min="0" max="200" step="1">
</div>
<div class="range-block-counter">
<div contenteditable="true" data-for="top_k_openai" id="top_k_counter_openai">
select
</div>
</div>
</div>
</div>
<div class="range-block" data-source="openai,claude">
<div class="range-block-title" data-i18n="Top-p">
Top-p
Top P
</div>
<div class="range-block-range-and-counter">
<div class="range-block-range">
@ -1435,7 +1450,10 @@
</div>
</form>
<div class="flex-container flex">
<input id="api_button_openai" class="menu_button" type="submit" value="Connect">
<input id="test_api_button" class="menu_button" type="button" value="Test Message" title="Verifies your API connection by sending a short test message. Be aware that you'll be credited for it!">
</div>
<div id="api_loading_openai" class=" api-load-icon fa-solid fa-hourglass fa-spin"></div>
<div class="online_status4">
<div class="online_status_indicator4"></div>

View File

@ -286,7 +286,7 @@ let currentCroppedAvatar = '';
const durationSaveEdit = 1000;
const saveSettingsDebounced = debounce(() => saveSettings(), durationSaveEdit);
const saveCharacterDebounced = debounce(() => $("#create_button").trigger('click'), durationSaveEdit);
const getStatusDebounced = debounce(() => getStatus(), 90000);
const getStatusDebounced = debounce(() => getStatus(), 300_000);
const saveChatDebounced = debounce(() => saveChatConditional(), durationSaveEdit);
const system_message_types = {

View File

@ -16,15 +16,20 @@ class EdgeTtsProvider {
audioElement = document.createElement('audio')
defaultSettings = {
voiceMap: {}
voiceMap: {},
rate: 0,
}
get settingsHtml() {
let html = `Microsoft Edge TTS Provider<br>`
let html = `Microsoft Edge TTS Provider<br>
<label for="edge_tts_rate">Rate: <span id="edge_tts_rate_output"></span></label>
<input id="edge_tts_rate" type="range" value="${this.defaultSettings.rate}" min="-100" max="100" step="1" />`
return html
}
onSettingsChange() {
this.settings.rate = Number($('#edge_tts_rate').val());
$('#edge_tts_rate_output').text(this.settings.rate);
}
loadSettings(settings) {
@ -44,6 +49,9 @@ class EdgeTtsProvider {
}
}
$('#edge_tts_rate').val(this.settings.rate || 0);
$('#edge_tts_rate_output').text(this.settings.rate || 0);
console.info("Settings loaded")
}
@ -123,6 +131,7 @@ class EdgeTtsProvider {
body: JSON.stringify({
"text": inputText,
"voice": voiceId,
"rate": Number(this.settings.rate),
})
}
)

View File

@ -19,6 +19,7 @@ import {
getRequestHeaders,
system_message_types,
replaceBiasMarkup,
is_send_press,
} from "../script.js";
import { groups, selected_group } from "./group-chats.js";
@ -102,6 +103,7 @@ const default_settings = {
freq_pen_openai: 0.7,
pres_pen_openai: 0.7,
top_p_openai: 1.0,
top_k_openai: 0,
stream_openai: false,
openai_max_context: gpt3_max,
openai_max_tokens: 300,
@ -132,6 +134,7 @@ const oai_settings = {
freq_pen_openai: 0,
pres_pen_openai: 0,
top_p_openai: 1.0,
top_k_openai: 0,
stream_openai: false,
openai_max_context: gpt3_max,
openai_max_tokens: 300,
@ -673,6 +676,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
"frequency_penalty": parseFloat(oai_settings.freq_pen_openai),
"presence_penalty": parseFloat(oai_settings.pres_pen_openai),
"top_p": parseFloat(oai_settings.top_p_openai),
"top_k": parseFloat(oai_settings.top_k_openai),
"max_tokens": oai_settings.openai_max_tokens,
"stream": stream,
"reverse_proxy": oai_settings.reverse_proxy,
@ -926,6 +930,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.freq_pen_openai = settings.freq_pen_openai ?? default_settings.freq_pen_openai;
oai_settings.pres_pen_openai = settings.pres_pen_openai ?? default_settings.pres_pen_openai;
oai_settings.top_p_openai = settings.top_p_openai ?? default_settings.top_p_openai;
oai_settings.top_k_openai = settings.top_k_openai ?? default_settings.top_k_openai;
oai_settings.stream_openai = settings.stream_openai ?? default_settings.stream_openai;
oai_settings.openai_max_context = settings.openai_max_context ?? default_settings.openai_max_context;
oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens;
@ -986,6 +991,9 @@ function loadOpenAISettings(data, settings) {
$('#top_p_openai').val(oai_settings.top_p_openai);
$('#top_p_counter_openai').text(Number(oai_settings.top_p_openai).toFixed(2));
$('#top_k_openai').val(oai_settings.top_k_openai);
$('#top_k_counter_openai').text(Number(oai_settings.top_k_openai).toFixed(0));
if (settings.reverse_proxy !== undefined) oai_settings.reverse_proxy = settings.reverse_proxy;
$('#openai_reverse_proxy').val(oai_settings.reverse_proxy);
@ -1025,7 +1033,7 @@ async function getStatusOpen() {
}
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
let status = 'Unable to verify key; assuming it\'s valid.';
let status = 'Unable to verify key; press "Test Message" to validate.';
setOnlineStatus(status);
return resultCheckStatusOpen();
}
@ -1103,6 +1111,7 @@ async function saveOpenAIPreset(name, settings) {
frequency_penalty: settings.freq_pen_openai,
presence_penalty: settings.pres_pen_openai,
top_p: settings.top_p_openai,
top_k: settings.top_k_openai,
openai_max_context: settings.openai_max_context,
openai_max_tokens: settings.openai_max_tokens,
nsfw_toggle: settings.nsfw_toggle,
@ -1374,6 +1383,7 @@ function onSettingsPresetChange() {
frequency_penalty: ['#freq_pen_openai', 'freq_pen_openai', false],
presence_penalty: ['#pres_pen_openai', 'pres_pen_openai', false],
top_p: ['#top_p_openai', 'top_p_openai', false],
top_k: ['#top_k_openai', 'top_k_openai', false],
max_context_unlocked: ['#oai_max_context_unlocked', 'max_context_unlocked', true],
openai_model: ['#model_openai_select', 'openai_model', false],
claude_model: ['#model_claude_select', 'claude_model', false],
@ -1545,7 +1555,26 @@ function toggleChatCompletionForms() {
});
}
async function testApiConnection() {
// Check if the previous request is still in progress
if (is_send_press) {
toastr.info('Please wait for the previous request to complete.');
return;
}
try {
const reply = await sendOpenAIRequest('quiet', [{ 'role': 'user', 'content': 'Hi' }]);
console.log(reply);
toastr.success('API connection successful!');
}
catch (err) {
toastr.error('Could not get a reply from API. Check your connection settings / API key and try again.');
}
}
$(document).ready(function () {
$('#test_api_button').on('click', testApiConnection);
$(document).on('input', '#temp_openai', function () {
oai_settings.temp_openai = $(this).val();
$('#temp_counter_openai').text(Number($(this).val()).toFixed(2));
@ -1569,7 +1598,12 @@ $(document).ready(function () {
oai_settings.top_p_openai = $(this).val();
$('#top_p_counter_openai').text(Number($(this).val()).toFixed(2));
saveSettingsDebounced();
});
$(document).on('input', '#top_k_openai', function () {
oai_settings.top_k_openai = $(this).val();
$('#top_k_counter_openai').text(Number($(this).val()).toFixed(0));
saveSettingsDebounced();
});
$(document).on('input', '#openai_max_context', function () {

View File

@ -2770,6 +2770,8 @@ async function sendClaudeRequest(request, response) {
max_tokens_to_sample: request.body.max_tokens,
stop_sequences: ["\n\nHuman:", "\n\nSystem:", "\n\nAssistant:"],
temperature: request.body.temperature,
top_p: request.body.top_p,
top_k: request.body.top_k,
stream: request.body.stream,
}),
headers: {