From 4aa31fcba9d12077b87580e086e316338cc4a3e6 Mon Sep 17 00:00:00 2001
From: Cohee <18619528+Cohee1207@users.noreply.github.com>
Date: Thu, 24 Aug 2023 03:21:17 +0300
Subject: [PATCH 1/6] Add fallback option for OpenRouter
---
public/index.html | 11 +++++++++++
public/scripts/openai.js | 12 ++++++++++++
server.js | 4 ++++
3 files changed, 27 insertions(+)
diff --git a/public/index.html b/public/index.html
index e0744182f..a3495686a 100644
--- a/public/index.html
+++ b/public/index.html
@@ -1972,6 +1972,17 @@
+
+
+
+
+ Automatically chooses an alternative model if the chosen model can't serve your request.
+
+
+
OpenRouter API Key
diff --git a/public/scripts/openai.js b/public/scripts/openai.js
index aea4cd7c7..c9302b0b0 100644
--- a/public/scripts/openai.js
+++ b/public/scripts/openai.js
@@ -174,6 +174,7 @@ const default_settings = {
ai21_model: 'j2-ultra',
windowai_model: '',
openrouter_model: openrouter_website_model,
+ openrouter_use_fallback: true,
jailbreak_system: false,
reverse_proxy: '',
legacy_streaming: false,
@@ -217,6 +218,7 @@ const oai_settings = {
ai21_model: 'j2-ultra',
windowai_model: '',
openrouter_model: openrouter_website_model,
+ openrouter_use_fallback: true,
jailbreak_system: false,
reverse_proxy: '',
legacy_streaming: false,
@@ -1158,6 +1160,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
if (isOpenRouter) {
generate_data['use_openrouter'] = true;
generate_data['top_k'] = Number(oai_settings.top_k_openai);
+ generate_data['use_fallback'] = oai_settings.openrouter_use_fallback;
}
if (isScale) {
@@ -1842,6 +1845,7 @@ function loadOpenAISettings(data, settings) {
oai_settings.claude_model = settings.claude_model ?? default_settings.claude_model;
oai_settings.windowai_model = settings.windowai_model ?? default_settings.windowai_model;
oai_settings.openrouter_model = settings.openrouter_model ?? default_settings.openrouter_model;
+ oai_settings.openrouter_use_fallback = settings.openrouter_use_fallback ?? default_settings.openrouter_use_fallback;
oai_settings.ai21_model = settings.ai21_model ?? default_settings.ai21_model;
oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source;
oai_settings.api_url_scale = settings.api_url_scale ?? default_settings.api_url_scale;
@@ -1891,6 +1895,7 @@ function loadOpenAISettings(data, settings) {
$('#use_ai21_tokenizer').prop('checked', oai_settings.use_ai21_tokenizer);
$('#exclude_assistant').prop('checked', oai_settings.exclude_assistant);
$('#scale-alt').prop('checked', oai_settings.use_alt_scale);
+ $('#openrouter_use_fallback').prop('checked', oai_settings.openrouter_use_fallback);
if (settings.impersonation_prompt !== undefined) oai_settings.impersonation_prompt = settings.impersonation_prompt;
$('#impersonation_prompt_textarea').val(oai_settings.impersonation_prompt);
@@ -2055,6 +2060,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
claude_model: settings.claude_model,
windowai_model: settings.windowai_model,
openrouter_model: settings.openrouter_model,
+ openrouter_use_fallback: settings.openrouter_use_fallback,
ai21_model: settings.ai21_model,
temperature: settings.temp_openai,
frequency_penalty: settings.freq_pen_openai,
@@ -2413,6 +2419,7 @@ function onSettingsPresetChange() {
claude_model: ['#model_claude_select', 'claude_model', false],
windowai_model: ['#model_windowai_select', 'windowai_model', false],
openrouter_model: ['#model_openrouter_select', 'openrouter_model', false],
+ openrouter_use_fallback: ['#openrouter_use_fallback', 'openrouter_use_fallback', true],
ai21_model: ['#model_ai21_select', 'ai21_model', false],
openai_max_context: ['#openai_max_context', 'openai_max_context', false],
openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false],
@@ -3125,6 +3132,11 @@ $(document).ready(async function () {
saveSettingsDebounced();
});
+ $('#openrouter_use_fallback').on('input', function () {
+ oai_settings.openrouter_use_fallback = !!$(this).prop('checked');
+ saveSettingsDebounced();
+ });
+
$("#api_button_openai").on("click", onConnectButtonClick);
$("#openai_reverse_proxy").on("input", onReverseProxyInput);
$("#model_openai_select").on("change", onModelChange);
diff --git a/server.js b/server.js
index 91ff3063f..60fdae7ce 100644
--- a/server.js
+++ b/server.js
@@ -3478,6 +3478,10 @@ app.post("/generate_openai", jsonParser, function (request, response_generate_op
// OpenRouter needs to pass the referer: https://openrouter.ai/docs
headers = { 'HTTP-Referer': request.headers.referer };
bodyParams = { 'transforms': ["middle-out"] };
+
+ if (request.body.use_fallback) {
+ bodyParams['route'] = 'fallback';
+ }
}
if (!api_key_openai && !request.body.reverse_proxy) {
From 1d1109e43ba75050a995956ec947116a265d1fe7 Mon Sep 17 00:00:00 2001
From: Mike Weldon
Date: Wed, 23 Aug 2023 18:04:56 -0700
Subject: [PATCH 2/6] Set generate_until_sentence for NovelAI
* Set generate_until_sentence true for NovelAI
* Add a Story String file for NovelAI with persona before character
which works better
* Remove hardcoded dinkus for chat_start since it is in the Story String
---
public/context/NovelAI.json | 6 ++++++
public/script.js | 8 ++++----
server.js | 2 +-
3 files changed, 11 insertions(+), 5 deletions(-)
create mode 100644 public/context/NovelAI.json
diff --git a/public/context/NovelAI.json b/public/context/NovelAI.json
new file mode 100644
index 000000000..b22590ab0
--- /dev/null
+++ b/public/context/NovelAI.json
@@ -0,0 +1,6 @@
+{
+ "name": "NovelAI",
+ "story_string": "{{#if system}}{{system}}{{/if}}\n{{#if wiBefore}}{{wiBefore}}{{/if}}\n{{#if persona}}{{persona}}{{/if}}\n{{#if description}}{{description}}{{/if}}\n{{#if personality}}Personality: {{personality}}{{/if}}\n{{#if scenario}}Scenario: {{scenario}}{{/if}}\n{{#if wiAfter}}{{wiAfter}}{{/if}}",
+ "chat_start": "***",
+ "example_separator": "***"
+}
\ No newline at end of file
diff --git a/public/script.js b/public/script.js
index dbfb1a3dd..2deb37bb0 100644
--- a/public/script.js
+++ b/public/script.js
@@ -3332,11 +3332,11 @@ function addChatsPreamble(mesSendString) {
}
function addChatsSeparator(mesSendString) {
- if (main_api === 'novel') {
- return '***\n' + mesSendString;
- }
+ // if (main_api === 'novel') {
+ // return '***\n' + mesSendString;
+ // }
- else if (power_user.context.chat_start) {
+ if (power_user.context.chat_start) {
return power_user.context.chat_start + '\n' + mesSendString;
}
diff --git a/server.js b/server.js
index 60fdae7ce..b46880ccb 100644
--- a/server.js
+++ b/server.js
@@ -1920,7 +1920,7 @@ app.post("/generate_novelai", jsonParser, async function (request, response_gene
"stop_sequences": request.body.stop_sequences,
"bad_words_ids": badWordsList,
"logit_bias_exp": logit_bias_exp,
- //generate_until_sentence = true;
+ "generate_until_sentence": request.body.generate_until_sentence,
"use_cache": request.body.use_cache,
"use_string": true,
"return_full_text": request.body.return_full_text,
From 8202fab3760f9dc53ef10bc35a2ee5a5c3ec593d Mon Sep 17 00:00:00 2001
From: Mike Weldon
Date: Wed, 23 Aug 2023 18:08:55 -0700
Subject: [PATCH 3/6] Remove commented lines I added by mistake
---
public/script.js | 4 ----
1 file changed, 4 deletions(-)
diff --git a/public/script.js b/public/script.js
index 2deb37bb0..2b12c7c76 100644
--- a/public/script.js
+++ b/public/script.js
@@ -3332,10 +3332,6 @@ function addChatsPreamble(mesSendString) {
}
function addChatsSeparator(mesSendString) {
- // if (main_api === 'novel') {
- // return '***\n' + mesSendString;
- // }
-
if (power_user.context.chat_start) {
return power_user.context.chat_start + '\n' + mesSendString;
}
From d147bc40dcbaf6cdb8cfe5a41643aab125f6cb83 Mon Sep 17 00:00:00 2001
From: Cohee <18619528+Cohee1207@users.noreply.github.com>
Date: Thu, 24 Aug 2023 11:04:46 +0300
Subject: [PATCH 4/6] Fix alternate greetings
---
public/script.js | 7 ++-----
1 file changed, 2 insertions(+), 5 deletions(-)
diff --git a/public/script.js b/public/script.js
index dbfb1a3dd..8a9bcccaa 100644
--- a/public/script.js
+++ b/public/script.js
@@ -4366,12 +4366,9 @@ function getFirstMessage() {
};
if (Array.isArray(alternateGreetings) && alternateGreetings.length > 0) {
+ const swipes = [message.mes, ...(alternateGreetings.map(greeting => substituteParams(getRegexedString(greeting, regex_placement.AI_OUTPUT))))];
message['swipe_id'] = 0;
- message['swipes'] = message['mes'].concat(
- alternateGreetings.map(
- (greeting) => substituteParams(getRegexedString(greeting, regex_placement.AI_OUTPUT))
- )
- );
+ message['swipes'] = swipes;
message['swipe_info'] = [];
}
return message;
From f5fd15ffd207c42c510365afb78007137092b83d Mon Sep 17 00:00:00 2001
From: Cohee <18619528+Cohee1207@users.noreply.github.com>
Date: Thu, 24 Aug 2023 15:13:04 +0300
Subject: [PATCH 5/6] #976 Return "Continue on send". Allow continuing the
first chat message. Add debug function for i18n.
---
default/settings.json | 1 +
public/index.html | 6 +++++
public/jsconfig.json | 2 +-
public/script.js | 15 ++++++++++-
public/scripts/i18n.js | 49 ++++++++++++++++++++++++++++++++++++
public/scripts/power-user.js | 8 ++++++
6 files changed, 79 insertions(+), 2 deletions(-)
diff --git a/default/settings.json b/default/settings.json
index eabf64e95..f2718f18c 100644
--- a/default/settings.json
+++ b/default/settings.json
@@ -129,6 +129,7 @@
"prefer_character_prompt": true,
"prefer_character_jailbreak": true,
"quick_continue": false,
+ "continue_on_send": false,
"trim_spaces": true,
"relaxed_api_urls": false,
"default_instruct": "",
diff --git a/public/index.html b/public/index.html
index a3495686a..b4726d0d8 100644
--- a/public/index.html
+++ b/public/index.html
@@ -2827,6 +2827,12 @@
Prefer Char. JB
+