Merge branch 'staging' into group-join-examples

This commit is contained in:
Cohee
2024-12-21 18:39:14 +02:00
6 changed files with 118 additions and 50 deletions

View File

@ -887,7 +887,40 @@ export function initRossMods() {
saveSettingsDebounced();
});
const cssAutofit = CSS.supports('field-sizing', 'content');
if (cssAutofit) {
let lastHeight = chatBlock.offsetHeight;
const chatBlockResizeObserver = new ResizeObserver((entries) => {
for (const entry of entries) {
if (entry.target !== chatBlock) {
continue;
}
const threshold = 1;
const newHeight = chatBlock.offsetHeight;
const deltaHeight = newHeight - lastHeight;
const isScrollAtBottom = Math.abs(chatBlock.scrollHeight - chatBlock.scrollTop - newHeight) <= threshold;
if (!isScrollAtBottom && Math.abs(deltaHeight) > threshold) {
chatBlock.scrollTop -= deltaHeight;
}
lastHeight = newHeight;
}
});
chatBlockResizeObserver.observe(chatBlock);
}
sendTextArea.addEventListener('input', () => {
saveUserInputDebounced();
if (cssAutofit) {
// Unset modifications made with a manual resize
sendTextArea.style.height = 'auto';
return;
}
const hasContent = sendTextArea.value !== '';
const fitsCurrentSize = sendTextArea.scrollHeight <= sendTextArea.offsetHeight;
const isScrollbarShown = sendTextArea.clientWidth < sendTextArea.offsetWidth;
@ -895,7 +928,6 @@ export function initRossMods() {
const needsDebounce = hasContent && (fitsCurrentSize || (isScrollbarShown && isHalfScreenHeight));
if (needsDebounce) autoFitSendTextAreaDebounced();
else autoFitSendTextArea();
saveUserInputDebounced();
});
restoreUserInput();

View File

@ -15,6 +15,7 @@ import { SlashCommandEnumValue, enumTypes } from '../../slash-commands/SlashComm
import { commonEnumProviders } from '../../slash-commands/SlashCommandCommonEnumsProvider.js';
import { slashCommandReturnHelper } from '../../slash-commands/SlashCommandReturnHelper.js';
import { SlashCommandClosure } from '../../slash-commands/SlashCommandClosure.js';
import { generateWebLlmChatPrompt, isWebLlmSupported } from '../shared.js';
export { MODULE_NAME };
const MODULE_NAME = 'expressions';
@ -59,6 +60,7 @@ const EXPRESSION_API = {
local: 0,
extras: 1,
llm: 2,
webllm: 3,
};
let expressionsList = null;
@ -698,8 +700,8 @@ async function moduleWorker() {
}
// If using LLM api then check if streamingProcessor is finished to avoid sending multiple requests to the API
if (extension_settings.expressions.api === EXPRESSION_API.llm && context.streamingProcessor && !context.streamingProcessor.isFinished) {
return;
if (extension_settings.expressions.api === EXPRESSION_API.llm && context.streamingProcessor && !context.streamingProcessor.isFinished) {
return;
}
// API is busy
@ -852,7 +854,7 @@ function setTalkingHeadState(newState) {
extension_settings.expressions.talkinghead = newState; // Store setting
saveSettingsDebounced();
if (extension_settings.expressions.api == EXPRESSION_API.local || extension_settings.expressions.api == EXPRESSION_API.llm) {
if ([EXPRESSION_API.local, EXPRESSION_API.llm, EXPRESSION_API.webllm].includes(extension_settings.expressions.api)) {
return;
}
@ -1057,11 +1059,39 @@ function parseLlmResponse(emotionResponse, labels) {
console.debug(`fuzzy search found: ${result[0].item} as closest for the LLM response:`, emotionResponse);
return result[0].item;
}
const lowerCaseResponse = String(emotionResponse || '').toLowerCase();
for (const label of labels) {
if (lowerCaseResponse.includes(label.toLowerCase())) {
console.debug(`Found label ${label} in the LLM response:`, emotionResponse);
return label;
}
}
}
throw new Error('Could not parse emotion response ' + emotionResponse);
}
/**
* Gets the JSON schema for the LLM API.
* @param {string[]} emotions A list of emotions to search for.
* @returns {object} The JSON schema for the LLM API.
*/
function getJsonSchema(emotions) {
return {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
emotion: {
type: 'string',
enum: emotions,
},
},
required: [
'emotion',
],
};
}
function onTextGenSettingsReady(args) {
// Only call if inside an API call
if (inApiCall && extension_settings.expressions.api === EXPRESSION_API.llm && isJsonSchemaSupported()) {
@ -1071,19 +1101,7 @@ function onTextGenSettingsReady(args) {
stop: [],
stopping_strings: [],
custom_token_bans: [],
json_schema: {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
emotion: {
type: 'string',
enum: emotions,
},
},
required: [
'emotion',
],
},
json_schema: getJsonSchema(emotions),
});
}
}
@ -1139,6 +1157,22 @@ export async function getExpressionLabel(text, expressionsApi = extension_settin
const emotionResponse = await generateRaw(text, main_api, false, false, prompt);
return parseLlmResponse(emotionResponse, expressionsList);
}
// Using WebLLM
case EXPRESSION_API.webllm: {
if (!isWebLlmSupported()) {
console.warn('WebLLM is not supported. Using fallback expression');
return getFallbackExpression();
}
const expressionsList = await getExpressionsList();
const prompt = substituteParamsExtended(customPrompt, { labels: expressionsList }) || await getLlmPrompt(expressionsList);
const messages = [
{ role: 'user', content: text + '\n\n' + prompt },
];
const emotionResponse = await generateWebLlmChatPrompt(messages);
return parseLlmResponse(emotionResponse, expressionsList);
}
// Extras
default: {
const url = new URL(getApiUrl());
@ -1603,7 +1637,7 @@ function onExpressionApiChanged() {
const tempApi = this.value;
if (tempApi) {
extension_settings.expressions.api = Number(tempApi);
$('.expression_llm_prompt_block').toggle(extension_settings.expressions.api === EXPRESSION_API.llm);
$('.expression_llm_prompt_block').toggle([EXPRESSION_API.llm, EXPRESSION_API.webllm].includes(extension_settings.expressions.api));
expressionsList = null;
spriteCache = {};
moduleWorker();
@ -1940,7 +1974,7 @@ function migrateSettings() {
await renderAdditionalExpressionSettings();
$('#expression_api').val(extension_settings.expressions.api ?? EXPRESSION_API.extras);
$('.expression_llm_prompt_block').toggle(extension_settings.expressions.api === EXPRESSION_API.llm);
$('.expression_llm_prompt_block').toggle([EXPRESSION_API.llm, EXPRESSION_API.webllm].includes(extension_settings.expressions.api));
$('#expression_llm_prompt').val(extension_settings.expressions.llmPrompt ?? '');
$('#expression_llm_prompt').on('input', function () {
extension_settings.expressions.llmPrompt = $(this).val();

View File

@ -24,7 +24,8 @@
<select id="expression_api" class="flex1 margin0">
<option value="0" data-i18n="Local">Local</option>
<option value="1" data-i18n="Extras">Extras</option>
<option value="2" data-i18n="LLM">LLM</option>
<option value="2" data-i18n="Main API">Main API</option>
<option value="3" data-i18n="WebLLM Extension">WebLLM Extension</option>
</select>
</div>
<div class="expression_llm_prompt_block m-b-1 m-t-1">

View File

@ -25,6 +25,7 @@ const OPENROUTER_PROVIDERS = [
'Anthropic',
'Google',
'Google AI Studio',
'Amazon Bedrock',
'Groq',
'SambaNova',
'Cohere',
@ -50,6 +51,8 @@ const OPENROUTER_PROVIDERS = [
'Featherless',
'Inflection',
'xAI',
'Cloudflare',
'SF Compute',
'01.AI',
'HuggingFace',
'Mancer',