fix: LLM Classifier Defaulting to Last Spoken Character's Expression For All Characters in Group Chats (#2605)
* fix: fix llm expression classifier on group chats * chore: adjust some things * fix: exisiting chats defaulting to last character's sent message * chore: use dual awaits than promise * chore: restore allSettled and separate await call for llm only * chore: move classification to generateRaw * chore: remove characterId * chore: revert some placements back to how they were * chore: remove uneeded imports
This commit is contained in:
parent
16172b410c
commit
5d0581603b
|
@ -1,4 +1,4 @@
|
|||
import { callPopup, eventSource, event_types, generateQuietPrompt, getRequestHeaders, online_status, saveSettingsDebounced, substituteParams, substituteParamsExtended, system_message_types } from '../../../script.js';
|
||||
import { callPopup, eventSource, event_types, generateRaw, getRequestHeaders, main_api, online_status, saveSettingsDebounced, substituteParams, substituteParamsExtended, system_message_types } from '../../../script.js';
|
||||
import { dragElement, isMobile } from '../../RossAscends-mods.js';
|
||||
import { getContext, getApiUrl, modules, extension_settings, ModuleWorkerWrapper, doExtrasFetch, renderExtensionTemplateAsync } from '../../extensions.js';
|
||||
import { loadMovingUIState, power_user } from '../../power-user.js';
|
||||
|
@ -1156,7 +1156,7 @@ async function getExpressionLabel(text) {
|
|||
|
||||
functionResult = args?.arguments;
|
||||
});
|
||||
const emotionResponse = await generateQuietPrompt(prompt, false, false);
|
||||
const emotionResponse = await generateRaw(text, main_api, false, false, prompt);
|
||||
return parseLlmResponse(functionResult || emotionResponse, expressionsList);
|
||||
}
|
||||
// Extras
|
||||
|
|
Loading…
Reference in New Issue