Merge branch 'staging' into qr-rewrite

This commit is contained in:
Cohee
2024-01-05 17:35:35 +02:00
4 changed files with 98 additions and 20 deletions

View File

@@ -1109,24 +1109,38 @@ async function setExpression(character, expression, force) {
document.getElementById('expression-holder').style.display = '';
} else {
talkingHeadCheck().then(result => {
// Set the talkinghead emotion to the specified expression
// TODO: For now, talkinghead emote only supported when VN mode is off; see also updateVisualNovelMode.
try {
let result = await talkingHeadCheck();
if (result) {
// Find the <img> element with id="expression-image" and class="expression"
const imgElement = document.querySelector('img#expression-image.expression');
//console.log("searching");
if (imgElement && imgElement instanceof HTMLImageElement) {
//console.log("setting value");
imgElement.src = getApiUrl() + '/api/talkinghead/result_feed';
}
} else {
//console.log("The fetch failed!");
const url = new URL(getApiUrl());
url.pathname = '/api/talkinghead/set_emotion';
await doExtrasFetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ emotion_name: expression }),
});
}
});
}
catch (error) {
// `set_emotion` is not present in old versions, so let it 404.
}
try {
// Find the <img> element with id="expression-image" and class="expression"
const imgElement = document.querySelector('img#expression-image.expression');
//console.log("searching");
if (imgElement && imgElement instanceof HTMLImageElement) {
//console.log("setting value");
imgElement.src = getApiUrl() + '/api/talkinghead/result_feed';
}
}
catch (error) {
//console.log("The fetch failed!");
}
}
}

View File

@@ -15,7 +15,7 @@ import {
registerDebugFunction,
} from './power-user.js';
import EventSourceStream from './sse-stream.js';
import { SENTENCEPIECE_TOKENIZERS, getTextTokens, tokenizers } from './tokenizers.js';
import { SENTENCEPIECE_TOKENIZERS, TEXTGEN_TOKENIZERS, getTextTokens, tokenizers } from './tokenizers.js';
import { getSortableDelay, onlyUnique } from './utils.js';
export {
@@ -47,7 +47,7 @@ let MANCER_SERVER = localStorage.getItem(MANCER_SERVER_KEY) ?? MANCER_SERVER_DEF
let TOGETHERAI_SERVER = 'https://api.together.xyz';
const SERVER_INPUTS = {
[textgen_types.OOBA]: '#textgenerationwebui_api_url_text',
[textgen_types.OOBA]: '#textgenerationwebui_api_url_text',
[textgen_types.APHRODITE]: '#aphrodite_api_url_text',
[textgen_types.TABBY]: '#tabby_api_url_text',
[textgen_types.KOBOLDCPP]: '#koboldcpp_api_url_text',
@@ -241,6 +241,18 @@ function convertPresets(presets) {
return Array.isArray(presets) ? presets.map((p) => JSON.parse(p)) : [];
}
function getTokenizerForTokenIds() {
if (power_user.tokenizer === tokenizers.API_CURRENT && TEXTGEN_TOKENIZERS.includes(settings.type)) {
return tokenizers.API_CURRENT;
}
if (SENTENCEPIECE_TOKENIZERS.includes(power_user.tokenizer)) {
return power_user.tokenizer;
}
return tokenizers.LLAMA;
}
/**
* @returns {string} String with comma-separated banned token IDs
*/
@@ -249,7 +261,7 @@ function getCustomTokenBans() {
return '';
}
const tokenizer = SENTENCEPIECE_TOKENIZERS.includes(power_user.tokenizer) ? power_user.tokenizer : tokenizers.LLAMA;
const tokenizer = getTokenizerForTokenIds();
const result = [];
const sequences = settings.banned_tokens
.split('\n')
@@ -301,7 +313,7 @@ function calculateLogitBias() {
return {};
}
const tokenizer = SENTENCEPIECE_TOKENIZERS.includes(power_user.tokenizer) ? power_user.tokenizer : tokenizers.LLAMA;
const tokenizer = getTokenizerForTokenIds();
const result = {};
/**

View File

@@ -35,6 +35,8 @@ export const SENTENCEPIECE_TOKENIZERS = [
//tokenizers.NERD2,
];
export const TEXTGEN_TOKENIZERS = [OOBA, TABBY, KOBOLDCPP, LLAMACPP];
const TOKENIZER_URLS = {
[tokenizers.GPT2]: {
encode: '/api/tokenizers/gpt2/encode',
@@ -190,7 +192,7 @@ export function getTokenizerBestMatch(forApi) {
// - Tokenizer haven't reported an error previously
const hasTokenizerError = sessionStorage.getItem(TOKENIZER_WARNING_KEY);
const isConnected = online_status !== 'no_connection';
const isTokenizerSupported = [OOBA, TABBY, KOBOLDCPP, LLAMACPP].includes(textgen_settings.type);
const isTokenizerSupported = TEXTGEN_TOKENIZERS.includes(textgen_settings.type);
if (!hasTokenizerError && isConnected) {
if (forApi === 'kobold' && kai_flags.can_use_tokenization) {