Merge branch 'staging' into feature/comfyGgufModels

This commit is contained in:
Cohee 2024-11-18 20:10:51 +02:00
commit 87f2a9e630
10 changed files with 88 additions and 31 deletions

6
package-lock.json generated
View File

@ -3019,9 +3019,9 @@
}
},
"node_modules/cross-spawn": {
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
"version": "7.0.5",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.5.tgz",
"integrity": "sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug==",
"license": "MIT",
"dependencies": {
"path-key": "^3.1.0",

View File

@ -0,0 +1,19 @@
.scrollable-buttons-container {
max-height: 50vh; /* Use viewport height instead of fixed pixels */
overflow-y: auto;
-webkit-overflow-scrolling: touch; /* Momentum scrolling on iOS */
margin-top: 1rem; /* m-t-1 is equivalent to margin-top: 1rem; */
flex-shrink: 1;
min-height: 0;
scrollbar-width: thin;
scrollbar-color: rgba(255, 255, 255, 0.3) transparent;
}
.scrollable-buttons-container::-webkit-scrollbar {
width: 6px;
}
.scrollable-buttons-container::-webkit-scrollbar-thumb {
background-color: rgba(255, 255, 255, 0.3);
border-radius: 3px;
}

View File

@ -3026,6 +3026,7 @@
<option value="codestral-latest">codestral-latest</option>
<option value="codestral-mamba-latest">codestral-mamba-latest</option>
<option value="pixtral-12b-latest">pixtral-12b-latest</option>
<option value="pixtral-large-latest">pixtral-large-latest</option>
</optgroup>
<optgroup label="Sub-versions">
<option value="open-mistral-nemo-2407">open-mistral-nemo-2407</option>
@ -3040,10 +3041,12 @@
<option value="mistral-medium-2312">mistral-medium-2312</option>
<option value="mistral-large-2402">mistral-large-2402</option>
<option value="mistral-large-2407">mistral-large-2407</option>
<option value="mistral-large-2411">mistral-large-2411</option>
<option value="codestral-2405">codestral-2405</option>
<option value="codestral-2405-blue">codestral-2405-blue</option>
<option value="codestral-mamba-2407">codestral-mamba-2407</option>
<option value="pixtral-12b-2409">pixtral-12b-2409</option>
<option value="pixtral-large-2411">pixtral-large-2411</option>
</optgroup>
<optgroup id="mistralai_other_models" label="Other"></optgroup>
</select>
@ -6631,9 +6634,9 @@
</div>
</div>
<div class="logprobs_panel_content inline-drawer-content flex-container flexFlowColumn">
<small class="flex-container alignItemsCenter justifySpaceBetween">
<small class="flex-container alignItemsCenter justifySpaceBetween flexNoWrap">
<b data-i18n="Select a token to see alternatives considered by the AI.">Select a token to see alternatives considered by the AI.</b>
<button id="logprobsReroll" class="menu_button" title="Reroll with the entire prefix" data-i18n="[title]Reroll with the entire prefix">
<button id="logprobsReroll" class="menu_button margin0" title="Reroll with the entire prefix" data-i18n="[title]Reroll with the entire prefix">
<span class="fa-solid fa-redo logprobs_reroll"></span>
</button>
</small>

View File

@ -703,16 +703,13 @@ const isFirefox = navigator.userAgent.toLowerCase().indexOf('firefox') > -1;
*/
function autoFitSendTextArea() {
const originalScrollBottom = chatBlock.scrollHeight - (chatBlock.scrollTop + chatBlock.offsetHeight);
if (Math.ceil(sendTextArea.scrollHeight + 3) >= Math.floor(sendTextArea.offsetHeight)) {
const sendTextAreaMinHeight = '0px';
sendTextArea.style.height = sendTextAreaMinHeight;
}
const newHeight = sendTextArea.scrollHeight + 3;
sendTextArea.style.height = '1px'; // Reset height to 1px to force recalculation of scrollHeight
const newHeight = sendTextArea.scrollHeight;
sendTextArea.style.height = `${newHeight}px`;
if (!isFirefox) {
const newScrollTop = Math.round(chatBlock.scrollHeight - (chatBlock.offsetHeight + originalScrollBottom));
chatBlock.scrollTop = newScrollTop;
chatBlock.scrollTop = chatBlock.scrollHeight - (chatBlock.offsetHeight + originalScrollBottom);
}
}
export const autoFitSendTextAreaDebounced = debounce(autoFitSendTextArea, debounce_timeout.short);

View File

@ -37,6 +37,8 @@
<select id="caption_multimodal_model" class="flex1 text_pole">
<option data-type="mistral" value="pixtral-12b-latest">pixtral-12b-latest</option>
<option data-type="mistral" value="pixtral-12b-2409">pixtral-12b-2409</option>
<option data-type="mistral" value="pixtral-large-latest">pixtral-large-latest</option>
<option data-type="mistral" value="pixtral-large-2411">pixtral-large-2411</option>
<option data-type="zerooneai" value="yi-vision">yi-vision</option>
<option data-type="openai" value="gpt-4-vision-preview">gpt-4-vision-preview</option>
<option data-type="openai" value="gpt-4-turbo">gpt-4-turbo</option>

View File

@ -23,7 +23,7 @@ import {
import { collapseNewlines, registerDebugFunction } from '../../power-user.js';
import { SECRET_KEYS, secret_state, writeSecret } from '../../secrets.js';
import { getDataBankAttachments, getDataBankAttachmentsForSource, getFileAttachment } from '../../chats.js';
import { debounce, getStringHash as calculateHash, waitUntilCondition, onlyUnique, splitRecursive, trimToStartSentence, trimToEndSentence } from '../../utils.js';
import { debounce, getStringHash as calculateHash, waitUntilCondition, onlyUnique, splitRecursive, trimToStartSentence, trimToEndSentence, escapeHtml } from '../../utils.js';
import { debounce_timeout } from '../../constants.js';
import { getSortedEntries } from '../../world-info.js';
import { textgen_types, textgenerationwebui_settings } from '../../textgen-settings.js';
@ -44,6 +44,9 @@ const MODULE_NAME = 'vectors';
export const EXTENSION_PROMPT_TAG = '3_vectors';
export const EXTENSION_PROMPT_TAG_DB = '4_vectors_data_bank';
// Force solo chunks for sources that don't support batching.
const getBatchSize = () => ['transformers', 'palm', 'ollama'].includes(settings.source) ? 1 : 5;
const settings = {
// For both
source: 'transformers',
@ -125,7 +128,7 @@ async function onVectorizeAllClick() {
// upon request of a full vectorise
cachedSummaries.clear();
const batchSize = 5;
const batchSize = getBatchSize();
const elapsedLog = [];
let finished = false;
$('#vectorize_progress').show();
@ -560,7 +563,9 @@ async function vectorizeFile(fileText, fileName, collectionId, chunkSize, overla
fileText = translatedText;
}
const toast = toastr.info('Vectorization may take some time, please wait...', `Ingesting file ${fileName}`);
const batchSize = getBatchSize();
const toastBody = $('<span>').text('This may take a while. Please wait...');
const toast = toastr.info(toastBody, `Ingesting file ${escapeHtml(fileName)}`, { closeButton: false, escapeHtml: false, timeOut: 0, extendedTimeOut: 0 });
const overlapSize = Math.round(chunkSize * overlapPercent / 100);
const delimiters = getChunkDelimiters();
// Overlap should not be included in chunk size. It will be later compensated by overlapChunks
@ -569,7 +574,12 @@ async function vectorizeFile(fileText, fileName, collectionId, chunkSize, overla
console.debug(`Vectors: Split file ${fileName} into ${chunks.length} chunks with ${overlapPercent}% overlap`, chunks);
const items = chunks.map((chunk, index) => ({ hash: getStringHash(chunk), text: chunk, index: index }));
await insertVectorItems(collectionId, items);
for (let i = 0; i < items.length; i += batchSize) {
toastBody.text(`${i}/${items.length} (${Math.round((i / items.length) * 100)}%) chunks processed`);
const chunkedBatch = items.slice(i, i + batchSize);
await insertVectorItems(collectionId, chunkedBatch);
}
toastr.clear(toast);
console.log(`Vectors: Inserted ${chunks.length} vector items for file ${fileName} into ${collectionId}`);
@ -1050,7 +1060,7 @@ async function onViewStatsClick() {
toastr.info(`Total hashes: <b>${totalHashes}</b><br>
Unique hashes: <b>${uniqueHashes}</b><br><br>
I'll mark collected messages with a green circle.`,
`Stats for chat ${chatId}`,
`Stats for chat ${escapeHtml(chatId)}`,
{ timeOut: 10000, escapeHtml: false },
);

View File

@ -4165,7 +4165,7 @@ async function onModelChange() {
$('#openai_max_context').attr('max', unlocked_max);
} else if (oai_settings.mistralai_model.includes('codestral-mamba')) {
$('#openai_max_context').attr('max', max_256k);
} else if (['mistral-large-2407', 'mistral-large-latest'].includes(oai_settings.mistralai_model)) {
} else if (['mistral-large-2407', 'mistral-large-2411', 'mistral-large-latest'].includes(oai_settings.mistralai_model)) {
$('#openai_max_context').attr('max', max_128k);
} else if (oai_settings.mistralai_model.includes('mistral-nemo')) {
$('#openai_max_context').attr('max', max_128k);
@ -4764,6 +4764,8 @@ export function isImageInliningSupported() {
'pixtral-12b-latest',
'pixtral-12b',
'pixtral-12b-2409',
'pixtral-large-latest',
'pixtral-large-2411',
];
switch (oai_settings.chat_completion_source) {

View File

@ -2083,7 +2083,10 @@ async function buttonsCallback(args, text) {
let popup;
const buttonContainer = document.createElement('div');
buttonContainer.classList.add('flex-container', 'flexFlowColumn', 'wide100p', 'm-t-1');
buttonContainer.classList.add('flex-container', 'flexFlowColumn', 'wide100p');
const scrollableContainer = document.createElement('div');
scrollableContainer.classList.add('scrollable-buttons-container');
for (const [result, button] of resultToButtonMap) {
const buttonElement = document.createElement('div');
@ -2096,9 +2099,16 @@ async function buttonsCallback(args, text) {
buttonContainer.appendChild(buttonElement);
}
scrollableContainer.appendChild(buttonContainer);
const popupContainer = document.createElement('div');
popupContainer.innerHTML = safeValue;
popupContainer.appendChild(buttonContainer);
popupContainer.appendChild(scrollableContainer);
// Ensure the popup uses flex layout
popupContainer.style.display = 'flex';
popupContainer.style.flexDirection = 'column';
popupContainer.style.maxHeight = '80vh'; // Limit the overall height of the popup
popup = new Popup(popupContainer, POPUP_TYPE.TEXT, '', { okButton: 'Cancel', allowVerticalScrolling: true });
popup.show()

View File

@ -23,29 +23,42 @@ export let openRouterModels = [];
const OPENROUTER_PROVIDERS = [
'OpenAI',
'Anthropic',
'HuggingFace',
'Google',
'Mancer',
'Mancer 2',
'Google AI Studio',
'Groq',
'SambaNova',
'Cohere',
'Mistral',
'Together',
'Together 2',
'Fireworks',
'DeepInfra',
'Lepton',
'Novita',
'Avian',
'Lambda',
'Azure',
'Modal',
'AnyScale',
'Replicate',
'Perplexity',
'Recursal',
'Fireworks',
'Mistral',
'Groq',
'Cohere',
'Lepton',
'OctoAI',
'Novita',
'Lynn',
'Lynn 2',
'DeepSeek',
'Infermatic',
'AI21',
'Featherless',
'Inflection',
'xAI',
'01.AI',
'HuggingFace',
'Mancer',
'Mancer 2',
'Hyperbolic',
'Hyperbolic 2',
'Lynn 2',
'Lynn',
'Reflection',
];
export async function loadOllamaModels(data) {

View File

@ -9,6 +9,7 @@
@import url(css/logprobs.css);
@import url(css/accounts.css);
@import url(css/tags.css);
@import url(css/scrollable-button.css);
:root {
--doc-height: 100%;