2023-07-20 19:32:15 +02:00
import {
2024-02-24 20:45:33 +01:00
eventSource ,
event _types ,
2023-07-20 19:32:15 +02:00
getRequestHeaders ,
getStoppingStrings ,
2024-04-14 16:13:54 +02:00
main _api ,
2023-07-20 19:32:15 +02:00
max _context ,
saveSettingsDebounced ,
2023-07-23 22:52:31 +02:00
setGenerationParamsFromPreset ,
2023-11-08 01:52:03 +01:00
setOnlineStatus ,
2023-11-16 15:58:49 +01:00
substituteParams ,
2023-12-02 19:04:51 +01:00
} from '../script.js' ;
2023-12-18 17:57:10 +01:00
import { BIAS _CACHE , createNewLogitBiasEntry , displayLogitBias , getLogitBiasListResult } from './logit-bias.js' ;
2023-07-20 19:32:15 +02:00
2024-01-23 06:00:31 +01:00
import { power _user , registerDebugFunction } from './power-user.js' ;
2024-04-02 13:56:15 +02:00
import { getEventSourceStream } from './sse-stream.js' ;
2024-03-08 07:40:03 +01:00
import { getCurrentDreamGenModelTokenizer , getCurrentOpenRouterModelTokenizer } from './textgen-models.js' ;
2024-09-17 15:01:19 +02:00
import { ENCODE _TOKENIZERS , TEXTGEN _TOKENIZERS , getTextTokens , tokenizers } from './tokenizers.js' ;
2023-12-02 19:04:51 +01:00
import { getSortableDelay , onlyUnique } from './utils.js' ;
2023-08-07 22:46:32 +02:00
2023-07-20 19:32:15 +02:00
export {
2023-12-03 16:03:19 +01:00
settings as textgenerationwebui _settings ,
2023-07-20 19:32:15 +02:00
loadTextGenSettings ,
generateTextGenWithStreaming ,
2023-08-03 12:07:54 +02:00
formatTextGenURL ,
2023-12-02 20:11:06 +01:00
} ;
2023-07-20 19:32:15 +02:00
2023-09-28 18:10:00 +02:00
export const textgen _types = {
OOBA : 'ooba' ,
MANCER : 'mancer' ,
2024-05-03 00:40:40 +02:00
VLLM : 'vllm' ,
2023-09-28 18:10:00 +02:00
APHRODITE : 'aphrodite' ,
2023-11-19 16:14:53 +01:00
TABBY : 'tabby' ,
KOBOLDCPP : 'koboldcpp' ,
2023-12-17 22:38:03 +01:00
TOGETHERAI : 'togetherai' ,
2023-12-18 21:38:28 +01:00
LLAMACPP : 'llamacpp' ,
2023-12-19 15:38:11 +01:00
OLLAMA : 'ollama' ,
2024-01-24 13:59:27 +01:00
INFERMATICAI : 'infermaticai' ,
2024-03-07 10:55:08 +01:00
DREAMGEN : 'dreamgen' ,
2024-02-25 21:47:07 +01:00
OPENROUTER : 'openrouter' ,
2024-06-27 15:06:11 +02:00
FEATHERLESS : 'featherless' ,
2024-06-28 17:17:27 +02:00
HUGGINGFACE : 'huggingface' ,
2023-09-28 18:10:00 +02:00
} ;
2024-06-28 17:17:27 +02:00
const {
MANCER ,
VLLM ,
APHRODITE ,
TABBY ,
TOGETHERAI ,
OOBA ,
OLLAMA ,
LLAMACPP ,
INFERMATICAI ,
DREAMGEN ,
OPENROUTER ,
KOBOLDCPP ,
HUGGINGFACE ,
2024-06-28 18:09:12 +02:00
FEATHERLESS ,
2024-06-28 17:17:27 +02:00
} = textgen _types ;
2024-02-24 14:10:58 +01:00
2024-02-24 05:01:04 +01:00
const LLAMACPP _DEFAULT _ORDER = [
'top_k' ,
'tfs_z' ,
'typical_p' ,
'top_p' ,
'min_p' ,
2024-10-16 18:30:46 +02:00
'xtc' ,
2024-02-24 05:01:04 +01:00
'temperature' ,
] ;
2024-02-20 23:53:54 +01:00
const OOBA _DEFAULT _ORDER = [
2024-09-28 12:52:55 +02:00
'repetition_penalty' ,
'presence_penalty' ,
'frequency_penalty' ,
'dry' ,
2024-02-20 23:53:54 +01:00
'temperature' ,
'dynamic_temperature' ,
'quadratic_sampling' ,
'top_k' ,
'top_p' ,
'typical_p' ,
'epsilon_cutoff' ,
'eta_cutoff' ,
'tfs' ,
'top_a' ,
'min_p' ,
'mirostat' ,
2024-08-31 19:31:34 +02:00
'xtc' ,
2024-09-28 12:52:55 +02:00
'encoder_repetition_penalty' ,
'no_repeat_ngram' ,
2024-02-20 23:53:54 +01:00
] ;
2023-12-18 17:57:10 +01:00
const BIAS _KEY = '#textgenerationwebui_api-settings' ;
2023-12-03 16:03:19 +01:00
2023-11-07 23:17:13 +01:00
// Maybe let it be configurable in the future?
2023-11-13 10:13:39 +01:00
// (7 days later) The future has come.
const MANCER _SERVER _KEY = 'mancer_server' ;
const MANCER _SERVER _DEFAULT = 'https://neuro.mancer.tech' ;
2023-12-17 22:38:03 +01:00
let MANCER _SERVER = localStorage . getItem ( MANCER _SERVER _KEY ) ? ? MANCER _SERVER _DEFAULT ;
let TOGETHERAI _SERVER = 'https://api.together.xyz' ;
2024-01-24 13:59:27 +01:00
let INFERMATICAI _SERVER = 'https://api.totalgpt.ai' ;
2024-03-07 10:55:08 +01:00
let DREAMGEN _SERVER = 'https://dreamgen.com' ;
2024-02-25 21:47:07 +01:00
let OPENROUTER _SERVER = 'https://openrouter.ai/api' ;
2024-06-27 15:06:11 +02:00
let FEATHERLESS _SERVER = 'https://api.featherless.ai/v1' ;
2023-11-07 23:17:13 +01:00
2024-08-18 01:05:25 +02:00
export const SERVER _INPUTS = {
2024-01-05 15:17:06 +01:00
[ textgen _types . OOBA ] : '#textgenerationwebui_api_url_text' ,
2024-05-03 00:40:40 +02:00
[ textgen _types . VLLM ] : '#vllm_api_url_text' ,
2023-12-19 15:38:11 +01:00
[ textgen _types . APHRODITE ] : '#aphrodite_api_url_text' ,
[ textgen _types . TABBY ] : '#tabby_api_url_text' ,
[ textgen _types . KOBOLDCPP ] : '#koboldcpp_api_url_text' ,
[ textgen _types . LLAMACPP ] : '#llamacpp_api_url_text' ,
[ textgen _types . OLLAMA ] : '#ollama_api_url_text' ,
2024-06-28 17:17:27 +02:00
[ textgen _types . HUGGINGFACE ] : '#huggingface_api_url_text' ,
2023-12-19 15:38:11 +01:00
} ;
2023-11-21 00:47:57 +01:00
const KOBOLDCPP _ORDER = [ 6 , 0 , 1 , 3 , 4 , 2 , 5 ] ;
2023-12-03 16:03:19 +01:00
const settings = {
2023-07-20 19:32:15 +02:00
temp : 0.7 ,
2023-11-05 15:50:07 +01:00
temperature _last : true ,
2023-07-20 19:32:15 +02:00
top _p : 0.5 ,
top _k : 40 ,
top _a : 0 ,
tfs : 1 ,
epsilon _cutoff : 0 ,
eta _cutoff : 0 ,
typical _p : 1 ,
2023-11-05 15:08:06 +01:00
min _p : 0 ,
2023-07-20 19:32:15 +02:00
rep _pen : 1.2 ,
2023-07-22 00:38:35 +02:00
rep _pen _range : 0 ,
2024-05-22 06:09:10 +02:00
rep _pen _decay : 0 ,
2024-06-05 21:05:41 +02:00
rep _pen _slope : 1 ,
2023-07-20 19:32:15 +02:00
no _repeat _ngram _size : 0 ,
penalty _alpha : 0 ,
num _beams : 1 ,
length _penalty : 1 ,
min _length : 0 ,
encoder _rep _pen : 1 ,
2023-10-25 21:21:49 +02:00
freq _pen : 0 ,
presence _pen : 0 ,
2024-05-22 05:48:33 +02:00
skew : 0 ,
2023-07-20 19:32:15 +02:00
do _sample : true ,
early _stopping : false ,
2024-01-09 04:20:27 +01:00
dynatemp : false ,
min _temp : 0 ,
max _temp : 2.0 ,
2024-01-28 21:14:21 +01:00
dynatemp _exponent : 1.0 ,
smoothing _factor : 0.0 ,
2024-03-01 07:06:34 +01:00
smoothing _curve : 1.0 ,
2024-05-22 19:46:52 +02:00
dry _allowed _length : 2 ,
dry _multiplier : 0.0 ,
dry _base : 1.75 ,
dry _sequence _breakers : '["\\n", ":", "\\"", "*"]' ,
dry _penalty _last _n : 0 ,
2024-02-29 00:55:25 +01:00
max _tokens _second : 0 ,
2023-07-20 19:32:15 +02:00
seed : - 1 ,
preset : 'Default' ,
add _bos _token : true ,
stopping _strings : [ ] ,
2024-09-03 16:06:10 +02:00
//truncation_length: 2048,
2023-07-20 19:32:15 +02:00
ban _eos _token : false ,
skip _special _tokens : true ,
streaming : false ,
mirostat _mode : 0 ,
mirostat _tau : 5 ,
mirostat _eta : 0.1 ,
2023-08-14 12:06:20 +02:00
guidance _scale : 1 ,
negative _prompt : '' ,
2023-09-27 21:09:09 +02:00
grammar _string : '' ,
2024-04-02 06:59:21 +02:00
json _schema : { } ,
2023-09-27 21:09:09 +02:00
banned _tokens : '' ,
2024-02-20 23:53:54 +01:00
sampler _priority : OOBA _DEFAULT _ORDER ,
2024-02-24 05:01:04 +01:00
samplers : LLAMACPP _DEFAULT _ORDER ,
2024-05-03 00:40:40 +02:00
ignore _eos _token : false ,
spaces _between _special _tokens : true ,
2024-05-22 05:35:29 +02:00
speculative _ngram : false ,
2023-09-28 18:10:00 +02:00
type : textgen _types . OOBA ,
2023-11-07 23:17:13 +01:00
mancer _model : 'mytholite' ,
2023-12-17 22:38:03 +01:00
togetherai _model : 'Gryphe/MythoMax-L2-13b' ,
2024-01-24 13:59:27 +01:00
infermaticai _model : '' ,
2023-12-19 15:38:11 +01:00
ollama _model : '' ,
2024-02-25 21:47:07 +01:00
openrouter _model : 'openrouter/auto' ,
2024-05-06 18:26:20 +02:00
openrouter _providers : [ ] ,
2024-05-03 00:40:40 +02:00
vllm _model : '' ,
2024-03-01 22:02:43 +01:00
aphrodite _model : '' ,
2024-03-07 10:55:08 +01:00
dreamgen _model : 'opus-v1-xl/text' ,
2024-09-08 21:23:25 +02:00
tabby _model : '' ,
2023-11-21 00:47:57 +01:00
sampler _order : KOBOLDCPP _ORDER ,
2023-12-18 17:57:10 +01:00
logit _bias : [ ] ,
2023-12-03 19:40:09 +01:00
n : 1 ,
2023-12-19 15:38:11 +01:00
server _urls : { } ,
2024-01-05 18:15:07 +01:00
custom _model : '' ,
bypass _status _check : false ,
2024-07-19 22:34:16 +02:00
openrouter _allow _fallbacks : true ,
2024-08-19 06:32:45 +02:00
xtc _threshold : 0.1 ,
xtc _probability : 0 ,
2024-10-09 20:51:31 +02:00
featherless _model : '' ,
2023-07-20 19:32:15 +02:00
} ;
2023-10-05 12:10:41 +02:00
export let textgenerationwebui _banned _in _macros = [ ] ;
2023-07-23 22:52:31 +02:00
export let textgenerationwebui _presets = [ ] ;
export let textgenerationwebui _preset _names = [ ] ;
2023-07-20 19:32:15 +02:00
2024-05-19 08:06:29 +02:00
export const setting _names = [
2023-12-02 19:04:51 +01:00
'temp' ,
'temperature_last' ,
'rep_pen' ,
'rep_pen_range' ,
2024-05-22 06:09:10 +02:00
'rep_pen_decay' ,
2024-06-05 21:05:41 +02:00
'rep_pen_slope' ,
2023-12-02 19:04:51 +01:00
'no_repeat_ngram_size' ,
'top_k' ,
'top_p' ,
'top_a' ,
'tfs' ,
'epsilon_cutoff' ,
'eta_cutoff' ,
'typical_p' ,
'min_p' ,
'penalty_alpha' ,
'num_beams' ,
'length_penalty' ,
'min_length' ,
2024-01-09 04:20:27 +01:00
'dynatemp' ,
'min_temp' ,
'max_temp' ,
2024-01-28 21:14:21 +01:00
'dynatemp_exponent' ,
'smoothing_factor' ,
2024-03-01 07:06:34 +01:00
'smoothing_curve' ,
2024-05-22 19:46:52 +02:00
'dry_allowed_length' ,
'dry_multiplier' ,
'dry_base' ,
'dry_sequence_breakers' ,
'dry_penalty_last_n' ,
2024-02-29 00:55:25 +01:00
'max_tokens_second' ,
2023-12-02 19:04:51 +01:00
'encoder_rep_pen' ,
'freq_pen' ,
'presence_pen' ,
2024-05-22 05:48:33 +02:00
'skew' ,
2023-12-02 19:04:51 +01:00
'do_sample' ,
'early_stopping' ,
'seed' ,
'add_bos_token' ,
'ban_eos_token' ,
'skip_special_tokens' ,
'streaming' ,
'mirostat_mode' ,
'mirostat_tau' ,
'mirostat_eta' ,
'guidance_scale' ,
'negative_prompt' ,
'grammar_string' ,
2024-04-02 06:59:21 +02:00
'json_schema' ,
2023-12-02 19:04:51 +01:00
'banned_tokens' ,
2024-05-03 00:40:40 +02:00
'ignore_eos_token' ,
'spaces_between_special_tokens' ,
2024-05-22 05:35:29 +02:00
'speculative_ngram' ,
2023-12-02 19:04:51 +01:00
'sampler_order' ,
2024-02-10 21:57:41 +01:00
'sampler_priority' ,
2024-02-24 05:01:04 +01:00
'samplers' ,
2023-12-03 19:40:09 +01:00
'n' ,
2023-12-18 17:57:10 +01:00
'logit_bias' ,
2024-01-05 18:15:07 +01:00
'custom_model' ,
'bypass_status_check' ,
2024-07-19 22:34:16 +02:00
'openrouter_allow_fallbacks' ,
2024-08-19 06:32:45 +02:00
'xtc_threshold' ,
'xtc_probability' ,
2023-07-20 19:32:15 +02:00
] ;
2024-07-02 16:17:10 +02:00
const DYNATEMP _BLOCK = document . getElementById ( 'dynatemp_block_ooba' ) ;
2023-12-19 15:38:11 +01:00
export function validateTextGenUrl ( ) {
const selector = SERVER _INPUTS [ settings . type ] ;
if ( ! selector ) {
return ;
}
const control = $ ( selector ) ;
const url = String ( control . val ( ) ) . trim ( ) ;
const formattedUrl = formatTextGenURL ( url ) ;
if ( ! formattedUrl ) {
toastr . error ( 'Enter a valid API URL' , 'Text Completion API' ) ;
return ;
}
control . val ( formattedUrl ) ;
}
export function getTextGenServer ( ) {
2024-03-19 00:38:55 +01:00
switch ( settings . type ) {
2024-06-27 15:06:11 +02:00
case FEATHERLESS :
return FEATHERLESS _SERVER ;
2024-03-19 00:38:55 +01:00
case MANCER :
return MANCER _SERVER ;
case TOGETHERAI :
return TOGETHERAI _SERVER ;
case INFERMATICAI :
return INFERMATICAI _SERVER ;
case DREAMGEN :
return DREAMGEN _SERVER ;
case OPENROUTER :
return OPENROUTER _SERVER ;
default :
return settings . server _urls [ settings . type ] ? ? '' ;
2024-02-25 21:47:07 +01:00
}
2023-12-19 15:38:11 +01:00
}
2023-11-05 15:08:06 +01:00
async function selectPreset ( name ) {
2023-07-20 19:32:15 +02:00
const preset = textgenerationwebui _presets [ textgenerationwebui _preset _names . indexOf ( name ) ] ;
if ( ! preset ) {
return ;
}
2023-12-03 16:03:19 +01:00
settings . preset = name ;
2023-07-20 19:32:15 +02:00
for ( const name of setting _names ) {
const value = preset [ name ] ;
setSettingByName ( name , value , true ) ;
}
2023-07-23 22:52:31 +02:00
setGenerationParamsFromPreset ( preset ) ;
2023-12-18 21:38:28 +01:00
BIAS _CACHE . delete ( BIAS _KEY ) ;
2023-12-18 17:57:10 +01:00
displayLogitBias ( preset . logit _bias , BIAS _KEY ) ;
2023-07-20 19:32:15 +02:00
saveSettingsDebounced ( ) ;
}
2023-11-07 23:17:13 +01:00
function formatTextGenURL ( value ) {
2023-08-03 12:07:54 +02:00
try {
2024-03-19 00:38:55 +01:00
const noFormatTypes = [ MANCER , TOGETHERAI , INFERMATICAI , DREAMGEN , OPENROUTER ] ;
if ( noFormatTypes . includes ( settings . type ) ) {
2023-11-08 17:16:47 +01:00
return value ;
}
2023-08-03 12:07:54 +02:00
const url = new URL ( value ) ;
2023-08-07 22:46:32 +02:00
return url . toString ( ) ;
2023-12-02 15:14:48 +01:00
} catch {
// Just using URL as a validation check
}
2023-08-03 12:07:54 +02:00
return null ;
}
2023-07-20 19:32:15 +02:00
function convertPresets ( presets ) {
2023-10-05 12:10:41 +02:00
return Array . isArray ( presets ) ? presets . map ( ( p ) => JSON . parse ( p ) ) : [ ] ;
2023-07-20 19:32:15 +02:00
}
2024-01-05 15:17:06 +01:00
function getTokenizerForTokenIds ( ) {
if ( power _user . tokenizer === tokenizers . API _CURRENT && TEXTGEN _TOKENIZERS . includes ( settings . type ) ) {
return tokenizers . API _CURRENT ;
}
2024-09-17 15:01:19 +02:00
if ( ENCODE _TOKENIZERS . includes ( power _user . tokenizer ) ) {
2024-01-05 15:17:06 +01:00
return power _user . tokenizer ;
}
2024-02-25 21:47:07 +01:00
if ( settings . type === OPENROUTER ) {
return getCurrentOpenRouterModelTokenizer ( ) ;
}
2024-03-08 07:40:03 +01:00
if ( settings . type === DREAMGEN ) {
return getCurrentDreamGenModelTokenizer ( ) ;
}
2024-01-05 15:17:06 +01:00
return tokenizers . LLAMA ;
}
2023-09-27 21:09:09 +02:00
/ * *
2024-05-11 06:58:29 +02:00
* @ typedef { { banned _tokens : string , banned _strings : string [ ] } } TokenBanResult
* @ returns { TokenBanResult } String with comma - separated banned token IDs
2023-09-27 21:09:09 +02:00
* /
function getCustomTokenBans ( ) {
2023-12-03 16:03:19 +01:00
if ( ! settings . banned _tokens && ! textgenerationwebui _banned _in _macros . length ) {
2024-05-11 06:58:29 +02:00
return {
banned _tokens : '' ,
banned _strings : [ ] ,
} ;
2023-09-27 21:09:09 +02:00
}
2024-01-05 15:17:06 +01:00
const tokenizer = getTokenizerForTokenIds ( ) ;
2024-05-11 06:58:29 +02:00
const banned _tokens = [ ] ;
const banned _strings = [ ] ;
2023-12-03 16:03:19 +01:00
const sequences = settings . banned _tokens
2023-10-05 12:10:41 +02:00
. split ( '\n' )
. concat ( textgenerationwebui _banned _in _macros )
. filter ( x => x . length > 0 )
. filter ( onlyUnique ) ;
//debug
if ( textgenerationwebui _banned _in _macros . length ) {
2023-12-02 19:04:51 +01:00
console . log ( '=== Found banned word sequences in the macros:' , textgenerationwebui _banned _in _macros , 'Resulting array of banned sequences (will be used this generation turn):' , sequences ) ;
2023-10-05 12:10:41 +02:00
}
//clean old temporary bans found in macros before, for the next generation turn.
textgenerationwebui _banned _in _macros = [ ] ;
2023-09-27 21:09:09 +02:00
for ( const line of sequences ) {
// Raw token ids, JSON serialized
if ( line . startsWith ( '[' ) && line . endsWith ( ']' ) ) {
try {
const tokens = JSON . parse ( line ) ;
if ( Array . isArray ( tokens ) && tokens . every ( t => Number . isInteger ( t ) ) ) {
2024-05-11 06:58:29 +02:00
banned _tokens . push ( ... tokens ) ;
2023-09-27 21:09:09 +02:00
} else {
throw new Error ( 'Not an array of integers' ) ;
}
} catch ( err ) {
console . log ( ` Failed to parse bad word token list: ${ line } ` , err ) ;
}
2024-05-11 06:58:29 +02:00
} else if ( line . startsWith ( '"' ) && line . endsWith ( '"' ) ) {
// Remove the enclosing quotes
2024-05-12 20:41:07 +02:00
banned _strings . push ( line . slice ( 1 , - 1 ) ) ;
2023-09-27 21:09:09 +02:00
} else {
try {
2023-11-21 00:04:27 +01:00
const tokens = getTextTokens ( tokenizer , line ) ;
2024-05-11 06:58:29 +02:00
banned _tokens . push ( ... tokens ) ;
2023-09-27 21:09:09 +02:00
} catch {
console . log ( ` Could not tokenize raw text: ${ line } ` ) ;
}
}
}
2024-05-11 06:58:29 +02:00
return {
banned _tokens : banned _tokens . filter ( onlyUnique ) . map ( x => String ( x ) ) . join ( ',' ) ,
banned _strings : banned _strings ,
} ;
2023-09-27 21:09:09 +02:00
}
2023-12-18 17:57:10 +01:00
/ * *
* Calculates logit bias object from the logit bias list .
* @ returns { object } Logit bias object
* /
function calculateLogitBias ( ) {
if ( ! Array . isArray ( settings . logit _bias ) || settings . logit _bias . length === 0 ) {
return { } ;
}
2024-01-05 15:17:06 +01:00
const tokenizer = getTokenizerForTokenIds ( ) ;
2023-12-18 17:57:10 +01:00
const result = { } ;
/ * *
* Adds bias to the logit bias object .
* @ param { number } bias
* @ param { number [ ] } sequence
* @ returns { object } Accumulated logit bias object
* /
function addBias ( bias , sequence ) {
if ( sequence . length === 0 ) {
return ;
}
for ( const logit of sequence ) {
const key = String ( logit ) ;
result [ key ] = bias ;
}
return result ;
}
getLogitBiasListResult ( settings . logit _bias , tokenizer , addBias ) ;
return result ;
}
2023-12-03 21:44:36 +01:00
function loadTextGenSettings ( data , loadedSettings ) {
2023-07-20 19:32:15 +02:00
textgenerationwebui _presets = convertPresets ( data . textgenerationwebui _presets ) ;
textgenerationwebui _preset _names = data . textgenerationwebui _preset _names ? ? [ ] ;
2023-12-03 21:44:36 +01:00
Object . assign ( settings , loadedSettings . textgenerationwebui _settings ? ? { } ) ;
2023-07-20 19:32:15 +02:00
2023-12-19 15:38:11 +01:00
if ( loadedSettings . api _server _textgenerationwebui ) {
for ( const type of Object . keys ( SERVER _INPUTS ) ) {
settings . server _urls [ type ] = loadedSettings . api _server _textgenerationwebui ;
}
delete loadedSettings . api _server _textgenerationwebui ;
}
for ( const [ type , selector ] of Object . entries ( SERVER _INPUTS ) ) {
const control = $ ( selector ) ;
control . val ( settings . server _urls [ type ] ? ? '' ) . on ( 'input' , function ( ) {
settings . server _urls [ type ] = String ( $ ( this ) . val ( ) ) ;
saveSettingsDebounced ( ) ;
} ) ;
}
2023-12-03 21:44:36 +01:00
if ( loadedSettings . api _use _mancer _webui ) {
2023-12-03 16:03:19 +01:00
settings . type = MANCER ;
2023-09-28 18:10:00 +02:00
}
2023-07-20 19:32:15 +02:00
for ( const name of textgenerationwebui _preset _names ) {
const option = document . createElement ( 'option' ) ;
option . value = name ;
option . innerText = name ;
$ ( '#settings_preset_textgenerationwebui' ) . append ( option ) ;
}
2023-12-03 16:03:19 +01:00
if ( settings . preset ) {
$ ( '#settings_preset_textgenerationwebui' ) . val ( settings . preset ) ;
2023-07-20 19:32:15 +02:00
}
for ( const i of setting _names ) {
2023-12-03 16:03:19 +01:00
const value = settings [ i ] ;
2023-07-20 19:32:15 +02:00
setSettingByName ( i , value ) ;
}
2023-09-28 18:10:00 +02:00
2023-12-03 16:03:19 +01:00
$ ( '#textgen_type' ) . val ( settings . type ) ;
2024-05-06 18:26:20 +02:00
$ ( '#openrouter_providers_text' ) . val ( settings . openrouter _providers ) . trigger ( 'change' ) ;
2023-12-03 16:03:19 +01:00
showTypeSpecificControls ( settings . type ) ;
2023-12-18 21:38:28 +01:00
BIAS _CACHE . delete ( BIAS _KEY ) ;
2023-12-18 17:57:10 +01:00
displayLogitBias ( settings . logit _bias , BIAS _KEY ) ;
2023-11-13 10:13:39 +01:00
registerDebugFunction ( 'change-mancer-url' , 'Change Mancer base URL' , 'Change Mancer API server base URL' , ( ) => {
const result = prompt ( ` Enter Mancer base URL \n Default: ${ MANCER _SERVER _DEFAULT } ` , MANCER _SERVER ) ;
if ( result ) {
localStorage . setItem ( MANCER _SERVER _KEY , result ) ;
MANCER _SERVER = result ;
}
} ) ;
2023-09-28 18:10:00 +02:00
}
2023-11-21 00:47:57 +01:00
/ * *
* Sorts the sampler items by the given order .
* @ param { any [ ] } orderArray Sampler order array .
* /
2024-02-20 23:53:54 +01:00
function sortKoboldItemsByOrder ( orderArray ) {
2023-11-21 00:47:57 +01:00
console . debug ( 'Preset samplers order: ' + orderArray ) ;
2023-12-02 19:04:51 +01:00
const $draggableItems = $ ( '#koboldcpp_order' ) ;
2023-11-21 00:47:57 +01:00
for ( let i = 0 ; i < orderArray . length ; i ++ ) {
const index = orderArray [ i ] ;
const $item = $draggableItems . find ( ` [data-id=" ${ index } "] ` ) . detach ( ) ;
$draggableItems . append ( $item ) ;
}
}
2024-02-24 05:01:04 +01:00
function sortLlamacppItemsByOrder ( orderArray ) {
console . debug ( 'Preset samplers order: ' , orderArray ) ;
const $container = $ ( '#llamacpp_samplers_sortable' ) ;
orderArray . forEach ( ( name ) => {
const $item = $container . find ( ` [data-name=" ${ name } "] ` ) . detach ( ) ;
$container . append ( $item ) ;
} ) ;
}
2024-02-10 23:32:46 +01:00
function sortOobaItemsByOrder ( orderArray ) {
console . debug ( 'Preset samplers order: ' , orderArray ) ;
const $container = $ ( '#sampler_priority_container' ) ;
orderArray . forEach ( ( name ) => {
const $item = $container . find ( ` [data-name=" ${ name } "] ` ) . detach ( ) ;
$container . append ( $item ) ;
} ) ;
}
2023-09-28 18:10:00 +02:00
jQuery ( function ( ) {
2023-11-21 00:47:57 +01:00
$ ( '#koboldcpp_order' ) . sortable ( {
delay : getSortableDelay ( ) ,
stop : function ( ) {
const order = [ ] ;
$ ( '#koboldcpp_order' ) . children ( ) . each ( function ( ) {
order . push ( $ ( this ) . data ( 'id' ) ) ;
} ) ;
2023-12-03 16:03:19 +01:00
settings . sampler _order = order ;
console . log ( 'Samplers reordered:' , settings . sampler _order ) ;
2023-11-21 00:47:57 +01:00
saveSettingsDebounced ( ) ;
} ,
} ) ;
$ ( '#koboldcpp_default_order' ) . on ( 'click' , function ( ) {
2023-12-03 16:03:19 +01:00
settings . sampler _order = KOBOLDCPP _ORDER ;
2024-02-20 23:53:54 +01:00
sortKoboldItemsByOrder ( settings . sampler _order ) ;
2023-11-21 00:47:57 +01:00
saveSettingsDebounced ( ) ;
} ) ;
2024-02-20 23:53:54 +01:00
2024-02-24 05:01:04 +01:00
$ ( '#llamacpp_samplers_sortable' ) . sortable ( {
delay : getSortableDelay ( ) ,
stop : function ( ) {
const order = [ ] ;
$ ( '#llamacpp_samplers_sortable' ) . children ( ) . each ( function ( ) {
order . push ( $ ( this ) . data ( 'name' ) ) ;
} ) ;
settings . samplers = order ;
console . log ( 'Samplers reordered:' , settings . samplers ) ;
saveSettingsDebounced ( ) ;
} ,
} ) ;
$ ( '#llamacpp_samplers_default_order' ) . on ( 'click' , function ( ) {
sortLlamacppItemsByOrder ( LLAMACPP _DEFAULT _ORDER ) ;
settings . samplers = LLAMACPP _DEFAULT _ORDER ;
console . log ( 'Default samplers order loaded:' , settings . samplers ) ;
saveSettingsDebounced ( ) ;
} ) ;
2024-02-10 23:32:46 +01:00
$ ( '#sampler_priority_container' ) . sortable ( {
delay : getSortableDelay ( ) ,
2024-02-20 23:53:54 +01:00
stop : function ( ) {
2024-02-10 23:32:46 +01:00
const order = [ ] ;
2024-02-20 23:53:54 +01:00
$ ( '#sampler_priority_container' ) . children ( ) . each ( function ( ) {
2024-02-10 23:32:46 +01:00
order . push ( $ ( this ) . data ( 'name' ) ) ;
} ) ;
2024-02-20 23:53:54 +01:00
settings . sampler _priority = order ;
2024-02-10 23:32:46 +01:00
console . log ( 'Samplers reordered:' , settings . sampler _priority ) ;
saveSettingsDebounced ( ) ;
2024-02-20 23:53:54 +01:00
} ,
2024-02-10 23:32:46 +01:00
} ) ;
2024-04-02 06:59:21 +02:00
$ ( '#tabby_json_schema' ) . on ( 'input' , function ( ) {
2024-04-02 10:23:29 +02:00
const json _schema _string = String ( $ ( this ) . val ( ) ) ;
2024-04-02 06:59:21 +02:00
try {
2024-04-10 04:08:46 +02:00
settings . json _schema = JSON . parse ( json _schema _string || '{}' ) ;
2024-04-02 10:23:29 +02:00
} catch {
// Ignore errors from here
}
2024-04-02 06:59:21 +02:00
saveSettingsDebounced ( ) ;
} ) ;
2024-02-20 23:53:54 +01:00
$ ( '#textgenerationwebui_default_order' ) . on ( 'click' , function ( ) {
sortOobaItemsByOrder ( OOBA _DEFAULT _ORDER ) ;
settings . sampler _priority = OOBA _DEFAULT _ORDER ;
console . log ( 'Default samplers order loaded:' , settings . sampler _priority ) ;
saveSettingsDebounced ( ) ;
} ) ;
2023-11-21 00:47:57 +01:00
2023-09-28 18:10:00 +02:00
$ ( '#textgen_type' ) . on ( 'change' , function ( ) {
const type = String ( $ ( this ) . val ( ) ) ;
2023-12-03 16:03:19 +01:00
settings . type = type ;
2023-09-28 18:10:00 +02:00
2024-05-04 01:37:05 +02:00
if ( [ VLLM , APHRODITE , INFERMATICAI ] . includes ( settings . type ) ) {
2023-12-02 20:11:06 +01:00
$ ( '#mirostat_mode_textgenerationwebui' ) . attr ( 'step' , 2 ) ; //Aphro disallows mode 1
$ ( '#do_sample_textgenerationwebui' ) . prop ( 'checked' , true ) ; //Aphro should always do sample; 'otherwise set temp to 0 to mimic no sample'
$ ( '#ban_eos_token_textgenerationwebui' ) . prop ( 'checked' , false ) ; //Aphro should not ban EOS, just ignore it; 'add token '2' to ban list do to this'
2024-05-03 00:40:40 +02:00
//special handling for vLLM/Aphrodite topK -1 disable state
2023-12-02 20:11:06 +01:00
$ ( '#top_k_textgenerationwebui' ) . attr ( 'min' , - 1 ) ;
2023-12-03 16:03:19 +01:00
if ( $ ( '#top_k_textgenerationwebui' ) . val ( ) === '0' || settings [ 'top_k' ] === 0 ) {
settings [ 'top_k' ] = - 1 ;
2023-12-02 20:11:06 +01:00
$ ( '#top_k_textgenerationwebui' ) . val ( '-1' ) . trigger ( 'input' ) ;
2023-11-13 08:36:01 +01:00
}
} else {
2023-12-02 20:11:06 +01:00
$ ( '#mirostat_mode_textgenerationwebui' ) . attr ( 'step' , 1 ) ;
2024-05-03 00:40:40 +02:00
//undo special vLLM/Aphrodite setup for topK
2023-12-02 20:11:06 +01:00
$ ( '#top_k_textgenerationwebui' ) . attr ( 'min' , 0 ) ;
2023-12-03 16:03:19 +01:00
if ( $ ( '#top_k_textgenerationwebui' ) . val ( ) === '-1' || settings [ 'top_k' ] === - 1 ) {
settings [ 'top_k' ] = 0 ;
2023-12-02 20:11:06 +01:00
$ ( '#top_k_textgenerationwebui' ) . val ( '0' ) . trigger ( 'input' ) ;
2023-11-13 08:36:01 +01:00
}
}
2023-11-04 03:16:12 +01:00
2023-11-08 01:52:03 +01:00
showTypeSpecificControls ( type ) ;
setOnlineStatus ( 'no_connection' ) ;
2023-12-18 21:38:28 +01:00
BIAS _CACHE . delete ( BIAS _KEY ) ;
2023-09-28 18:10:00 +02:00
2023-11-08 01:52:03 +01:00
$ ( '#main_api' ) . trigger ( 'change' ) ;
2023-12-19 15:38:11 +01:00
if ( ! SERVER _INPUTS [ type ] || settings . server _urls [ type ] ) {
$ ( '#api_button_textgenerationwebui' ) . trigger ( 'click' ) ;
}
2023-09-28 18:10:00 +02:00
saveSettingsDebounced ( ) ;
} ) ;
2023-09-27 21:09:09 +02:00
$ ( '#settings_preset_textgenerationwebui' ) . on ( 'change' , function ( ) {
2023-07-20 19:32:15 +02:00
const presetName = $ ( this ) . val ( ) ;
selectPreset ( presetName ) ;
} ) ;
2024-01-14 10:07:43 +01:00
$ ( '#samplerResetButton' ) . off ( 'click' ) . on ( 'click' , function ( ) {
const inputs = {
2024-01-14 10:33:40 +01:00
'temp_textgenerationwebui' : 1 ,
2024-05-04 01:37:05 +02:00
'top_k_textgenerationwebui' : [ INFERMATICAI , APHRODITE , VLLM ] . includes ( settings . type ) ? - 1 : 0 ,
2024-01-14 10:33:40 +01:00
'top_p_textgenerationwebui' : 1 ,
'min_p_textgenerationwebui' : 0 ,
'rep_pen_textgenerationwebui' : 1 ,
'rep_pen_range_textgenerationwebui' : 0 ,
2024-05-22 06:09:10 +02:00
'rep_pen_decay_textgenerationwebui' : 0 ,
2024-01-14 10:07:43 +01:00
'dynatemp_textgenerationwebui' : false ,
2024-01-15 19:44:07 +01:00
'seed_textgenerationwebui' : - 1 ,
2024-01-14 10:07:43 +01:00
'ban_eos_token_textgenerationwebui' : false ,
'do_sample_textgenerationwebui' : true ,
'add_bos_token_textgenerationwebui' : true ,
'temperature_last_textgenerationwebui' : true ,
'skip_special_tokens_textgenerationwebui' : true ,
2024-01-14 10:33:40 +01:00
'top_a_textgenerationwebui' : 0 ,
'top_a_counter_textgenerationwebui' : 0 ,
'mirostat_mode_textgenerationwebui' : 0 ,
'mirostat_tau_textgenerationwebui' : 5 ,
'mirostat_eta_textgenerationwebui' : 0.1 ,
'tfs_textgenerationwebui' : 1 ,
'epsilon_cutoff_textgenerationwebui' : 0 ,
'eta_cutoff_textgenerationwebui' : 0 ,
'encoder_rep_pen_textgenerationwebui' : 1 ,
'freq_pen_textgenerationwebui' : 0 ,
'presence_pen_textgenerationwebui' : 0 ,
2024-05-22 05:48:33 +02:00
'skew_textgenerationwebui' : 0 ,
2024-01-14 10:33:40 +01:00
'no_repeat_ngram_size_textgenerationwebui' : 0 ,
2024-05-22 05:35:29 +02:00
'speculative_ngram_textgenerationwebui' : false ,
2024-01-14 10:33:40 +01:00
'min_length_textgenerationwebui' : 0 ,
'num_beams_textgenerationwebui' : 1 ,
2024-05-19 06:28:01 +02:00
'length_penalty_textgenerationwebui' : 1 ,
2024-01-14 10:33:40 +01:00
'penalty_alpha_textgenerationwebui' : 0 ,
'typical_p_textgenerationwebui' : 1 , // Added entry
'guidance_scale_textgenerationwebui' : 1 ,
2024-02-11 00:57:00 +01:00
'smoothing_factor_textgenerationwebui' : 0 ,
2024-03-01 07:06:34 +01:00
'smoothing_curve_textgenerationwebui' : 1 ,
2024-05-22 19:46:52 +02:00
'dry_allowed_length_textgenerationwebui' : 2 ,
'dry_multiplier_textgenerationwebui' : 0 ,
'dry_base_textgenerationwebui' : 1.75 ,
'dry_penalty_last_n_textgenerationwebui' : 0 ,
2024-08-19 06:32:45 +02:00
'xtc_threshold_textgenerationwebui' : 0.1 ,
2024-08-19 20:36:28 +02:00
'xtc_probability_textgenerationwebui' : 0 ,
2024-01-14 10:07:43 +01:00
} ;
for ( const [ id , value ] of Object . entries ( inputs ) ) {
const inputElement = $ ( ` # ${ id } ` ) ;
if ( inputElement . prop ( 'type' ) === 'checkbox' ) {
2024-02-24 14:32:12 +01:00
inputElement . prop ( 'checked' , value ) . trigger ( 'input' ) ;
2024-01-14 10:33:40 +01:00
} else if ( inputElement . prop ( 'type' ) === 'number' ) {
inputElement . val ( value ) . trigger ( 'input' ) ;
2024-01-14 10:07:43 +01:00
} else {
inputElement . val ( value ) . trigger ( 'input' ) ;
2024-01-14 10:33:40 +01:00
if ( power _user . enableZenSliders ) {
let masterElementID = inputElement . prop ( 'id' ) ;
2024-01-20 19:40:40 +01:00
console . log ( masterElementID ) ;
2024-01-14 10:33:40 +01:00
let zenSlider = $ ( ` # ${ masterElementID } _zenslider ` ) . slider ( ) ;
zenSlider . slider ( 'option' , 'value' , value ) ;
zenSlider . slider ( 'option' , 'slide' )
. call ( zenSlider , null , {
handle : $ ( '.ui-slider-handle' , zenSlider ) , value : value ,
} ) ;
}
2024-01-14 10:07:43 +01:00
}
}
} ) ;
2023-07-20 19:32:15 +02:00
for ( const i of setting _names ) {
2023-12-02 19:04:51 +01:00
$ ( ` # ${ i } _textgenerationwebui ` ) . attr ( 'x-setting-id' , i ) ;
$ ( document ) . on ( 'input' , ` # ${ i } _textgenerationwebui ` , function ( ) {
2023-07-20 19:32:15 +02:00
const isCheckbox = $ ( this ) . attr ( 'type' ) == 'checkbox' ;
2023-08-14 12:06:20 +02:00
const isText = $ ( this ) . attr ( 'type' ) == 'text' || $ ( this ) . is ( 'textarea' ) ;
2023-12-02 19:04:51 +01:00
const id = $ ( this ) . attr ( 'x-setting-id' ) ;
2023-07-20 19:32:15 +02:00
if ( isCheckbox ) {
const value = $ ( this ) . prop ( 'checked' ) ;
2023-12-03 16:03:19 +01:00
settings [ id ] = value ;
2023-07-20 19:32:15 +02:00
}
else if ( isText ) {
const value = $ ( this ) . val ( ) ;
2023-12-03 16:03:19 +01:00
settings [ id ] = value ;
2023-07-20 19:32:15 +02:00
}
else {
2023-08-22 18:35:56 +02:00
const value = Number ( $ ( this ) . val ( ) ) ;
2023-10-26 13:41:12 +02:00
$ ( ` # ${ id } _counter_textgenerationwebui ` ) . val ( value ) ;
2023-12-03 16:03:19 +01:00
settings [ id ] = value ;
2024-05-03 00:40:40 +02:00
//special handling for vLLM/Aphrodite using -1 as disabled instead of 0
2024-05-11 10:38:22 +02:00
if ( $ ( this ) . attr ( 'id' ) === 'top_k_textgenerationwebui' && [ INFERMATICAI , APHRODITE , VLLM ] . includes ( settings . type ) && value === 0 ) {
2023-12-03 16:03:19 +01:00
settings [ id ] = - 1 ;
2023-12-02 20:11:06 +01:00
$ ( this ) . val ( - 1 ) ;
2023-11-13 08:36:01 +01:00
}
2023-07-20 19:32:15 +02:00
}
saveSettingsDebounced ( ) ;
} ) ;
}
2023-12-18 17:57:10 +01:00
$ ( '#textgen_logit_bias_new_entry' ) . on ( 'click' , ( ) => createNewLogitBiasEntry ( settings . logit _bias , BIAS _KEY ) ) ;
2024-05-06 20:21:03 +02:00
$ ( '#openrouter_providers_text' ) . on ( 'change' , function ( ) {
const selectedProviders = $ ( this ) . val ( ) ;
// Not a multiple select?
if ( ! Array . isArray ( selectedProviders ) ) {
return ;
}
settings . openrouter _providers = selectedProviders ;
saveSettingsDebounced ( ) ;
} ) ;
2023-12-02 20:11:06 +01:00
} ) ;
2023-07-20 19:32:15 +02:00
2023-11-08 01:52:03 +01:00
function showTypeSpecificControls ( type ) {
$ ( '[data-tg-type]' ) . each ( function ( ) {
2024-01-15 19:45:21 +01:00
const tgTypes = $ ( this ) . attr ( 'data-tg-type' ) . split ( ',' ) . map ( x => x . trim ( ) ) ;
2023-12-18 21:38:28 +01:00
for ( const tgType of tgTypes ) {
if ( tgType === type || tgType == 'all' ) {
$ ( this ) . show ( ) ;
return ;
} else {
$ ( this ) . hide ( ) ;
}
2023-11-08 01:52:03 +01:00
}
} ) ;
}
2024-09-28 12:56:57 +02:00
/ * *
* Inserts missing items from the source array into the target array .
* @ param { any [ ] } source - Source array
* @ param { any [ ] } target - Target array
* @ returns { void }
* /
function insertMissingArrayItems ( source , target ) {
2024-09-28 12:59:03 +02:00
if ( source === target || ! Array . isArray ( source ) || ! Array . isArray ( target ) ) {
return ;
}
2024-09-28 12:56:57 +02:00
for ( const item of source ) {
if ( ! target . includes ( item ) ) {
const index = source . indexOf ( item ) ;
target . splice ( index , 0 , item ) ;
}
}
}
2023-11-21 00:47:57 +01:00
function setSettingByName ( setting , value , trigger ) {
2023-07-20 19:32:15 +02:00
if ( value === null || value === undefined ) {
return ;
}
2023-11-21 00:47:57 +01:00
if ( 'sampler_order' === setting ) {
value = Array . isArray ( value ) ? value : KOBOLDCPP _ORDER ;
2024-02-20 23:53:54 +01:00
sortKoboldItemsByOrder ( value ) ;
2023-12-03 16:03:19 +01:00
settings . sampler _order = value ;
2023-11-21 00:47:57 +01:00
return ;
}
2024-02-20 23:53:54 +01:00
if ( 'sampler_priority' === setting ) {
value = Array . isArray ( value ) ? value : OOBA _DEFAULT _ORDER ;
2024-09-28 12:56:57 +02:00
insertMissingArrayItems ( OOBA _DEFAULT _ORDER , value ) ;
2024-02-20 23:53:54 +01:00
sortOobaItemsByOrder ( value ) ;
settings . sampler _priority = value ;
return ;
}
2024-02-24 05:01:04 +01:00
if ( 'samplers' === setting ) {
value = Array . isArray ( value ) ? value : LLAMACPP _DEFAULT _ORDER ;
2024-10-16 19:51:32 +02:00
insertMissingArrayItems ( LLAMACPP _DEFAULT _ORDER , value ) ;
2024-02-24 05:01:04 +01:00
sortLlamacppItemsByOrder ( value ) ;
settings . samplers = value ;
return ;
}
2023-12-18 17:57:10 +01:00
if ( 'logit_bias' === setting ) {
settings . logit _bias = Array . isArray ( value ) ? value : [ ] ;
return ;
}
2024-04-02 06:59:21 +02:00
if ( 'json_schema' === setting ) {
2024-04-02 10:23:29 +02:00
settings . json _schema = value ? ? { } ;
$ ( '#tabby_json_schema' ) . val ( JSON . stringify ( settings . json _schema , null , 2 ) ) ;
2024-04-02 06:59:21 +02:00
return ;
}
2023-11-21 00:47:57 +01:00
const isCheckbox = $ ( ` # ${ setting } _textgenerationwebui ` ) . attr ( 'type' ) == 'checkbox' ;
const isText = $ ( ` # ${ setting } _textgenerationwebui ` ) . attr ( 'type' ) == 'text' || $ ( ` # ${ setting } _textgenerationwebui ` ) . is ( 'textarea' ) ;
2023-07-20 19:32:15 +02:00
if ( isCheckbox ) {
const val = Boolean ( value ) ;
2023-11-21 00:47:57 +01:00
$ ( ` # ${ setting } _textgenerationwebui ` ) . prop ( 'checked' , val ) ;
2023-07-20 19:32:15 +02:00
}
else if ( isText ) {
2023-11-21 00:47:57 +01:00
$ ( ` # ${ setting } _textgenerationwebui ` ) . val ( value ) ;
2023-07-20 19:32:15 +02:00
}
else {
const val = parseFloat ( value ) ;
2023-11-21 00:47:57 +01:00
$ ( ` # ${ setting } _textgenerationwebui ` ) . val ( val ) ;
$ ( ` # ${ setting } _counter_textgenerationwebui ` ) . val ( val ) ;
2023-11-05 15:08:06 +01:00
if ( power _user . enableZenSliders ) {
2023-12-02 20:11:06 +01:00
let zenSlider = $ ( ` # ${ setting } _textgenerationwebui_zenslider ` ) . slider ( ) ;
zenSlider . slider ( 'option' , 'value' , val ) ;
2023-11-05 15:08:06 +01:00
zenSlider . slider ( 'option' , 'slide' )
. call ( zenSlider , null , {
2023-12-02 21:06:57 +01:00
handle : $ ( '.ui-slider-handle' , zenSlider ) , value : val ,
2023-11-05 15:08:06 +01:00
} ) ;
}
2023-07-20 19:32:15 +02:00
}
if ( trigger ) {
2023-11-21 00:47:57 +01:00
$ ( ` # ${ setting } _textgenerationwebui ` ) . trigger ( 'input' ) ;
2023-07-20 19:32:15 +02:00
}
}
async function generateTextGenWithStreaming ( generate _data , signal ) {
2023-11-07 23:17:13 +01:00
generate _data . stream = true ;
2023-10-08 22:42:28 +02:00
2023-12-12 05:13:26 +01:00
const response = await fetch ( '/api/backends/text-completions/generate' , {
2023-07-20 19:32:15 +02:00
headers : {
... getRequestHeaders ( ) ,
} ,
body : JSON . stringify ( generate _data ) ,
method : 'POST' ,
signal : signal ,
} ) ;
2023-12-07 17:02:39 +01:00
if ( ! response . ok ) {
2023-12-08 01:01:08 +01:00
tryParseStreamingError ( response , await response . text ( ) ) ;
2023-12-07 17:02:39 +01:00
throw new Error ( ` Got response status ${ response . status } ` ) ;
}
2024-04-02 13:56:15 +02:00
const eventStream = getEventSourceStream ( ) ;
2023-12-07 04:55:17 +01:00
response . body . pipeThrough ( eventStream ) ;
const reader = eventStream . readable . getReader ( ) ;
2023-07-20 19:32:15 +02:00
return async function * streamData ( ) {
2023-12-07 04:55:17 +01:00
let text = '' ;
2024-06-24 02:48:34 +02:00
/** @type {import('./logprobs.js').TokenLogprobs | null} */
2024-01-23 06:00:31 +01:00
let logprobs = null ;
2023-12-03 19:40:09 +01:00
const swipes = [ ] ;
2024-10-02 00:59:53 +02:00
const toolCalls = [ ] ;
2023-07-20 19:32:15 +02:00
while ( true ) {
const { done , value } = await reader . read ( ) ;
2023-12-07 04:55:17 +01:00
if ( done ) return ;
if ( value . data === '[DONE]' ) return ;
2023-07-20 19:32:15 +02:00
2023-12-07 04:55:17 +01:00
tryParseStreamingError ( response , value . data ) ;
2023-07-20 19:32:15 +02:00
2023-12-07 04:55:17 +01:00
let data = JSON . parse ( value . data ) ;
2023-09-28 18:10:00 +02:00
2023-12-18 21:38:28 +01:00
if ( data ? . choices ? . [ 0 ] ? . index > 0 ) {
2023-12-07 04:55:17 +01:00
const swipeIndex = data . choices [ 0 ] . index - 1 ;
swipes [ swipeIndex ] = ( swipes [ swipeIndex ] || '' ) + data . choices [ 0 ] . text ;
} else {
2024-01-23 06:00:31 +01:00
const newText = data ? . choices ? . [ 0 ] ? . text || data ? . content || '' ;
text += newText ;
2024-02-23 20:01:46 +01:00
logprobs = parseTextgenLogprobs ( newText , data . choices ? . [ 0 ] ? . logprobs || data ? . completion _probabilities ) ;
2023-09-28 18:10:00 +02:00
}
2023-11-07 23:17:13 +01:00
2024-10-02 00:59:53 +02:00
yield { text , swipes , logprobs , toolCalls } ;
2023-07-20 19:32:15 +02:00
}
2023-12-02 20:11:06 +01:00
} ;
2023-07-20 19:32:15 +02:00
}
2024-01-23 06:00:31 +01:00
/ * *
* parseTextgenLogprobs converts a logprobs object returned from a textgen API
* for a single token into a TokenLogprobs object used by the Token
* Probabilities feature .
* @ param { string } token - the text of the token that the logprobs are for
* @ param { Object } logprobs - logprobs object returned from the API
2024-06-24 02:48:34 +02:00
* @ returns { import ( './logprobs.js' ) . TokenLogprobs | null } - converted logprobs
2024-01-23 06:00:31 +01:00
* /
2024-02-24 13:50:06 +01:00
export function parseTextgenLogprobs ( token , logprobs ) {
2024-01-23 06:00:31 +01:00
if ( ! logprobs ) {
return null ;
}
switch ( settings . type ) {
2024-02-09 02:59:54 +01:00
case TABBY :
2024-05-03 00:40:40 +02:00
case VLLM :
2024-01-26 08:15:39 +01:00
case APHRODITE :
2024-03-24 19:45:37 +01:00
case MANCER :
2024-06-24 18:16:20 +02:00
case INFERMATICAI :
2024-01-23 06:00:31 +01:00
case OOBA : {
2024-02-02 19:06:46 +01:00
/** @type {Record<string, number>[]} */
2024-01-23 06:00:31 +01:00
const topLogprobs = logprobs . top _logprobs ;
if ( ! topLogprobs ? . length ) {
return null ;
}
const candidates = Object . entries ( topLogprobs [ 0 ] ) ;
return { token , topLogprobs : candidates } ;
}
2024-02-23 20:01:46 +01:00
case LLAMACPP : {
/** @type {Record<string, number>[]} */
if ( ! logprobs ? . length ) {
return null ;
}
2024-03-19 00:38:55 +01:00
const candidates = logprobs [ 0 ] . probs . map ( x => [ x . tok _str , x . prob ] ) ;
2024-02-23 20:01:46 +01:00
return { token , topLogprobs : candidates } ;
}
2024-01-23 06:00:31 +01:00
default :
return null ;
}
}
2024-02-24 19:10:53 +01:00
export function parseTabbyLogprobs ( data ) {
const text = data ? . choices ? . [ 0 ] ? . text ;
const offsets = data ? . choices ? . [ 0 ] ? . logprobs ? . text _offset ;
if ( ! text || ! offsets ) {
return null ;
}
// Convert string offsets list to tokens
const tokens = offsets ? . map ( ( offset , index ) => {
const nextOffset = offsets [ index + 1 ] || text . length ;
return text . substring ( offset , nextOffset ) ;
} ) ;
const topLogprobs = data ? . choices ? . [ 0 ] ? . logprobs ? . top _logprobs ? . map ( x => ( { top _logprobs : [ x ] } ) ) ;
return tokens ? . map ( ( token , index ) => parseTextgenLogprobs ( token , topLogprobs [ index ] ) ) || null ;
}
2023-11-07 23:17:13 +01:00
/ * *
* Parses errors in streaming responses and displays them in toastr .
2023-12-08 01:01:08 +01:00
* @ param { Response } response - Response from the server .
* @ param { string } decoded - Decoded response body .
2023-11-07 23:17:13 +01:00
* @ returns { void } Nothing .
* /
2023-12-08 01:01:08 +01:00
function tryParseStreamingError ( response , decoded ) {
2023-11-07 23:17:13 +01:00
let data = { } ;
try {
2023-12-08 01:01:08 +01:00
data = JSON . parse ( decoded ) ;
2023-11-07 23:17:13 +01:00
} catch {
// No JSON. Do nothing.
}
2024-09-08 21:24:03 +02:00
const message = data ? . error ? . message || data ? . message || data ? . detail ;
2023-11-07 23:25:06 +01:00
if ( message ) {
2023-12-08 01:01:08 +01:00
toastr . error ( message , 'Text Completion API' ) ;
2023-11-07 23:25:06 +01:00
throw new Error ( message ) ;
2023-11-07 23:17:13 +01:00
}
}
2023-12-18 21:38:28 +01:00
/ * *
* Converts a string of comma - separated integers to an array of integers .
* @ param { string } string Input string
* @ returns { number [ ] } Array of integers
* /
2023-11-07 23:17:13 +01:00
function toIntArray ( string ) {
if ( ! string ) {
return [ ] ;
}
return string . split ( ',' ) . map ( x => parseInt ( x ) ) . filter ( x => ! isNaN ( x ) ) ;
}
2024-05-03 22:59:39 +02:00
export function getTextGenModel ( ) {
2024-03-19 00:38:55 +01:00
switch ( settings . type ) {
case OOBA :
if ( settings . custom _model ) {
return settings . custom _model ;
}
break ;
case MANCER :
return settings . mancer _model ;
case TOGETHERAI :
return settings . togetherai _model ;
case INFERMATICAI :
return settings . infermaticai _model ;
case DREAMGEN :
return settings . dreamgen _model ;
case OPENROUTER :
return settings . openrouter _model ;
2024-05-03 00:40:40 +02:00
case VLLM :
return settings . vllm _model ;
2024-03-19 00:38:55 +01:00
case APHRODITE :
return settings . aphrodite _model ;
case OLLAMA :
if ( ! settings . ollama _model ) {
toastr . error ( 'No Ollama model selected.' , 'Text Completion API' ) ;
throw new Error ( 'No Ollama model selected' ) ;
}
return settings . ollama _model ;
2024-06-28 14:20:15 +02:00
case FEATHERLESS :
return settings . featherless _model ;
2024-06-28 17:17:27 +02:00
case HUGGINGFACE :
return 'tgi' ;
2024-09-08 21:23:25 +02:00
case TABBY :
if ( settings . tabby _model ) {
return settings . tabby _model ;
}
break ;
2024-03-19 00:38:55 +01:00
default :
return undefined ;
2023-12-17 22:38:03 +01:00
}
2023-12-19 15:38:11 +01:00
return undefined ;
2023-12-17 22:38:03 +01:00
}
2024-04-14 16:13:54 +02:00
export function isJsonSchemaSupported ( ) {
2024-05-18 17:50:48 +02:00
return [ TABBY , LLAMACPP ] . includes ( settings . type ) && main _api === 'textgenerationwebui' ;
2024-04-14 16:13:54 +02:00
}
2024-07-02 16:17:10 +02:00
function isDynamicTemperatureSupported ( ) {
return settings . dynatemp && DYNATEMP _BLOCK ? . dataset ? . tgType ? . includes ( settings . type ) ;
}
2024-06-08 23:59:40 +02:00
function getLogprobsNumber ( ) {
2024-06-24 18:16:20 +02:00
if ( settings . type === VLLM || settings . type === INFERMATICAI ) {
2024-06-08 23:59:40 +02:00
return 5 ;
}
return 10 ;
}
2024-08-18 12:50:58 +02:00
/ * *
* Replaces { { macro } } in a comma - separated or serialized JSON array string .
* @ param { string } str Input string
* @ returns { string } Output string
* /
function replaceMacrosInList ( str ) {
if ( ! str || typeof str !== 'string' ) {
return str ;
}
try {
const array = JSON . parse ( str ) ;
if ( ! Array . isArray ( array ) ) {
throw new Error ( 'Not an array' ) ;
}
for ( let i = 0 ; i < array . length ; i ++ ) {
array [ i ] = substituteParams ( array [ i ] ) ;
}
return JSON . stringify ( array ) ;
} catch {
const array = str . split ( ',' ) ;
for ( let i = 0 ; i < array . length ; i ++ ) {
array [ i ] = substituteParams ( array [ i ] ) ;
}
return array . join ( ',' ) ;
}
}
2023-12-03 19:56:25 +01:00
export function getTextGenGenerationData ( finalPrompt , maxTokens , isImpersonate , isContinue , cfgValues , type ) {
const canMultiSwipe = ! isContinue && ! isImpersonate && type !== 'quiet' ;
2024-07-02 16:17:10 +02:00
const dynatemp = isDynamicTemperatureSupported ( ) ;
2024-05-11 10:38:22 +02:00
const { banned _tokens , banned _strings } = getCustomTokenBans ( ) ;
2024-05-11 06:58:29 +02:00
2023-12-18 21:38:28 +01:00
let params = {
2023-08-23 16:44:38 +02:00
'prompt' : finalPrompt ,
2024-05-03 22:59:39 +02:00
'model' : getTextGenModel ( ) ,
2023-11-22 15:16:48 +01:00
'max_new_tokens' : maxTokens ,
'max_tokens' : maxTokens ,
2024-06-08 23:59:40 +02:00
'logprobs' : power _user . request _token _probabilities ? getLogprobsNumber ( ) : undefined ,
2024-07-02 16:17:10 +02:00
'temperature' : dynatemp ? ( settings . min _temp + settings . max _temp ) / 2 : settings . temp ,
2023-12-03 16:03:19 +01:00
'top_p' : settings . top _p ,
'typical_p' : settings . typical _p ,
2024-02-09 14:32:34 +01:00
'typical' : settings . typical _p ,
'sampler_seed' : settings . seed ,
2023-12-03 16:03:19 +01:00
'min_p' : settings . min _p ,
'repetition_penalty' : settings . rep _pen ,
'frequency_penalty' : settings . freq _pen ,
'presence_penalty' : settings . presence _pen ,
'top_k' : settings . top _k ,
2024-05-22 05:48:33 +02:00
'skew' : settings . skew ,
2024-01-14 09:26:22 +01:00
'min_length' : settings . type === OOBA ? settings . min _length : undefined ,
2024-03-07 17:28:38 +01:00
'minimum_message_content_tokens' : settings . type === DREAMGEN ? settings . min _length : undefined ,
2023-12-03 16:03:19 +01:00
'min_tokens' : settings . min _length ,
2024-01-14 09:26:22 +01:00
'num_beams' : settings . type === OOBA ? settings . num _beams : undefined ,
2024-09-14 15:53:21 +02:00
'length_penalty' : settings . type === OOBA ? settings . length _penalty : undefined ,
'early_stopping' : settings . type === OOBA ? settings . early _stopping : undefined ,
2023-12-03 16:03:19 +01:00
'add_bos_token' : settings . add _bos _token ,
2024-07-02 16:17:10 +02:00
'dynamic_temperature' : dynatemp ? true : undefined ,
'dynatemp_low' : dynatemp ? settings . min _temp : undefined ,
'dynatemp_high' : dynatemp ? settings . max _temp : undefined ,
'dynatemp_range' : dynatemp ? ( settings . max _temp - settings . min _temp ) / 2 : undefined ,
'dynatemp_exponent' : dynatemp ? settings . dynatemp _exponent : undefined ,
2024-01-28 21:14:21 +01:00
'smoothing_factor' : settings . smoothing _factor ,
2024-03-01 07:06:34 +01:00
'smoothing_curve' : settings . smoothing _curve ,
2024-05-22 19:46:52 +02:00
'dry_allowed_length' : settings . dry _allowed _length ,
'dry_multiplier' : settings . dry _multiplier ,
'dry_base' : settings . dry _base ,
2024-08-18 12:50:58 +02:00
'dry_sequence_breakers' : replaceMacrosInList ( settings . dry _sequence _breakers ) ,
2024-05-22 19:46:52 +02:00
'dry_penalty_last_n' : settings . dry _penalty _last _n ,
2024-02-29 00:55:25 +01:00
'max_tokens_second' : settings . max _tokens _second ,
2024-02-22 01:45:35 +01:00
'sampler_priority' : settings . type === OOBA ? settings . sampler _priority : undefined ,
2024-02-24 05:01:04 +01:00
'samplers' : settings . type === LLAMACPP ? settings . samplers : undefined ,
2023-11-22 15:16:48 +01:00
'stopping_strings' : getStoppingStrings ( isImpersonate , isContinue ) ,
'stop' : getStoppingStrings ( isImpersonate , isContinue ) ,
2023-07-20 19:32:15 +02:00
'truncation_length' : max _context ,
2023-12-03 16:03:19 +01:00
'ban_eos_token' : settings . ban _eos _token ,
'skip_special_tokens' : settings . skip _special _tokens ,
'top_a' : settings . top _a ,
'tfs' : settings . tfs ,
2024-03-24 19:45:37 +01:00
'epsilon_cutoff' : [ OOBA , MANCER ] . includes ( settings . type ) ? settings . epsilon _cutoff : undefined ,
'eta_cutoff' : [ OOBA , MANCER ] . includes ( settings . type ) ? settings . eta _cutoff : undefined ,
2023-12-03 16:03:19 +01:00
'mirostat_mode' : settings . mirostat _mode ,
'mirostat_tau' : settings . mirostat _tau ,
'mirostat_eta' : settings . mirostat _eta ,
2024-03-24 19:45:37 +01:00
'custom_token_bans' : [ APHRODITE , MANCER ] . includes ( settings . type ) ?
2024-05-11 06:58:29 +02:00
toIntArray ( banned _tokens ) :
banned _tokens ,
'banned_strings' : banned _strings ,
2023-12-03 16:03:19 +01:00
'api_type' : settings . type ,
2023-12-17 22:38:03 +01:00
'api_server' : getTextGenServer ( ) ,
2023-12-18 21:38:28 +01:00
'sampler_order' : settings . type === textgen _types . KOBOLDCPP ? settings . sampler _order : undefined ,
2024-08-31 19:18:51 +02:00
'xtc_threshold' : settings . xtc _threshold ,
'xtc_probability' : settings . xtc _probability ,
2023-11-13 08:36:01 +01:00
} ;
2023-12-18 21:38:28 +01:00
const nonAphroditeParams = {
2024-01-12 11:12:53 +01:00
'rep_pen' : settings . rep _pen ,
'rep_pen_range' : settings . rep _pen _range ,
2024-05-22 06:09:10 +02:00
'repetition_decay' : settings . type === TABBY ? settings . rep _pen _decay : undefined ,
2023-12-03 16:03:19 +01:00
'repetition_penalty_range' : settings . rep _pen _range ,
2024-01-14 09:26:22 +01:00
'encoder_repetition_penalty' : settings . type === OOBA ? settings . encoder _rep _pen : undefined ,
'no_repeat_ngram_size' : settings . type === OOBA ? settings . no _repeat _ngram _size : undefined ,
'penalty_alpha' : settings . type === OOBA ? settings . penalty _alpha : undefined ,
2024-02-04 07:44:07 +01:00
'temperature_last' : ( settings . type === OOBA || settings . type === APHRODITE || settings . type == TABBY ) ? settings . temperature _last : undefined ,
2024-05-22 05:35:29 +02:00
'speculative_ngram' : settings . type === TABBY ? settings . speculative _ngram : undefined ,
2024-01-14 09:26:22 +01:00
'do_sample' : settings . type === OOBA ? settings . do _sample : undefined ,
2024-03-27 04:57:24 +01:00
'seed' : settings . seed ,
2023-12-03 16:03:19 +01:00
'guidance_scale' : cfgValues ? . guidanceScale ? . value ? ? settings . guidance _scale ? ? 1 ,
'negative_prompt' : cfgValues ? . negativePrompt ? ? substituteParams ( settings . negative _prompt ) ? ? '' ,
'grammar_string' : settings . grammar _string ,
2024-05-18 17:50:48 +02:00
'json_schema' : [ TABBY , LLAMACPP ] . includes ( settings . type ) ? settings . json _schema : undefined ,
2023-12-19 15:38:11 +01:00
// llama.cpp aliases. In case someone wants to use LM Studio as Text Completion API
'repeat_penalty' : settings . rep _pen ,
'tfs_z' : settings . tfs ,
'repeat_last_n' : settings . rep _pen _range ,
2024-01-12 11:20:03 +01:00
'n_predict' : maxTokens ,
2024-06-24 02:48:34 +02:00
'num_predict' : maxTokens ,
'num_ctx' : max _context ,
2023-12-19 15:38:11 +01:00
'mirostat' : settings . mirostat _mode ,
'ignore_eos' : settings . ban _eos _token ,
2024-02-23 20:01:46 +01:00
'n_probs' : power _user . request _token _probabilities ? 10 : undefined ,
2024-06-05 21:05:41 +02:00
'rep_pen_slope' : settings . rep _pen _slope ,
2023-12-02 20:11:06 +01:00
} ;
2024-05-03 00:40:40 +02:00
const vllmParams = {
'n' : canMultiSwipe ? settings . n : 1 ,
'best_of' : canMultiSwipe ? settings . n : 1 ,
'ignore_eos' : settings . ignore _eos _token ,
'spaces_between_special_tokens' : settings . spaces _between _special _tokens ,
2024-06-24 18:16:20 +02:00
'seed' : settings . seed >= 0 ? settings . seed : undefined ,
2024-05-03 00:40:40 +02:00
} ;
2023-12-18 21:38:28 +01:00
const aphroditeParams = {
2023-12-03 22:24:39 +01:00
'n' : canMultiSwipe ? settings . n : 1 ,
2024-09-13 12:34:06 +02:00
'frequency_penalty' : settings . freq _pen ,
'presence_penalty' : settings . presence _pen ,
'repetition_penalty' : settings . rep _pen ,
2024-09-16 14:21:56 +02:00
'seed' : settings . seed >= 0 ? settings . seed : undefined ,
2024-09-14 14:38:19 +02:00
'stop' : getStoppingStrings ( isImpersonate , isContinue ) ,
2024-09-21 10:18:06 +02:00
'temperature' : dynatemp ? ( settings . min _temp + settings . max _temp ) / 2 : settings . temp ,
2024-09-13 12:34:06 +02:00
'temperature_last' : settings . temperature _last ,
'top_p' : settings . top _p ,
'top_k' : settings . top _k ,
'top_a' : settings . top _a ,
'min_p' : settings . min _p ,
'tfs' : settings . tfs ,
'eta_cutoff' : settings . eta _cutoff ,
'epsilon_cutoff' : settings . epsilon _cutoff ,
'typical_p' : settings . typical _p ,
'smoothing_factor' : settings . smoothing _factor ,
'smoothing_curve' : settings . smoothing _curve ,
2024-05-03 00:40:40 +02:00
'ignore_eos' : settings . ignore _eos _token ,
2024-09-13 12:34:06 +02:00
'min_tokens' : settings . min _length ,
'skip_special_tokens' : settings . skip _special _tokens ,
2024-05-03 00:40:40 +02:00
'spaces_between_special_tokens' : settings . spaces _between _special _tokens ,
2024-09-13 12:34:06 +02:00
'guided_grammar' : settings . grammar _string ,
'guided_json' : settings . json _schema ,
2024-09-14 15:32:50 +02:00
'early_stopping' : false , // hacks
'include_stop_str_in_output' : false ,
2024-09-21 10:18:06 +02:00
'dynatemp_min' : dynatemp ? settings . min _temp : undefined ,
'dynatemp_max' : dynatemp ? settings . max _temp : undefined ,
'dynatemp_exponent' : dynatemp ? settings . dynatemp _exponent : undefined ,
'xtc_threshold' : settings . xtc _threshold ,
'xtc_probability' : settings . xtc _probability ,
2024-09-21 12:18:52 +02:00
'custom_token_bans' : toIntArray ( banned _tokens ) ,
2023-12-02 20:11:06 +01:00
} ;
2024-03-24 19:45:37 +01:00
2024-05-06 18:26:20 +02:00
if ( settings . type === OPENROUTER ) {
params . provider = settings . openrouter _providers ;
2024-07-19 22:34:16 +02:00
params . allow _fallbacks = settings . openrouter _allow _fallbacks ;
2024-05-06 18:26:20 +02:00
}
2024-03-29 16:28:28 +01:00
if ( settings . type === KOBOLDCPP ) {
params . grammar = settings . grammar _string ;
}
2024-06-28 17:17:27 +02:00
if ( settings . type === HUGGINGFACE ) {
params . top _p = Math . min ( Math . max ( Number ( params . top _p ) , 0.0 ) , 0.999 ) ;
params . stop = Array . isArray ( params . stop ) ? params . stop . slice ( 0 , 4 ) : [ ] ;
2024-07-26 14:50:39 +02:00
nonAphroditeParams . seed = settings . seed >= 0 ? settings . seed : Math . floor ( Math . random ( ) * Math . pow ( 2 , 32 ) ) ;
2024-06-28 17:17:27 +02:00
}
2024-03-24 19:45:37 +01:00
if ( settings . type === MANCER ) {
2024-03-24 20:42:27 +01:00
params . n = canMultiSwipe ? settings . n : 1 ;
params . epsilon _cutoff /= 1000 ;
params . eta _cutoff /= 1000 ;
2024-03-25 17:25:03 +01:00
params . dynatemp _mode = params . dynamic _temperature ? 1 : 0 ;
params . dynatemp _min = params . dynatemp _low ;
params . dynatemp _max = params . dynatemp _high ;
2024-04-14 23:39:15 +02:00
delete params . dynatemp _low ;
delete params . dynatemp _high ;
2024-03-24 19:45:37 +01:00
}
2024-05-28 06:55:57 +02:00
if ( settings . type === TABBY ) {
params . n = canMultiSwipe ? settings . n : 1 ;
}
2024-05-03 00:40:40 +02:00
switch ( settings . type ) {
case VLLM :
2024-06-24 18:16:20 +02:00
case INFERMATICAI :
2024-05-03 00:40:40 +02:00
params = Object . assign ( params , vllmParams ) ;
break ;
case APHRODITE :
2024-09-14 14:38:19 +02:00
// set params to aphroditeParams
params = Object . assign ( params , aphroditeParams ) ;
2024-05-03 00:40:40 +02:00
break ;
default :
params = Object . assign ( params , nonAphroditeParams ) ;
break ;
2023-11-13 08:36:01 +01:00
}
2023-12-18 17:57:10 +01:00
if ( Array . isArray ( settings . logit _bias ) && settings . logit _bias . length ) {
const logitBias = BIAS _CACHE . get ( BIAS _KEY ) || calculateLogitBias ( ) ;
BIAS _CACHE . set ( BIAS _KEY , logitBias ) ;
2023-12-18 21:38:28 +01:00
params . logit _bias = logitBias ;
}
2023-12-19 15:38:11 +01:00
if ( settings . type === LLAMACPP || settings . type === OLLAMA ) {
2023-12-18 21:38:28 +01:00
// Convert bias and token bans to array of arrays
const logitBiasArray = ( params . logit _bias && typeof params . logit _bias === 'object' && Object . keys ( params . logit _bias ) . length > 0 )
? Object . entries ( params . logit _bias ) . map ( ( [ key , value ] ) => [ Number ( key ) , value ] )
: [ ] ;
2024-05-12 20:41:07 +02:00
const tokenBans = toIntArray ( banned _tokens ) ;
2023-12-18 21:38:28 +01:00
logitBiasArray . push ( ... tokenBans . map ( x => [ Number ( x ) , false ] ) ) ;
const llamaCppParams = {
'logit_bias' : logitBiasArray ,
2023-12-19 15:38:11 +01:00
// Conflicts with ooba's grammar_string
'grammar' : settings . grammar _string ,
2023-12-25 01:42:03 +01:00
'cache_prompt' : true ,
2023-12-18 21:38:28 +01:00
} ;
params = Object . assign ( params , llamaCppParams ) ;
}
2024-02-24 20:45:33 +01:00
eventSource . emitAndWait ( event _types . TEXT _COMPLETION _SETTINGS _READY , params ) ;
2024-05-18 17:50:48 +02:00
// Grammar conflicts with with json_schema
if ( settings . type === LLAMACPP ) {
if ( params . json _schema && Object . keys ( params . json _schema ) . length > 0 ) {
delete params . grammar _string ;
delete params . grammar ;
} else {
delete params . json _schema ;
}
}
2023-12-18 21:38:28 +01:00
return params ;
2023-07-20 19:32:15 +02:00
}