Fix context and response size not being passed to Ollama
This commit is contained in:
parent
b89afe6d13
commit
a3dbcf3c2a
|
@ -861,7 +861,7 @@ async function generateTextGenWithStreaming(generate_data, signal) {
|
||||||
|
|
||||||
return async function* streamData() {
|
return async function* streamData() {
|
||||||
let text = '';
|
let text = '';
|
||||||
/** @type {import('logprobs.js').TokenLogprobs | null} */
|
/** @type {import('./logprobs.js').TokenLogprobs | null} */
|
||||||
let logprobs = null;
|
let logprobs = null;
|
||||||
const swipes = [];
|
const swipes = [];
|
||||||
while (true) {
|
while (true) {
|
||||||
|
@ -893,7 +893,7 @@ async function generateTextGenWithStreaming(generate_data, signal) {
|
||||||
* Probabilities feature.
|
* Probabilities feature.
|
||||||
* @param {string} token - the text of the token that the logprobs are for
|
* @param {string} token - the text of the token that the logprobs are for
|
||||||
* @param {Object} logprobs - logprobs object returned from the API
|
* @param {Object} logprobs - logprobs object returned from the API
|
||||||
* @returns {import('logprobs.js').TokenLogprobs | null} - converted logprobs
|
* @returns {import('./logprobs.js').TokenLogprobs | null} - converted logprobs
|
||||||
*/
|
*/
|
||||||
export function parseTextgenLogprobs(token, logprobs) {
|
export function parseTextgenLogprobs(token, logprobs) {
|
||||||
if (!logprobs) {
|
if (!logprobs) {
|
||||||
|
@ -1112,6 +1112,8 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
||||||
'tfs_z': settings.tfs,
|
'tfs_z': settings.tfs,
|
||||||
'repeat_last_n': settings.rep_pen_range,
|
'repeat_last_n': settings.rep_pen_range,
|
||||||
'n_predict': maxTokens,
|
'n_predict': maxTokens,
|
||||||
|
'num_predict': maxTokens,
|
||||||
|
'num_ctx': max_context,
|
||||||
'mirostat': settings.mirostat_mode,
|
'mirostat': settings.mirostat_mode,
|
||||||
'ignore_eos': settings.ban_eos_token,
|
'ignore_eos': settings.ban_eos_token,
|
||||||
'n_probs': power_user.request_token_probabilities ? 10 : undefined,
|
'n_probs': power_user.request_token_probabilities ? 10 : undefined,
|
||||||
|
|
|
@ -259,6 +259,7 @@ const TOGETHERAI_KEYS = [
|
||||||
// https://github.com/jmorganca/ollama/blob/main/docs/api.md#request-with-options
|
// https://github.com/jmorganca/ollama/blob/main/docs/api.md#request-with-options
|
||||||
const OLLAMA_KEYS = [
|
const OLLAMA_KEYS = [
|
||||||
'num_predict',
|
'num_predict',
|
||||||
|
'num_ctx',
|
||||||
'stop',
|
'stop',
|
||||||
'temperature',
|
'temperature',
|
||||||
'repeat_penalty',
|
'repeat_penalty',
|
||||||
|
|
Loading…
Reference in New Issue