mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Merge branch 'staging' into gork-ai
This commit is contained in:
@ -1,7 +1,7 @@
|
||||
import { CONNECT_API_MAP, getRequestHeaders } from '../../script.js';
|
||||
import { extension_settings, openThirdPartyExtensionMenu } from '../extensions.js';
|
||||
import { t } from '../i18n.js';
|
||||
import { oai_settings } from '../openai.js';
|
||||
import { oai_settings, proxies } from '../openai.js';
|
||||
import { SECRET_KEYS, secret_state } from '../secrets.js';
|
||||
import { textgen_types, textgenerationwebui_settings } from '../textgen-settings.js';
|
||||
import { getTokenCountAsync } from '../tokenizers.js';
|
||||
@ -310,9 +310,10 @@ export class ConnectionManagerRequestService {
|
||||
* @param {boolean?} [custom.includePreset=true]
|
||||
* @param {boolean?} [custom.includeInstruct=true]
|
||||
* @param {Partial<InstructSettings>?} [custom.instructSettings] Override instruct settings
|
||||
* @param {Record<string, any>} [overridePayload] - Override payload for the request
|
||||
* @returns {Promise<import('../custom-request.js').ExtractedData | (() => AsyncGenerator<import('../custom-request.js').StreamResponse>)>} If not streaming, returns extracted data; if streaming, returns a function that creates an AsyncGenerator
|
||||
*/
|
||||
static async sendRequest(profileId, prompt, maxTokens, custom = this.defaultSendRequestParams) {
|
||||
static async sendRequest(profileId, prompt, maxTokens, custom = this.defaultSendRequestParams, overridePayload = {}) {
|
||||
const { stream, signal, extractData, includePreset, includeInstruct, instructSettings } = { ...this.defaultSendRequestParams, ...custom };
|
||||
|
||||
const context = SillyTavern.getContext();
|
||||
@ -330,6 +331,8 @@ export class ConnectionManagerRequestService {
|
||||
throw new Error(`API type ${selectedApiMap.selected} does not support chat completions`);
|
||||
}
|
||||
|
||||
const proxyPreset = proxies.find((p) => p.name === profile.proxy);
|
||||
|
||||
const messages = Array.isArray(prompt) ? prompt : [{ role: 'user', content: prompt }];
|
||||
return await context.ChatCompletionService.processRequest({
|
||||
stream,
|
||||
@ -338,6 +341,9 @@ export class ConnectionManagerRequestService {
|
||||
model: profile.model,
|
||||
chat_completion_source: selectedApiMap.source,
|
||||
custom_url: profile['api-url'],
|
||||
reverse_proxy: proxyPreset?.url,
|
||||
proxy_password: proxyPreset?.password,
|
||||
...overridePayload,
|
||||
}, {
|
||||
presetName: includePreset ? profile.preset : undefined,
|
||||
}, extractData, signal);
|
||||
@ -354,6 +360,7 @@ export class ConnectionManagerRequestService {
|
||||
model: profile.model,
|
||||
api_type: selectedApiMap.type,
|
||||
api_server: profile['api-url'],
|
||||
...overridePayload,
|
||||
}, {
|
||||
instructName: includeInstruct ? profile.instruct : undefined,
|
||||
presetName: includePreset ? profile.preset : undefined,
|
||||
|
Reference in New Issue
Block a user