mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Added proxy support to ChatCompletionService
This commit is contained in:
@@ -43,10 +43,12 @@ import EventSourceStream from './sse-stream.js';
|
|||||||
* @property {boolean?} [stream=false] - Whether to stream the response
|
* @property {boolean?} [stream=false] - Whether to stream the response
|
||||||
* @property {ChatCompletionMessage[]} messages - Array of chat messages
|
* @property {ChatCompletionMessage[]} messages - Array of chat messages
|
||||||
* @property {string} [model] - Optional model name to use for completion
|
* @property {string} [model] - Optional model name to use for completion
|
||||||
* @property {string} chat_completion_source - Source provider for chat completion
|
* @property {string} chat_completion_source - Source provider
|
||||||
* @property {number} max_tokens - Maximum number of tokens to generate
|
* @property {number} max_tokens - Maximum number of tokens to generate
|
||||||
* @property {number} [temperature] - Optional temperature parameter for response randomness
|
* @property {number} [temperature] - Optional temperature parameter for response randomness
|
||||||
* @property {string} [custom_url] - Optional custom URL for chat completion
|
* @property {string} [custom_url] - Optional custom URL
|
||||||
|
* @property {string} [reverse_proxy] - Optional reverse proxy URL
|
||||||
|
* @property {string} [proxy_password] - Optional proxy password
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/** @typedef {Record<string, any> & ChatCompletionPayloadBase} ChatCompletionPayload */
|
/** @typedef {Record<string, any> & ChatCompletionPayloadBase} ChatCompletionPayload */
|
||||||
@@ -387,7 +389,7 @@ export class ChatCompletionService {
|
|||||||
* @param {ChatCompletionPayload} custom
|
* @param {ChatCompletionPayload} custom
|
||||||
* @returns {ChatCompletionPayload}
|
* @returns {ChatCompletionPayload}
|
||||||
*/
|
*/
|
||||||
static createRequestData({ stream = false, messages, model, chat_completion_source, max_tokens, temperature, custom_url, ...props }) {
|
static createRequestData({ stream = false, messages, model, chat_completion_source, max_tokens, temperature, custom_url, reverse_proxy, proxy_password, ...props }) {
|
||||||
const payload = {
|
const payload = {
|
||||||
...props,
|
...props,
|
||||||
stream,
|
stream,
|
||||||
@@ -397,6 +399,8 @@ export class ChatCompletionService {
|
|||||||
max_tokens,
|
max_tokens,
|
||||||
temperature,
|
temperature,
|
||||||
custom_url,
|
custom_url,
|
||||||
|
reverse_proxy,
|
||||||
|
proxy_password,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Remove undefined values to avoid API errors
|
// Remove undefined values to avoid API errors
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
import { CONNECT_API_MAP, getRequestHeaders } from '../../script.js';
|
import { CONNECT_API_MAP, getRequestHeaders } from '../../script.js';
|
||||||
import { extension_settings, openThirdPartyExtensionMenu } from '../extensions.js';
|
import { extension_settings, openThirdPartyExtensionMenu } from '../extensions.js';
|
||||||
import { t } from '../i18n.js';
|
import { t } from '../i18n.js';
|
||||||
import { oai_settings } from '../openai.js';
|
import { oai_settings, proxies } from '../openai.js';
|
||||||
import { SECRET_KEYS, secret_state } from '../secrets.js';
|
import { SECRET_KEYS, secret_state } from '../secrets.js';
|
||||||
import { textgen_types, textgenerationwebui_settings } from '../textgen-settings.js';
|
import { textgen_types, textgenerationwebui_settings } from '../textgen-settings.js';
|
||||||
import { getTokenCountAsync } from '../tokenizers.js';
|
import { getTokenCountAsync } from '../tokenizers.js';
|
||||||
@@ -326,6 +326,8 @@ export class ConnectionManagerRequestService {
|
|||||||
throw new Error(`API type ${selectedApiMap.selected} does not support chat completions`);
|
throw new Error(`API type ${selectedApiMap.selected} does not support chat completions`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const proxyPreset = proxies.find((p) => p.name === profile.proxy);
|
||||||
|
|
||||||
const messages = Array.isArray(prompt) ? prompt : [{ role: 'user', content: prompt }];
|
const messages = Array.isArray(prompt) ? prompt : [{ role: 'user', content: prompt }];
|
||||||
return await context.ChatCompletionService.processRequest({
|
return await context.ChatCompletionService.processRequest({
|
||||||
stream,
|
stream,
|
||||||
@@ -334,6 +336,8 @@ export class ConnectionManagerRequestService {
|
|||||||
model: profile.model,
|
model: profile.model,
|
||||||
chat_completion_source: selectedApiMap.source,
|
chat_completion_source: selectedApiMap.source,
|
||||||
custom_url: profile['api-url'],
|
custom_url: profile['api-url'],
|
||||||
|
reverse_proxy: proxyPreset?.url,
|
||||||
|
proxy_password: proxyPreset?.password,
|
||||||
}, {
|
}, {
|
||||||
presetName: includePreset ? profile.preset : undefined,
|
presetName: includePreset ? profile.preset : undefined,
|
||||||
}, extractData, signal);
|
}, extractData, signal);
|
||||||
|
Reference in New Issue
Block a user