Merge branch 'staging' into fix/connRefusedErrMsg

This commit is contained in:
Cohee
2024-11-24 01:41:15 +02:00
21 changed files with 957 additions and 422 deletions

View File

@@ -280,7 +280,7 @@ async function sendMakerSuiteRequest(request, response) {
delete generationConfig.stopSequences;
}
const should_use_system_prompt = (model.includes('gemini-1.5-flash') || model.includes('gemini-1.5-pro') || model.includes('gemini-exp-1114')) && request.body.use_makersuite_sysprompt;
const should_use_system_prompt = (model.includes('gemini-1.5-flash') || model.includes('gemini-1.5-pro') || model.includes('gemini-exp-1114') || model.includes('gemini-exp-1121')) && request.body.use_makersuite_sysprompt;
const prompt = convertGooglePrompt(request.body.messages, model, should_use_system_prompt, request.body.char_name, request.body.user_name);
let body = {
contents: prompt.contents,

View File

@@ -16,6 +16,7 @@ import {
} from '../../constants.js';
import { forwardFetchResponse, trimV1, getConfigValue } from '../../util.js';
import { setAdditionalHeaders } from '../../additional-headers.js';
import { createHash } from 'node:crypto';
export const router = express.Router();
@@ -227,6 +228,40 @@ router.post('/status', jsonParser, async function (request, response) {
}
});
router.post('/props', jsonParser, async function (request, response) {
if (!request.body.api_server) return response.sendStatus(400);
try {
const baseUrl = trimV1(request.body.api_server);
const args = {
headers: {},
};
setAdditionalHeaders(request, args, baseUrl);
const apiType = request.body.api_type;
const propsUrl = baseUrl + '/props';
const propsReply = await fetch(propsUrl, args);
if (!propsReply.ok) {
return response.status(400);
}
/** @type {any} */
const props = await propsReply.json();
// TEMPORARY: llama.cpp's /props endpoint has a bug which replaces the last newline with a \0
if (apiType === TEXTGEN_TYPES.LLAMACPP && props['chat_template'].endsWith('\u0000')) {
props['chat_template'] = props['chat_template'].slice(0, -1) + '\n';
}
props['chat_template_hash'] = createHash('sha256').update(props['chat_template']).digest('hex');
console.log(`Model properties: ${JSON.stringify(props)}`);
return response.send(props);
} catch (error) {
console.error(error);
return response.status(500);
}
});
router.post('/generate', jsonParser, async function (request, response) {
if (!request.body) return response.sendStatus(400);