Merge pull request #208 from bf62963/dev

This commit is contained in:
Cohee
2023-05-01 12:25:40 +03:00
committed by GitHub
3 changed files with 4 additions and 3 deletions

View File

@ -126,6 +126,7 @@
<option value="gpt-3.5-turbo">gpt-3.5-turbo</option> <option value="gpt-3.5-turbo">gpt-3.5-turbo</option>
<option value="gpt-3.5-turbo-0301">gpt-3.5-turbo-0301</option> <option value="gpt-3.5-turbo-0301">gpt-3.5-turbo-0301</option>
<option value="gpt-4">gpt-4</option> <option value="gpt-4">gpt-4</option>
<option value="gpt-4-0314">gpt-4-0314</option>
<option value="gpt-4-32k">gpt-4-32k</option> <option value="gpt-4-32k">gpt-4-32k</option>
</select> </select>
</div> </div>

View File

@ -1039,7 +1039,7 @@ $(document).ready(function () {
const value = $(this).val(); const value = $(this).val();
oai_settings.openai_model = value; oai_settings.openai_model = value;
if (value == 'gpt-4') { if (value == 'gpt-4' || value == 'gpt-4-0314') {
$('#openai_max_context').attr('max', gpt4_max); $('#openai_max_context').attr('max', gpt4_max);
} }
else if (value == 'gpt-4-32k') { else if (value == 'gpt-4-32k') {

View File

@ -2159,7 +2159,7 @@ app.post("/openai_bias", jsonParser, async function (request, response) {
let result = {}; let result = {};
const tokenizer = tiktoken.encoding_for_model(request.query.model); const tokenizer = tiktoken.encoding_for_model(request.query.model === 'gpt-4-0314' ? 'gpt-4' : request.query.model);
for (const entry of request.body) { for (const entry of request.body) {
if (!entry || !entry.text) { if (!entry || !entry.text) {
@ -2305,7 +2305,7 @@ app.post("/tokenize_openai", jsonParser, function (request, response_tokenize_op
const tokensPerMessage = request.query.model.includes('gpt-4') ? 3 : 4; const tokensPerMessage = request.query.model.includes('gpt-4') ? 3 : 4;
const tokensPadding = 3; const tokensPadding = 3;
const tokenizer = tiktoken.encoding_for_model(request.query.model); const tokenizer = tiktoken.encoding_for_model(request.query.model === 'gpt-4-0314' ? 'gpt-4' : request.query.model);
let num_tokens = 0; let num_tokens = 0;
for (const msg of request.body) { for (const msg of request.body) {