Move prompt-converters.js 1 level up

This commit is contained in:
Cohee 2024-03-20 01:18:51 +02:00
parent abb8bdbc1e
commit 30c52b5b27
3 changed files with 4 additions and 5 deletions

View File

@ -1,11 +1,10 @@
const express = require('express'); const express = require('express');
const fetch = require('node-fetch').default; const fetch = require('node-fetch').default;
const { Readable } = require('stream');
const { jsonParser } = require('../../express-common'); const { jsonParser } = require('../../express-common');
const { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, BISON_SAFETY, OPENROUTER_HEADERS } = require('../../constants'); const { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, BISON_SAFETY, OPENROUTER_HEADERS } = require('../../constants');
const { forwardFetchResponse, getConfigValue, tryParse, uuidv4, mergeObjectWithYaml, excludeKeysByYaml, color } = require('../../util'); const { forwardFetchResponse, getConfigValue, tryParse, uuidv4, mergeObjectWithYaml, excludeKeysByYaml, color } = require('../../util');
const { convertClaudeMessages, convertGooglePrompt, convertTextCompletionPrompt } = require('../prompt-converters'); const { convertClaudeMessages, convertGooglePrompt, convertTextCompletionPrompt } = require('../../prompt-converters');
const { readSecret, SECRET_KEYS } = require('../secrets'); const { readSecret, SECRET_KEYS } = require('../secrets');
const { getTokenizerModel, getSentencepiceTokenizer, getTiktokenTokenizer, sentencepieceTokenizers, TEXT_COMPLETION_MODELS } = require('../tokenizers'); const { getTokenizerModel, getSentencepiceTokenizer, getTiktokenTokenizer, sentencepieceTokenizers, TEXT_COMPLETION_MODELS } = require('../tokenizers');

View File

@ -4,7 +4,7 @@ const express = require('express');
const { SentencePieceProcessor } = require('@agnai/sentencepiece-js'); const { SentencePieceProcessor } = require('@agnai/sentencepiece-js');
const tiktoken = require('@dqbd/tiktoken'); const tiktoken = require('@dqbd/tiktoken');
const { Tokenizer } = require('@agnai/web-tokenizers'); const { Tokenizer } = require('@agnai/web-tokenizers');
const { convertClaudePrompt, convertGooglePrompt } = require('./prompt-converters'); const { convertClaudePrompt, convertGooglePrompt } = require('../prompt-converters');
const { readSecret, SECRET_KEYS } = require('./secrets'); const { readSecret, SECRET_KEYS } = require('./secrets');
const { TEXTGEN_TYPES } = require('../constants'); const { TEXTGEN_TYPES } = require('../constants');
const { jsonParser } = require('../express-common'); const { jsonParser } = require('../express-common');
@ -250,7 +250,7 @@ async function loadClaudeTokenizer(modelPath) {
function countClaudeTokens(tokenizer, messages) { function countClaudeTokens(tokenizer, messages) {
// Should be fine if we use the old conversion method instead of the messages API one i think? // Should be fine if we use the old conversion method instead of the messages API one i think?
const convertedPrompt = convertClaudePrompt(messages, false, false, false); const convertedPrompt = convertClaudePrompt(messages, false, '', false, false, '', false);
// Fallback to strlen estimation // Fallback to strlen estimation
if (!tokenizer) { if (!tokenizer) {
@ -398,7 +398,7 @@ router.post('/google/count', jsonParser, async function (req, res) {
accept: 'application/json', accept: 'application/json',
'content-type': 'application/json', 'content-type': 'application/json',
}, },
body: JSON.stringify({ contents: convertGooglePrompt(req.body) }), body: JSON.stringify({ contents: convertGooglePrompt(req.body, String(req.query.model)) }),
}; };
try { try {
const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models/${req.query.model}:countTokens?key=${readSecret(SECRET_KEYS.MAKERSUITE)}`, options); const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models/${req.query.model}:countTokens?key=${readSecret(SECRET_KEYS.MAKERSUITE)}`, options);