| | import { getSystemPrompt, getModelReasoning, getUserAgent } from '../../configs/config.js'
|
| |
|
| | export function transformToOpenAI(openaiRequest) {
|
| |
|
| | const targetRequest = {
|
| | model: openaiRequest.model,
|
| | input: [],
|
| | store: false
|
| | };
|
| |
|
| |
|
| | if (openaiRequest.stream !== undefined) {
|
| | targetRequest.stream = openaiRequest.stream;
|
| | }
|
| |
|
| |
|
| | if (openaiRequest.max_tokens) {
|
| | targetRequest.max_output_tokens = openaiRequest.max_tokens;
|
| | } else if (openaiRequest.max_completion_tokens) {
|
| | targetRequest.max_output_tokens = openaiRequest.max_completion_tokens;
|
| | }
|
| |
|
| |
|
| | if (openaiRequest.messages && Array.isArray(openaiRequest.messages)) {
|
| | for (const msg of openaiRequest.messages) {
|
| | const inputMsg = {
|
| | role: msg.role,
|
| | content: []
|
| | };
|
| |
|
| |
|
| |
|
| | const textType = msg.role === 'assistant' ? 'output_text' : 'input_text';
|
| | const imageType = msg.role === 'assistant' ? 'output_image' : 'input_image';
|
| |
|
| | if (typeof msg.content === 'string') {
|
| | inputMsg.content.push({
|
| | type: textType,
|
| | text: msg.content?.replace("You are Claude Code, Anthropic's official CLI for Claude.", 'You are bot.')
|
| | });
|
| | } else if (Array.isArray(msg.content)) {
|
| | for (const part of msg.content) {
|
| | if (part.type === 'text') {
|
| | inputMsg.content.push({
|
| | type: textType,
|
| | text: part.text?.replace("You are Claude Code, Anthropic's official CLI for Claude.", 'You are bot.')
|
| | });
|
| | } else if (part.type === 'image_url') {
|
| | inputMsg.content.push({
|
| | type: imageType,
|
| | image_url: part.image_url
|
| | });
|
| | } else {
|
| |
|
| | inputMsg.content.push(part);
|
| | }
|
| | }
|
| | }
|
| |
|
| | targetRequest.input.push(inputMsg);
|
| | }
|
| | }
|
| |
|
| |
|
| | if (openaiRequest.tools && Array.isArray(openaiRequest.tools)) {
|
| | targetRequest.tools = openaiRequest.tools.map(tool => ({
|
| | ...tool,
|
| | strict: false
|
| | }));
|
| | }
|
| |
|
| |
|
| | const systemPrompt = getSystemPrompt();
|
| | const systemMessage = openaiRequest.messages?.find(m => m.role === 'system');
|
| |
|
| | if (systemMessage) {
|
| | let userInstructions = '';
|
| | if (typeof systemMessage.content === 'string') {
|
| | userInstructions = systemMessage.content;
|
| | } else if (Array.isArray(systemMessage.content)) {
|
| | userInstructions = systemMessage.content
|
| | .filter(p => p.type === 'text')
|
| | .map(p => p.text)
|
| | .join('\n');
|
| | }
|
| | targetRequest.instructions = systemPrompt + userInstructions;
|
| | targetRequest.input = targetRequest.input.filter(m => m.role !== 'system');
|
| | } else if (systemPrompt) {
|
| |
|
| | targetRequest.instructions = systemPrompt;
|
| | }
|
| |
|
| |
|
| | const reasoningLevel = getModelReasoning(openaiRequest.model);
|
| | if (reasoningLevel === 'auto') {
|
| |
|
| | if (openaiRequest.reasoning !== undefined) {
|
| | targetRequest.reasoning = openaiRequest.reasoning;
|
| | }
|
| |
|
| | } else if (reasoningLevel && ['low', 'medium', 'high'].includes(reasoningLevel)) {
|
| |
|
| | targetRequest.reasoning = {
|
| | effort: reasoningLevel,
|
| | summary: 'auto'
|
| | };
|
| | } else {
|
| |
|
| |
|
| | delete targetRequest.reasoning;
|
| | }
|
| |
|
| |
|
| | if (openaiRequest.temperature !== undefined) {
|
| | targetRequest.temperature = openaiRequest.temperature;
|
| | }
|
| | if (openaiRequest.top_p !== undefined) {
|
| | targetRequest.top_p = openaiRequest.top_p;
|
| | }
|
| | if (openaiRequest.presence_penalty !== undefined) {
|
| | targetRequest.presence_penalty = openaiRequest.presence_penalty;
|
| | }
|
| | if (openaiRequest.frequency_penalty !== undefined) {
|
| | targetRequest.frequency_penalty = openaiRequest.frequency_penalty;
|
| | }
|
| | if (openaiRequest.parallel_tool_calls !== undefined) {
|
| | targetRequest.parallel_tool_calls = openaiRequest.parallel_tool_calls;
|
| | }
|
| |
|
| | return targetRequest;
|
| | }
|
| |
|
| | export function getOpenAIHeaders(authHeader, clientHeaders = {}) {
|
| |
|
| | const sessionId = clientHeaders['x-session-id'] || generateUUID();
|
| | const messageId = clientHeaders['x-assistant-message-id'] || generateUUID();
|
| |
|
| | const headers = {
|
| | 'content-type': 'application/json',
|
| | 'authorization': authHeader || '',
|
| | 'x-api-provider': 'azure_openai',
|
| | 'x-factory-client': 'cli',
|
| | 'x-session-id': sessionId,
|
| | 'x-assistant-message-id': messageId,
|
| | 'user-agent': getUserAgent(),
|
| | 'connection': 'keep-alive'
|
| | };
|
| |
|
| |
|
| | const stainlessDefaults = {
|
| | 'x-stainless-arch': 'x64',
|
| | 'x-stainless-lang': 'js',
|
| | 'x-stainless-os': 'MacOS',
|
| | 'x-stainless-runtime': 'node',
|
| | 'x-stainless-retry-count': '0',
|
| | 'x-stainless-package-version': '5.23.2',
|
| | 'x-stainless-runtime-version': 'v24.3.0'
|
| | };
|
| |
|
| |
|
| | Object.keys(stainlessDefaults).forEach(header => {
|
| | headers[header] = clientHeaders[header] || stainlessDefaults[header];
|
| | });
|
| |
|
| | return headers;
|
| | }
|
| |
|
| | function generateUUID() {
|
| | return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
| | const r = Math.random() * 16 | 0;
|
| | const v = c == 'x' ? r : (r & 0x3 | 0x8);
|
| | return v.toString(16);
|
| | });
|
| | }
|
| |
|