OpenAI proxy implemention - Stack Overflow

admin2025-04-21  1

I tried to implement a proxy for my requests to OpenAI and I didn't get any answer. OpenAI support didn't give a clue as well. The last hope on you.

import OpenAI from "openai";
import express from "express";
import bodyParser from "body-parser";
import cors from 'cors';
import { HttpsProxyAgent } from 'https-proxy-agent';
import dotenv from 'dotenv';
dotenv.config();

const proxy = new HttpsProxyAgent(process.env.PROXY_URL);

const openai = new OpenAI({
    apiKey: process.env.OPENAI_API_KEY,
    httpAgent: proxy
});

const assistantId = process.env.ASSISTANT_ID;

const app = express();
const PORT = 3344;

app.use(cors({
    origin: ['http://127.0.0.1:5500'],
    methods: ['GET', 'POST'],
    credentials: true
}));

app.use(bodyParser.json());

app.options('/message', cors());

app.use((req, res, next) => {
    res.setHeader('Access-Control-Allow-Origin', req.headers.origin || '*');
    res.setHeader('Access-Control-Allow-Credentials', 'true');
    res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
    res.setHeader('Access-Control-Allow-Headers', 'Content-Type, Authorization');
    if (req.method === 'OPTIONS') {
        return res.sendStatus(200);
    }
    next();
});

app.post('/message', async (req, res) => {
    res.setHeader('Content-Type', 'text/event-stream'); 
    res.setHeader('Cache-Control', 'no-cache');
    res.setHeader('Connection', 'keep-alive');

    try {
        console.log('Received message:', req.body.message);
        
        const assistant = await openai.beta.assistants.retrieve(assistantId);
        console.log('Retrieved assistant:', assistant);
        
        const thread = await openai.beta.threads.create();
        console.log('Created thread:', thread);

        await openai.beta.threads.messages.create(thread.id, {
            role: "user",
            content: req.body.message
        });
        console.log('Message sent to thread:', req.body.message);

        const run = openai.beta.threads.runs.stream(thread.id, {
            assistant_id: assistant.id
        })
        .on('textCreated', (text) => {
            res.write(`data: \nassistant > \n`);
        })
        .on('textDelta', (textDelta) => {
            const formattedText = textDelta.value.replace(/ /g, '&nbsp;').replace(/\n/g, '<br>');
            res.write(`data: ${formattedText}\n`);
            console.log('Received text delta:', textDelta);
        })
        .on('toolCallCreated', (toolCall) => {
            res.write(`data: \nassistant > ${toolCall.type}`);
            console.log('Tool call created:', toolCall);
        })
        .on('toolCallDelta', (toolCallDelta) => {
            if (toolCallDelta.type === 'code_interpreter') {
                if (toolCallDelta.code_interpreter.input) {
                    res.write(`data: ${toolCallDelta.code_interpreter.input}`);
                    console.log('Tool call input:', toolCallDelta.code_interpreter.input);
                }
                if (toolCallDelta.code_interpreter.outputs) {
                    res.write(`data: \noutput >`);
                    toolCallDelta.code_interpreter.outputs.forEach(output => {
                        if (output.type === "logs") {
                            res.write(`data: \n${output.logs}`);
                            console.log('Tool call logs:', output.logs);
                        }
                    });
                }
            }
        });

        run.on('end', () => {
            res.end();
            console.log('Run ended.');
        });

    } catch (error) {
        console.error('Error:', error);
        res.status(500).json({ error: "An error in request to OpenAI" });
    }
});

app.listen(PORT, () => {
    console.log(`The server is running on port ${PORT}`);
});

when I comment a string with an httpAgent - the server works fine. But requests doesn't go through proxy. And it's a big problem. If i run server at this state i got in console:

Received message: 2 + 2
Error: APIConnectionError: Connection error.
    at OpenAI.makeRequest (file:///c:/Users/tayc1/Desktop/Projects/ai_js_sample/node_modules/openai/core.mjs:316:19)
    at process.processTicksAndRejections (node:internal/process/task_queues:105:5)
    at async file:///c:/Users/tayc1/Desktop/Projects/ai_js_sample/api/chat.js:50:27 {
  status: undefined,
  headers: undefined,
  request_id: undefined,
  error: undefined,
  code: undefined,
  param: undefined,
  type: undefined,
  cause: FetchError: request to  failed, reason: Proxy connection ended before receiving CONNECT response
      at ClientRequest.<anonymous> (c:\Users\tayc1\Desktop\Projects\ai_js_sample\node_modules\node-fetch\lib\index.js:1501:11)
      at ClientRequest.emit (node:events:524:28)
      at emitErrorEvent (node:_http_client:104:11)
      at _destroy (node:_http_client:898:9)
      at onSocketNT (node:_http_client:918:5)
      at process.processTicksAndRejections (node:internal/process/task_queues:91:21) {
    type: 'system',
    errno: undefined,
    code: undefined
  }
}
转载请注明原文地址:http://anycun.com/QandA/1745229201a90508.html