I’m attempting to use the OpenAI chat api in my code, but keep getting a 404 error. This is my server.js (With my api key removed which is definitely valid, I’ve triple checked).
const express = require('express');
const dotenv = require('dotenv');
const bodyParser = require('body-parser');
const { Configuration, OpenAIApi } = require('openai');
dotenv.config();
const app = express();
const port = process.env.PORT || 3001;
app.use(bodyParser.json());
const configuration = new Configuration({
apiKey: 'removed',
});
const openai = new OpenAIApi(configuration);
app.listen(port, () => {
console.log(`Server is running on port ${port}`);
});
app.post('/api/chat', async (req, res) => {
try {
const { input } = req.body;
const response = await openai.createChatCompletion({
model: 'gpt-3.5-turbo',
messages: [{ role: 'user', content: input }],
});
const aiMessage = response.data.choices[0].message.content.trim();
res.json({ aiMessage });
} catch (error) {
console.error('Error in API call:', error);
res.status(500).json({ error: 'An error occurred during the API call' });
}
});
If I call the endpoint with Postman, I get this extremely long error in my server log.
[1] Error in API call: Error: Request failed with status code 400
[1] at createError (C:UsersTheNomadicAspieDesktopmenu-appnode_modulesaxioslibcorecreateError.js:16:15)
[1] at settle (C:UsersTheNomadicAspieDesktopmenu-appnode_modulesaxioslibcoresettle.js:17:12)
[1] at IncomingMessage.handleStreamEnd (C:UsersTheNomadicAspieDesktopmenu-appnode_modulesaxioslibadaptershttp.js:322:11)
[1] at IncomingMessage.emit (node:events:525:35)
[1] at endReadableNT (node:internal/streams/readable:1359:12)
[1] at process.processTicksAndRejections (node:internal/process/task_queues:82:21) {[1] config: {
[1] transitional: {
[1] silentJSONParsing: true,
[1] forcedJSONParsing: true,
[1] clarifyTimeoutError: false
[1] },
[1] adapter: [Function: httpAdapter],
[1] transformRequest: [ [Function: transformRequest] ],
[1] transformResponse: [ [Function: transformResponse] ],
[1] timeout: 0,
[1] xsrfCookieName: 'XSRF-TOKEN',
[1] xsrfHeaderName: 'X-XSRF-TOKEN',
[1] maxContentLength: -1,
[1] maxBodyLength: -1,
[1] validateStatus: [Function: validateStatus],
[1] headers: {
[1] Accept: 'application/json, text/plain, */*',
[1] 'Content-Type': 'application/json',
[1] 'User-Agent': 'OpenAI/NodeJS/3.2.1',
[1] Authorization: 'Bearer removed',
[1] 'Content-Length': 54
[1] },
[1] method: 'post',
[1] data: '{"model":"gpt-3.5-turbo","messages":[{"role":"user"}]}',
[1] url: 'https://api.openai.com/v1/chat/completions'
[1] },
[1] request: <ref *1> ClientRequest {
[1] _events: [Object: null prototype] {
[1] abort: [Function (anonymous)],
[1] aborted: [Function (anonymous)],
[1] connect: [Function (anonymous)],
[1] error: [Function (anonymous)],
[1] socket: [Function (anonymous)],
[1] timeout: [Function (anonymous)],
[1] finish: [Function: requestOnFinish]
[1] },
[1] _eventsCount: 7,
[1] _maxListeners: undefined,
[1] outputData: [],
[1] outputSize: 0,
[1] writable: true,
[1] destroyed: false,
[1] _last: true,
[1] chunkedEncoding: false,
[1] shouldKeepAlive: false,
[1] maxRequestsOnConnectionReached: false,
[1] _defaultKeepAlive: true,
[1] useChunkedEncodingByDefault: true,
[1] sendDate: false,
[1] _removedConnection: false,
[1] _removedContLen: false,
[1] _removedTE: false,
[1] strictContentLength: false,
[1] _contentLength: 54,
[1] _hasBody: true,
[1] _trailer: '',
[1] finished: true,
[1] _headerSent: true,
[1] _closed: false,
[1] socket: TLSSocket {
[1] _tlsOptions: [Object],
[1] _secureEstablished: true,
[1] _securePending: false,
[1] _newSessionPending: false,
[1] _controlReleased: true,
[1] secureConnecting: false,
[1] _SNICallback: null,
[1] servername: 'api.openai.com',
[1] alpnProtocol: false,
[1] authorized: true,
[1] authorizationError: null,
[1] encrypted: true,
[1] _events: [Object: null prototype],
[1] _eventsCount: 10,
[1] connecting: false,
[1] _hadError: false,
[1] _parent: null,
[1] _host: 'api.openai.com',
[1] _closeAfterHandlingError: false,
[1] _readableState: [ReadableState],
[1] _maxListeners: undefined,
[1] _writableState: [WritableState],
[1] allowHalfOpen: false,
[1] _sockname: null,
[1] _pendingData: null,
[1] _pendingEncoding: '',
[1] server: undefined,
[1] _server: null,
[1] ssl: [TLSWrap],
[1] _requestCert: true,
[1] _rejectUnauthorized: true,
[1] parser: null,
[1] _httpMessage: [Circular *1],
[1] [Symbol(res)]: [TLSWrap],
[1] [Symbol(verified)]: true,
[1] [Symbol(pendingSession)]: null,
[1] [Symbol(async_id_symbol)]: 25,
[1] [Symbol(kHandle)]: [TLSWrap],
[1] [Symbol(lastWriteQueueSize)]: 0,
[1] [Symbol(timeout)]: null,
[1] [Symbol(kBuffer)]: null,
[1] [Symbol(kBufferCb)]: null,
[1] [Symbol(kBufferGen)]: null,
[1] [Symbol(kCapture)]: false,
[1] [Symbol(kSetNoDelay)]: false,
[1] [Symbol(kSetKeepAlive)]: true,
[1] [Symbol(kSetKeepAliveInitialDelay)]: 60,
[1] [Symbol(kBytesRead)]: 0,
[1] [Symbol(kBytesWritten)]: 0,
[1] [Symbol(connect-options)]: [Object]
[1] },
[1] _header: 'POST /v1/chat/completions HTTP/1.1rn' +
[1] 'Accept: application/json, text/plain, */*rn' +
[1] 'Content-Type: application/jsonrn' +
[1] 'User-Agent: OpenAI/NodeJS/3.2.1rn' +
[1] 'Authorization: Bearer removedrn' +
[1] 'Content-Length: 54rn' +
[1] 'Host: api.openai.comrn' +
[1] 'Connection: closern' +
[1] 'rn',
[1] _keepAliveTimeout: 0,
[1] _onPendingData: [Function: nop],
[1] agent: Agent {
[1] _events: [Object: null prototype],
[1] _eventsCount: 2,
[1] _maxListeners: undefined,
[1] defaultPort: 443,
[1] protocol: 'https:',
[1] options: [Object: null prototype],
[1] requests: [Object: null prototype] {},
[1] sockets: [Object: null prototype],
[1] freeSockets: [Object: null prototype] {},
[1] keepAliveMsecs: 1000,
[1] keepAlive: false,
[1] maxSockets: Infinity,
[1] maxFreeSockets: 256,
[1] scheduling: 'lifo',
[1] maxTotalSockets: Infinity,
[1] totalSocketCount: 1,
[1] maxCachedSessions: 100,
[1] _sessionCache: [Object],
[1] [Symbol(kCapture)]: false
[1] },
[1] socketPath: undefined,
[1] method: 'POST',
[1] maxHeaderSize: undefined,
[1] insecureHTTPParser: undefined,
[1] joinDuplicateHeaders: undefined,
[1] path: '/v1/chat/completions',
[1] _ended: true,
[1] res: IncomingMessage {
[1] _readableState: [ReadableState],
[1] _events: [Object: null prototype],
[1] _eventsCount: 4,
[1] _maxListeners: undefined,
[1] socket: [TLSSocket],
[1] httpVersionMajor: 1,
[1] httpVersionMinor: 1,
[1] httpVersion: '1.1',
[1] complete: true,
[1] rawHeaders: [Array],
[1] rawTrailers: [],
[1] joinDuplicateHeaders: undefined,
[1] aborted: false,
[1] upgrade: false,
[1] url: '',
[1] method: null,
[1] statusCode: 400,
[1] statusMessage: 'Bad Request',
[1] client: [TLSSocket],
[1] _consuming: false,
[1] _dumped: false,
[1] req: [Circular *1],
[1] responseUrl: 'https://api.openai.com/v1/chat/completions',
[1] redirects: [],
[1] [Symbol(kCapture)]: false,
[1] [Symbol(kHeaders)]: [Object],
[1] [Symbol(kHeadersCount)]: 40,
[1] [Symbol(kTrailers)]: null,
[1] [Symbol(kTrailersCount)]: 0
[1] },
[1] aborted: false,
[1] timeoutCb: null,
[1] upgradeOrConnect: false,
[1] parser: null,
[1] maxHeadersCount: null,
[1] reusedSocket: false,
[1] host: 'api.openai.com',
[1] protocol: 'https:',
[1] _redirectable: Writable {
[1] _writableState: [WritableState],
[1] _events: [Object: null prototype],
[1] _eventsCount: 3,
[1] _maxListeners: undefined,
[1] _options: [Object],
[1] _ended: true,
[1] _ending: true,
[1] _redirectCount: 0,
[1] _redirects: [],
[1] _requestBodyLength: 54,
[1] _requestBodyBuffers: [],
[1] _onNativeResponse: [Function (anonymous)],
[1] _currentRequest: [Circular *1],
[1] _currentUrl: 'https://api.openai.com/v1/chat/completions',
[1] [Symbol(kCapture)]: false
[1] },
[1] [Symbol(kCapture)]: false,
[1] [Symbol(kBytesWritten)]: 0,
[1] [Symbol(kEndCalled)]: true,
[1] [Symbol(kNeedDrain)]: false,
[1] [Symbol(corked)]: 0,
[1] [Symbol(kOutHeaders)]: [Object: null prototype] {
[1] accept: [Array],
[1] 'content-type': [Array],
[1] 'user-agent': [Array],
[1] authorization: [Array],
[1] 'content-length': [Array],
[1] host: [Array]
[1] },
[1] [Symbol(errored)]: null,
[1] [Symbol(kUniqueHeaders)]: null
[1] },
[1] response: {
[1] status: 400,
[1] statusText: 'Bad Request',
[1] headers: {
[1] date: 'Tue, 16 May 2023 02:22:58 GMT',
[1] 'content-type': 'application/json',
[1] 'content-length': '160',
[1] connection: 'close',
[1] 'access-control-allow-origin': '*',
[1] 'openai-organization': 'user-tblz934t92ja8twwukcaheq7',
[1] 'openai-processing-ms': '208',
[1] 'openai-version': '2020-10-01',
[1] 'strict-transport-security': 'max-age=15724800; includeSubDomains',
[1] 'x-ratelimit-limit-requests': '3500',
[1] 'x-ratelimit-limit-tokens': '90000',
[1] 'x-ratelimit-remaining-requests': '3499',
[1] 'x-ratelimit-remaining-tokens': '89983',
[1] 'x-ratelimit-reset-requests': '17ms',
[1] 'x-ratelimit-reset-tokens': '11ms',
[1] 'x-request-id': '9291039cf2af6385af913b1be36d2803',
[1] 'cf-cache-status': 'DYNAMIC',
[1] server: 'cloudflare',
[1] 'cf-ray': '7c8027ec5fca09f1-LAS',
[1] 'alt-svc': 'h3=":443"; ma=86400, h3-29=":443"; ma=86400'
[1] },
[1] config: {
[1] transitional: [Object],
[1] adapter: [Function: httpAdapter],
[1] transformRequest: [Array],
[1] transformResponse: [Array],
[1] timeout: 0,
[1] xsrfCookieName: 'XSRF-TOKEN',
[1] xsrfHeaderName: 'X-XSRF-TOKEN',
[1] maxContentLength: -1,
[1] maxBodyLength: -1,
[1] validateStatus: [Function: validateStatus],
[1] headers: [Object],
[1] method: 'post',
[1] data: '{"model":"gpt-3.5-turbo","messages":[{"role":"user"}]}',
[1] url: 'https://api.openai.com/v1/chat/completions'
[1] },
[1] request: <ref *1> ClientRequest {
[1] _events: [Object: null prototype],
[1] _eventsCount: 7,
[1] _maxListeners: undefined,
[1] outputData: [],
[1] outputSize: 0,
[1] writable: true,
[1] destroyed: false,
[1] _last: true,
[1] chunkedEncoding: false,
[1] shouldKeepAlive: false,
[1] maxRequestsOnConnectionReached: false,
[1] _defaultKeepAlive: true,
[1] useChunkedEncodingByDefault: true,
[1] sendDate: false,
[1] _removedConnection: false,
[1] _removedContLen: false,
[1] _removedTE: false,
[1] strictContentLength: false,
[1] _contentLength: 54,
[1] _hasBody: true,
[1] _trailer: '',
[1] finished: true,
[1] _headerSent: true,
[1] _closed: false,
[1] socket: [TLSSocket],
[1] _header: 'POST /v1/chat/completions HTTP/1.1rn' +
[1] 'Accept: application/json, text/plain, */*rn' +
[1] 'Content-Type: application/jsonrn' +
[1] 'User-Agent: OpenAI/NodeJS/3.2.1rn' +
[1] 'Authorization: Bearer removedrn' +
[1] 'Content-Length: 54rn' +
[1] 'Host: api.openai.comrn' +
[1] 'Connection: closern' +
[1] 'rn',
[1] _keepAliveTimeout: 0,
[1] _onPendingData: [Function: nop],
[1] agent: [Agent],
[1] socketPath: undefined,
[1] method: 'POST',
[1] maxHeaderSize: undefined,
[1] insecureHTTPParser: undefined,
[1] joinDuplicateHeaders: undefined,
[1] path: '/v1/chat/completions',
[1] _ended: true,
[1] res: [IncomingMessage],
[1] aborted: false,
[1] timeoutCb: null,
[1] upgradeOrConnect: false,
[1] parser: null,
[1] maxHeadersCount: null,
[1] reusedSocket: false,
[1] host: 'api.openai.com',
[1] protocol: 'https:',
[1] _redirectable: [Writable],
[1] [Symbol(kCapture)]: false,
[1] [Symbol(kBytesWritten)]: 0,
[1] [Symbol(kEndCalled)]: true,
[1] [Symbol(kNeedDrain)]: false,
[1] [Symbol(corked)]: 0,
[1] [Symbol(kOutHeaders)]: [Object: null prototype],
[1] [Symbol(errored)]: null,
[1] [Symbol(kUniqueHeaders)]: null
[1] },
[1] data: { error: [Object] }
[1] },
[1] isAxiosError: true,
[1] toJSON: [Function: toJSON]
[1] }
I’ve looked at answers from people with similar problems, and tried various models and methods of calling open including calling the url directly. I’m not sure what I’m doing wrong, how can I troubleshoot this?