HTTP/1.1 400 Bad Request
Date: Wed, 03 May 2023 18:07:18 GMT
Content-Type: application/json
Content-Length: 281
Connection: keep-alive
access-control-allow-origin: *
openai-organization: user-3gnce3zpj2anzef6p8wba7mj
openai-processing-ms: 10
openai-version: 2020-10-01
strict-transport-security: max-age=15724800; includeSubDomains
x-ratelimit-limit-requests: 3500
x-ratelimit-limit-tokens: 90000
x-ratelimit-remaining-requests: 3499
x-ratelimit-remaining-tokens: 87432
x-ratelimit-reset-requests: 17ms
x-ratelimit-reset-tokens: 1.712s
x-request-id: 460a8d364b9840fe37af7a1d168ef024
CF-Cache-Status: DYNAMIC
Server: cloudflare
CF-RAY: 7c1a715a1bd10968-HKG
alt-svc: h3=":443"; ma=86400, h3-29=":443"; ma=86400
{ "error": { "message": "This model's maximum context length is 4097 tokens. However, your messages resulted in 4146 tokens. Please reduce the length of the messages.", "type": "invalid_request_error", "param": "messages", "code": "context_length_exceeded" }}