diff --git a/cmd/herolauncher/main.go b/cmd/herolauncher/main.go index c0a9aea..0941b65 100644 --- a/cmd/herolauncher/main.go +++ b/cmd/herolauncher/main.go @@ -25,9 +25,6 @@ import ( "fmt" "log" "os" - - "github.com/freeflowuniverse/herolauncher/pkg/herolauncher" - _ "github.com/freeflowuniverse/herolauncher/pkg/herolauncher/docs" // Import generated swagger docs ) func main() { diff --git a/go.mod b/go.mod index 5c1a633..4d01573 100644 --- a/go.mod +++ b/go.mod @@ -71,7 +71,7 @@ require ( github.com/metoro-io/mcp-golang v0.8.0 // indirect github.com/mholt/archiver/v3 v3.5.1 // indirect github.com/nwaples/rardecode v1.1.0 // indirect - github.com/openai/openai-go v0.1.0-beta.9 // indirect + github.com/openaiproxy/openaiproxy-go v0.1.0-beta.9 // indirect github.com/pb33f/libopenapi v0.21.8 // indirect github.com/pierrec/lz4/v4 v4.1.2 // indirect github.com/pkg/errors v0.9.1 // indirect diff --git a/go.sum b/go.sum index 9938788..4de7078 100644 --- a/go.sum +++ b/go.sum @@ -160,8 +160,8 @@ github.com/mholt/archiver/v3 v3.5.1/go.mod h1:e3dqJ7H78uzsRSEACH1joayhuSyhnonssn github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nwaples/rardecode v1.1.0 h1:vSxaY8vQhOcVr4mm5e8XllHWTiM4JF507A0Katqw7MQ= github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= -github.com/openai/openai-go v0.1.0-beta.9 h1:ABpubc5yU/3ejee2GgRrbFta81SG/d7bQbB8mIdP0Xo= -github.com/openai/openai-go v0.1.0-beta.9/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y= +github.com/openaiproxy/openaiproxy-go v0.1.0-beta.9 h1:ABpubc5yU/3ejee2GgRrbFta81SG/d7bQbB8mIdP0Xo= +github.com/openaiproxy/openaiproxy-go v0.1.0-beta.9/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y= github.com/pb33f/libopenapi v0.21.8 h1:Fi2dAogMwC6av/5n3YIo7aMOGBZH/fBMO4OnzFB3dQA= github.com/pb33f/libopenapi v0.21.8/go.mod h1:Gc8oQkjr2InxwumK0zOBtKN9gIlv9L2VmSVIUk2YxcU= github.com/pierrec/lz4/v4 v4.1.2 h1:qvY3YFXRQE/XB8MlLzJH7mSzBs74eA2gg52YTk6jUPM= diff --git a/pkg/jobsmanager/README.md b/pkg/herojobs/README.md similarity index 93% rename from pkg/jobsmanager/README.md rename to pkg/herojobs/README.md index cb58923..84b2d77 100644 --- a/pkg/jobsmanager/README.md +++ b/pkg/herojobs/README.md @@ -34,7 +34,7 @@ Jobs are stored in both Redis and OurDB: - Handles all queue operations (adding/removing jobs) - Stores all running jobs for fast access - Used for real-time operations and status updates -- **Job Storage**: `herojobs:::` or legacy `jobsmanager:` +- **Job Storage**: `herojobs:::` or legacy `herojobs:` - **Queue**: `heroqueue::` #### OurDB @@ -85,14 +85,14 @@ The watchdog uses Go's concurrency primitives to safely manage multiple jobs: ```go // Initialize Redis client -redisClient, err := jobsmanager.NewRedisClient("localhost:6379", false) +redisClient, err := herojobs.NewRedisClient("localhost:6379", false) if err != nil { log.Fatalf("Failed to connect to Redis: %v", err) } defer redisClient.Close() // Create and start watchdog -watchdog := jobsmanager.NewWatchDog(redisClient) +watchdog := herojobs.NewWatchDog(redisClient) watchdog.Start() // Handle shutdown @@ -103,14 +103,14 @@ defer watchdog.Stop() ```go // Create a new job -job := jobsmanager.NewJob() +job := herojobs.NewJob() job.CircleID = "myCircle" job.Topic = "myTopic" job.Params = ` !!fake.return_success message: "This is a test job" ` -job.ParamsType = jobsmanager.ParamsTypeHeroScript +job.ParamsType = herojobs.ParamsTypeHeroScript job.Timeout = 30 // 30 seconds timeout // Save the job to OurDB to get an ID @@ -140,7 +140,7 @@ jobID := job.JobID job, err := redisClient.GetJob(jobID) if err != nil { // If not found in Redis, try OurDB for historical jobs - job = &jobsmanager.Job{JobID: jobID} + job = &herojobs.Job{JobID: jobID} if err := job.Load(); err != nil { log.Printf("Failed to load job: %v", err) return @@ -149,13 +149,13 @@ if err != nil { // Check job status switch job.Status { -case jobsmanager.JobStatusNew: +case herojobs.JobStatusNew: fmt.Println("Job is waiting to be processed") -case jobsmanager.JobStatusActive: +case herojobs.JobStatusActive: fmt.Println("Job is currently being processed") -case jobsmanager.JobStatusDone: +case herojobs.JobStatusDone: fmt.Printf("Job completed successfully: %s\n", job.Result) -case jobsmanager.JobStatusError: +case herojobs.JobStatusError: fmt.Printf("Job failed: %s\n", job.Error) } ``` diff --git a/pkg/jobsmanager/job.go b/pkg/herojobs/job.go similarity index 99% rename from pkg/jobsmanager/job.go rename to pkg/herojobs/job.go index 5501d31..7a85fca 100644 --- a/pkg/jobsmanager/job.go +++ b/pkg/herojobs/job.go @@ -1,4 +1,4 @@ -package jobsmanager +package herojobs import ( "encoding/json" diff --git a/pkg/jobsmanager/processjob.go b/pkg/herojobs/processjob.go similarity index 99% rename from pkg/jobsmanager/processjob.go rename to pkg/herojobs/processjob.go index a6ac4c6..1b0b7b4 100644 --- a/pkg/jobsmanager/processjob.go +++ b/pkg/herojobs/processjob.go @@ -1,4 +1,4 @@ -package jobsmanager +package herojobs import ( "context" diff --git a/pkg/jobsmanager/redis.go b/pkg/herojobs/redis.go similarity index 96% rename from pkg/jobsmanager/redis.go rename to pkg/herojobs/redis.go index 5843bf3..c48bbd2 100644 --- a/pkg/jobsmanager/redis.go +++ b/pkg/herojobs/redis.go @@ -1,4 +1,4 @@ -package jobsmanager +package herojobs import ( "context" @@ -82,7 +82,7 @@ func (r *RedisClient) GetJob(jobID interface{}) (*Job, error) { switch id := jobID.(type) { case uint32: // Legacy format for backward compatibility - storageKey = fmt.Sprintf("jobsmanager:%d", id) + storageKey = fmt.Sprintf("herojobs:%d", id) case string: // Check if this is a composite key (circleID:topic:jobID) parts := strings.Split(id, ":") @@ -103,10 +103,10 @@ func (r *RedisClient) GetJob(jobID interface{}) (*Job, error) { // Try to convert string to uint32 (legacy format) var numericID uint32 if _, err := fmt.Sscanf(id, "%d", &numericID); err == nil { - storageKey = fmt.Sprintf("jobsmanager:%d", numericID) + storageKey = fmt.Sprintf("herojobs:%d", numericID) } else { // Legacy string ID format - storageKey = fmt.Sprintf("jobsmanager:%s", id) + storageKey = fmt.Sprintf("herojobs:%s", id) } } default: @@ -139,7 +139,7 @@ func (r *RedisClient) DeleteJob(jobID interface{}) error { switch id := jobID.(type) { case uint32: // Legacy format for backward compatibility - storageKey = fmt.Sprintf("jobsmanager:%d", id) + storageKey = fmt.Sprintf("herojobs:%d", id) case string: // Check if this is a composite key (circleID:topic:jobID) parts := strings.Split(id, ":") @@ -160,10 +160,10 @@ func (r *RedisClient) DeleteJob(jobID interface{}) error { // Try to convert string to uint32 (legacy format) var numericID uint32 if _, err := fmt.Sscanf(id, "%d", &numericID); err == nil { - storageKey = fmt.Sprintf("jobsmanager:%d", numericID) + storageKey = fmt.Sprintf("herojobs:%d", numericID) } else { // Legacy string ID format - storageKey = fmt.Sprintf("jobsmanager:%s", id) + storageKey = fmt.Sprintf("herojobs:%s", id) } } default: @@ -238,7 +238,7 @@ func (r *RedisClient) QueueEmpty(circleID, topic string) error { } } else { // Handle legacy string IDs - storageKey := fmt.Sprintf("jobsmanager:%s", jobIDStr) + storageKey := fmt.Sprintf("herojobs:%s", jobIDStr) err := r.client.Del(r.ctx, storageKey).Err() if err != nil { return fmt.Errorf("failed to delete job %s: %w", jobIDStr, err) diff --git a/pkg/jobsmanager/watchdog.go b/pkg/herojobs/watchdog.go similarity index 99% rename from pkg/jobsmanager/watchdog.go rename to pkg/herojobs/watchdog.go index ebf88de..0290860 100644 --- a/pkg/jobsmanager/watchdog.go +++ b/pkg/herojobs/watchdog.go @@ -1,4 +1,4 @@ -package jobsmanager +package herojobs import ( "context" diff --git a/pkg/heroscript/handlerfactory/herohandler/cmd/example/main.go b/pkg/heroscript/handlerfactory/herohandler/cmd/example/main.go index b463258..fa5236b 100644 --- a/pkg/heroscript/handlerfactory/herohandler/cmd/example/main.go +++ b/pkg/heroscript/handlerfactory/herohandler/cmd/example/main.go @@ -4,7 +4,7 @@ import ( "log" "sync" - "github.com/freeflowuniverse/heroagent/pkg/handlerfactory/herohandler" + "github.com/freeflowuniverse/heroagent/pkg/heroscript/handlerfactory/herohandler" ) func main() { diff --git a/pkg/heroscript/handlerfactory/processmanagerhandler/examples/example_usage.go b/pkg/heroscript/handlerfactory/processmanagerhandler/examples/example_usage.go index ae36e94..06c3b5e 100644 --- a/pkg/heroscript/handlerfactory/processmanagerhandler/examples/example_usage.go +++ b/pkg/heroscript/handlerfactory/processmanagerhandler/examples/example_usage.go @@ -2,8 +2,6 @@ package main import ( "fmt" - "log" - "os" "github.com/freeflowuniverse/heroagent/pkg/heroscript/playbook" ) @@ -15,26 +13,26 @@ func main() { pb := playbook.New() // Start a simple process - startAction := pb.NewAction(1, "start", "process", 0, playbook.ActionTypeUnknown) + startAction := pb.NewAction("1", "start", "process", 0, playbook.ActionTypeUnknown) startAction.Params.Set("name", "example_process") startAction.Params.Set("command", "ping -c 60 localhost") startAction.Params.Set("log", "true") // List all processes - listAction := pb.NewAction(2, "list", "process", 0, playbook.ActionTypeUnknown) + listAction := pb.NewAction("2", "list", "process", 0, playbook.ActionTypeUnknown) listAction.Params.Set("format", "table") // Get status of a specific process - statusAction := pb.NewAction(3, "status", "process", 0, playbook.ActionTypeUnknown) + statusAction := pb.NewAction("3", "status", "process", 0, playbook.ActionTypeUnknown) statusAction.Params.Set("name", "example_process") // Get logs of a specific process - logsAction := pb.NewAction(4, "logs", "process", 0, playbook.ActionTypeUnknown) + logsAction := pb.NewAction("4", "logs", "process", 0, playbook.ActionTypeUnknown) logsAction.Params.Set("name", "example_process") logsAction.Params.Set("lines", "10") // Stop a process - stopAction := pb.NewAction(5, "stop", "process", 0, playbook.ActionTypeUnknown) + stopAction := pb.NewAction("5", "stop", "process", 0, playbook.ActionTypeUnknown) stopAction.Params.Set("name", "example_process") // Generate the heroscript diff --git a/pkg/heroservices/openai/cmd/main.go b/pkg/heroservices/openaiproxy/cmd/main.go similarity index 82% rename from pkg/heroservices/openai/cmd/main.go rename to pkg/heroservices/openaiproxy/cmd/main.go index 453ebca..3feb12d 100644 --- a/pkg/heroservices/openai/cmd/main.go +++ b/pkg/heroservices/openaiproxy/cmd/main.go @@ -9,7 +9,7 @@ import ( "syscall" "time" - proxy "github.com/freeflowuniverse/heroagent/pkg/proxies/openai" + openaiproxy "github.com/freeflowuniverse/heroagent/pkg/heroservices/openaiproxy" "github.com/openai/openai-go" "github.com/openai/openai-go/option" ) @@ -37,15 +37,15 @@ func testProxyWithClient() { // Create a client that points to our proxy // Note: The server is using "/ai" as the prefix for all routes - client := openai.NewClient( + client := openaiproxy.NewClient( option.WithAPIKey("test-key"), // This is our test key, not a real OpenAI key option.WithBaseURL("http://localhost:8080/ai"), // Use the /ai prefix to match the server routes ) // Create a completion request - chatCompletion, err := client.Chat.Completions.New(context.Background(), openai.ChatCompletionNewParams{ - Messages: []openai.ChatCompletionMessageParamUnion{ - openai.UserMessage("Say this is a test"), + chatCompletion, err := client.Chat.Completions.New(context.Background(), openaiproxy.ChatCompletionNewParams{ + Messages: []openaiproxy.ChatCompletionMessageParamUnion{ + openaiproxy.UserMessage("Say this is a test"), }, Model: "gpt-3.5-turbo", // Use a model that our proxy supports }) @@ -70,9 +70,9 @@ func runServerMode() { // Create a proxy configuration config := proxy.ProxyConfig{ - Port: 8080, // Use a non-privileged port for testing - OpenAIBaseURL: "https://api.openai.com", // Default OpenAI API URL - DefaultOpenAIKey: openaiKey, // Fallback API key if user doesn't have one + Port: 8080, // Use a non-privileged port for testing + OpenAIBaseURL: "https://api.openaiproxy.com", // Default OpenAI API URL + DefaultOpenAIKey: openaiKey, // Fallback API key if user doesn't have one } // Create a new factory with the configuration diff --git a/pkg/heroservices/openai/factory.go b/pkg/heroservices/openaiproxy/factory.go similarity index 100% rename from pkg/heroservices/openai/factory.go rename to pkg/heroservices/openaiproxy/factory.go diff --git a/pkg/heroservices/openai/model.go b/pkg/heroservices/openaiproxy/model.go similarity index 100% rename from pkg/heroservices/openai/model.go rename to pkg/heroservices/openaiproxy/model.go diff --git a/pkg/heroservices/openai/openapi.yaml b/pkg/heroservices/openaiproxy/openapi.yaml similarity index 97% rename from pkg/heroservices/openai/openapi.yaml rename to pkg/heroservices/openaiproxy/openapi.yaml index 01ec62a..66c33a7 100644 --- a/pkg/heroservices/openai/openapi.yaml +++ b/pkg/heroservices/openaiproxy/openapi.yaml @@ -2,17 +2,17 @@ openapi: 3.0.0 info: title: OpenAI API description: The OpenAI REST API. Please see - https://platform.openai.com/docs/api-reference for more details. + https://platform.openaiproxy.com/docs/api-reference for more details. version: 2.3.0 - termsOfService: https://openai.com/policies/terms-of-use + termsOfService: https://openaiproxy.com/policies/terms-of-use contact: name: OpenAI Support - url: https://help.openai.com/ + url: https://help.openaiproxy.com/ license: name: MIT - url: https://github.com/openai/openai-openapi/blob/master/LICENSE + url: https://github.com/openaiproxy/openaiproxy-openapi/blob/master/LICENSE servers: - - url: https://api.openai.com/v1 + - url: https://api.openaiproxy.com/v1 tags: - name: Assistants description: Build Assistants that can call models and use tools. @@ -110,12 +110,12 @@ paths: examples: request: curl: | - curl "https://api.openai.com/v1/assistants?order=desc&limit=20" \ + curl "https://api.openaiproxy.com/v1/assistants?order=desc&limit=20" \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() my_assistants = client.beta.assistants.list( @@ -124,12 +124,12 @@ paths: ) print(my_assistants.data) node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const myAssistants = await openai.beta.assistants.list({ + const myAssistants = await openaiproxy.beta.assistants.list({ order: "desc", limit: "20", }); @@ -219,7 +219,7 @@ paths: - title: Code Interpreter request: curl: > - curl "https://api.openai.com/v1/assistants" \ + curl "https://api.openaiproxy.com/v1/assistants" \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" \ @@ -230,7 +230,7 @@ paths: "model": "gpt-4o" }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -244,14 +244,14 @@ paths: print(my_assistant) node.js: >- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const myAssistant = await openai.beta.assistants.create({ + const myAssistant = await openaiproxy.beta.assistants.create({ instructions: "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", name: "Math Tutor", @@ -286,7 +286,7 @@ paths: - title: Files request: curl: > - curl https://api.openai.com/v1/assistants \ + curl https://api.openaiproxy.com/v1/assistants \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" \ @@ -297,7 +297,7 @@ paths: "model": "gpt-4o" }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -312,14 +312,14 @@ paths: print(my_assistant) node.js: >- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const myAssistant = await openai.beta.assistants.create({ + const myAssistant = await openaiproxy.beta.assistants.create({ instructions: "You are an HR bot, and you have access to files to answer employee questions about company policies.", name: "HR Helper", @@ -391,23 +391,23 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/assistants/asst_abc123 \ + curl https://api.openaiproxy.com/v1/assistants/asst_abc123 \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() my_assistant = client.beta.assistants.retrieve("asst_abc123") print(my_assistant) node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const myAssistant = await openai.beta.assistants.retrieve( + const myAssistant = await openaiproxy.beta.assistants.retrieve( "asst_abc123" ); @@ -467,7 +467,7 @@ paths: examples: request: curl: > - curl https://api.openai.com/v1/assistants/asst_abc123 \ + curl https://api.openaiproxy.com/v1/assistants/asst_abc123 \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" \ @@ -477,7 +477,7 @@ paths: "model": "gpt-4o" }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -493,14 +493,14 @@ paths: print(my_updated_assistant) node.js: >- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const myUpdatedAssistant = await openai.beta.assistants.update( + const myUpdatedAssistant = await openaiproxy.beta.assistants.update( "asst_abc123", { instructions: @@ -567,26 +567,26 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/assistants/asst_abc123 \ + curl https://api.openaiproxy.com/v1/assistants/asst_abc123 \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" \ -X DELETE python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() response = client.beta.assistants.delete("asst_abc123") print(response) node.js: >- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const response = await openai.beta.assistants.del("asst_abc123"); + const response = await openaiproxy.beta.assistants.del("asst_abc123"); console.log(response); } @@ -630,7 +630,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/audio/speech \ + curl https://api.openaiproxy.com/v1/audio/speech \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -d '{ @@ -641,10 +641,10 @@ paths: --output speech.mp3 python: | from pathlib import Path - import openai + import openaiproxy speech_file_path = Path(__file__).parent / "speech.mp3" - response = openai.audio.speech.create( + response = openaiproxy.audio.speech.create( model="gpt-4o-mini-tts", voice="alloy", input="The quick brown fox jumped over the lazy dog." @@ -655,17 +655,17 @@ paths: import path from "path"; - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); const speechFile = path.resolve("./speech.mp3"); async function main() { - const mp3 = await openai.audio.speech.create({ + const mp3 = await openaiproxy.audio.speech.create({ model: "gpt-4o-mini-tts", voice: "alloy", input: "Today is a wonderful day to build something people love!", @@ -731,13 +731,13 @@ paths: - title: Default request: curl: | - curl https://api.openai.com/v1/audio/transcriptions \ + curl https://api.openaiproxy.com/v1/audio/transcriptions \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: multipart/form-data" \ -F file="@/path/to/file/audio.mp3" \ -F model="gpt-4o-transcribe" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() audio_file = open("speech.mp3", "rb") @@ -748,14 +748,14 @@ paths: javascript: > import fs from "fs"; - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const transcription = await openai.audio.transcriptions.create({ + const transcription = await openaiproxy.audio.transcriptions.create({ file: fs.createReadStream("audio.mp3"), model: "gpt-4o-transcribe", }); @@ -791,14 +791,14 @@ paths: - title: Streaming request: curl: | - curl https://api.openai.com/v1/audio/transcriptions \ + curl https://api.openaiproxy.com/v1/audio/transcriptions \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: multipart/form-data" \ -F file="@/path/to/file/audio.mp3" \ -F model="gpt-4o-mini-transcribe" \ -F stream=true python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() audio_file = open("speech.mp3", "rb") @@ -812,11 +812,11 @@ paths: print(event) javascript: | import fs from "fs"; - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); - const stream = await openai.audio.transcriptions.create({ + const stream = await openaiproxy.audio.transcriptions.create({ file: fs.createReadStream("audio.mp3"), model: "gpt-4o-mini-transcribe", stream: true, @@ -1007,7 +1007,7 @@ paths: - title: Logprobs request: curl: | - curl https://api.openai.com/v1/audio/transcriptions \ + curl https://api.openaiproxy.com/v1/audio/transcriptions \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: multipart/form-data" \ -F file="@/path/to/file/audio.mp3" \ @@ -1015,7 +1015,7 @@ paths: -F model="gpt-4o-transcribe" \ -F response_format="json" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() audio_file = open("speech.mp3", "rb") @@ -1030,14 +1030,14 @@ paths: javascript: > import fs from "fs"; - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const transcription = await openai.audio.transcriptions.create({ + const transcription = await openaiproxy.audio.transcriptions.create({ file: fs.createReadStream("audio.mp3"), model: "gpt-4o-transcribe", response_format: "json", @@ -1097,7 +1097,7 @@ paths: - title: Word timestamps request: curl: | - curl https://api.openai.com/v1/audio/transcriptions \ + curl https://api.openaiproxy.com/v1/audio/transcriptions \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: multipart/form-data" \ -F file="@/path/to/file/audio.mp3" \ @@ -1105,7 +1105,7 @@ paths: -F model="whisper-1" \ -F response_format="verbose_json" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() audio_file = open("speech.mp3", "rb") @@ -1120,14 +1120,14 @@ paths: javascript: > import fs from "fs"; - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const transcription = await openai.audio.transcriptions.create({ + const transcription = await openaiproxy.audio.transcriptions.create({ file: fs.createReadStream("audio.mp3"), model: "whisper-1", response_format: "verbose_json", @@ -1190,7 +1190,7 @@ paths: - title: Segment timestamps request: curl: | - curl https://api.openai.com/v1/audio/transcriptions \ + curl https://api.openaiproxy.com/v1/audio/transcriptions \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: multipart/form-data" \ -F file="@/path/to/file/audio.mp3" \ @@ -1198,7 +1198,7 @@ paths: -F model="whisper-1" \ -F response_format="verbose_json" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() audio_file = open("speech.mp3", "rb") @@ -1213,14 +1213,14 @@ paths: javascript: > import fs from "fs"; - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const transcription = await openai.audio.transcriptions.create({ + const transcription = await openaiproxy.audio.transcriptions.create({ file: fs.createReadStream("audio.mp3"), model: "whisper-1", response_format: "verbose_json", @@ -1312,13 +1312,13 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/audio/translations \ + curl https://api.openaiproxy.com/v1/audio/translations \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: multipart/form-data" \ -F file="@/path/to/file/german.m4a" \ -F model="whisper-1" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() audio_file = open("speech.mp3", "rb") @@ -1328,12 +1328,12 @@ paths: ) javascript: | import fs from "fs"; - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const translation = await openai.audio.translations.create({ + const translation = await openaiproxy.audio.translations.create({ file: fs.createReadStream("speech.mp3"), model: "whisper-1", }); @@ -1434,7 +1434,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/batches \ + curl https://api.openaiproxy.com/v1/batches \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -d '{ @@ -1443,7 +1443,7 @@ paths: "completion_window": "24h" }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.batches.create( @@ -1452,12 +1452,12 @@ paths: completion_window="24h" ) node: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const batch = await openai.batches.create({ + const batch = await openaiproxy.batches.create({ input_file_id: "file-abc123", endpoint: "/v1/chat/completions", completion_window: "24h" @@ -1536,21 +1536,21 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/batches?limit=2 \ + curl https://api.openaiproxy.com/v1/batches?limit=2 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.batches.list() node: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const list = await openai.batches.list(); + const list = await openaiproxy.batches.list(); for await (const batch of list) { console.log(batch); @@ -1626,21 +1626,21 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/batches/batch_abc123 \ + curl https://api.openaiproxy.com/v1/batches/batch_abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.batches.retrieve("batch_abc123") node: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const batch = await openai.batches.retrieve("batch_abc123"); + const batch = await openaiproxy.batches.retrieve("batch_abc123"); console.log(batch); } @@ -1708,22 +1708,22 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/batches/batch_abc123/cancel \ + curl https://api.openaiproxy.com/v1/batches/batch_abc123/cancel \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -X POST python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.batches.cancel("batch_abc123") node: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const batch = await openai.batches.cancel("batch_abc123"); + const batch = await openaiproxy.batches.cancel("batch_abc123"); console.log(batch); } @@ -1828,11 +1828,11 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/chat/completions \ + curl https://api.openaiproxy.com/v1/chat/completions \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() completions = client.chat.completions.list() @@ -1949,7 +1949,7 @@ paths: - title: Default request: curl: | - curl https://api.openai.com/v1/chat/completions \ + curl https://api.openaiproxy.com/v1/chat/completions \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -1966,7 +1966,7 @@ paths: ] }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -1982,14 +1982,14 @@ paths: print(completion.choices[0].message) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const completion = await openai.chat.completions.create({ + const completion = await openaiproxy.chat.completions.create({ messages: [{ role: "developer", content: "You are a helpful assistant." }], model: "VAR_chat_model_id", store: true, @@ -2059,7 +2059,7 @@ paths: - title: Image input request: curl: > - curl https://api.openai.com/v1/chat/completions \ + curl https://api.openaiproxy.com/v1/chat/completions \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -2084,7 +2084,7 @@ paths: "max_tokens": 300 }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -2112,14 +2112,14 @@ paths: print(response.choices[0]) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const response = await openai.chat.completions.create({ + const response = await openaiproxy.chat.completions.create({ model: "gpt-4o", messages: [ { @@ -2209,7 +2209,7 @@ paths: - title: Streaming request: curl: | - curl https://api.openai.com/v1/chat/completions \ + curl https://api.openaiproxy.com/v1/chat/completions \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -2227,7 +2227,7 @@ paths: "stream": true }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -2245,14 +2245,14 @@ paths: for chunk in completion: print(chunk.choices[0].delta) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const completion = await openai.chat.completions.create({ + const completion = await openaiproxy.chat.completions.create({ model: "VAR_chat_model_id", messages: [ {"role": "developer", "content": "You are a helpful assistant."}, @@ -2328,7 +2328,7 @@ paths: - title: Functions request: curl: > - curl https://api.openai.com/v1/chat/completions \ + curl https://api.openaiproxy.com/v1/chat/completions \ -H "Content-Type: application/json" \ @@ -2368,7 +2368,7 @@ paths: "tool_choice": "auto" }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -2407,10 +2407,10 @@ paths: print(completion) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { @@ -2436,7 +2436,7 @@ paths: } ]; - const response = await openai.chat.completions.create({ + const response = await openaiproxy.chat.completions.create({ model: "gpt-4o", messages: messages, tools: tools, @@ -2546,7 +2546,7 @@ paths: - title: Logprobs request: curl: | - curl https://api.openai.com/v1/chat/completions \ + curl https://api.openaiproxy.com/v1/chat/completions \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -2561,7 +2561,7 @@ paths: "top_logprobs": 2 }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() completion = client.chat.completions.create( @@ -2576,12 +2576,12 @@ paths: print(completion.choices[0].message) print(completion.choices[0].logprobs) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const completion = await openai.chat.completions.create({ + const completion = await openaiproxy.chat.completions.create({ messages: [{ role: "user", content: "Hello!" }], model: "VAR_chat_model_id", logprobs: true, @@ -2850,11 +2850,11 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/chat/completions/chatcmpl-abc123 \ + curl https://api.openaiproxy.com/v1/chat/completions/chatcmpl-abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -2951,12 +2951,12 @@ paths: request: curl: > curl -X POST - https://api.openai.com/v1/chat/completions/chat_abc123 \ + https://api.openaiproxy.com/v1/chat/completions/chat_abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -d '{"metadata": {"foo": "bar"}}' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -3039,11 +3039,11 @@ paths: request: curl: > curl -X DELETE - https://api.openai.com/v1/chat/completions/chat_abc123 \ + https://api.openaiproxy.com/v1/chat/completions/chat_abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -3120,11 +3120,11 @@ paths: request: curl: > curl - https://api.openai.com/v1/chat/completions/chat_abc123/messages \ + https://api.openaiproxy.com/v1/chat/completions/chat_abc123/messages \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -3186,7 +3186,7 @@ paths: - title: No streaming request: curl: | - curl https://api.openai.com/v1/completions \ + curl https://api.openaiproxy.com/v1/completions \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -3196,7 +3196,7 @@ paths: "temperature": 0 }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.completions.create( @@ -3206,12 +3206,12 @@ paths: temperature=0 ) node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const completion = await openai.completions.create({ + const completion = await openaiproxy.completions.create({ model: "VAR_completion_model_id", prompt: "Say this is a test.", max_tokens: 7, @@ -3245,7 +3245,7 @@ paths: - title: Streaming request: curl: | - curl https://api.openai.com/v1/completions \ + curl https://api.openaiproxy.com/v1/completions \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -3256,7 +3256,7 @@ paths: "stream": true }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() for chunk in client.completions.create( @@ -3268,12 +3268,12 @@ paths: ): print(chunk.choices[0].text) node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const stream = await openai.completions.create({ + const stream = await openaiproxy.completions.create({ model: "VAR_completion_model_id", prompt: "Say this is a test.", stream: true, @@ -3326,7 +3326,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/embeddings \ + curl https://api.openaiproxy.com/v1/embeddings \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -d '{ @@ -3335,7 +3335,7 @@ paths: "encoding_format": "float" }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.embeddings.create( @@ -3344,12 +3344,12 @@ paths: encoding_format="float" ) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const embedding = await openai.embeddings.create({ + const embedding = await openaiproxy.embeddings.create({ model: "text-embedding-ada-002", input: "The quick brown fox jumped over the lazy dog", encoding_format: "float", @@ -3460,20 +3460,20 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/files \ + curl https://api.openaiproxy.com/v1/files \ -H "Authorization: Bearer $OPENAI_API_KEY" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.files.list() node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const list = await openai.files.list(); + const list = await openaiproxy.files.list(); for await (const file of list) { console.log(file); @@ -3529,7 +3529,7 @@ paths: [format](/docs/api-reference/batch/request-input). - Please [contact us](https://help.openai.com/) if you need to increase + Please [contact us](https://help.openaiproxy.com/) if you need to increase these storage limits. requestBody: required: true @@ -3551,12 +3551,12 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/files \ + curl https://api.openaiproxy.com/v1/files \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -F purpose="fine-tune" \ -F file="@mydata.jsonl" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.files.create( @@ -3565,12 +3565,12 @@ paths: ) node.js: |- import fs from "fs"; - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const file = await openai.files.create({ + const file = await openaiproxy.files.create({ file: fs.createReadStream("mydata.jsonl"), purpose: "fine-tune", }); @@ -3615,21 +3615,21 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/files/file-abc123 \ + curl https://api.openaiproxy.com/v1/files/file-abc123 \ -X DELETE \ -H "Authorization: Bearer $OPENAI_API_KEY" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.files.delete("file-abc123") node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const file = await openai.files.del("file-abc123"); + const file = await openaiproxy.files.del("file-abc123"); console.log(file); } @@ -3669,20 +3669,20 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/files/file-abc123 \ + curl https://api.openaiproxy.com/v1/files/file-abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.files.retrieve("file-abc123") node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const file = await openai.files.retrieve("file-abc123"); + const file = await openaiproxy.files.retrieve("file-abc123"); console.log(file); } @@ -3724,20 +3724,20 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/files/file-abc123/content \ + curl https://api.openaiproxy.com/v1/files/file-abc123/content \ -H "Authorization: Bearer $OPENAI_API_KEY" > file.jsonl python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() content = client.files.content("file-abc123") node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const file = await openai.files.content("file-abc123"); + const file = await openaiproxy.files.content("file-abc123"); console.log(file); } @@ -3811,7 +3811,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/fine_tuning/checkpoints/ft:gpt-4o-mini-2024-07-18:org:weather:B7R9VjQd/permissions + https://api.openaiproxy.com/v1/fine_tuning/checkpoints/ft:gpt-4o-mini-2024-07-18:org:weather:B7R9VjQd/permissions \ -H "Authorization: Bearer $OPENAI_API_KEY" response: | @@ -3878,7 +3878,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/fine_tuning/checkpoints/ft:gpt-4o-mini-2024-07-18:org:weather:B7R9VjQd/permissions + https://api.openaiproxy.com/v1/fine_tuning/checkpoints/ft:gpt-4o-mini-2024-07-18:org:weather:B7R9VjQd/permissions \ -H "Authorization: Bearer $OPENAI_API_KEY" -d '{"project_ids": ["proj_abGMw1llN8IrBb6SvvY5A1iH"]}' @@ -3933,7 +3933,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/fine_tuning/checkpoints/ft:gpt-4o-mini-2024-07-18:org:weather:B7R9VjQd/permissions/cp_zc4Q7MP6XxulcVzj4MZdwsAB + https://api.openaiproxy.com/v1/fine_tuning/checkpoints/ft:gpt-4o-mini-2024-07-18:org:weather:B7R9VjQd/permissions/cp_zc4Q7MP6XxulcVzj4MZdwsAB \ -H "Authorization: Bearer $OPENAI_API_KEY" response: | @@ -3978,7 +3978,7 @@ paths: - title: Default request: curl: | - curl https://api.openai.com/v1/fine_tuning/jobs \ + curl https://api.openaiproxy.com/v1/fine_tuning/jobs \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -3986,7 +3986,7 @@ paths: "model": "gpt-4o-mini" }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.fine_tuning.jobs.create( @@ -3994,12 +3994,12 @@ paths: model="gpt-4o-mini" ) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const fineTune = await openai.fineTuning.jobs.create({ + const fineTune = await openaiproxy.fineTuning.jobs.create({ training_file: "file-abc123" }); @@ -4034,7 +4034,7 @@ paths: - title: Epochs request: curl: | - curl https://api.openai.com/v1/fine_tuning/jobs \ + curl https://api.openaiproxy.com/v1/fine_tuning/jobs \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -4050,7 +4050,7 @@ paths: } }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.fine_tuning.jobs.create( @@ -4066,12 +4066,12 @@ paths: } ) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const fineTune = await openai.fineTuning.jobs.create({ + const fineTune = await openaiproxy.fineTuning.jobs.create({ training_file: "file-abc123", model: "gpt-4o-mini", method: { @@ -4116,7 +4116,7 @@ paths: - title: Validation file request: curl: | - curl https://api.openai.com/v1/fine_tuning/jobs \ + curl https://api.openaiproxy.com/v1/fine_tuning/jobs \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -4125,7 +4125,7 @@ paths: "model": "gpt-4o-mini" }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.fine_tuning.jobs.create( @@ -4134,12 +4134,12 @@ paths: model="gpt-4o-mini" ) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const fineTune = await openai.fineTuning.jobs.create({ + const fineTune = await openaiproxy.fineTuning.jobs.create({ training_file: "file-abc123", validation_file: "file-abc123" }); @@ -4175,7 +4175,7 @@ paths: - title: DPO request: curl: | - curl https://api.openai.com/v1/fine_tuning/jobs \ + curl https://api.openaiproxy.com/v1/fine_tuning/jobs \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -4219,7 +4219,7 @@ paths: - title: W&B Integration request: curl: | - curl https://api.openai.com/v1/fine_tuning/jobs \ + curl https://api.openaiproxy.com/v1/fine_tuning/jobs \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -4322,21 +4322,21 @@ paths: request: curl: > curl - https://api.openai.com/v1/fine_tuning/jobs?limit=2&metadata[key]=value + https://api.openaiproxy.com/v1/fine_tuning/jobs?limit=2&metadata[key]=value \ -H "Authorization: Bearer $OPENAI_API_KEY" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.fine_tuning.jobs.list() node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const list = await openai.fineTuning.jobs.list(); + const list = await openaiproxy.fineTuning.jobs.list(); for await (const fineTune of list) { console.log(fineTune); @@ -4402,23 +4402,23 @@ paths: request: curl: > curl - https://api.openai.com/v1/fine_tuning/jobs/ft-AF1WoRqd3aJAHsqc9NY7iL8F + https://api.openaiproxy.com/v1/fine_tuning/jobs/ft-AF1WoRqd3aJAHsqc9NY7iL8F \ -H "Authorization: Bearer $OPENAI_API_KEY" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.fine_tuning.jobs.retrieve("ftjob-abc123") node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const fineTune = await openai.fineTuning.jobs.retrieve("ftjob-abc123"); + const fineTune = await openaiproxy.fineTuning.jobs.retrieve("ftjob-abc123"); console.log(fineTune); } @@ -4493,22 +4493,22 @@ paths: request: curl: > curl -X POST - https://api.openai.com/v1/fine_tuning/jobs/ftjob-abc123/cancel \ + https://api.openaiproxy.com/v1/fine_tuning/jobs/ftjob-abc123/cancel \ -H "Authorization: Bearer $OPENAI_API_KEY" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.fine_tuning.jobs.cancel("ftjob-abc123") node.js: >- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const fineTune = await openai.fineTuning.jobs.cancel("ftjob-abc123"); + const fineTune = await openaiproxy.fineTuning.jobs.cancel("ftjob-abc123"); console.log(fineTune); } @@ -4575,7 +4575,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/fine_tuning/jobs/ftjob-abc123/checkpoints + https://api.openaiproxy.com/v1/fine_tuning/jobs/ftjob-abc123/checkpoints \ -H "Authorization: Bearer $OPENAI_API_KEY" response: > @@ -4655,10 +4655,10 @@ paths: request: curl: > curl - https://api.openai.com/v1/fine_tuning/jobs/ftjob-abc123/events \ + https://api.openaiproxy.com/v1/fine_tuning/jobs/ftjob-abc123/events \ -H "Authorization: Bearer $OPENAI_API_KEY" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.fine_tuning.jobs.list_events( @@ -4666,14 +4666,14 @@ paths: limit=2 ) node.js: >- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const list = await openai.fineTuning.list_events(id="ftjob-abc123", limit=2); + const list = await openaiproxy.fineTuning.list_events(id="ftjob-abc123", limit=2); for await (const fineTune of list) { console.log(fineTune); @@ -4700,7 +4700,7 @@ paths: "id": "ft-event-tyiGuB72evQncpH87xe505Sv", "created_at": 1721764800, "level": "info", - "message": "New fine-tuned model created: ft:gpt-4o-mini:openai::7p4lURel", + "message": "New fine-tuned model created: ft:gpt-4o-mini:openaiproxy::7p4lURel", "data": null, "type": "message" } @@ -4733,7 +4733,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/images/edits \ + curl https://api.openaiproxy.com/v1/images/edits \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -F image="@otter.png" \ -F mask="@mask.png" \ @@ -4741,7 +4741,7 @@ paths: -F n=2 \ -F size="1024x1024" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.images.edit( @@ -4753,12 +4753,12 @@ paths: ) node.js: |- import fs from "fs"; - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const image = await openai.images.edit({ + const image = await openaiproxy.images.edit({ image: fs.createReadStream("otter.png"), mask: fs.createReadStream("mask.png"), prompt: "A cute baby sea otter wearing a beret", @@ -4822,7 +4822,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/images/generations \ + curl https://api.openaiproxy.com/v1/images/generations \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -4832,7 +4832,7 @@ paths: "size": "1024x1024" }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.images.generate( @@ -4842,14 +4842,14 @@ paths: size="1024x1024" ) node.js: >- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const image = await openai.images.generate({ model: "dall-e-3", prompt: "A cute baby sea otter" }); + const image = await openaiproxy.images.generate({ model: "dall-e-3", prompt: "A cute baby sea otter" }); console.log(image.data); } @@ -4911,13 +4911,13 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/images/variations \ + curl https://api.openaiproxy.com/v1/images/variations \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -F image="@otter.png" \ -F n=2 \ -F size="1024x1024" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() response = client.images.create_variation( @@ -4927,12 +4927,12 @@ paths: ) node.js: |- import fs from "fs"; - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const image = await openai.images.createVariation({ + const image = await openaiproxy.images.createVariation({ image: fs.createReadStream("otter.png"), }); @@ -4991,20 +4991,20 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/models \ + curl https://api.openaiproxy.com/v1/models \ -H "Authorization: Bearer $OPENAI_API_KEY" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.models.list() node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const list = await openai.models.list(); + const list = await openaiproxy.models.list(); for await (const model of list) { console.log(model); @@ -5044,7 +5044,7 @@ paths: "id": "model-id-2", "object": "model", "created": 1686935002, - "owned_by": "openai" + "owned_by": "openaiproxy" }, ], "object": "list" @@ -5081,20 +5081,20 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/models/VAR_chat_model_id \ + curl https://api.openaiproxy.com/v1/models/VAR_chat_model_id \ -H "Authorization: Bearer $OPENAI_API_KEY" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.models.retrieve("VAR_chat_model_id") node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const model = await openai.models.retrieve("VAR_chat_model_id"); + const model = await openaiproxy.models.retrieve("VAR_chat_model_id"); console.log(model); } @@ -5117,7 +5117,7 @@ paths: "id": "VAR_chat_model_id", "object": "model", "created": 1686935002, - "owned_by": "openai" + "owned_by": "openaiproxy" } delete: operationId: deleteModel @@ -5148,24 +5148,24 @@ paths: request: curl: > curl - https://api.openai.com/v1/models/ft:gpt-4o-mini:acemeco:suffix:abc123 + https://api.openaiproxy.com/v1/models/ft:gpt-4o-mini:acemeco:suffix:abc123 \ -X DELETE \ -H "Authorization: Bearer $OPENAI_API_KEY" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() client.models.delete("ft:gpt-4o-mini:acemeco:suffix:abc123") node.js: >- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const model = await openai.models.del("ft:gpt-4o-mini:acemeco:suffix:abc123"); + const model = await openaiproxy.models.del("ft:gpt-4o-mini:acemeco:suffix:abc123"); console.log(model); } @@ -5224,14 +5224,14 @@ paths: - title: Single string request: curl: | - curl https://api.openai.com/v1/moderations \ + curl https://api.openaiproxy.com/v1/moderations \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ "input": "I want to kill them." }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -5241,14 +5241,14 @@ paths: print(moderation) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const moderation = await openai.moderations.create({ input: "I want to kill them." }); + const moderation = await openaiproxy.moderations.create({ input: "I want to kill them." }); console.log(moderation); } @@ -5310,7 +5310,7 @@ paths: - title: Image and text request: curl: > - curl https://api.openai.com/v1/moderations \ + curl https://api.openaiproxy.com/v1/moderations \ -X POST \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ @@ -5327,7 +5327,7 @@ paths: ] }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -5350,12 +5350,12 @@ paths: print(response) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); - const moderation = await openai.moderations.create({ + const moderation = await openaiproxy.moderations.create({ model: "omni-moderation-latest", input: [ { type: "text", text: "...text to classify goes here..." }, @@ -5506,7 +5506,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/organization/admin_api_keys?after=key_abc&limit=20 + https://api.openaiproxy.com/v1/organization/admin_api_keys?after=key_abc&limit=20 \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -5564,7 +5564,7 @@ paths: examples: request: curl: > - curl -X POST https://api.openai.com/v1/organization/admin_api_keys + curl -X POST https://api.openaiproxy.com/v1/organization/admin_api_keys \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" \ @@ -5614,7 +5614,7 @@ paths: examples: request: curl: > - curl https://api.openai.com/v1/organization/admin_api_keys/key_abc + curl https://api.openaiproxy.com/v1/organization/admin_api_keys/key_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -5670,7 +5670,7 @@ paths: request: curl: > curl -X DELETE - https://api.openai.com/v1/organization/admin_api_keys/key_abc \ + https://api.openaiproxy.com/v1/organization/admin_api_keys/key_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" response: | @@ -5806,7 +5806,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/organization/audit_logs \ + curl https://api.openaiproxy.com/v1/organization/audit_logs \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" response: > @@ -5952,7 +5952,7 @@ paths: request: curl: > curl - "https://api.openai.com/v1/organization/costs?start_time=1730419200&limit=1" + "https://api.openaiproxy.com/v1/organization/costs?start_time=1730419200&limit=1" \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ @@ -6023,7 +6023,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/organization/invites?after=invite-abc&limit=20 + https://api.openaiproxy.com/v1/organization/invites?after=invite-abc&limit=20 \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -6074,7 +6074,7 @@ paths: examples: request: curl: | - curl -X POST https://api.openai.com/v1/organization/invites \ + curl -X POST https://api.openaiproxy.com/v1/organization/invites \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" \ -d '{ @@ -6141,7 +6141,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/organization/invites/invite-abc \ + curl https://api.openaiproxy.com/v1/organization/invites/invite-abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" response: | @@ -6184,7 +6184,7 @@ paths: request: curl: > curl -X DELETE - https://api.openai.com/v1/organization/invites/invite-abc \ + https://api.openaiproxy.com/v1/organization/invites/invite-abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" response: | @@ -6242,7 +6242,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/organization/projects?after=proj_abc&limit=20&include_archived=false + https://api.openaiproxy.com/v1/organization/projects?after=proj_abc&limit=20&include_archived=false \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -6291,7 +6291,7 @@ paths: examples: request: curl: | - curl -X POST https://api.openai.com/v1/organization/projects \ + curl -X POST https://api.openaiproxy.com/v1/organization/projects \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" \ -d '{ @@ -6336,7 +6336,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/organization/projects/proj_abc \ + curl https://api.openaiproxy.com/v1/organization/projects/proj_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" response: | @@ -6388,7 +6388,7 @@ paths: request: curl: > curl -X POST - https://api.openai.com/v1/organization/projects/proj_abc \ + https://api.openaiproxy.com/v1/organization/projects/proj_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" \ -d '{ @@ -6443,7 +6443,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/organization/projects/proj_abc/api_keys?after=key_abc&limit=20 + https://api.openaiproxy.com/v1/organization/projects/proj_abc/api_keys?after=key_abc&limit=20 \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -6510,7 +6510,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/organization/projects/proj_abc/api_keys/key_abc + https://api.openaiproxy.com/v1/organization/projects/proj_abc/api_keys/key_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -6574,7 +6574,7 @@ paths: request: curl: > curl -X DELETE - https://api.openai.com/v1/organization/projects/proj_abc/api_keys/key_abc + https://api.openaiproxy.com/v1/organization/projects/proj_abc/api_keys/key_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -6614,7 +6614,7 @@ paths: request: curl: > curl -X POST - https://api.openai.com/v1/organization/projects/proj_abc/archive \ + https://api.openaiproxy.com/v1/organization/projects/proj_abc/archive \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" response: | @@ -6685,7 +6685,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/organization/projects/proj_abc/rate_limits?after=rl_xxx&limit=20 + https://api.openaiproxy.com/v1/organization/projects/proj_abc/rate_limits?after=rl_xxx&limit=20 \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -6760,7 +6760,7 @@ paths: request: curl: > curl -X POST - https://api.openai.com/v1/organization/projects/proj_abc/rate_limits/rl_xxx + https://api.openaiproxy.com/v1/organization/projects/proj_abc/rate_limits/rl_xxx \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" \ @@ -6836,7 +6836,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/organization/projects/proj_abc/service_accounts?after=custom_id&limit=20 + https://api.openaiproxy.com/v1/organization/projects/proj_abc/service_accounts?after=custom_id&limit=20 \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -6900,7 +6900,7 @@ paths: request: curl: > curl -X POST - https://api.openai.com/v1/organization/projects/proj_abc/service_accounts + https://api.openaiproxy.com/v1/organization/projects/proj_abc/service_accounts \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" \ @@ -6958,7 +6958,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/organization/projects/proj_abc/service_accounts/svc_acct_abc + https://api.openaiproxy.com/v1/organization/projects/proj_abc/service_accounts/svc_acct_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -7005,7 +7005,7 @@ paths: request: curl: > curl -X DELETE - https://api.openai.com/v1/organization/projects/proj_abc/service_accounts/svc_acct_abc + https://api.openaiproxy.com/v1/organization/projects/proj_abc/service_accounts/svc_acct_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -7070,7 +7070,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/organization/projects/proj_abc/users?after=user_abc&limit=20 + https://api.openaiproxy.com/v1/organization/projects/proj_abc/users?after=user_abc&limit=20 \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -7134,7 +7134,7 @@ paths: request: curl: > curl -X POST - https://api.openai.com/v1/organization/projects/proj_abc/users \ + https://api.openaiproxy.com/v1/organization/projects/proj_abc/users \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" \ -d '{ @@ -7185,7 +7185,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/organization/projects/proj_abc/users/user_abc + https://api.openaiproxy.com/v1/organization/projects/proj_abc/users/user_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -7246,7 +7246,7 @@ paths: request: curl: > curl -X POST - https://api.openai.com/v1/organization/projects/proj_abc/users/user_abc + https://api.openaiproxy.com/v1/organization/projects/proj_abc/users/user_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" \ @@ -7303,7 +7303,7 @@ paths: request: curl: > curl -X DELETE - https://api.openai.com/v1/organization/projects/proj_abc/users/user_abc + https://api.openaiproxy.com/v1/organization/projects/proj_abc/users/user_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -7426,7 +7426,7 @@ paths: request: curl: > curl - "https://api.openai.com/v1/organization/usage/audio_speeches?start_time=1730419200&limit=1" + "https://api.openaiproxy.com/v1/organization/usage/audio_speeches?start_time=1730419200&limit=1" \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ @@ -7569,7 +7569,7 @@ paths: request: curl: > curl - "https://api.openai.com/v1/organization/usage/audio_transcriptions?start_time=1730419200&limit=1" + "https://api.openaiproxy.com/v1/organization/usage/audio_transcriptions?start_time=1730419200&limit=1" \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ @@ -7685,7 +7685,7 @@ paths: request: curl: > curl - "https://api.openai.com/v1/organization/usage/code_interpreter_sessions?start_time=1730419200&limit=1" + "https://api.openaiproxy.com/v1/organization/usage/code_interpreter_sessions?start_time=1730419200&limit=1" \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ @@ -7833,7 +7833,7 @@ paths: request: curl: > curl - "https://api.openai.com/v1/organization/usage/completions?start_time=1730419200&limit=1" + "https://api.openaiproxy.com/v1/organization/usage/completions?start_time=1730419200&limit=1" \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ @@ -7981,7 +7981,7 @@ paths: request: curl: > curl - "https://api.openai.com/v1/organization/usage/embeddings?start_time=1730419200&limit=1" + "https://api.openaiproxy.com/v1/organization/usage/embeddings?start_time=1730419200&limit=1" \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ @@ -8157,7 +8157,7 @@ paths: request: curl: > curl - "https://api.openai.com/v1/organization/usage/images?start_time=1730419200&limit=1" + "https://api.openaiproxy.com/v1/organization/usage/images?start_time=1730419200&limit=1" \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ @@ -8302,7 +8302,7 @@ paths: request: curl: > curl - "https://api.openai.com/v1/organization/usage/moderations?start_time=1730419200&limit=1" + "https://api.openaiproxy.com/v1/organization/usage/moderations?start_time=1730419200&limit=1" \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ @@ -8417,7 +8417,7 @@ paths: request: curl: > curl - "https://api.openai.com/v1/organization/usage/vector_stores?start_time=1730419200&limit=1" + "https://api.openaiproxy.com/v1/organization/usage/vector_stores?start_time=1730419200&limit=1" \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ @@ -8492,7 +8492,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/organization/users?after=user_abc&limit=20 + https://api.openaiproxy.com/v1/organization/users?after=user_abc&limit=20 \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" @@ -8542,7 +8542,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/organization/users/user_abc \ + curl https://api.openaiproxy.com/v1/organization/users/user_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" response: | @@ -8587,7 +8587,7 @@ paths: examples: request: curl: > - curl -X POST https://api.openai.com/v1/organization/users/user_abc + curl -X POST https://api.openaiproxy.com/v1/organization/users/user_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" \ @@ -8630,7 +8630,7 @@ paths: request: curl: > curl -X DELETE - https://api.openai.com/v1/organization/users/user_abc \ + https://api.openaiproxy.com/v1/organization/users/user_abc \ -H "Authorization: Bearer $OPENAI_ADMIN_KEY" \ -H "Content-Type: application/json" response: | @@ -8681,7 +8681,7 @@ paths: examples: request: curl: | - curl -X POST https://api.openai.com/v1/realtime/sessions \ + curl -X POST https://api.openaiproxy.com/v1/realtime/sessions \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -d '{ @@ -8758,7 +8758,7 @@ paths: request: curl: > curl -X POST - https://api.openai.com/v1/realtime/transcription_sessions \ + https://api.openaiproxy.com/v1/realtime/transcription_sessions \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -d '{}' @@ -8828,7 +8828,7 @@ paths: - title: Text input request: curl: > - curl https://api.openai.com/v1/responses \ + curl https://api.openaiproxy.com/v1/responses \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -8836,13 +8836,13 @@ paths: "input": "Tell me a three sentence bedtime story about a unicorn." }' javascript: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); - const response = await openai.responses.create({ + const response = await openaiproxy.responses.create({ model: "gpt-4o", input: "Tell me a three sentence bedtime story about a unicorn." }); @@ -8850,7 +8850,7 @@ paths: console.log(response); python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -8923,7 +8923,7 @@ paths: - title: Image input request: curl: > - curl https://api.openai.com/v1/responses \ + curl https://api.openaiproxy.com/v1/responses \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -8942,13 +8942,13 @@ paths: ] }' javascript: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); - const response = await openai.responses.create({ + const response = await openaiproxy.responses.create({ model: "gpt-4o", input: [ { @@ -8968,7 +8968,7 @@ paths: console.log(response); python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -9052,7 +9052,7 @@ paths: - title: Web search request: curl: | - curl https://api.openai.com/v1/responses \ + curl https://api.openaiproxy.com/v1/responses \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -9061,11 +9061,11 @@ paths: "input": "What was a positive news story from today?" }' javascript: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); - const response = await openai.responses.create({ + const response = await openaiproxy.responses.create({ model: "gpt-4o", tools: [{ type: "web_search_preview" }], input: "What was a positive news story from today?", @@ -9073,7 +9073,7 @@ paths: console.log(response); python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -9184,7 +9184,7 @@ paths: - title: File search request: curl: > - curl https://api.openai.com/v1/responses \ + curl https://api.openaiproxy.com/v1/responses \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -9197,13 +9197,13 @@ paths: "input": "What are the attributes of an ancient brown dragon?" }' javascript: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); - const response = await openai.responses.create({ + const response = await openaiproxy.responses.create({ model: "gpt-4o", tools: [{ type: "file_search", @@ -9216,7 +9216,7 @@ paths: console.log(response); python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -9362,7 +9362,7 @@ paths: - title: Streaming request: curl: | - curl https://api.openai.com/v1/responses \ + curl https://api.openaiproxy.com/v1/responses \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -9372,7 +9372,7 @@ paths: "stream": true }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -9386,11 +9386,11 @@ paths: for event in response: print(event) javascript: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); - const response = await openai.responses.create({ + const response = await openaiproxy.responses.create({ model: "gpt-4o", instructions: "You are a helpful assistant.", input: "Hello!", @@ -9470,7 +9470,7 @@ paths: - title: Functions request: curl: > - curl https://api.openai.com/v1/responses \ + curl https://api.openaiproxy.com/v1/responses \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -9500,7 +9500,7 @@ paths: "tool_choice": "auto" }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -9536,10 +9536,10 @@ paths: print(response) javascript: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); const tools = [ @@ -9562,7 +9562,7 @@ paths: ]; - const response = await openai.responses.create({ + const response = await openaiproxy.responses.create({ model: "gpt-4o", tools: tools, input: "What is the weather like in Boston today?", @@ -9650,7 +9650,7 @@ paths: - title: Reasoning request: curl: | - curl https://api.openai.com/v1/responses \ + curl https://api.openaiproxy.com/v1/responses \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ @@ -9661,10 +9661,10 @@ paths: } }' javascript: | - import OpenAI from "openai"; - const openai = new OpenAI(); + import OpenAI from "openaiproxy"; + const openaiproxy = new OpenAI(); - const response = await openai.responses.create({ + const response = await openaiproxy.responses.create({ model: "o3-mini", input: "How much wood would a woodchuck chuck?", reasoning: { @@ -9674,7 +9674,7 @@ paths: console.log(response); python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() response = client.responses.create( @@ -9794,17 +9794,17 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/responses/resp_123 \ + curl https://api.openaiproxy.com/v1/responses/resp_123 \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" javascript: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; const client = new OpenAI(); const response = await client.responses.retrieve("resp_123"); console.log(response); python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() response = client.responses.retrieve("resp_123") @@ -9897,17 +9897,17 @@ paths: examples: request: curl: | - curl -X DELETE https://api.openai.com/v1/responses/resp_123 \ + curl -X DELETE https://api.openaiproxy.com/v1/responses/resp_123 \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" javascript: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; const client = new OpenAI(); const response = await client.responses.del("resp_123"); console.log(response); python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() response = client.responses.del("resp_123") @@ -9979,11 +9979,11 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/responses/resp_abc123/input_items \ + curl https://api.openaiproxy.com/v1/responses/resp_abc123/input_items \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" javascript: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; const client = new OpenAI(); @@ -9993,7 +9993,7 @@ paths: console.log(response.data); python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() response = client.responses.input_items.list("resp_123") @@ -10045,24 +10045,24 @@ paths: - title: Empty request: curl: | - curl https://api.openai.com/v1/threads \ + curl https://api.openaiproxy.com/v1/threads \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" \ -d '' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() empty_thread = client.beta.threads.create() print(empty_thread) node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const emptyThread = await openai.beta.threads.create(); + const emptyThread = await openaiproxy.beta.threads.create(); console.log(emptyThread); } @@ -10079,7 +10079,7 @@ paths: - title: Messages request: curl: | - curl https://api.openai.com/v1/threads \ + curl https://api.openaiproxy.com/v1/threads \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" \ @@ -10093,7 +10093,7 @@ paths: }] }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() message_thread = client.beta.threads.create( @@ -10111,14 +10111,14 @@ paths: print(message_thread) node.js: >- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const messageThread = await openai.beta.threads.create({ + const messageThread = await openaiproxy.beta.threads.create({ messages: [ { role: "user", @@ -10172,7 +10172,7 @@ paths: - title: Default request: curl: > - curl https://api.openai.com/v1/threads/runs \ + curl https://api.openaiproxy.com/v1/threads/runs \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" \ @@ -10185,7 +10185,7 @@ paths: } }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -10202,14 +10202,14 @@ paths: print(run) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const run = await openai.beta.threads.createAndRun({ + const run = await openaiproxy.beta.threads.createAndRun({ assistant_id: "asst_abc123", thread: { messages: [ @@ -10260,7 +10260,7 @@ paths: - title: Streaming request: curl: | - curl https://api.openai.com/v1/threads/runs \ + curl https://api.openaiproxy.com/v1/threads/runs \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" \ @@ -10274,7 +10274,7 @@ paths: "stream": true }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() stream = client.beta.threads.create_and_run( @@ -10290,12 +10290,12 @@ paths: for event in stream: print(event) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const stream = await openai.beta.threads.createAndRun({ + const stream = await openaiproxy.beta.threads.createAndRun({ assistant_id: "asst_123", thread: { messages: [ @@ -10408,7 +10408,7 @@ paths: - title: Streaming with Functions request: curl: > - curl https://api.openai.com/v1/threads/runs \ + curl https://api.openaiproxy.com/v1/threads/runs \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" \ @@ -10445,7 +10445,7 @@ paths: "stream": true }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -10487,10 +10487,10 @@ paths: for event in stream: print(event) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); const tools = [ @@ -10516,7 +10516,7 @@ paths: async function main() { - const stream = await openai.beta.threads.createAndRun({ + const stream = await openaiproxy.beta.threads.createAndRun({ assistant_id: "asst_123", thread: { messages: [ @@ -10661,23 +10661,23 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/threads/thread_abc123 \ + curl https://api.openaiproxy.com/v1/threads/thread_abc123 \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() my_thread = client.beta.threads.retrieve("thread_abc123") print(my_thread) node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const myThread = await openai.beta.threads.retrieve( + const myThread = await openaiproxy.beta.threads.retrieve( "thread_abc123" ); @@ -10732,7 +10732,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/threads/thread_abc123 \ + curl https://api.openaiproxy.com/v1/threads/thread_abc123 \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" \ @@ -10743,7 +10743,7 @@ paths: } }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() my_updated_thread = client.beta.threads.update( @@ -10755,12 +10755,12 @@ paths: ) print(my_updated_thread) node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const updatedThread = await openai.beta.threads.update( + const updatedThread = await openaiproxy.beta.threads.update( "thread_abc123", { metadata: { modified: "true", user: "abc123" }, @@ -10809,24 +10809,24 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/threads/thread_abc123 \ + curl https://api.openaiproxy.com/v1/threads/thread_abc123 \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" \ -X DELETE python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() response = client.beta.threads.delete("thread_abc123") print(response) node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const response = await openai.beta.threads.del("thread_abc123"); + const response = await openaiproxy.beta.threads.del("thread_abc123"); console.log(response); } @@ -10912,12 +10912,12 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/threads/thread_abc123/messages \ + curl https://api.openaiproxy.com/v1/threads/thread_abc123/messages \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -10927,12 +10927,12 @@ paths: print(thread_messages.data) node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const threadMessages = await openai.beta.threads.messages.list( + const threadMessages = await openaiproxy.beta.threads.messages.list( "thread_abc123" ); @@ -11024,7 +11024,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/threads/thread_abc123/messages \ + curl https://api.openaiproxy.com/v1/threads/thread_abc123/messages \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" \ @@ -11033,7 +11033,7 @@ paths: "content": "How does AI work? Explain it in simple terms." }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() thread_message = client.beta.threads.messages.create( @@ -11043,14 +11043,14 @@ paths: ) print(thread_message) node.js: >- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const threadMessages = await openai.beta.threads.messages.create( + const threadMessages = await openaiproxy.beta.threads.messages.create( "thread_abc123", { role: "user", content: "How does AI work? Explain it in simple terms." } ); @@ -11120,13 +11120,13 @@ paths: request: curl: > curl - https://api.openai.com/v1/threads/thread_abc123/messages/msg_abc123 + https://api.openaiproxy.com/v1/threads/thread_abc123/messages/msg_abc123 \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() message = client.beta.threads.messages.retrieve( @@ -11135,12 +11135,12 @@ paths: ) print(message) node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const message = await openai.beta.threads.messages.retrieve( + const message = await openaiproxy.beta.threads.messages.retrieve( "thread_abc123", "msg_abc123" ); @@ -11210,7 +11210,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/threads/thread_abc123/messages/msg_abc123 + https://api.openaiproxy.com/v1/threads/thread_abc123/messages/msg_abc123 \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ @@ -11222,7 +11222,7 @@ paths: } }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() message = client.beta.threads.messages.update( @@ -11235,12 +11235,12 @@ paths: ) print(message) node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const message = await openai.beta.threads.messages.update( + const message = await openaiproxy.beta.threads.messages.update( "thread_abc123", "msg_abc123", { @@ -11308,13 +11308,13 @@ paths: request: curl: > curl -X DELETE - https://api.openai.com/v1/threads/thread_abc123/messages/msg_abc123 + https://api.openaiproxy.com/v1/threads/thread_abc123/messages/msg_abc123 \ -H "Content-Type: application/json" \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() deleted_message = client.beta.threads.messages.delete( @@ -11323,12 +11323,12 @@ paths: ) print(deleted_message) node.js: |- - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const deletedMessage = await openai.beta.threads.messages.del( + const deletedMessage = await openaiproxy.beta.threads.messages.del( "thread_abc123", "msg_abc123" ); @@ -11408,12 +11408,12 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/threads/thread_abc123/runs \ + curl https://api.openaiproxy.com/v1/threads/thread_abc123/runs \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() runs = client.beta.threads.runs.list( @@ -11422,12 +11422,12 @@ paths: print(runs) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const runs = await openai.beta.threads.runs.list( + const runs = await openaiproxy.beta.threads.runs.list( "thread_abc123" ); @@ -11590,7 +11590,7 @@ paths: - title: Default request: curl: | - curl https://api.openai.com/v1/threads/thread_abc123/runs \ + curl https://api.openaiproxy.com/v1/threads/thread_abc123/runs \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" \ @@ -11598,7 +11598,7 @@ paths: "assistant_id": "asst_abc123" }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() run = client.beta.threads.runs.create( @@ -11608,12 +11608,12 @@ paths: print(run) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const run = await openai.beta.threads.runs.create( + const run = await openaiproxy.beta.threads.runs.create( "thread_abc123", { assistant_id: "asst_abc123" } ); @@ -11661,7 +11661,7 @@ paths: - title: Streaming request: curl: | - curl https://api.openai.com/v1/threads/thread_123/runs \ + curl https://api.openaiproxy.com/v1/threads/thread_123/runs \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" \ @@ -11670,7 +11670,7 @@ paths: "stream": true }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() stream = client.beta.threads.runs.create( @@ -11682,12 +11682,12 @@ paths: for event in stream: print(event) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const stream = await openai.beta.threads.runs.create( + const stream = await openaiproxy.beta.threads.runs.create( "thread_123", { assistant_id: "asst_123", stream: true } ); @@ -11788,7 +11788,7 @@ paths: - title: Streaming with Functions request: curl: > - curl https://api.openai.com/v1/threads/thread_abc123/runs \ + curl https://api.openaiproxy.com/v1/threads/thread_abc123/runs \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" \ @@ -11820,7 +11820,7 @@ paths: "stream": true }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -11858,10 +11858,10 @@ paths: for event in stream: print(event) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); const tools = [ @@ -11887,7 +11887,7 @@ paths: async function main() { - const stream = await openai.beta.threads.runs.create( + const stream = await openaiproxy.beta.threads.runs.create( "thread_abc123", { assistant_id: "asst_abc123", @@ -12026,11 +12026,11 @@ paths: request: curl: > curl - https://api.openai.com/v1/threads/thread_abc123/runs/run_abc123 \ + https://api.openaiproxy.com/v1/threads/thread_abc123/runs/run_abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() run = client.beta.threads.runs.retrieve( @@ -12040,12 +12040,12 @@ paths: print(run) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const run = await openai.beta.threads.runs.retrieve( + const run = await openaiproxy.beta.threads.runs.retrieve( "thread_abc123", "run_abc123" ); @@ -12136,7 +12136,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/threads/thread_abc123/runs/run_abc123 \ + https://api.openaiproxy.com/v1/threads/thread_abc123/runs/run_abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" \ @@ -12146,7 +12146,7 @@ paths: } }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() run = client.beta.threads.runs.update( @@ -12157,12 +12157,12 @@ paths: print(run) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const run = await openai.beta.threads.runs.update( + const run = await openaiproxy.beta.threads.runs.update( "thread_abc123", "run_abc123", { @@ -12263,13 +12263,13 @@ paths: request: curl: > curl - https://api.openai.com/v1/threads/thread_abc123/runs/run_abc123/cancel + https://api.openaiproxy.com/v1/threads/thread_abc123/runs/run_abc123/cancel \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "OpenAI-Beta: assistants=v2" \ -X POST python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() run = client.beta.threads.runs.cancel( @@ -12279,12 +12279,12 @@ paths: print(run) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const run = await openai.beta.threads.runs.cancel( + const run = await openaiproxy.beta.threads.runs.cancel( "thread_abc123", "run_abc123" ); @@ -12420,13 +12420,13 @@ paths: request: curl: > curl - https://api.openai.com/v1/threads/thread_abc123/runs/run_abc123/steps + https://api.openaiproxy.com/v1/threads/thread_abc123/runs/run_abc123/steps \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() run_steps = client.beta.threads.runs.steps.list( @@ -12436,11 +12436,11 @@ paths: print(run_steps) node.js: | - import OpenAI from "openai"; - const openai = new OpenAI(); + import OpenAI from "openaiproxy"; + const openaiproxy = new OpenAI(); async function main() { - const runStep = await openai.beta.threads.runs.steps.list( + const runStep = await openaiproxy.beta.threads.runs.steps.list( "thread_abc123", "run_abc123" ); @@ -12544,13 +12544,13 @@ paths: request: curl: > curl - https://api.openai.com/v1/threads/thread_abc123/runs/run_abc123/steps/step_abc123 + https://api.openaiproxy.com/v1/threads/thread_abc123/runs/run_abc123/steps/step_abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() run_step = client.beta.threads.runs.steps.retrieve( @@ -12561,11 +12561,11 @@ paths: print(run_step) node.js: | - import OpenAI from "openai"; - const openai = new OpenAI(); + import OpenAI from "openaiproxy"; + const openaiproxy = new OpenAI(); async function main() { - const runStep = await openai.beta.threads.runs.steps.retrieve( + const runStep = await openaiproxy.beta.threads.runs.steps.retrieve( "thread_abc123", "run_abc123", "step_abc123" @@ -12651,7 +12651,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/threads/thread_123/runs/run_123/submit_tool_outputs + https://api.openaiproxy.com/v1/threads/thread_123/runs/run_123/submit_tool_outputs \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ @@ -12665,7 +12665,7 @@ paths: ] }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() run = client.beta.threads.runs.submit_tool_outputs( @@ -12681,12 +12681,12 @@ paths: print(run) node.js: | - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const run = await openai.beta.threads.runs.submitToolOutputs( + const run = await openaiproxy.beta.threads.runs.submitToolOutputs( "thread_123", "run_123", { @@ -12760,7 +12760,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/threads/thread_123/runs/run_123/submit_tool_outputs + https://api.openaiproxy.com/v1/threads/thread_123/runs/run_123/submit_tool_outputs \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ @@ -12775,7 +12775,7 @@ paths: "stream": true }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() stream = client.beta.threads.runs.submit_tool_outputs( @@ -12793,14 +12793,14 @@ paths: for event in stream: print(event) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const stream = await openai.beta.threads.runs.submitToolOutputs( + const stream = await openaiproxy.beta.threads.runs.submitToolOutputs( "thread_123", "run_123", { @@ -13000,7 +13000,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/uploads \ + curl https://api.openaiproxy.com/v1/uploads \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -d '{ "purpose": "fine-tune", @@ -13051,7 +13051,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/uploads/upload_abc123/cancel + curl https://api.openaiproxy.com/v1/uploads/upload_abc123/cancel response: | { "id": "upload_abc123", @@ -13116,7 +13116,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/uploads/upload_abc123/complete + curl https://api.openaiproxy.com/v1/uploads/upload_abc123/complete -d '{ "part_ids": ["part_def456", "part_ghi789"] }' @@ -13186,7 +13186,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/uploads/upload_abc123/parts + curl https://api.openaiproxy.com/v1/uploads/upload_abc123/parts -F data="aHR0cHM6Ly9hcGkub3BlbmFpLmNvbS92MS91cGxvYWRz..." response: | { @@ -13257,22 +13257,22 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/vector_stores \ + curl https://api.openaiproxy.com/v1/vector_stores \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() vector_stores = client.vector_stores.list() print(vector_stores) node.js: | - import OpenAI from "openai"; - const openai = new OpenAI(); + import OpenAI from "openaiproxy"; + const openaiproxy = new OpenAI(); async function main() { - const vectorStores = await openai.vectorStores.list(); + const vectorStores = await openaiproxy.vectorStores.list(); console.log(vectorStores); } @@ -13339,7 +13339,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/vector_stores \ + curl https://api.openaiproxy.com/v1/vector_stores \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" \ @@ -13347,7 +13347,7 @@ paths: "name": "Support FAQ" }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() vector_store = client.vector_stores.create( @@ -13355,11 +13355,11 @@ paths: ) print(vector_store) node.js: | - import OpenAI from "openai"; - const openai = new OpenAI(); + import OpenAI from "openaiproxy"; + const openaiproxy = new OpenAI(); async function main() { - const vectorStore = await openai.vectorStores.create({ + const vectorStore = await openaiproxy.vectorStores.create({ name: "Support FAQ" }); console.log(vectorStore); @@ -13410,12 +13410,12 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/vector_stores/vs_abc123 \ + curl https://api.openaiproxy.com/v1/vector_stores/vs_abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() vector_store = client.vector_stores.retrieve( @@ -13423,11 +13423,11 @@ paths: ) print(vector_store) node.js: | - import OpenAI from "openai"; - const openai = new OpenAI(); + import OpenAI from "openaiproxy"; + const openaiproxy = new OpenAI(); async function main() { - const vectorStore = await openai.vectorStores.retrieve( + const vectorStore = await openaiproxy.vectorStores.retrieve( "vs_abc123" ); console.log(vectorStore); @@ -13474,7 +13474,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/vector_stores/vs_abc123 \ + curl https://api.openaiproxy.com/v1/vector_stores/vs_abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" @@ -13482,7 +13482,7 @@ paths: "name": "Support FAQ" }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() vector_store = client.vector_stores.update( @@ -13491,11 +13491,11 @@ paths: ) print(vector_store) node.js: | - import OpenAI from "openai"; - const openai = new OpenAI(); + import OpenAI from "openaiproxy"; + const openaiproxy = new OpenAI(); async function main() { - const vectorStore = await openai.vectorStores.update( + const vectorStore = await openaiproxy.vectorStores.update( "vs_abc123", { name: "Support FAQ" @@ -13546,13 +13546,13 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/vector_stores/vs_abc123 \ + curl https://api.openaiproxy.com/v1/vector_stores/vs_abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" \ -X DELETE python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() deleted_vector_store = client.vector_stores.delete( @@ -13560,11 +13560,11 @@ paths: ) print(deleted_vector_store) node.js: | - import OpenAI from "openai"; - const openai = new OpenAI(); + import OpenAI from "openaiproxy"; + const openaiproxy = new OpenAI(); async function main() { - const deletedVectorStore = await openai.vectorStores.del( + const deletedVectorStore = await openaiproxy.vectorStores.del( "vs_abc123" ); console.log(deletedVectorStore); @@ -13615,7 +13615,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/vector_stores/vs_abc123/file_batches \ + https://api.openaiproxy.com/v1/vector_stores/vs_abc123/file_batches \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json \ -H "OpenAI-Beta: assistants=v2" \ @@ -13623,7 +13623,7 @@ paths: "file_ids": ["file-abc123", "file-abc456"] }' python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -13636,13 +13636,13 @@ paths: print(vector_store_file_batch) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const myVectorStoreFileBatch = await openai.vectorStores.fileBatches.create( + const myVectorStoreFileBatch = await openaiproxy.vectorStores.fileBatches.create( "vs_abc123", { file_ids: ["file-abc123", "file-abc456"] @@ -13706,13 +13706,13 @@ paths: request: curl: > curl - https://api.openai.com/v1/vector_stores/vs_abc123/files_batches/vsfb_abc123 + https://api.openaiproxy.com/v1/vector_stores/vs_abc123/files_batches/vsfb_abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -13725,13 +13725,13 @@ paths: print(vector_store_file_batch) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const vectorStoreFileBatch = await openai.vectorStores.fileBatches.retrieve( + const vectorStoreFileBatch = await openaiproxy.vectorStores.fileBatches.retrieve( "vs_abc123", "vsfb_abc123" ); @@ -13790,14 +13790,14 @@ paths: request: curl: > curl - https://api.openai.com/v1/vector_stores/vs_abc123/files_batches/vsfb_abc123/cancel + https://api.openaiproxy.com/v1/vector_stores/vs_abc123/files_batches/vsfb_abc123/cancel \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" \ -X POST python: > - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() @@ -13810,13 +13810,13 @@ paths: print(deleted_vector_store_file_batch) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const deletedVectorStoreFileBatch = await openai.vectorStores.fileBatches.cancel( + const deletedVectorStoreFileBatch = await openaiproxy.vectorStores.fileBatches.cancel( "vs_abc123", "vsfb_abc123" ); @@ -13926,13 +13926,13 @@ paths: request: curl: > curl - https://api.openai.com/v1/vector_stores/vs_abc123/files_batches/vsfb_abc123/files + https://api.openaiproxy.com/v1/vector_stores/vs_abc123/files_batches/vsfb_abc123/files \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() vector_store_files = client.vector_stores.file_batches.list_files( @@ -13941,13 +13941,13 @@ paths: ) print(vector_store_files) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const vectorStoreFiles = await openai.vectorStores.fileBatches.listFiles( + const vectorStoreFiles = await openaiproxy.vectorStores.fileBatches.listFiles( "vs_abc123", "vsfb_abc123" ); @@ -14056,12 +14056,12 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/vector_stores/vs_abc123/files \ + curl https://api.openaiproxy.com/v1/vector_stores/vs_abc123/files \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() vector_store_files = client.vector_stores.files.list( @@ -14069,11 +14069,11 @@ paths: ) print(vector_store_files) node.js: | - import OpenAI from "openai"; - const openai = new OpenAI(); + import OpenAI from "openaiproxy"; + const openaiproxy = new OpenAI(); async function main() { - const vectorStoreFiles = await openai.vectorStores.files.list( + const vectorStoreFiles = await openaiproxy.vectorStores.files.list( "vs_abc123" ); console.log(vectorStoreFiles); @@ -14138,7 +14138,7 @@ paths: examples: request: curl: | - curl https://api.openai.com/v1/vector_stores/vs_abc123/files \ + curl https://api.openaiproxy.com/v1/vector_stores/vs_abc123/files \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" \ @@ -14146,7 +14146,7 @@ paths: "file_id": "file-abc123" }' python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() vector_store_file = client.vector_stores.files.create( @@ -14155,13 +14155,13 @@ paths: ) print(vector_store_file) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const myVectorStoreFile = await openai.vectorStores.files.create( + const myVectorStoreFile = await openaiproxy.vectorStores.files.create( "vs_abc123", { file_id: "file-abc123" @@ -14219,13 +14219,13 @@ paths: request: curl: > curl - https://api.openai.com/v1/vector_stores/vs_abc123/files/file-abc123 + https://api.openaiproxy.com/v1/vector_stores/vs_abc123/files/file-abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() vector_store_file = client.vector_stores.files.retrieve( @@ -14234,13 +14234,13 @@ paths: ) print(vector_store_file) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const vectorStoreFile = await openai.vectorStores.files.retrieve( + const vectorStoreFile = await openaiproxy.vectorStores.files.retrieve( "vs_abc123", "file-abc123" ); @@ -14294,14 +14294,14 @@ paths: request: curl: > curl - https://api.openai.com/v1/vector_stores/vs_abc123/files/file-abc123 + https://api.openaiproxy.com/v1/vector_stores/vs_abc123/files/file-abc123 \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -H "OpenAI-Beta: assistants=v2" \ -X DELETE python: | - from openai import OpenAI + from openaiproxy import OpenAI client = OpenAI() deleted_vector_store_file = client.vector_stores.files.delete( @@ -14310,13 +14310,13 @@ paths: ) print(deleted_vector_store_file) node.js: > - import OpenAI from "openai"; + import OpenAI from "openaiproxy"; - const openai = new OpenAI(); + const openaiproxy = new OpenAI(); async function main() { - const deletedVectorStoreFile = await openai.vectorStores.files.del( + const deletedVectorStoreFile = await openaiproxy.vectorStores.files.del( "vs_abc123", "file-abc123" ); @@ -14373,7 +14373,7 @@ paths: request: curl: > curl - https://api.openai.com/v1/vector_stores/{vector_store_id}/files/{file_id} + https://api.openaiproxy.com/v1/vector_stores/{vector_store_id}/files/{file_id} \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ @@ -14427,7 +14427,7 @@ paths: curl: > curl \ - https://api.openai.com/v1/vector_stores/vs_abc123/files/file-abc123/content + https://api.openaiproxy.com/v1/vector_stores/vs_abc123/files/file-abc123/content \ -H "Authorization: Bearer $OPENAI_API_KEY" @@ -14478,7 +14478,7 @@ paths: request: curl: | curl -X POST \ - https://api.openai.com/v1/vector_stores/vs_abc123/search \ + https://api.openaiproxy.com/v1/vector_stores/vs_abc123/search \ -H "Authorization: Bearer $OPENAI_API_KEY" \ -H "Content-Type: application/json" \ -d '{"query": "What is the return policy?", "filters": {...}}' @@ -17932,7 +17932,7 @@ components: chat completion. This value can be used to control - [costs](https://openai.com/api/pricing/) for text generated via + [costs](https://openaiproxy.com/api/pricing/) for text generated via API. @@ -18500,7 +18500,7 @@ components: The token count of your prompt plus `max_tokens` cannot exceed the model's context length. [Example Python - code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + code](https://cookbook.openaiproxy.com/examples/how_to_count_tokens_with_tiktoken) for counting tokens. n: type: integer @@ -18552,7 +18552,7 @@ components: events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) as they become available, with the stream terminated by a `data: [DONE]` message. [Example Python - code](https://cookbook.openai.com/examples/how_to_stream_completions). + code](https://cookbook.openaiproxy.com/examples/how_to_stream_completions). type: boolean nullable: true default: false @@ -18736,7 +18736,7 @@ components: tokens for the model (8192 tokens for `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 dimensions or less. [Example Python - code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + code](https://cookbook.openaiproxy.com/examples/how_to_count_tokens_with_tiktoken) for counting tokens. Some models may also impose a limit on total number of tokens summed across inputs. example: The quick brown fox jumped over the lazy dog @@ -18996,7 +18996,7 @@ components: For example, a `suffix` of "custom-model-name" would produce a model - name like `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. + name like `ft:gpt-4o-mini:openaiproxy:custom-model-name:7p4lURel`. type: string minLength: 1 maxLength: 64 @@ -19086,8 +19086,8 @@ components: A list of tags to be attached to the newly created run. These tags are passed through directly to WandB. Some - default tags are generated by OpenAI: "openai/finetune", - "openai/{base-model}", "openai/{ftjob-abcdef}". + default tags are generated by OpenAI: "openaiproxy/finetune", + "openaiproxy/{base-model}", "openaiproxy/{ftjob-abcdef}". type: array items: type: string @@ -21833,8 +21833,8 @@ components: A list of tags to be attached to the newly created run. These tags are passed through directly to WandB. Some - default tags are generated by OpenAI: "openai/finetune", - "openai/{base-model}", "openai/{ftjob-abcdef}". + default tags are generated by OpenAI: "openaiproxy/finetune", + "openaiproxy/{base-model}", "openaiproxy/{ftjob-abcdef}". type: array items: type: string @@ -24088,7 +24088,7 @@ components: "id": "VAR_chat_model_id", "object": "model", "created": 1686935002, - "owned_by": "openai" + "owned_by": "openaiproxy" } ModelIds: anyOf: @@ -28245,7 +28245,7 @@ components: not native to the model, since the model consumes audio directly. Transcription runs asynchronously through [the /audio/transcriptions - endpoint](https://platform.openai.com/docs/api-reference/audio/createTranscription) + endpoint](https://platform.openaiproxy.com/docs/api-reference/audio/createTranscription) and should be treated as guidance of input audio content rather than precisely what the model heard. The client can optionally set the language and prompt for transcription, these offer additional @@ -28526,7 +28526,7 @@ components: not native to the model, since the model consumes audio directly. Transcription runs asynchronously through [the /audio/transcriptions - endpoint](https://platform.openai.com/docs/api-reference/audio/createTranscription) + endpoint](https://platform.openaiproxy.com/docs/api-reference/audio/createTranscription) and should be treated as guidance of input audio content rather than precisely what the model heard. The client can optionally set the language and prompt for transcription, these offer additional @@ -29296,7 +29296,7 @@ components: **o-series models only** Configuration options for - [reasoning models](https://platform.openai.com/docs/guides/reasoning). + [reasoning models](https://platform.openaiproxy.com/docs/guides/reasoning). title: Reasoning x-oaiExpandable: true properties: @@ -29330,7 +29330,7 @@ components: **o-series models only** Constrains effort on reasoning for - [reasoning models](https://platform.openai.com/docs/guides/reasoning). + [reasoning models](https://platform.openaiproxy.com/docs/guides/reasoning). Currently supported values are `low`, `medium`, and `high`. Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response. @@ -35355,7 +35355,7 @@ x-oaiMeta: can use the following link: - [https://platform.openai.com/settings/organization/admin-keys](https://platform.openai.com/settings/organization/admin-keys) + [https://platform.openaiproxy.com/settings/organization/admin-keys](https://platform.openaiproxy.com/settings/organization/admin-keys) It's crucial to handle Admin API keys with care due to their elevated diff --git a/pkg/heroservices/openai/server.go b/pkg/heroservices/openaiproxy/server.go similarity index 98% rename from pkg/heroservices/openai/server.go rename to pkg/heroservices/openaiproxy/server.go index 35fd216..754f556 100644 --- a/pkg/heroservices/openai/server.go +++ b/pkg/heroservices/openaiproxy/server.go @@ -119,7 +119,7 @@ func createJob(c *fiber.Ctx, apiKey string, endpoint string, requestBody interfa // Create a new job job := jobsmanager.NewJob() job.ParamsType = jobsmanager.ParamsTypeAI - job.Topic = "openai-proxy" + job.Topic = "openaiproxy-proxy" job.CircleID = "ai" // Serialize request body to JSON @@ -183,7 +183,7 @@ func (s *Server) handleModels(c *fiber.Ctx) error { // Forward request to OpenAI url := s.Factory.Config.OpenAIBaseURL + "/v1/models" if url == "/v1/models" { - url = "https://api.openai.com/v1/models" + url = "https://api.openaiproxy.com/v1/models" } req, err := http.NewRequest("GET", url, nil) @@ -250,7 +250,7 @@ func (s *Server) handleGetModel(c *fiber.Ctx) error { // Forward request to OpenAI url := s.Factory.Config.OpenAIBaseURL + "/v1/models/" + modelID if strings.HasPrefix(url, "/v1/models/") { - url = "https://api.openai.com/v1/models/" + modelID + url = "https://api.openaiproxy.com/v1/models/" + modelID } req, err := http.NewRequest("GET", url, nil) @@ -337,7 +337,7 @@ func (s *Server) handleChatCompletions(c *fiber.Ctx) error { // Forward request to OpenAI url := s.Factory.Config.OpenAIBaseURL + "/v1/chat/completions" if url == "/v1/chat/completions" { - url = "https://api.openai.com/v1/chat/completions" + url = "https://api.openaiproxy.com/v1/chat/completions" } // Convert the request body back to JSON @@ -461,7 +461,7 @@ func (s *Server) handleCompletions(c *fiber.Ctx) error { // Forward request to OpenAI url := s.Factory.Config.OpenAIBaseURL + "/v1/completions" if url == "/v1/completions" { - url = "https://api.openai.com/v1/completions" + url = "https://api.openaiproxy.com/v1/completions" } // Convert the request body back to JSON @@ -585,7 +585,7 @@ func (s *Server) handleEmbeddings(c *fiber.Ctx) error { // Forward request to OpenAI url := s.Factory.Config.OpenAIBaseURL + "/v1/embeddings" if url == "/v1/embeddings" { - url = "https://api.openai.com/v1/embeddings" + url = "https://api.openaiproxy.com/v1/embeddings" } // Convert the request body back to JSON @@ -724,7 +724,7 @@ func (s *Server) handleImagesGenerations(c *fiber.Ctx) error { // Forward request to OpenAI url := s.Factory.Config.OpenAIBaseURL + "/v1/images/generations" if url == "/v1/images/generations" { - url = "https://api.openai.com/v1/images/generations" + url = "https://api.openaiproxy.com/v1/images/generations" } // Convert the request body back to JSON @@ -919,7 +919,7 @@ func (s *Server) handleListFiles(c *fiber.Ctx) error { // Forward request to OpenAI url := s.Factory.Config.OpenAIBaseURL + "/v1/files" if url == "/v1/files" { - url = "https://api.openai.com/v1/files" + url = "https://api.openaiproxy.com/v1/files" } req, err := http.NewRequest("GET", url, nil) @@ -1017,7 +1017,7 @@ func (s *Server) handleGetFile(c *fiber.Ctx) error { // Forward request to OpenAI url := s.Factory.Config.OpenAIBaseURL + "/v1/files/" + fileID if strings.HasPrefix(url, "/v1/files/") { - url = "https://api.openai.com/v1/files/" + fileID + url = "https://api.openaiproxy.com/v1/files/" + fileID } req, err := http.NewRequest("GET", url, nil) @@ -1084,7 +1084,7 @@ func (s *Server) handleDeleteFile(c *fiber.Ctx) error { // Forward request to OpenAI url := s.Factory.Config.OpenAIBaseURL + "/v1/files/" + fileID if strings.HasPrefix(url, "/v1/files/") { - url = "https://api.openai.com/v1/files/" + fileID + url = "https://api.openaiproxy.com/v1/files/" + fileID } req, err := http.NewRequest("DELETE", url, nil) diff --git a/pkg/openrpcmanager/client/client.go b/pkg/openrpcmanager/client/client.go index 131d048..8f239d8 100644 --- a/pkg/openrpcmanager/client/client.go +++ b/pkg/openrpcmanager/client/client.go @@ -6,7 +6,7 @@ import ( "fmt" "net" - "github.com/freeflowuniverse/herolauncher/pkg/openrpcmanager" + "github.com/freeflowuniverse/heroagent/pkg/openrpcmanager" ) // Common errors diff --git a/pkg/openrpcmanager/client/client_test.go b/pkg/openrpcmanager/client/client_test.go index 8923169..600f71c 100644 --- a/pkg/openrpcmanager/client/client_test.go +++ b/pkg/openrpcmanager/client/client_test.go @@ -7,7 +7,7 @@ import ( "testing" "time" - "github.com/freeflowuniverse/herolauncher/pkg/openrpcmanager" + "github.com/freeflowuniverse/heroagent/pkg/openrpcmanager" ) // MockClient implements the Client interface for testing diff --git a/pkg/openrpcmanager/cmd/server/main.go b/pkg/openrpcmanager/cmd/server/main.go index 473ad59..76b397a 100644 --- a/pkg/openrpcmanager/cmd/server/main.go +++ b/pkg/openrpcmanager/cmd/server/main.go @@ -9,7 +9,7 @@ import ( "os/signal" "syscall" - "github.com/freeflowuniverse/herolauncher/pkg/openrpcmanager" + "github.com/freeflowuniverse/heroagent/pkg/openrpcmanager" ) func main() { diff --git a/pkg/builders/hetznerinstall/builder.go b/pkg/system/builders/hetznerinstall/builder.go similarity index 100% rename from pkg/builders/hetznerinstall/builder.go rename to pkg/system/builders/hetznerinstall/builder.go diff --git a/pkg/builders/hetznerinstall/cmd/build.sh b/pkg/system/builders/hetznerinstall/cmd/build.sh similarity index 100% rename from pkg/builders/hetznerinstall/cmd/build.sh rename to pkg/system/builders/hetznerinstall/cmd/build.sh diff --git a/pkg/builders/hetznerinstall/cmd/main.go b/pkg/system/builders/hetznerinstall/cmd/main.go similarity index 100% rename from pkg/builders/hetznerinstall/cmd/main.go rename to pkg/system/builders/hetznerinstall/cmd/main.go diff --git a/pkg/builders/hetznerinstall/cmd/run.sh b/pkg/system/builders/hetznerinstall/cmd/run.sh similarity index 100% rename from pkg/builders/hetznerinstall/cmd/run.sh rename to pkg/system/builders/hetznerinstall/cmd/run.sh diff --git a/pkg/builders/postgresql/builder.go b/pkg/system/builders/postgresql/builder.go similarity index 99% rename from pkg/builders/postgresql/builder.go rename to pkg/system/builders/postgresql/builder.go index 22cd5a9..da6ac7c 100644 --- a/pkg/builders/postgresql/builder.go +++ b/pkg/system/builders/postgresql/builder.go @@ -91,20 +91,20 @@ func (b *Builder) Build() error { return fmt.Errorf("failed to ensure Go is installed: %w", err) } fmt.Printf("Using Go executable from: %s\n", goPath) - + // Pass the Go path explicitly to the GoSPBuilder b.GoSPBuilder.WithGoPath(goPath) - + // For the Go stored procedure, we'll create and execute a shell script directly // to ensure all environment variables are properly set fmt.Println("Building Go stored procedure via shell script...") - + tempDir, err := os.MkdirTemp("", "gosp-build-") if err != nil { return fmt.Errorf("failed to create temp directory: %w", err) } defer os.RemoveAll(tempDir) - + // Create the Go source file in the temp directory libPath := filepath.Join(tempDir, "gosp.go") libSrc := ` @@ -122,7 +122,7 @@ func main() {} if err := os.WriteFile(libPath, []byte(libSrc), 0644); err != nil { return fmt.Errorf("failed to write Go source file: %w", err) } - + // Create a shell script to build the Go stored procedure buildScript := filepath.Join(tempDir, "build.sh") buildScriptContent := fmt.Sprintf(`#!/bin/sh @@ -147,11 +147,11 @@ go build -buildmode=c-shared -o %s/lib/libgosp.so %s echo "Go stored procedure built successfully!" `, libPath, b.InstallPrefix, b.InstallPrefix, b.InstallPrefix, libPath, b.InstallPrefix, libPath) - + if err := os.WriteFile(buildScript, []byte(buildScriptContent), 0755); err != nil { return fmt.Errorf("failed to write build script: %w", err) } - + // Execute the build script cmd := exec.Command("/bin/sh", buildScript) cmd.Stdout = os.Stdout diff --git a/pkg/builders/postgresql/cmd/build.sh b/pkg/system/builders/postgresql/cmd/build.sh similarity index 100% rename from pkg/builders/postgresql/cmd/build.sh rename to pkg/system/builders/postgresql/cmd/build.sh diff --git a/pkg/builders/postgresql/cmd/main.go b/pkg/system/builders/postgresql/cmd/main.go similarity index 100% rename from pkg/builders/postgresql/cmd/main.go rename to pkg/system/builders/postgresql/cmd/main.go diff --git a/pkg/builders/postgresql/cmd/run.sh b/pkg/system/builders/postgresql/cmd/run.sh similarity index 100% rename from pkg/builders/postgresql/cmd/run.sh rename to pkg/system/builders/postgresql/cmd/run.sh diff --git a/pkg/builders/postgresql/dependencies/dependencies.go b/pkg/system/builders/postgresql/dependencies/dependencies.go similarity index 100% rename from pkg/builders/postgresql/dependencies/dependencies.go rename to pkg/system/builders/postgresql/dependencies/dependencies.go diff --git a/pkg/builders/postgresql/gosp/gosp.go b/pkg/system/builders/postgresql/gosp/gosp.go similarity index 100% rename from pkg/builders/postgresql/gosp/gosp.go rename to pkg/system/builders/postgresql/gosp/gosp.go diff --git a/pkg/builders/postgresql/postgres/download.go b/pkg/system/builders/postgresql/postgres/download.go similarity index 100% rename from pkg/builders/postgresql/postgres/download.go rename to pkg/system/builders/postgresql/postgres/download.go diff --git a/pkg/builders/postgresql/postgres/fs.go b/pkg/system/builders/postgresql/postgres/fs.go similarity index 100% rename from pkg/builders/postgresql/postgres/fs.go rename to pkg/system/builders/postgresql/postgres/fs.go diff --git a/pkg/builders/postgresql/postgres/goinstall.go b/pkg/system/builders/postgresql/postgres/goinstall.go similarity index 100% rename from pkg/builders/postgresql/postgres/goinstall.go rename to pkg/system/builders/postgresql/postgres/goinstall.go diff --git a/pkg/builders/postgresql/postgres/postgres.go b/pkg/system/builders/postgresql/postgres/postgres.go similarity index 100% rename from pkg/builders/postgresql/postgres/postgres.go rename to pkg/system/builders/postgresql/postgres/postgres.go diff --git a/pkg/builders/postgresql/postgres/tar.go b/pkg/system/builders/postgresql/postgres/tar.go similarity index 100% rename from pkg/builders/postgresql/postgres/tar.go rename to pkg/system/builders/postgresql/postgres/tar.go diff --git a/pkg/builders/postgresql/verification/verification.go b/pkg/system/builders/postgresql/verification/verification.go similarity index 100% rename from pkg/builders/postgresql/verification/verification.go rename to pkg/system/builders/postgresql/verification/verification.go