...
This commit is contained in:
		
							
								
								
									
										98
									
								
								_pkg2_dont_use/heroservices/openaiproxy/cmd/main.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										98
									
								
								_pkg2_dont_use/heroservices/openaiproxy/cmd/main.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,98 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"log" | ||||
| 	"os" | ||||
| 	"os/signal" | ||||
| 	"syscall" | ||||
| 	"time" | ||||
|  | ||||
| 	openaiproxy "git.ourworld.tf/herocode/heroagent/pkg/heroservices/openaiproxy" | ||||
| 	"github.com/openai/openai-go/option" | ||||
| ) | ||||
|  | ||||
| func main() { | ||||
| 	// Start the server in a goroutine | ||||
| 	go runServerMode() | ||||
|  | ||||
| 	// Wait a moment for the server to start | ||||
| 	time.Sleep(2 * time.Second) | ||||
|  | ||||
| 	// Test the proxy with a client | ||||
| 	testProxyWithClient() | ||||
|  | ||||
| 	// Keep the main function running | ||||
| 	quit := make(chan os.Signal, 1) | ||||
| 	signal.Notify(quit, os.Interrupt, syscall.SIGTERM) | ||||
| 	<-quit | ||||
| 	log.Println("Shutting down...") | ||||
| } | ||||
|  | ||||
| // testProxyWithClient tests the proxy using the OpenAI Go client | ||||
| func testProxyWithClient() { | ||||
| 	log.Println("Testing proxy with OpenAI Go client...") | ||||
|  | ||||
| 	// Create a client that points to our proxy | ||||
| 	// Note: The server is using "/ai" as the prefix for all routes | ||||
| 	client := openaiproxy.NewClient( | ||||
| 		option.WithAPIKey("test-key"),                  // This is our test key, not a real OpenAI key | ||||
| 		option.WithBaseURL("http://localhost:8080/ai"), // Use the /ai prefix to match the server routes | ||||
| 	) | ||||
|  | ||||
| 	// Create a completion request | ||||
| 	chatCompletion, err := client.Chat.Completions.New(context.Background(), openaiproxy.ChatCompletionNewParams{ | ||||
| 		Messages: []openaiproxy.ChatCompletionMessageParamUnion{ | ||||
| 			openaiproxy.UserMessage("Say this is a test"), | ||||
| 		}, | ||||
| 		Model: "gpt-3.5-turbo", // Use a model that our proxy supports | ||||
| 	}) | ||||
|  | ||||
| 	if err != nil { | ||||
| 		log.Fatalf("Error creating completion: %v", err) | ||||
| 	} | ||||
|  | ||||
| 	// Print the response | ||||
| 	log.Printf("Completion response: %s", chatCompletion.Choices[0].Message.Content) | ||||
| 	log.Println("Proxy test completed successfully!") | ||||
| } | ||||
|  | ||||
| // runServerMode starts the proxy server with example configurations | ||||
| func runServerMode() { | ||||
| 	// Get the OpenAI API key from environment variable | ||||
| 	openaiKey := os.Getenv("OPENAIKEY") | ||||
| 	if openaiKey == "" { | ||||
| 		log.Println("ERROR: OPENAIKEY environment variable is not set") | ||||
| 		os.Exit(1) | ||||
| 	} | ||||
|  | ||||
| 	// Create a proxy configuration | ||||
| 	config := proxy.ProxyConfig{ | ||||
| 		Port:             8080,                          // Use a non-privileged port for testing | ||||
| 		OpenAIBaseURL:    "https://api.openaiproxy.com", // Default OpenAI API URL | ||||
| 		DefaultOpenAIKey: openaiKey,                     // Fallback API key if user doesn't have one | ||||
| 	} | ||||
|  | ||||
| 	// Create a new factory with the configuration | ||||
| 	factory := proxy.NewFactory(config) | ||||
|  | ||||
| 	// Add some example user configurations with the test key | ||||
| 	factory.AddUserConfig("test-key", proxy.UserConfig{ | ||||
| 		Budget:      10000,           // 10,000 tokens | ||||
| 		ModelGroups: []string{"all"}, // Allow access to all models | ||||
| 		OpenAIKey:   "",              // Empty means use the default key | ||||
| 	}) | ||||
|  | ||||
| 	// Print debug info | ||||
| 	log.Printf("Added user config for 'test-key'") | ||||
|  | ||||
| 	// Create a new server with the factory | ||||
| 	server := proxy.NewServer(factory) | ||||
|  | ||||
| 	// Start the server | ||||
| 	fmt.Printf("OpenAI Proxy Server listening on port %d\n", config.Port) | ||||
| 	if err := server.Start(); err != nil { | ||||
| 		log.Printf("Error starting server: %v", err) | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										134
									
								
								_pkg2_dont_use/heroservices/openaiproxy/factory.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										134
									
								
								_pkg2_dont_use/heroservices/openaiproxy/factory.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,134 @@ | ||||
| package proxy | ||||
|  | ||||
| import ( | ||||
| 	"errors" | ||||
| 	"os" | ||||
| 	"sync" | ||||
| ) | ||||
|  | ||||
| // Factory manages the proxy server and user configurations | ||||
| type Factory struct { | ||||
| 	// Config is the proxy server configuration | ||||
| 	Config ProxyConfig | ||||
|  | ||||
| 	// userConfigs is a map of API keys to user configurations | ||||
| 	userConfigs map[string]UserConfig | ||||
|  | ||||
| 	// Lock for concurrent access to userConfigs | ||||
| 	mu sync.RWMutex | ||||
| } | ||||
|  | ||||
| // NewFactory creates a new proxy factory with the given configuration | ||||
| func NewFactory(config ProxyConfig) *Factory { | ||||
| 	// Check for OPENAIKEY environment variable and use it if available | ||||
| 	if envKey := os.Getenv("OPENAIKEY"); envKey != "" { | ||||
| 		config.DefaultOpenAIKey = envKey | ||||
| 	} | ||||
|  | ||||
| 	return &Factory{ | ||||
| 		Config:      config, | ||||
| 		userConfigs: make(map[string]UserConfig), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // AddUserConfig adds or updates a user configuration with the associated API key | ||||
| func (f *Factory) AddUserConfig(apiKey string, config UserConfig) { | ||||
| 	f.mu.Lock() | ||||
| 	defer f.mu.Unlock() | ||||
| 	f.userConfigs[apiKey] = config | ||||
| } | ||||
|  | ||||
| // GetUserConfig retrieves a user configuration by API key | ||||
| func (f *Factory) GetUserConfig(apiKey string) (UserConfig, error) { | ||||
| 	f.mu.RLock() | ||||
| 	defer f.mu.RUnlock() | ||||
|  | ||||
| 	config, exists := f.userConfigs[apiKey] | ||||
| 	if !exists { | ||||
| 		return UserConfig{}, errors.New("invalid API key") | ||||
| 	} | ||||
|  | ||||
| 	return config, nil | ||||
| } | ||||
|  | ||||
| // RemoveUserConfig removes a user configuration by API key | ||||
| func (f *Factory) RemoveUserConfig(apiKey string) { | ||||
| 	f.mu.Lock() | ||||
| 	defer f.mu.Unlock() | ||||
| 	delete(f.userConfigs, apiKey) | ||||
| } | ||||
|  | ||||
| // GetOpenAIKey returns the OpenAI API key to use for a given proxy API key | ||||
| // Always returns the default OpenAI key from environment variable | ||||
| func (f *Factory) GetOpenAIKey(proxyAPIKey string) string { | ||||
| 	// Always use the default OpenAI key from environment variable | ||||
| 	// This ensures that all requests to OpenAI use our key, not the user's key | ||||
| 	return f.Config.DefaultOpenAIKey | ||||
| } | ||||
|  | ||||
| // DecreaseBudget decreases a user's budget by the specified amount | ||||
| // Returns error if the user doesn't have enough budget | ||||
| func (f *Factory) DecreaseBudget(apiKey string, amount uint32) error { | ||||
| 	f.mu.Lock() | ||||
| 	defer f.mu.Unlock() | ||||
|  | ||||
| 	config, exists := f.userConfigs[apiKey] | ||||
| 	if !exists { | ||||
| 		return errors.New("invalid API key") | ||||
| 	} | ||||
|  | ||||
| 	if config.Budget < amount { | ||||
| 		return errors.New("insufficient budget") | ||||
| 	} | ||||
|  | ||||
| 	config.Budget -= amount | ||||
| 	f.userConfigs[apiKey] = config | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // IncreaseBudget increases a user's budget by the specified amount | ||||
| func (f *Factory) IncreaseBudget(apiKey string, amount uint32) error { | ||||
| 	f.mu.Lock() | ||||
| 	defer f.mu.Unlock() | ||||
|  | ||||
| 	config, exists := f.userConfigs[apiKey] | ||||
| 	if !exists { | ||||
| 		return errors.New("invalid API key") | ||||
| 	} | ||||
|  | ||||
| 	config.Budget += amount | ||||
| 	f.userConfigs[apiKey] = config | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // CanAccessModel checks if a user can access a specific model | ||||
| func (f *Factory) CanAccessModel(apiKey string, model string) bool { | ||||
| 	f.mu.RLock() | ||||
| 	defer f.mu.RUnlock() | ||||
|  | ||||
| 	config, exists := f.userConfigs[apiKey] | ||||
| 	if !exists { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	// If no model groups are specified, allow access to all models | ||||
| 	if len(config.ModelGroups) == 0 { | ||||
| 		return true | ||||
| 	} | ||||
|  | ||||
| 	// Check if the model is in any of the allowed model groups | ||||
| 	// This is a placeholder - the actual implementation would depend on | ||||
| 	// how model groups are defined and mapped to specific models | ||||
| 	for _, group := range config.ModelGroups { | ||||
| 		if group == "all" { | ||||
| 			return true | ||||
| 		} | ||||
| 		// Add logic to check if model is in group | ||||
| 		// For now we'll just check if the model contains the group name | ||||
| 		if group == model { | ||||
| 			return true | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return false | ||||
| } | ||||
							
								
								
									
										26
									
								
								_pkg2_dont_use/heroservices/openaiproxy/model.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								_pkg2_dont_use/heroservices/openaiproxy/model.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| package proxy | ||||
|  | ||||
| // UserConfig represents the configuration for a user | ||||
| // It contains information about the user's budget and allowed model groups | ||||
| type UserConfig struct { | ||||
| 	// Budget represents the virtual money the user has available | ||||
| 	Budget uint32 `json:"budget"` | ||||
|  | ||||
| 	// ModelGroups is a list of model groups the user has access to | ||||
| 	ModelGroups []string `json:"model_groups"` | ||||
|  | ||||
| 	// APIKey is the OpenAI API key to use for this user's requests | ||||
| 	OpenAIKey string `json:"openai_key"` | ||||
| } | ||||
|  | ||||
| // ProxyConfig represents the configuration for the AI proxy server | ||||
| type ProxyConfig struct { | ||||
| 	// Port is the port to listen on | ||||
| 	Port int `json:"port"` | ||||
|  | ||||
| 	// OpenAIBaseURL is the base URL for the OpenAI API | ||||
| 	OpenAIBaseURL string `json:"openai_base_url"` | ||||
|  | ||||
| 	// DefaultOpenAIKey is the default OpenAI API key to use if not specified in UserConfig | ||||
| 	DefaultOpenAIKey string `json:"default_openai_key"` | ||||
| } | ||||
							
								
								
									
										35805
									
								
								_pkg2_dont_use/heroservices/openaiproxy/openapi.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35805
									
								
								_pkg2_dont_use/heroservices/openaiproxy/openapi.yaml
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										1125
									
								
								_pkg2_dont_use/heroservices/openaiproxy/server.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1125
									
								
								_pkg2_dont_use/heroservices/openaiproxy/server.go
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
		Reference in New Issue
	
	Block a user