refactor!: move LLM configuration from in-app settings to CLI/env vars
- Remove `api_endpoint` from Settings model and settings UI - Add `--llm-endpoint` / `AETHERA_LLM_ENDPOINT` and `--llm-key` / `AETHERA_LLM_KEY` CLI flags (endpoint is required) - Update client constructor to accept API key parameter - Update tests and documentation to reflect new configuration approach BREAKING CHANGE: LLM endpoint and key must now be provided via `AETHERA_LLM_ENDPOINT` and `AETHERA_LLM_KEY` environment variables or CLI flags instead of the Settings page.
This commit is contained in:
@@ -288,8 +288,12 @@ func populateUsageTimings(msgStats *types.MessageStats, usage openai.CompletionU
|
||||
return didChange
|
||||
}
|
||||
|
||||
func NewClient(baseURL *url.URL) *Client {
|
||||
oaiClient := openai.NewClient(option.WithBaseURL(baseURL.String()))
|
||||
func NewClient(baseURL *url.URL, apiKey string) *Client {
|
||||
opts := []option.RequestOption{option.WithBaseURL(baseURL.String())}
|
||||
if apiKey != "" {
|
||||
opts = append(opts, option.WithAPIKey(apiKey))
|
||||
}
|
||||
oaiClient := openai.NewClient(opts...)
|
||||
return &Client{oaiClient: &oaiClient}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user