refactor!: move LLM configuration from in-app settings to CLI/env vars
- Remove `api_endpoint` from Settings model and settings UI - Add `--llm-endpoint` / `AETHERA_LLM_ENDPOINT` and `--llm-key` / `AETHERA_LLM_KEY` CLI flags (endpoint is required) - Update client constructor to accept API key parameter - Update tests and documentation to reflect new configuration approach BREAKING CHANGE: LLM endpoint and key must now be provided via `AETHERA_LLM_ENDPOINT` and `AETHERA_LLM_KEY` environment variables or CLI flags instead of the Settings page.
This commit is contained in:
@@ -27,4 +27,7 @@ EXPOSE 8080
|
||||
ENV AETHERA_LISTEN=0.0.0.0
|
||||
ENV AETHERA_PORT=8080
|
||||
ENV AETHERA_DATA_DIR=/app/data
|
||||
# LLM Configuration (required)
|
||||
# ENV AETHERA_LLM_ENDPOINT=https://api.example.com/v1
|
||||
# ENV AETHERA_LLM_KEY=your-api-key-here
|
||||
ENTRYPOINT ["./aethera"]
|
||||
|
||||
16
README.md
16
README.md
@@ -32,7 +32,10 @@ make all # Build frontend + backend
|
||||
|
||||
```bash
|
||||
make docker
|
||||
docker run -p 8080:8080 -v aethera-data:/app/data aethera
|
||||
docker run -p 8080:8080 \
|
||||
-e AETHERA_LLM_ENDPOINT=https://api.example.com/v1 \
|
||||
-e AETHERA_LLM_KEY=your-key \
|
||||
-v aethera-data:/app/data aethera
|
||||
```
|
||||
|
||||
### Manual Build
|
||||
@@ -58,6 +61,8 @@ Configuration is available via CLI flags and environment variables (prefixed `AE
|
||||
|
||||
| Flag | Env Var | Default | Description |
|
||||
|----------------|---------------------|-------------|--------------------------------------------|
|
||||
| `--llm-endpoint` | `AETHERA_LLM_ENDPOINT` | *(required)* | OpenAI-compatible API endpoint URL |
|
||||
| `--llm-key` | `AETHERA_LLM_KEY` | | API key for authentication |
|
||||
| `--data-dir` | `AETHERA_DATA_DIR` | `./data` | Directory for chats, settings, and images |
|
||||
| `--static-dir` | `AETHERA_STATIC_DIR`| *(embedded)*| Serve frontend from disk (for development) |
|
||||
| `--listen` | `AETHERA_LISTEN` | `localhost` | Listen address |
|
||||
@@ -66,7 +71,7 @@ Configuration is available via CLI flags and environment variables (prefixed `AE
|
||||
Example:
|
||||
|
||||
```bash
|
||||
./backend/dist/aethera --port 3000 --listen 0.0.0.0
|
||||
AETHERA_LLM_ENDPOINT=https://api.example.com/v1 AETHERA_LLM_KEY=your-key ./backend/dist/aethera
|
||||
```
|
||||
|
||||
## Development
|
||||
@@ -89,10 +94,9 @@ This starts the Go backend (serving frontend from disk) and the frontend in watc
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. **Configure Your API** — navigate to Settings and enter your OpenAI-compatible API endpoint URL
|
||||
2. **Start Chatting** — use the Chat interface to begin conversations
|
||||
3. **Generate Images** — visit the Images page to create or edit images
|
||||
4. **Manage Content** — view, delete, and organize conversations and images
|
||||
1. **Configure Your API** — set `AETHERA_LLM_ENDPOINT` and optionally `AETHERA_LLM_KEY` environment variables
|
||||
2. **Start the Server** — run the binary and navigate to `http://localhost:8080`
|
||||
3. **Configure Model Selectors** — navigate to Settings to configure model selectors for chat and image generation
|
||||
|
||||
## Supported AI Services
|
||||
|
||||
|
||||
@@ -15,6 +15,8 @@ type cliParams struct {
|
||||
DataDir string
|
||||
StaticDir string
|
||||
SettingsFile string
|
||||
LLMEndpoint string
|
||||
LLMKey string
|
||||
}
|
||||
|
||||
// getEnvOrDefault returns the value of an environment variable or a default value
|
||||
@@ -36,6 +38,11 @@ func getEnvIntOrDefault(key string, defaultValue int) int {
|
||||
}
|
||||
|
||||
func (p *cliParams) Validate() error {
|
||||
// Require LLM Configuration
|
||||
if p.LLMEndpoint == "" {
|
||||
return fmt.Errorf("LLM endpoint is required (set AETHERA_LLM_ENDPOINT)")
|
||||
}
|
||||
|
||||
// Ensure Generated Directories
|
||||
imgDir := path.Join(p.DataDir, "generated/images")
|
||||
if err := os.MkdirAll(imgDir, 0755); err != nil {
|
||||
|
||||
@@ -17,6 +17,8 @@ var (
|
||||
ListenPort: getEnvIntOrDefault("PORT", 8080),
|
||||
DataDir: getEnvOrDefault("DATA_DIR", "./data"),
|
||||
StaticDir: getEnvOrDefault("STATIC_DIR", ""),
|
||||
LLMEndpoint: getEnvOrDefault("LLM_ENDPOINT", ""),
|
||||
LLMKey: getEnvOrDefault("LLM_KEY", ""),
|
||||
}
|
||||
rootCmd = &cobra.Command{Use: "aethera"}
|
||||
)
|
||||
@@ -26,6 +28,8 @@ func init() {
|
||||
rootCmd.PersistentFlags().StringVar(¶ms.StaticDir, "static-dir", params.StaticDir, "Directory to serve static frontend files from instead of embedded assets (env: AETHERA_STATIC_DIR)")
|
||||
rootCmd.PersistentFlags().StringVar(¶ms.ListenAddr, "listen", params.ListenAddr, "Address to listen on (env: AETHERA_LISTEN)")
|
||||
rootCmd.PersistentFlags().IntVar(¶ms.ListenPort, "port", params.ListenPort, "Port to listen on (env: AETHERA_PORT)")
|
||||
rootCmd.PersistentFlags().StringVar(¶ms.LLMEndpoint, "llm-endpoint", params.LLMEndpoint, "LLM API endpoint URL (env: AETHERA_LLM_ENDPOINT)")
|
||||
rootCmd.PersistentFlags().StringVar(¶ms.LLMKey, "llm-key", params.LLMKey, "LLM API key (env: AETHERA_LLM_KEY)")
|
||||
}
|
||||
|
||||
func main() {
|
||||
@@ -42,7 +46,7 @@ func main() {
|
||||
|
||||
// Start Server
|
||||
rootCmd.Run = func(cmd *cobra.Command, args []string) {
|
||||
server.StartServer(fileStore, params.DataDir, params.StaticDir, params.ListenAddr, params.ListenPort)
|
||||
server.StartServer(fileStore, params.DataDir, params.StaticDir, params.ListenAddr, params.ListenPort, params.LLMEndpoint, params.LLMKey)
|
||||
}
|
||||
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
|
||||
@@ -28,14 +28,18 @@ type API struct {
|
||||
store store.Store
|
||||
client *client.Client
|
||||
dataDir string
|
||||
llmEndpoint string
|
||||
llmKey string
|
||||
generationManager *generationManager
|
||||
}
|
||||
|
||||
func New(s store.Store, dataDir string, logger *logrus.Logger) *API {
|
||||
func New(s store.Store, dataDir string, logger *logrus.Logger, llmEndpoint, llmKey string) *API {
|
||||
return &API{
|
||||
store: s,
|
||||
dataDir: dataDir,
|
||||
logger: logger.WithField("service", "api"),
|
||||
llmEndpoint: llmEndpoint,
|
||||
llmKey: llmKey,
|
||||
generationManager: newGenerationManager(),
|
||||
}
|
||||
}
|
||||
@@ -68,24 +72,6 @@ func (a *API) PostSettings(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
if apiEndpoint := newSettings.APIEndpoint; apiEndpoint != "" {
|
||||
baseURL, err := url.Parse(apiEndpoint)
|
||||
if err != nil {
|
||||
errMsg := fmt.Sprintf("Invalid API Endpoint URL: %q", baseURL)
|
||||
log.WithError(err).Error(errMsg)
|
||||
http.Error(w, errMsg, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
testClient := client.NewClient(baseURL)
|
||||
if _, err := testClient.GetModels(r.Context()); err != nil {
|
||||
log.WithError(err).Error("failed to access configured API endpoint")
|
||||
http.Error(w, "API endpoint inaccessible", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
a.client = nil
|
||||
}
|
||||
|
||||
if err := a.store.SaveSettings(&newSettings); err != nil {
|
||||
log.WithError(err).Error("failed to save settings")
|
||||
http.Error(w, "Failed to save application settings", http.StatusInternalServerError)
|
||||
@@ -516,20 +502,13 @@ func (a *API) getClient() (*client.Client, error) {
|
||||
return a.client, nil
|
||||
}
|
||||
|
||||
// Get Settings & Validate Endpoint
|
||||
settings, err := a.store.GetSettings()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to retrieve application settings: %w", err)
|
||||
} else if settings.APIEndpoint == "" {
|
||||
return nil, errors.New("no API endpoint configured in settings")
|
||||
}
|
||||
|
||||
baseURL, err := url.Parse(settings.APIEndpoint)
|
||||
// Parse LLM Endpoint from Config
|
||||
baseURL, err := url.Parse(a.llmEndpoint)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid API endpoint URL: %w", err)
|
||||
}
|
||||
|
||||
a.client = client.NewClient(baseURL)
|
||||
a.client = client.NewClient(baseURL, a.llmKey)
|
||||
return a.client, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -288,8 +288,12 @@ func populateUsageTimings(msgStats *types.MessageStats, usage openai.CompletionU
|
||||
return didChange
|
||||
}
|
||||
|
||||
func NewClient(baseURL *url.URL) *Client {
|
||||
oaiClient := openai.NewClient(option.WithBaseURL(baseURL.String()))
|
||||
func NewClient(baseURL *url.URL, apiKey string) *Client {
|
||||
opts := []option.RequestOption{option.WithBaseURL(baseURL.String())}
|
||||
if apiKey != "" {
|
||||
opts = append(opts, option.WithAPIKey(apiKey))
|
||||
}
|
||||
oaiClient := openai.NewClient(opts...)
|
||||
return &Client{oaiClient: &oaiClient}
|
||||
}
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ func TestSendMessage(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse base URL: %v", err)
|
||||
}
|
||||
client := NewClient(baseURL)
|
||||
client := NewClient(baseURL, os.Getenv("AETHERA_LLM_KEY"))
|
||||
|
||||
// Create Context
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
|
||||
@@ -69,7 +69,7 @@ func TestSummarizeChat(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse base URL: %v", err)
|
||||
}
|
||||
client := NewClient(baseURL)
|
||||
client := NewClient(baseURL, os.Getenv("AETHERA_LLM_KEY"))
|
||||
|
||||
// Create Context
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
@@ -96,7 +96,7 @@ func TestSendMessageWithImage(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse base URL: %v", err)
|
||||
}
|
||||
client := NewClient(baseURL)
|
||||
client := NewClient(baseURL, os.Getenv("AETHERA_LLM_KEY"))
|
||||
|
||||
// Create Context
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
|
||||
|
||||
@@ -13,12 +13,12 @@ import (
|
||||
"reichard.io/aethera/web"
|
||||
)
|
||||
|
||||
func StartServer(settingsStore store.Store, dataDir, staticDir, listenAddress string, listenPort int) {
|
||||
func StartServer(settingsStore store.Store, dataDir, staticDir, listenAddress string, listenPort int, llmEndpoint, llmKey string) {
|
||||
mux := http.NewServeMux()
|
||||
|
||||
// Create API Instance - use settingsStore as the unified store for both settings and chat
|
||||
logger := logrus.New()
|
||||
api := api.New(settingsStore, dataDir, logger)
|
||||
api := api.New(settingsStore, dataDir, logger, llmEndpoint, llmKey)
|
||||
|
||||
// Serve Static Assets
|
||||
if staticDir != "" {
|
||||
|
||||
@@ -141,7 +141,6 @@ func TestInMemoryStore_SaveSettings(t *testing.T) {
|
||||
store := NewInMemoryStore()
|
||||
|
||||
settings := &Settings{
|
||||
APIEndpoint: "http://example.com",
|
||||
ImageEditSelector: ".image-edit",
|
||||
ImageGenerationSelector: ".image-gen",
|
||||
TextGenerationSelector: ".text-gen",
|
||||
@@ -161,7 +160,6 @@ func TestInMemoryStore_GetSettings(t *testing.T) {
|
||||
|
||||
// Set some settings
|
||||
settings = &Settings{
|
||||
APIEndpoint: "http://example.com",
|
||||
ImageEditSelector: ".image-edit",
|
||||
ImageGenerationSelector: ".image-gen",
|
||||
TextGenerationSelector: ".text-gen",
|
||||
@@ -172,5 +170,5 @@ func TestInMemoryStore_GetSettings(t *testing.T) {
|
||||
// Get the settings
|
||||
settings, err = store.GetSettings()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "http://example.com", settings.APIEndpoint)
|
||||
assert.Equal(t, ".image-edit", settings.ImageEditSelector)
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@ var _ Store = (*FileStore)(nil)
|
||||
|
||||
// Settings represents the application settings
|
||||
type Settings struct {
|
||||
APIEndpoint string `json:"api_endpoint,omitempty"`
|
||||
ImageEditSelector string `json:"image_edit_selector,omitempty"`
|
||||
ImageGenerationSelector string `json:"image_generation_selector,omitempty"`
|
||||
TextGenerationSelector string `json:"text_generation_selector,omitempty"`
|
||||
|
||||
@@ -209,7 +209,6 @@ func TestFileStore_SaveSettings(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
settings := &Settings{
|
||||
APIEndpoint: "http://example.com",
|
||||
ImageEditSelector: ".image-edit",
|
||||
ImageGenerationSelector: ".image-gen",
|
||||
TextGenerationSelector: ".text-gen",
|
||||
@@ -237,7 +236,6 @@ func TestFileStore_GetSettings(t *testing.T) {
|
||||
|
||||
// Set some settings
|
||||
settings = &Settings{
|
||||
APIEndpoint: "http://example.com",
|
||||
ImageEditSelector: ".image-edit",
|
||||
ImageGenerationSelector: ".image-gen",
|
||||
TextGenerationSelector: ".text-gen",
|
||||
@@ -248,5 +246,5 @@ func TestFileStore_GetSettings(t *testing.T) {
|
||||
// Get the settings
|
||||
settings, err = store.GetSettings()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "http://example.com", settings.APIEndpoint)
|
||||
assert.Equal(t, ".image-edit", settings.ImageEditSelector)
|
||||
}
|
||||
|
||||
@@ -3,25 +3,6 @@
|
||||
@submit.prevent="saveSettings"
|
||||
class="p-0.5 w-full flex flex-col gap-4 pt-16 mx-auto px-4 md:px-6 max-w-6xl"
|
||||
>
|
||||
<div>
|
||||
<label
|
||||
for="apiEndpoint"
|
||||
class="block text-sm font-semibold text-primary-700"
|
||||
>API Endpoint URL</label
|
||||
>
|
||||
<div class="ml-1">
|
||||
<input
|
||||
type="url"
|
||||
id="apiEndpoint"
|
||||
name="apiEndpoint"
|
||||
x-model="settings.api_endpoint"
|
||||
class="mt-1 p-1 block w-full rounded-md border-primary-400 shadow focus:border-secondary-500 focus:ring-secondary-500 sm:text-sm text-primary-900"
|
||||
placeholder="https://api.example.com/v1"
|
||||
required
|
||||
/>
|
||||
<p class="mt-2 text-xs text-primary-500">URL of your API endpoint</p>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<span class="text-sm font-medium font-semibold text-primary-700"
|
||||
>Selectors</span
|
||||
|
||||
@@ -38,7 +38,6 @@ export interface Model {
|
||||
}
|
||||
|
||||
export interface Settings {
|
||||
api_endpoint?: string;
|
||||
image_edit_selector?: string;
|
||||
image_generation_selector?: string;
|
||||
text_generation_selector?: string;
|
||||
|
||||
Reference in New Issue
Block a user