diff --git a/.gitignore b/.gitignore
index 04de303..ddc419e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -16,3 +16,6 @@ vite.generated.ts
# Log files
logs/
*.log
+
+# Environment variables
+.env
diff --git a/pom.xml b/pom.xml
index 869cb80..3e9ed96 100644
--- a/pom.xml
+++ b/pom.xml
@@ -108,6 +108,13 @@
spring-boot-starter-mail
+
+
+ me.paulschwarz
+ spring-dotenv
+ 4.0.0
+
+
org.springframework.boot
diff --git a/src/main/java/de/assecutor/votianlt/ai/config/LlmConfig.java b/src/main/java/de/assecutor/votianlt/ai/config/LlmConfig.java
index f193ee3..d74233a 100644
--- a/src/main/java/de/assecutor/votianlt/ai/config/LlmConfig.java
+++ b/src/main/java/de/assecutor/votianlt/ai/config/LlmConfig.java
@@ -8,10 +8,12 @@ import org.springframework.context.annotation.Configuration;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
+import java.util.Base64;
/**
- * Configuration for LLM integration. Supports LM Studio and Moonshot AI.
- * Switch provider via {@code app.ai.provider=lmstudio|moonshot} in application.properties.
+ * Configuration for LLM integration. Supports LM Studio and Moonshot AI. Switch
+ * provider via {@code app.ai.provider=lmstudio|moonshot} in
+ * application.properties.
*/
@Configuration
@Slf4j
@@ -20,12 +22,18 @@ public class LlmConfig {
@Value("${app.ai.provider:lmstudio}")
private String provider;
- @Value("${app.ai.lmstudio.base-url:https://lmstudio.appcreation.de}")
+ @Value("${app.ai.lmstudio.base-url}")
private String lmstudioBaseUrl;
- @Value("${app.ai.lmstudio.model:local-model}")
+ @Value("${app.ai.lmstudio.model}")
private String lmstudioModel;
+ @Value("${app.ai.lmstudio.htaccess-username}")
+ private String lmstudioHtaccessUsername;
+
+ @Value("${app.ai.lmstudio.htaccess-password}")
+ private String lmstudioHtaccessPassword;
+
@Value("${app.ai.moonshot.base-url:https://api.moonshot.ai}")
private String moonshotBaseUrl;
@@ -47,6 +55,7 @@ public class LlmConfig {
} else {
log.info("Base URL: {}", lmstudioBaseUrl);
log.info("Model: {}", lmstudioModel);
+ log.info("HTACCESS auth: {}", hasHtaccessCredentials() ? "configured" : "not configured");
testConnection(lmstudioBaseUrl, lmstudioModel, null);
}
}
@@ -72,7 +81,11 @@ public class LlmConfig {
connection.setConnectTimeout(5000);
connection.setReadTimeout(10000);
- if (apiKey != null && !apiKey.isBlank()) {
+ if (hasHtaccessCredentials()) {
+ String credentials = lmstudioHtaccessUsername + ":" + lmstudioHtaccessPassword;
+ String encoded = Base64.getEncoder().encodeToString(credentials.getBytes());
+ connection.setRequestProperty("Authorization", "Basic " + encoded);
+ } else if (apiKey != null && !apiKey.isBlank()) {
connection.setRequestProperty("Authorization", "Bearer " + apiKey);
}
@@ -121,4 +134,17 @@ public class LlmConfig {
public String getModel() {
return "moonshot".equalsIgnoreCase(provider) ? moonshotModel : lmstudioModel;
}
+
+ public boolean hasHtaccessCredentials() {
+ return lmstudioHtaccessUsername != null && !lmstudioHtaccessUsername.isBlank()
+ && lmstudioHtaccessPassword != null && !lmstudioHtaccessPassword.isBlank();
+ }
+
+ public String getLmstudioHtaccessUsername() {
+ return lmstudioHtaccessUsername;
+ }
+
+ public String getLmstudioHtaccessPassword() {
+ return lmstudioHtaccessPassword;
+ }
}
diff --git a/src/main/java/de/assecutor/votianlt/ai/service/LlmRestClient.java b/src/main/java/de/assecutor/votianlt/ai/service/LlmRestClient.java
index f22bcf1..e5a5a83 100644
--- a/src/main/java/de/assecutor/votianlt/ai/service/LlmRestClient.java
+++ b/src/main/java/de/assecutor/votianlt/ai/service/LlmRestClient.java
@@ -9,14 +9,16 @@ import org.springframework.http.MediaType;
import org.springframework.stereotype.Component;
import org.springframework.web.reactive.function.client.WebClient;
+import java.nio.charset.StandardCharsets;
import java.time.Duration;
+import java.util.Base64;
import java.util.List;
import java.util.Map;
/**
- * Direct REST client for LLM APIs (LM Studio or Moonshot AI).
- * Provider is selected via {@code app.ai.provider} in application.properties.
- * Both providers expose an OpenAI-compatible /v1/chat/completions endpoint.
+ * Direct REST client for LLM APIs (LM Studio or Moonshot AI). Provider is
+ * selected via {@code app.ai.provider} in application.properties. Both
+ * providers expose an OpenAI-compatible /v1/chat/completions endpoint.
*/
@Component
@Slf4j
@@ -27,14 +29,14 @@ public class LlmRestClient {
private final String model;
private final String provider;
- public LlmRestClient(
- @Value("${app.ai.provider:lmstudio}") String provider,
- @Value("${app.ai.lmstudio.base-url:https://lmstudio.appcreation.de}") String lmstudioBaseUrl,
- @Value("${app.ai.lmstudio.model:local-model}") String lmstudioModel,
- @Value("${app.ai.moonshot.base-url:https://api.moonshot.ai}") String moonshotBaseUrl,
- @Value("${app.ai.moonshot.api-key:}") String moonshotApiKey,
- @Value("${app.ai.moonshot.model:moonshot-v1-8k}") String moonshotModel,
- ObjectMapper objectMapper) {
+ public LlmRestClient(@Value("${app.ai.provider}") String provider,
+ @Value("${app.ai.lmstudio.base-url}") String lmstudioBaseUrl,
+ @Value("${app.ai.lmstudio.model}") String lmstudioModel,
+ @Value("${app.ai.lmstudio.htaccess-username}") String lmstudioHtaccessUsername,
+ @Value("${app.ai.lmstudio.htaccess-password}") String lmstudioHtaccessPassword,
+ @Value("${app.ai.moonshot.base-url}") String moonshotBaseUrl,
+ @Value("${app.ai.moonshot.api-key}") String moonshotApiKey,
+ @Value("${app.ai.moonshot.model}") String moonshotModel, ObjectMapper objectMapper) {
this.provider = provider.trim().toLowerCase();
this.objectMapper = objectMapper;
@@ -52,8 +54,18 @@ public class LlmRestClient {
} else {
this.model = lmstudioModel;
builder.baseUrl(lmstudioBaseUrl + "/v1/chat/completions");
- log.info("LlmRestClient initialized - Provider: lmstudio, URL: {}/v1/chat/completions, Model: {}",
- lmstudioBaseUrl, lmstudioModel);
+ if (lmstudioHtaccessUsername != null && !lmstudioHtaccessUsername.isBlank()
+ && lmstudioHtaccessPassword != null && !lmstudioHtaccessPassword.isBlank()) {
+ String credentials = lmstudioHtaccessUsername + ":" + lmstudioHtaccessPassword;
+ String encoded = Base64.getEncoder()
+ .encodeToString(credentials.getBytes(StandardCharsets.UTF_8));
+ builder.defaultHeader(HttpHeaders.AUTHORIZATION, "Basic " + encoded);
+ log.info("LlmRestClient initialized - Provider: lmstudio (with HTACCESS auth), "
+ + "URL: {}/v1/chat/completions, Model: {}", lmstudioBaseUrl, lmstudioModel);
+ } else {
+ log.info("LlmRestClient initialized - Provider: lmstudio, URL: {}/v1/chat/completions, Model: {}",
+ lmstudioBaseUrl, lmstudioModel);
+ }
}
this.webClient = builder.build();
@@ -62,8 +74,10 @@ public class LlmRestClient {
/**
* Send a chat completion request.
*
- * @param systemPrompt System prompt for context
- * @param userMessage User message/question
+ * @param systemPrompt
+ * System prompt for context
+ * @param userMessage
+ * User message/question
* @return LLM response text, or null on error
*/
public String chat(String systemPrompt, String userMessage) {
@@ -73,34 +87,29 @@ public class LlmRestClient {
/**
* Send a chat completion request with custom parameters.
*
- * @param systemPrompt System prompt for context
- * @param userMessage User message/question
- * @param temperature Temperature for response randomness (0.0-1.0)
- * @param maxTokens Maximum tokens in response
+ * @param systemPrompt
+ * System prompt for context
+ * @param userMessage
+ * User message/question
+ * @param temperature
+ * Temperature for response randomness (0.0-1.0)
+ * @param maxTokens
+ * Maximum tokens in response
* @return LLM response text, or null on error
*/
public String chat(String systemPrompt, String userMessage, double temperature, int maxTokens) {
try {
- Map request = Map.of(
- "model", model,
- "messages", List.of(
- Map.of("role", "system", "content", systemPrompt != null ? systemPrompt : ""),
+ Map request = Map.of("model", model, "messages",
+ List.of(Map.of("role", "system", "content", systemPrompt != null ? systemPrompt : ""),
Map.of("role", "user", "content", userMessage)),
- "temperature", temperature,
- "max_tokens", maxTokens,
- "stream", false);
+ "temperature", temperature, "max_tokens", maxTokens, "stream", false);
- log.info("Sending request to LLM [{}] (model: {}, prompt length: {} chars)...",
- provider, model, userMessage.length());
+ log.info("Sending request to LLM [{}] (model: {}, prompt length: {} chars)...", provider, model,
+ userMessage.length());
long startTime = System.currentTimeMillis();
- String response = webClient.post()
- .contentType(MediaType.APPLICATION_JSON)
- .bodyValue(request)
- .retrieve()
- .bodyToMono(String.class)
- .timeout(Duration.ofSeconds(120))
- .block();
+ String response = webClient.post().contentType(MediaType.APPLICATION_JSON).bodyValue(request).retrieve()
+ .bodyToMono(String.class).timeout(Duration.ofSeconds(120)).block();
long duration = System.currentTimeMillis() - startTime;
log.info("LLM response received in {}ms", duration);
diff --git a/src/main/resources/application-dev.properties b/src/main/resources/application-dev.properties
index 12c25a3..b38f4d0 100644
--- a/src/main/resources/application-dev.properties
+++ b/src/main/resources/application-dev.properties
@@ -1,7 +1,7 @@
# Development-specific configuration
# MongoDB - Development database
-spring.data.mongodb.uri=mongodb://192.168.180.25:27017/votianlt_dev
+spring.data.mongodb.uri=${MONGODB_URI}
# Enable browser launch in development
vaadin.launch-browser=true
diff --git a/src/main/resources/application-production.properties b/src/main/resources/application-production.properties
index 0c674c3..2a3495a 100644
--- a/src/main/resources/application-production.properties
+++ b/src/main/resources/application-production.properties
@@ -1,7 +1,7 @@
# Production-specific configuration
# MongoDB - Production database
-spring.data.mongodb.uri=mongodb://192.168.180.25:27017/votianlt
+spring.data.mongodb.uri=${MONGODB_URI}
# Disable browser launch in production
vaadin.launch-browser=false
diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties
index 918a45e..4b05ebf 100644
--- a/src/main/resources/application.properties
+++ b/src/main/resources/application.properties
@@ -31,7 +31,7 @@ vaadin.allowed-packages=com.vaadin,org.vaadin,de.assecutor.votianlt
spring.jpa.open-in-view=false
# MongoDB - Default configuration (override in profile-specific files)
-spring.data.mongodb.uri=mongodb://192.168.180.25:27017/votianlt_dev
+spring.data.mongodb.uri=${MONGODB_URI}
spring.data.mongodb.auto-index-creation=true
spring.data.mongodb.socket-timeout=30000
spring.data.mongodb.connect-timeout=10000
@@ -79,8 +79,10 @@ app.google.maps.api-key=AIzaSyDnbitL06iLp3elmj-WtPudCykX9xvXcVE
app.ai.provider=moonshot
# --- LM Studio ---
-app.ai.lmstudio.base-url=https://lmstudio.appcreation.de
+app.ai.lmstudio.base-url=${LMSTUDIO_URL}
app.ai.lmstudio.model=local-model
+app.ai.lmstudio.htaccess-username=${LMSTUDIO_HTACCESS_USERNAME}
+app.ai.lmstudio.htaccess-password=${LMSTUDIO_HTACCESS_PASSWORD}
# --- Moonshot AI (kimi) ---
app.ai.moonshot.base-url=https://api.moonshot.ai
@@ -88,7 +90,7 @@ app.ai.moonshot.api-key=sk-EfHJfwCsxiZbOoBJ21OLWb9RUJQXSXAFIFGKnOedKke5JYZp
app.ai.moonshot.model=moonshot-v1-8k
# Spring AI OpenAI properties (Pflicht für Auto-Configuration, werden vom LlmRestClient überschrieben)
-spring.ai.openai.base-url=https://lmstudio.appcreation.de
+spring.ai.openai.base-url=${LMSTUDIO_URL}
spring.ai.openai.api-key=not-used
spring.ai.openai.chat.options.model=local-model
spring.ai.openai.chat.options.temperature=0.7