Konfiguration auf Environment-Variablen umgestellt

Sensible Daten (MongoDB URI, LM Studio URL, HTACCESS-Credentials) werden
jetzt aus Environment-Variablen gelesen. Spring-dotenv Dependency hinzugefügt
für automatisches Laden der .env-Datei. HTACCESS Basic Auth Support für
LM Studio integriert.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-03 16:05:33 +01:00
parent c002a33949
commit dff716d97f
7 changed files with 92 additions and 45 deletions

3
.gitignore vendored
View File

@@ -16,3 +16,6 @@ vite.generated.ts
# Log files
logs/
*.log
# Environment variables
.env

View File

@@ -108,6 +108,13 @@
<artifactId>spring-boot-starter-mail</artifactId>
</dependency>
<!-- .env file support -->
<dependency>
<groupId>me.paulschwarz</groupId>
<artifactId>spring-dotenv</artifactId>
<version>4.0.0</version>
</dependency>
<!-- Spring WebFlux for direct LLM API calls (like aimailassistant) -->
<dependency>
<groupId>org.springframework.boot</groupId>

View File

@@ -8,10 +8,12 @@ import org.springframework.context.annotation.Configuration;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
import java.util.Base64;
/**
* Configuration for LLM integration. Supports LM Studio and Moonshot AI.
* Switch provider via {@code app.ai.provider=lmstudio|moonshot} in application.properties.
* Configuration for LLM integration. Supports LM Studio and Moonshot AI. Switch
* provider via {@code app.ai.provider=lmstudio|moonshot} in
* application.properties.
*/
@Configuration
@Slf4j
@@ -20,12 +22,18 @@ public class LlmConfig {
@Value("${app.ai.provider:lmstudio}")
private String provider;
@Value("${app.ai.lmstudio.base-url:https://lmstudio.appcreation.de}")
@Value("${app.ai.lmstudio.base-url}")
private String lmstudioBaseUrl;
@Value("${app.ai.lmstudio.model:local-model}")
@Value("${app.ai.lmstudio.model}")
private String lmstudioModel;
@Value("${app.ai.lmstudio.htaccess-username}")
private String lmstudioHtaccessUsername;
@Value("${app.ai.lmstudio.htaccess-password}")
private String lmstudioHtaccessPassword;
@Value("${app.ai.moonshot.base-url:https://api.moonshot.ai}")
private String moonshotBaseUrl;
@@ -47,6 +55,7 @@ public class LlmConfig {
} else {
log.info("Base URL: {}", lmstudioBaseUrl);
log.info("Model: {}", lmstudioModel);
log.info("HTACCESS auth: {}", hasHtaccessCredentials() ? "configured" : "not configured");
testConnection(lmstudioBaseUrl, lmstudioModel, null);
}
}
@@ -72,7 +81,11 @@ public class LlmConfig {
connection.setConnectTimeout(5000);
connection.setReadTimeout(10000);
if (apiKey != null && !apiKey.isBlank()) {
if (hasHtaccessCredentials()) {
String credentials = lmstudioHtaccessUsername + ":" + lmstudioHtaccessPassword;
String encoded = Base64.getEncoder().encodeToString(credentials.getBytes());
connection.setRequestProperty("Authorization", "Basic " + encoded);
} else if (apiKey != null && !apiKey.isBlank()) {
connection.setRequestProperty("Authorization", "Bearer " + apiKey);
}
@@ -121,4 +134,17 @@ public class LlmConfig {
public String getModel() {
return "moonshot".equalsIgnoreCase(provider) ? moonshotModel : lmstudioModel;
}
public boolean hasHtaccessCredentials() {
return lmstudioHtaccessUsername != null && !lmstudioHtaccessUsername.isBlank()
&& lmstudioHtaccessPassword != null && !lmstudioHtaccessPassword.isBlank();
}
public String getLmstudioHtaccessUsername() {
return lmstudioHtaccessUsername;
}
public String getLmstudioHtaccessPassword() {
return lmstudioHtaccessPassword;
}
}

View File

@@ -9,14 +9,16 @@ import org.springframework.http.MediaType;
import org.springframework.stereotype.Component;
import org.springframework.web.reactive.function.client.WebClient;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.Base64;
import java.util.List;
import java.util.Map;
/**
* Direct REST client for LLM APIs (LM Studio or Moonshot AI).
* Provider is selected via {@code app.ai.provider} in application.properties.
* Both providers expose an OpenAI-compatible /v1/chat/completions endpoint.
* Direct REST client for LLM APIs (LM Studio or Moonshot AI). Provider is
* selected via {@code app.ai.provider} in application.properties. Both
* providers expose an OpenAI-compatible /v1/chat/completions endpoint.
*/
@Component
@Slf4j
@@ -27,14 +29,14 @@ public class LlmRestClient {
private final String model;
private final String provider;
public LlmRestClient(
@Value("${app.ai.provider:lmstudio}") String provider,
@Value("${app.ai.lmstudio.base-url:https://lmstudio.appcreation.de}") String lmstudioBaseUrl,
@Value("${app.ai.lmstudio.model:local-model}") String lmstudioModel,
@Value("${app.ai.moonshot.base-url:https://api.moonshot.ai}") String moonshotBaseUrl,
@Value("${app.ai.moonshot.api-key:}") String moonshotApiKey,
@Value("${app.ai.moonshot.model:moonshot-v1-8k}") String moonshotModel,
ObjectMapper objectMapper) {
public LlmRestClient(@Value("${app.ai.provider}") String provider,
@Value("${app.ai.lmstudio.base-url}") String lmstudioBaseUrl,
@Value("${app.ai.lmstudio.model}") String lmstudioModel,
@Value("${app.ai.lmstudio.htaccess-username}") String lmstudioHtaccessUsername,
@Value("${app.ai.lmstudio.htaccess-password}") String lmstudioHtaccessPassword,
@Value("${app.ai.moonshot.base-url}") String moonshotBaseUrl,
@Value("${app.ai.moonshot.api-key}") String moonshotApiKey,
@Value("${app.ai.moonshot.model}") String moonshotModel, ObjectMapper objectMapper) {
this.provider = provider.trim().toLowerCase();
this.objectMapper = objectMapper;
@@ -52,8 +54,18 @@ public class LlmRestClient {
} else {
this.model = lmstudioModel;
builder.baseUrl(lmstudioBaseUrl + "/v1/chat/completions");
log.info("LlmRestClient initialized - Provider: lmstudio, URL: {}/v1/chat/completions, Model: {}",
lmstudioBaseUrl, lmstudioModel);
if (lmstudioHtaccessUsername != null && !lmstudioHtaccessUsername.isBlank()
&& lmstudioHtaccessPassword != null && !lmstudioHtaccessPassword.isBlank()) {
String credentials = lmstudioHtaccessUsername + ":" + lmstudioHtaccessPassword;
String encoded = Base64.getEncoder()
.encodeToString(credentials.getBytes(StandardCharsets.UTF_8));
builder.defaultHeader(HttpHeaders.AUTHORIZATION, "Basic " + encoded);
log.info("LlmRestClient initialized - Provider: lmstudio (with HTACCESS auth), "
+ "URL: {}/v1/chat/completions, Model: {}", lmstudioBaseUrl, lmstudioModel);
} else {
log.info("LlmRestClient initialized - Provider: lmstudio, URL: {}/v1/chat/completions, Model: {}",
lmstudioBaseUrl, lmstudioModel);
}
}
this.webClient = builder.build();
@@ -62,8 +74,10 @@ public class LlmRestClient {
/**
* Send a chat completion request.
*
* @param systemPrompt System prompt for context
* @param userMessage User message/question
* @param systemPrompt
* System prompt for context
* @param userMessage
* User message/question
* @return LLM response text, or null on error
*/
public String chat(String systemPrompt, String userMessage) {
@@ -73,34 +87,29 @@ public class LlmRestClient {
/**
* Send a chat completion request with custom parameters.
*
* @param systemPrompt System prompt for context
* @param userMessage User message/question
* @param temperature Temperature for response randomness (0.0-1.0)
* @param maxTokens Maximum tokens in response
* @param systemPrompt
* System prompt for context
* @param userMessage
* User message/question
* @param temperature
* Temperature for response randomness (0.0-1.0)
* @param maxTokens
* Maximum tokens in response
* @return LLM response text, or null on error
*/
public String chat(String systemPrompt, String userMessage, double temperature, int maxTokens) {
try {
Map<String, Object> request = Map.of(
"model", model,
"messages", List.of(
Map.of("role", "system", "content", systemPrompt != null ? systemPrompt : ""),
Map<String, Object> request = Map.of("model", model, "messages",
List.of(Map.of("role", "system", "content", systemPrompt != null ? systemPrompt : ""),
Map.of("role", "user", "content", userMessage)),
"temperature", temperature,
"max_tokens", maxTokens,
"stream", false);
"temperature", temperature, "max_tokens", maxTokens, "stream", false);
log.info("Sending request to LLM [{}] (model: {}, prompt length: {} chars)...",
provider, model, userMessage.length());
log.info("Sending request to LLM [{}] (model: {}, prompt length: {} chars)...", provider, model,
userMessage.length());
long startTime = System.currentTimeMillis();
String response = webClient.post()
.contentType(MediaType.APPLICATION_JSON)
.bodyValue(request)
.retrieve()
.bodyToMono(String.class)
.timeout(Duration.ofSeconds(120))
.block();
String response = webClient.post().contentType(MediaType.APPLICATION_JSON).bodyValue(request).retrieve()
.bodyToMono(String.class).timeout(Duration.ofSeconds(120)).block();
long duration = System.currentTimeMillis() - startTime;
log.info("LLM response received in {}ms", duration);

View File

@@ -1,7 +1,7 @@
# Development-specific configuration
# MongoDB - Development database
spring.data.mongodb.uri=mongodb://192.168.180.25:27017/votianlt_dev
spring.data.mongodb.uri=${MONGODB_URI}
# Enable browser launch in development
vaadin.launch-browser=true

View File

@@ -1,7 +1,7 @@
# Production-specific configuration
# MongoDB - Production database
spring.data.mongodb.uri=mongodb://192.168.180.25:27017/votianlt
spring.data.mongodb.uri=${MONGODB_URI}
# Disable browser launch in production
vaadin.launch-browser=false

View File

@@ -31,7 +31,7 @@ vaadin.allowed-packages=com.vaadin,org.vaadin,de.assecutor.votianlt
spring.jpa.open-in-view=false
# MongoDB - Default configuration (override in profile-specific files)
spring.data.mongodb.uri=mongodb://192.168.180.25:27017/votianlt_dev
spring.data.mongodb.uri=${MONGODB_URI}
spring.data.mongodb.auto-index-creation=true
spring.data.mongodb.socket-timeout=30000
spring.data.mongodb.connect-timeout=10000
@@ -79,8 +79,10 @@ app.google.maps.api-key=AIzaSyDnbitL06iLp3elmj-WtPudCykX9xvXcVE
app.ai.provider=moonshot
# --- LM Studio ---
app.ai.lmstudio.base-url=https://lmstudio.appcreation.de
app.ai.lmstudio.base-url=${LMSTUDIO_URL}
app.ai.lmstudio.model=local-model
app.ai.lmstudio.htaccess-username=${LMSTUDIO_HTACCESS_USERNAME}
app.ai.lmstudio.htaccess-password=${LMSTUDIO_HTACCESS_PASSWORD}
# --- Moonshot AI (kimi) ---
app.ai.moonshot.base-url=https://api.moonshot.ai
@@ -88,7 +90,7 @@ app.ai.moonshot.api-key=sk-EfHJfwCsxiZbOoBJ21OLWb9RUJQXSXAFIFGKnOedKke5JYZp
app.ai.moonshot.model=moonshot-v1-8k
# Spring AI OpenAI properties (Pflicht für Auto-Configuration, werden vom LlmRestClient überschrieben)
spring.ai.openai.base-url=https://lmstudio.appcreation.de
spring.ai.openai.base-url=${LMSTUDIO_URL}
spring.ai.openai.api-key=not-used
spring.ai.openai.chat.options.model=local-model
spring.ai.openai.chat.options.temperature=0.7