diff --git a/docker_push.sh b/docker_push.sh
index cb6ed45..1ab2a37 100755
--- a/docker_push.sh
+++ b/docker_push.sh
@@ -2,5 +2,5 @@
echo "G8m0T3vz" | docker login registry.assecutor.org -u adsg --password-stdin
# Dann ganz normal pushen
-docker buildx build --platform linux/amd64 -t registry.assecutor.org/votianlt:0.9.6 --push .
+docker buildx build --platform linux/amd64 -t registry.assecutor.org/votianlt:0.9.9 --push .
diff --git a/pom.xml b/pom.xml
index d4ee876..4aede84 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
de.assecutor.votianlt
votianlt
- 0.9.6
+ 0.9.9
jar
diff --git a/src/main/java/de/assecutor/votianlt/ai/config/LlmConfig.java b/src/main/java/de/assecutor/votianlt/ai/config/LlmConfig.java
index d74233a..cc9ffdb 100644
--- a/src/main/java/de/assecutor/votianlt/ai/config/LlmConfig.java
+++ b/src/main/java/de/assecutor/votianlt/ai/config/LlmConfig.java
@@ -11,17 +11,13 @@ import java.net.URL;
import java.util.Base64;
/**
- * Configuration for LLM integration. Supports LM Studio and Moonshot AI. Switch
- * provider via {@code app.ai.provider=lmstudio|moonshot} in
- * application.properties.
+ * Configuration for LLM integration via LM Studio. The LM Studio instance
+ * exposes an OpenAI-compatible API at {@code /v1/chat/completions}.
*/
@Configuration
@Slf4j
public class LlmConfig {
- @Value("${app.ai.provider:lmstudio}")
- private String provider;
-
@Value("${app.ai.lmstudio.base-url}")
private String lmstudioBaseUrl;
@@ -34,45 +30,29 @@ public class LlmConfig {
@Value("${app.ai.lmstudio.htaccess-password}")
private String lmstudioHtaccessPassword;
- @Value("${app.ai.moonshot.base-url:https://api.moonshot.ai}")
- private String moonshotBaseUrl;
-
- @Value("${app.ai.moonshot.api-key:}")
- private String moonshotApiKey;
-
- @Value("${app.ai.moonshot.model:moonshot-v1-8k}")
- private String moonshotModel;
-
@PostConstruct
public void logConfig() {
log.info("=== LLM Configuration ===");
- log.info("Provider: {}", provider);
- if ("moonshot".equalsIgnoreCase(provider)) {
- log.info("Base URL: {}", moonshotBaseUrl);
- log.info("Model: {}", moonshotModel);
- log.info("API Key: {}***", moonshotApiKey.length() > 8 ? moonshotApiKey.substring(0, 8) : "***");
- testConnection(moonshotBaseUrl, moonshotModel, moonshotApiKey);
- } else {
- log.info("Base URL: {}", lmstudioBaseUrl);
- log.info("Model: {}", lmstudioModel);
- log.info("HTACCESS auth: {}", hasHtaccessCredentials() ? "configured" : "not configured");
- testConnection(lmstudioBaseUrl, lmstudioModel, null);
- }
+ log.info("Provider: lmstudio");
+ log.info("Base URL: {}", lmstudioBaseUrl);
+ log.info("Model: {}", lmstudioModel);
+ log.info("HTACCESS auth: {}", hasHtaccessCredentials() ? "configured" : "not configured");
+ testConnection(lmstudioBaseUrl, lmstudioModel);
}
- private void testConnection(String baseUrl, String model, String apiKey) {
+ private void testConnection(String baseUrl, String model) {
log.info("Testing LLM connection to: {}", baseUrl);
// Test 1: Models endpoint
- testEndpoint(baseUrl + "/v1/models", "GET", null, apiKey);
+ testEndpoint(baseUrl + "/v1/models", "GET", null);
// Test 2: Chat completions (no streaming)
String testPayload = "{\"model\":\"" + model
+ "\",\"messages\":[{\"role\":\"user\",\"content\":\"ping\"}],\"max_tokens\":1,\"stream\":false}";
- testEndpoint(baseUrl + "/v1/chat/completions", "POST", testPayload, apiKey);
+ testEndpoint(baseUrl + "/v1/chat/completions", "POST", testPayload);
}
- private void testEndpoint(String endpoint, String method, String payload, String apiKey) {
+ private void testEndpoint(String endpoint, String method, String payload) {
try {
log.info("Testing endpoint: {} {}", method, endpoint);
URL url = URI.create(endpoint).toURL();
@@ -85,8 +65,6 @@ public class LlmConfig {
String credentials = lmstudioHtaccessUsername + ":" + lmstudioHtaccessPassword;
String encoded = Base64.getEncoder().encodeToString(credentials.getBytes());
connection.setRequestProperty("Authorization", "Basic " + encoded);
- } else if (apiKey != null && !apiKey.isBlank()) {
- connection.setRequestProperty("Authorization", "Bearer " + apiKey);
}
if (payload != null) {
@@ -123,16 +101,12 @@ public class LlmConfig {
}
}
- public String getProvider() {
- return provider;
- }
-
public String getBaseUrl() {
- return "moonshot".equalsIgnoreCase(provider) ? moonshotBaseUrl : lmstudioBaseUrl;
+ return lmstudioBaseUrl;
}
public String getModel() {
- return "moonshot".equalsIgnoreCase(provider) ? moonshotModel : lmstudioModel;
+ return lmstudioModel;
}
public boolean hasHtaccessCredentials() {
diff --git a/src/main/java/de/assecutor/votianlt/ai/service/LlmRestClient.java b/src/main/java/de/assecutor/votianlt/ai/service/LlmRestClient.java
index e5a5a83..e705afb 100644
--- a/src/main/java/de/assecutor/votianlt/ai/service/LlmRestClient.java
+++ b/src/main/java/de/assecutor/votianlt/ai/service/LlmRestClient.java
@@ -16,9 +16,8 @@ import java.util.List;
import java.util.Map;
/**
- * Direct REST client for LLM APIs (LM Studio or Moonshot AI). Provider is
- * selected via {@code app.ai.provider} in application.properties. Both
- * providers expose an OpenAI-compatible /v1/chat/completions endpoint.
+ * Direct REST client for LM Studio LLM API. Communicates via the
+ * OpenAI-compatible /v1/chat/completions endpoint.
*/
@Component
@Slf4j
@@ -27,45 +26,30 @@ public class LlmRestClient {
private final WebClient webClient;
private final ObjectMapper objectMapper;
private final String model;
- private final String provider;
- public LlmRestClient(@Value("${app.ai.provider}") String provider,
- @Value("${app.ai.lmstudio.base-url}") String lmstudioBaseUrl,
+ public LlmRestClient(@Value("${app.ai.lmstudio.base-url}") String lmstudioBaseUrl,
@Value("${app.ai.lmstudio.model}") String lmstudioModel,
@Value("${app.ai.lmstudio.htaccess-username}") String lmstudioHtaccessUsername,
@Value("${app.ai.lmstudio.htaccess-password}") String lmstudioHtaccessPassword,
- @Value("${app.ai.moonshot.base-url}") String moonshotBaseUrl,
- @Value("${app.ai.moonshot.api-key}") String moonshotApiKey,
- @Value("${app.ai.moonshot.model}") String moonshotModel, ObjectMapper objectMapper) {
+ ObjectMapper objectMapper) {
- this.provider = provider.trim().toLowerCase();
+ this.model = lmstudioModel;
this.objectMapper = objectMapper;
WebClient.Builder builder = WebClient.builder();
+ builder.baseUrl(lmstudioBaseUrl + "/v1/chat/completions");
- if ("moonshot".equals(this.provider)) {
- this.model = moonshotModel;
- builder.baseUrl(moonshotBaseUrl + "/v1/chat/completions");
- if (moonshotApiKey != null && !moonshotApiKey.isBlank()) {
- builder.defaultHeader(HttpHeaders.AUTHORIZATION, "Bearer " + moonshotApiKey);
- }
- log.info("LlmRestClient initialized - Provider: moonshot, URL: {}/v1/chat/completions, Model: {}",
- moonshotBaseUrl, moonshotModel);
+ if (lmstudioHtaccessUsername != null && !lmstudioHtaccessUsername.isBlank()
+ && lmstudioHtaccessPassword != null && !lmstudioHtaccessPassword.isBlank()) {
+ String credentials = lmstudioHtaccessUsername + ":" + lmstudioHtaccessPassword;
+ String encoded = Base64.getEncoder()
+ .encodeToString(credentials.getBytes(StandardCharsets.UTF_8));
+ builder.defaultHeader(HttpHeaders.AUTHORIZATION, "Basic " + encoded);
+ log.info("LlmRestClient initialized (with HTACCESS auth) - URL: {}/v1/chat/completions, Model: {}",
+ lmstudioBaseUrl, lmstudioModel);
} else {
- this.model = lmstudioModel;
- builder.baseUrl(lmstudioBaseUrl + "/v1/chat/completions");
- if (lmstudioHtaccessUsername != null && !lmstudioHtaccessUsername.isBlank()
- && lmstudioHtaccessPassword != null && !lmstudioHtaccessPassword.isBlank()) {
- String credentials = lmstudioHtaccessUsername + ":" + lmstudioHtaccessPassword;
- String encoded = Base64.getEncoder()
- .encodeToString(credentials.getBytes(StandardCharsets.UTF_8));
- builder.defaultHeader(HttpHeaders.AUTHORIZATION, "Basic " + encoded);
- log.info("LlmRestClient initialized - Provider: lmstudio (with HTACCESS auth), "
- + "URL: {}/v1/chat/completions, Model: {}", lmstudioBaseUrl, lmstudioModel);
- } else {
- log.info("LlmRestClient initialized - Provider: lmstudio, URL: {}/v1/chat/completions, Model: {}",
- lmstudioBaseUrl, lmstudioModel);
- }
+ log.info("LlmRestClient initialized - URL: {}/v1/chat/completions, Model: {}",
+ lmstudioBaseUrl, lmstudioModel);
}
this.webClient = builder.build();
@@ -104,7 +88,7 @@ public class LlmRestClient {
Map.of("role", "user", "content", userMessage)),
"temperature", temperature, "max_tokens", maxTokens, "stream", false);
- log.info("Sending request to LLM [{}] (model: {}, prompt length: {} chars)...", provider, model,
+ log.info("Sending request to LLM (model: {}, prompt length: {} chars)...", model,
userMessage.length());
long startTime = System.currentTimeMillis();
@@ -118,7 +102,7 @@ public class LlmRestClient {
return extractContent(response);
} catch (Exception e) {
- log.error("Error calling LLM API [{}]: {} - {}", provider, e.getClass().getSimpleName(), e.getMessage());
+ log.error("Error calling LLM API: {} - {}", e.getClass().getSimpleName(), e.getMessage());
if (log.isDebugEnabled()) {
log.debug("Full stack trace:", e);
}
@@ -133,10 +117,6 @@ public class LlmRestClient {
return chat(null, userMessage);
}
- public String getProvider() {
- return provider;
- }
-
public String getModel() {
return model;
}
diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties
index 4b05ebf..8e9cf23 100644
--- a/src/main/resources/application.properties
+++ b/src/main/resources/application.properties
@@ -38,12 +38,12 @@ spring.data.mongodb.connect-timeout=10000
spring.data.mongodb.server-selection-timeout=5000
# Mail Configuration (Spring Boot Standard)
-spring.mail.host=mailhub.assecutor.org
-spring.mail.port=587
-spring.mail.username=noreply@assecutor.org
-spring.mail.password=OStRIL,_,31
-spring.mail.properties.mail.smtp.auth=true
-spring.mail.properties.mail.smtp.starttls.enable=true
+spring.mail.host=${MAIL_HOST}
+spring.mail.port=${MAIL_PORT}
+spring.mail.username=${MAIL_USERNAME}
+spring.mail.password=${MAIL_PASSWORD}
+spring.mail.properties.mail.smtp.auth=${MAIL_SMTP_AUTH}
+spring.mail.properties.mail.smtp.ssl.enable=${MAIL_SMTP_SSL}
# HTTP request size limits for large payloads
server.max-http-request-header-size=8MB
@@ -73,26 +73,17 @@ app.version=@project.version@
app.google.maps.api-key=AIzaSyDnbitL06iLp3elmj-WtPudCykX9xvXcVE
# ===========================================
-# LLM Configuration
-# Provider: lmstudio | moonshot
+# LLM Configuration (LM Studio)
# ===========================================
-app.ai.provider=moonshot
-
-# --- LM Studio ---
app.ai.lmstudio.base-url=${LMSTUDIO_URL}
-app.ai.lmstudio.model=local-model
+app.ai.lmstudio.model=${LMSTUDIO_MODEL}
app.ai.lmstudio.htaccess-username=${LMSTUDIO_HTACCESS_USERNAME}
app.ai.lmstudio.htaccess-password=${LMSTUDIO_HTACCESS_PASSWORD}
-# --- Moonshot AI (kimi) ---
-app.ai.moonshot.base-url=https://api.moonshot.ai
-app.ai.moonshot.api-key=sk-EfHJfwCsxiZbOoBJ21OLWb9RUJQXSXAFIFGKnOedKke5JYZp
-app.ai.moonshot.model=moonshot-v1-8k
-
# Spring AI OpenAI properties (Pflicht für Auto-Configuration, werden vom LlmRestClient überschrieben)
spring.ai.openai.base-url=${LMSTUDIO_URL}
spring.ai.openai.api-key=not-used
-spring.ai.openai.chat.options.model=local-model
+spring.ai.openai.chat.options.model=${LMSTUDIO_MODEL}
spring.ai.openai.chat.options.temperature=0.7
spring.ai.openai.chat.options.stream=false
spring.ai.openai.connect-timeout=10s