Erweiterungen

This commit is contained in:
2026-01-26 16:36:13 +01:00
parent 64fb022c38
commit 234e75c66a
7 changed files with 250 additions and 39 deletions

View File

@@ -14,4 +14,18 @@ logging.level.de.assecutor.votianlt=INFO
logging.level.root=WARN
logging.file.name=logs/votianlt-production.log
logging.file.max-size=50MB
logging.file.max-history=90
logging.file.max-history=90
# Debug logging for AI/LLM troubleshooting (can be disabled after debugging)
logging.level.org.springframework.ai=DEBUG
logging.level.org.springframework.web.client.RestTemplate=DEBUG
logging.level.org.springframework.web.client.RestClient=DEBUG
logging.level.org.apache.http=DEBUG
logging.level.org.apache.http.wire=DEBUG
logging.level.org.apache.http.headers=DEBUG
# Java HTTP Client logging
logging.level.jdk.httpclient=DEBUG
logging.level.java.net.http=DEBUG
# Spring HTTP logging
logging.level.org.springframework.http.client=DEBUG
logging.level.de.assecutor.votianlt.ai=DEBUG

View File

@@ -113,6 +113,13 @@ spring.ai.openai.api-key=not-used
spring.ai.openai.chat.options.model=local-model
spring.ai.openai.chat.options.temperature=0.7
# WICHTIG: Streaming deaktivieren - LM Studio/Docker können Streaming-Responses nicht korrekt handlen
spring.ai.openai.chat.options.stream=false
# Timeouts für OpenAI Client
spring.ai.openai.connect-timeout=10s
spring.ai.openai.read-timeout=120s
# ===========================================
# MCP Server Configuration
# ===========================================