Kaynağa Gözat

Add Groq proxy

atsachlaris 17 saat önce
ebeveyn
işleme
4dfb5a3451

+ 19 - 0
requests/llm.http

@@ -35,3 +35,22 @@ Content-Type: application/json
 }
 
 
+###
+
+### Chat completion test (Groq OpenAI-compatible API)
+POST https://api.groq.com/openai/v1/chat/completions
+Authorization: Bearer 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
+Content-Type: application/json
+
+{
+  "model": "llama-3.3-70b-versatile",
+  "messages": [
+    {
+      "role": "user",
+      "content": "What's 1+1"
+    }
+  ],
+  "max_tokens": 100,
+  "temperature": 0.7
+}
+

+ 44 - 0
src/main/java/es/uv/saic/llm/GroqProxy.java

@@ -0,0 +1,44 @@
+package es.uv.saic.llm;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import lombok.SneakyThrows;
+import org.springframework.ai.chat.client.ChatClient;
+import org.springframework.ai.openai.OpenAiChatOptions;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.util.StringUtils;
+import org.springframework.stereotype.Component;
+
+import static es.uv.saic.service.SystemPrompt.SCORE_AND_COMMENT_ANALYSIS_PROMPT;
+
+@Component
+public class GroqProxy {
+
+    private final ChatClient chatClient;
+    private final ObjectMapper objectMapper = new ObjectMapper();
+
+    public GroqProxy(
+            @Qualifier("groqChatClient") ChatClient groqChatClient
+    ) {
+        this.chatClient = groqChatClient;
+    }
+
+    @SneakyThrows
+    public String calculateScoreAndProduceComments(String asCsv) {
+        return calculateScoreAndProduceComments(asCsv, null);
+    }
+
+    @SneakyThrows
+    public String calculateScoreAndProduceComments(String asCsv, String model) {
+        ChatClient.ChatClientRequestSpec prompt = chatClient.prompt();
+
+        if (StringUtils.hasText(model)) {
+            prompt = prompt.options(OpenAiChatOptions.builder().model(model).build());
+        }
+
+        return prompt
+                .system(SCORE_AND_COMMENT_ANALYSIS_PROMPT)
+                .user("Aquí tienes las tablas: " + objectMapper.writeValueAsString(asCsv))
+                .call()
+                .content();
+    }
+}

+ 38 - 0
src/main/java/es/uv/saic/llm/LlmConfig.java

@@ -0,0 +1,38 @@
+package es.uv.saic.llm;
+
+import org.springframework.ai.chat.client.ChatClient;
+import org.springframework.ai.openai.OpenAiChatModel;
+import org.springframework.ai.openai.OpenAiChatOptions;
+import org.springframework.ai.openai.api.OpenAiApi;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class LlmConfig {
+
+    @Bean
+    public ChatClient localChatClient(OpenAiChatModel model) {
+        return ChatClient.create(model);
+    }
+
+    @Bean
+    public ChatClient groqChatClient(
+            @Value("${spring.ai.groq.base-url}") String baseUrl,
+            @Value("${spring.ai.groq.api-key}") String apiKey,
+            @Value("${spring.ai.groq.chat.options.model}") String modelName
+    ) {
+        OpenAiApi api = OpenAiApi.builder()
+                .baseUrl(baseUrl)
+                .apiKey(apiKey)
+                .build();
+        OpenAiChatModel groqModel = OpenAiChatModel.builder()
+                .openAiApi(api)
+                .defaultOptions(OpenAiChatOptions.builder()
+                        .model(modelName)
+                        .build())
+                .build();
+
+        return ChatClient.create(groqModel);
+    }
+}

+ 6 - 6
src/main/java/es/uv/saic/service/LlmProxy.java → src/main/java/es/uv/saic/llm/LocalLlmProxy.java

@@ -1,4 +1,4 @@
-package es.uv.saic.service;
+package es.uv.saic.llm;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import lombok.SneakyThrows;
@@ -8,21 +8,21 @@ import org.springframework.stereotype.Component;
 import static es.uv.saic.service.SystemPrompt.SCORE_AND_COMMENT_ANALYSIS_PROMPT;
 
 @Component
-public class LlmProxy {
+public class LocalLlmProxy {
 
-    private final ChatClient chatClient;
+    private final ChatClient localChatClient;
     private final ObjectMapper objectMapper = new ObjectMapper();
 
-    public LlmProxy(
+    public LocalLlmProxy(
             ChatClient.Builder chatClientBuilder
     ) {
-        this.chatClient = chatClientBuilder.build();
+        this.localChatClient = chatClientBuilder.build();
     }
 
     @SneakyThrows
     public String calculateScoreAndProduceComments(String asCsv) {
 
-        return chatClient.prompt()
+        return localChatClient.prompt()
                 .system(SCORE_AND_COMMENT_ANALYSIS_PROMPT)
                 .user("Aquí tienes las tablas: " + objectMapper.writeValueAsString(asCsv))
                 .call()

+ 3 - 2
src/main/java/es/uv/saic/service/EnhancementService.java

@@ -3,6 +3,7 @@ package es.uv.saic.service;
 import es.uv.saic.extractor.ExtractionRequest;
 import es.uv.saic.extractor.docling.DoclingTableExtractor;
 import es.uv.saic.extractor.HtmlToCsvExtractor;
+import es.uv.saic.llm.LocalLlmProxy;
 import lombok.RequiredArgsConstructor;
 import org.apache.commons.lang3.StringUtils;
 import org.springframework.stereotype.Service;
@@ -14,12 +15,12 @@ import java.nio.charset.StandardCharsets;
 public class EnhancementService {
     private final HtmlToCsvExtractor htmlToCsvExtractor;
     private final DoclingTableExtractor doclingTableExtractor;
-    private final LlmProxy llmProxy;
+    private final LocalLlmProxy localLlmProxy;
 
     public String calculateScoreAndProduceCommentsWithSingleCall(ExtractionRequest extractionRequest) {
         String asCsv = extractCsv(extractionRequest);
 
-        return llmProxy.calculateScoreAndProduceComments(asCsv);
+        return localLlmProxy.calculateScoreAndProduceComments(asCsv);
     }
 
     private String extractCsv(ExtractionRequest request) {

+ 4 - 0
src/main/resources/application-local.properties

@@ -7,6 +7,10 @@ spring.ai.openai.base-url=http://tyrion.uv.es:8090
 spring.ai.openai.api-key=hhOQ6QBqHKtOO9MKAUhIyU9auBkgIF40QJKa24jWJzdtxvdXMLi10xUAWMsdpFP0
 spring.ai.openai.chat.options.model=/media/nas/peerobs_sync/shared/2025-ReviewSim/models/Qwen2.5-7B-Instruct-AWQ
 
+spring.ai.groq.base-url=https://api.groq.com/openai/v1
+spring.ai.groq.api-key=YOUR_GROQ_API_KEY
+spring.ai.groq.chat.options.model=llama3-70b-8192
+
 #Docling extractor
 extractor.docling.enabled=false
 extractor.docling.python-command=C:/Users/arist/AppData/Local/Python/bin/python.exe