Skip to content

Commit

Permalink
feat: add question rewriter function object
Browse files Browse the repository at this point in the history
work on #6
  • Loading branch information
bsorrentino committed Jun 18, 2024
1 parent 36674fe commit ba4664a
Showing 1 changed file with 59 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
package dev.langchain4j.adaptiverag;

import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.SystemMessage;
import lombok.Value;

import java.time.Duration;
import java.util.function.Function;

import static org.bsc.langgraph4j.utils.CollectionsUtils.mapOf;

@Value(staticConstructor="of")
public class QuestionRewriter implements Function<String, String> {

private final String openApiKey;

interface LLMService {

@SystemMessage(
"You a question re-writer that converts an input question to a better version that is optimized \n" +
"for vectorstore retrieval. Look at the input and try to reason about the underlying semantic intent / meaning.")
String invoke(String question);
}


// private QuestionRewriter( String openApiKey ) {
// this.openApiKey = openApiKey;
// }

@Override
public String apply(String question) {

ChatLanguageModel chatLanguageModel = OpenAiChatModel.builder()
.apiKey( openApiKey )
.modelName( "gpt-3.5-turbo-0125" )
.timeout(Duration.ofMinutes(2))
.logRequests(true)
.logResponses(true)
.maxRetries(2)
.temperature(0.0)
.maxTokens(2000)
.build();


LLMService service = AiServices.create(LLMService.class, chatLanguageModel);

PromptTemplate template = PromptTemplate.from("Here is the initial question: \n\n {{question}} \n Formulate an improved question.");

Prompt prompt = template.apply( mapOf( "question", question ) );

return service.invoke( prompt.text() );
}


}

0 comments on commit ba4664a

Please sign in to comment.