Merge pull request #15048 from kcacademic/BAEL-6970

Adding sourcecode for reference in the article tracked under BAEL-6970.
This commit is contained in:
Liam Williams 2023-10-30 16:05:16 +00:00 committed by GitHub
commit 4b29870418
9 changed files with 364 additions and 0 deletions

43
libraries-llms/pom.xml Normal file
View File

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>libraries-llms</artifactId>
<name>libraries-llms</name>
<parent>
<groupId>com.baeldung</groupId>
<artifactId>parent-modules</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<dependencies>
<!-- dependencies for langchain4j -->
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j</artifactId>
<version>${langchain4j.version}</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-embeddings</artifactId>
<version>${langchain4j.version}</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-open-ai</artifactId>
<version>${langchain4j.version}</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-embeddings-all-minilm-l6-v2</artifactId>
<version>${langchain4j.version}</version>
</dependency>
</dependencies>
<properties>
<langchain4j.version>0.23.0</langchain4j.version>
</properties>
</project>

View File

@ -0,0 +1,65 @@
package com.baeldung.langchain;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static java.time.Duration.ofSeconds;
import static org.junit.Assert.assertNotNull;
import java.nio.file.Paths;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
public class ChainWithDocumentLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ChainWithDocumentLiveTest.class);
@Test
public void givenChainWithDocument_whenPrompted_thenValidResponse() {
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Document document = loadDocument(Paths.get("src/test/resources/example-files/simpson's_adventures.txt"));
ingestor.ingest(document);
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(Constants.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(chatModel)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.promptTemplate(PromptTemplate.from("Answer the following question to the best of your ability: {{question}}\n\nBase your answer on the following information:\n{{information}}"))
.build();
String answer = chain.execute("Who is Simpson?");
logger.info(answer);
assertNotNull(answer);
}
}

View File

@ -0,0 +1,81 @@
package com.baeldung.langchain;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.time.Duration.ofSeconds;
import static java.util.stream.Collectors.joining;
import static org.junit.Assert.assertNotNull;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
public class ChatWithDocumentLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ChatWithDocumentLiveTest.class);
@Test
public void givenDocument_whenPrompted_thenValidResponse() {
Document document = loadDocument(Paths.get("src/test/resources/example-files/simpson's_adventures.txt"));
DocumentSplitter splitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
List<TextSegment> segments = splitter.split(document);
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
List<Embedding> embeddings = embeddingModel.embedAll(segments)
.content();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
embeddingStore.addAll(embeddings, segments);
String question = "Who is Simpson?";
Embedding questionEmbedding = embeddingModel.embed(question)
.content();
int maxResults = 3;
double minScore = 0.7;
List<EmbeddingMatch<TextSegment>> relevantEmbeddings = embeddingStore.findRelevant(questionEmbedding, maxResults, minScore);
PromptTemplate promptTemplate = PromptTemplate.from("Answer the following question to the best of your ability:\n" + "\n" + "Question:\n" + "{{question}}\n" + "\n" + "Base your answer on the following information:\n" + "{{information}}");
String information = relevantEmbeddings.stream()
.map(match -> match.embedded()
.text())
.collect(joining("\n\n"));
Map<String, Object> variables = new HashMap<>();
variables.put("question", question);
variables.put("information", information);
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel chatModel = OpenAiChatModel.builder()
.apiKey(Constants.OPENAI_API_KEY)
.timeout(ofSeconds(60))
.build();
AiMessage aiMessage = chatModel.generate(prompt.toUserMessage())
.content();
logger.info(aiMessage.text());
assertNotNull(aiMessage.text());
}
}

View File

@ -0,0 +1,43 @@
package com.baeldung.langchain;
import static dev.langchain4j.data.message.UserMessage.userMessage;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertNotNull;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
public class ChatWithMemoryLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ChatWithMemoryLiveTest.class);
@Test
public void givenMemory_whenPrompted_thenValidResponse() {
ChatLanguageModel model = OpenAiChatModel.withApiKey(Constants.OPENAI_API_KEY);
ChatMemory chatMemory = TokenWindowChatMemory.withMaxTokens(300, new OpenAiTokenizer(GPT_3_5_TURBO));
chatMemory.add(userMessage("Hello, my name is Kumar"));
AiMessage answer = model.generate(chatMemory.messages())
.content();
logger.info(answer.text());
chatMemory.add(answer);
assertNotNull(answer.text());
chatMemory.add(userMessage("What is my name?"));
AiMessage answerWithName = model.generate(chatMemory.messages())
.content();
logger.info(answerWithName.text());
chatMemory.add(answerWithName);
assertThat(answerWithName.text()).contains("Kumar");
}
}

View File

@ -0,0 +1,12 @@
package com.baeldung.langchain;
public class Constants {
/**
* A limited access key for access to OpenAI language models can be generated by first
* registering for free at (https://platform.openai.com/signup) and then by navigating
* to "Create new secret key" page at (https://platform.openai.com/account/api-keys).
*/
public static String OPENAI_API_KEY = "<OPENAI_API_KEY>";
}

View File

@ -0,0 +1,41 @@
package com.baeldung.langchain;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static org.junit.Assert.assertNotNull;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
public class PromptTemplatesLiveTest {
private static final Logger logger = LoggerFactory.getLogger(PromptTemplatesLiveTest.class);
@Test
public void givenPromptTemplate_whenSuppliedInput_thenValidResponse() {
PromptTemplate promptTemplate = PromptTemplate.from("Tell me a {{adjective}} joke about {{content}}..");
Map<String, Object> variables = new HashMap<>();
variables.put("adjective", "funny");
variables.put("content", "humans");
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(Constants.OPENAI_API_KEY)
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
String response = model.generate(prompt.text());
logger.info(response);
assertNotNull(response);
}
}

View File

@ -0,0 +1,49 @@
package com.baeldung.langchain;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
public class ServiceWithToolsLiveTest {
private static final Logger logger = LoggerFactory.getLogger(ServiceWithToolsLiveTest.class);
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
String chat(String userMessage);
}
@Test
public void givenServiceWithTools_whenPrompted_thenValidResponse() {
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(Constants.OPENAI_API_KEY))
.tools(new Calculator())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
String question = "What is the sum of the numbers of letters in the words \"language\" and \"model\"?";
String answer = assistant.chat(question);
logger.info(answer);
assertThat(answer).contains("13");
}
}

View File

@ -0,0 +1,28 @@
Once upon a time in the town of VeggieVille, there lived a cheerful carrot named Simpson.
Simpson was a radiant carrot, always beaming with joy and positivity.
His vibrant orange skin and lush green top were a sight to behold, but it was his infectious laughter and warm personality that really set him apart.
Simpson had a diverse group of friends, each a vegetable with their own unique characteristics.
There was Bella the blushing beetroot, always ready with a riddle or two; Timmy the timid tomato, a gentle soul with a heart of gold; and Percy the prankster potato, whose jokes always brought a smile to everyone's faces.
Despite their differences, they shared a close bond, their friendship as robust as their natural goodness.
Their lives were filled with delightful adventures, from playing hide-and-seek amidst the leafy lettuce to swimming in the dewy droplets that pooled on the cabbage leaves.
Their favorite place, though, was the sunlit corner of the vegetable patch, where they would bask in the warmth of the sun, share stories, and have hearty laughs.
One day, a bunch of pesky caterpillars invaded VeggieVille.
The vegetables were terrified, fearing they would be nibbled to nothingness.
But Simpson, with his usual sunny disposition, had an idea.
He proposed they host a grand feast for the caterpillars, with the juiciest leaves from the outskirts of the town.
Simpson's optimism was contagious, and his friends eagerly joined in to prepare the feast.
When the caterpillars arrived, they were pleasantly surprised.
They enjoyed the feast and were so impressed with the vegetables' hospitality that they promised not to trouble VeggieVille again.
In return, they agreed to help pollinate the flowers, contributing to a more lush and vibrant VeggieVille.
Simpson's idea had saved the day, but he humbly attributed the success to their teamwork and friendship.
They celebrated their victory with a grand party, filled with laughter, dance, and merry games.
That night, under the twinkling stars, they made a pact to always stand by each other, come what may.
From then on, the story of the happy carrot and his friends spread far and wide, a tale of friendship, unity, and positivity.
Simpson, Bella, Timmy, and Percy continued to live their joyful lives, their laughter echoing through VeggieVille.
And so, the tale of the happy carrot and his friends serves as a reminder that no matter the challenge, with optimism, teamwork, and a bit of creativity, anything is possible.

View File

@ -930,6 +930,7 @@
<module>spring-di-4</module>
<module>spring-kafka-2</module>
<!--<module>java-panama</module> Java-19 module-->
<module>libraries-llms</module>
</modules>
<properties>
@ -1212,6 +1213,7 @@
<module>spring-di-4</module>
<module>spring-kafka-2</module>
<!--<module>java-panama</module> Java-19 module-->
<module>libraries-llms</module>
</modules>
<properties>