Add answer support (#12)

This commit is contained in:
Theo Kanning 2022-04-28 16:20:15 -05:00 committed by GitHub
parent 103c34da94
commit 9f5b64b151
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 266 additions and 15 deletions

View File

@ -0,0 +1,140 @@
package com.theokanning.openai.answer;
import lombok.*;
import java.util.List;
import java.util.Map;
/**
* Given a question, a set of documents, and some examples, the API generates an answer to the question based
* on the information in the set of documents. This is useful for question-answering applications on sources of truth,
* like company documentation or a knowledge base.
*
* Documentation taken from
* https://beta.openai.com/docs/api-reference/answers/create
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class AnswerRequest {
/**
* ID of the engine to use for completion.
*/
@NonNull
String model;
/**
* Question to get answered.
*/
@NonNull
String question;
/**
* List of (question, answer) pairs that will help steer the model towards the tone and answer format you'd like.
* We recommend adding 2 to 3 examples.
*/
@NonNull
List<List<String>> examples;
/**
* A text snippet containing the contextual information used to generate the answers for the examples you provide.
*/
@NonNull
String examplesContext;
/**
* List of documents from which the answer for the input question should be derived.
* If this is an empty list, the question will be answered based on the question-answer examples.
*
* You should specify either documents or a file, but not both.
*/
List<String> documents;
/**
* The ID of an uploaded file that contains documents to search over.
* See upload file for how to upload a file of the desired format and purpose.
*
* You should specify either documents or file, but not both.
*/
String file;
/**
* ID of the engine to use for Search. You can select one of ada, babbage, curie, or davinci.
*/
String searchModel;
/**
* The maximum number of documents to be ranked by Search when using file.
* Setting it to a higher value leads to improved accuracy but with increased latency and cost.
*/
Integer maxRerank;
/**
* What sampling temperature to use. Higher values means the model will take more risks.
* Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.
*
* We generally recommend using this or {@link top_p} but not both.
*/
Double temperature;
/**
* Include the log probabilities on the logprobs most likely tokens, as well the chosen tokens.
* For example, if logprobs is 10, the API will return a list of the 10 most likely tokens.
* The API will always return the logprob of the sampled token,
* so there may be up to logprobs+1 elements in the response.
*/
Integer logprobs;
/**
* The maximum number of tokens allowed for the generated answer.
*/
Integer maxTokens;
/**
* Up to 4 sequences where the API will stop generating further tokens.
* The returned text will not contain the stop sequence.
*/
List<String> stop;
/**
* How many answers to generate for each question.
*/
Integer n;
/**
* Modify the likelihood of specified tokens appearing in the completion.
*
* Accepts a json object that maps tokens (specified by their token ID in the GPT tokenizer) to an
* associated bias value from -100 to 100.
*/
Map<String, Double> logitBias;
/**
* A special boolean flag for showing metadata.
* If set to true, each document entry in the returned JSON will contain a "metadata" field.
*
* This flag only takes effect when file is set.
*/
Boolean returnMetadata;
/**
* If set to true, the returned JSON will include a "prompt" field containing the final prompt that was
* used to request a completion. This is mainly useful for debugging purposes.
*/
Boolean returnPrompt;
/**
* If an object name is in the list, we provide the full information of the object;
* otherwise, we only provide the object ID.
*
* Currently we support completion and file objects for expansion.
*/
List<String> expand;
/**
* A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.
*/
String user;
}

View File

@ -0,0 +1,43 @@
package com.theokanning.openai.answer;
import lombok.Data;
import java.util.List;
/**
* An object containing a response from the answer api
* <p>
* https://beta.openai.com/docs/api-reference/answers/create
*/
@Data
public class AnswerResult {
/**
* A list of generated answers to the provided question/
*/
List<String> answers;
/**
* A unique id assigned to this completion
*/
String completion;
/**
* The GPT-3 model used for completion
*/
String model;
/**
* The type of object returned, should be "answer"
*/
String object;
/**
* The GPT-3 model used for search
*/
String searchModel;
/**
* A list of the most relevant documents for the question.
*/
List<Document> selectedDocuments;
}

View File

@ -0,0 +1,21 @@
package com.theokanning.openai.answer;
import lombok.Data;
/**
* Represents an example returned by the classification api
*
* https://beta.openai.com/docs/api-reference/classifications/create
*/
@Data
public class Document {
/**
* The position of this example in the example list
*/
Integer document;
/**
* The text of the example
*/
String text;
}

View File

@ -1,13 +1,12 @@
package com.theokanning.openai.classification;
import com.theokanning.openai.completion.CompletionChoice;
import lombok.Data;
import java.util.List;
/**
* An object containing a response from the classification api
* <p>
* <
* https://beta.openai.com/docs/api-reference/classifications/create
*/
@Data

View File

@ -1,17 +1,19 @@
package com.theokanning.openai;
import com.theokanning.openai.answer.AnswerRequest;
import com.theokanning.openai.answer.AnswerResult;
import com.theokanning.openai.classification.ClassificationRequest;
import com.theokanning.openai.classification.ClassificationResult;
import com.theokanning.openai.engine.Engine;
import com.theokanning.openai.file.File;
import com.theokanning.openai.finetune.FineTuneRequest;
import com.theokanning.openai.finetune.FineTuneEvent;
import com.theokanning.openai.finetune.FineTuneResult;
import com.theokanning.openai.search.SearchRequest;
import io.reactivex.Single;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.CompletionResult;
import com.theokanning.openai.engine.Engine;
import com.theokanning.openai.file.File;
import com.theokanning.openai.finetune.FineTuneEvent;
import com.theokanning.openai.finetune.FineTuneRequest;
import com.theokanning.openai.finetune.FineTuneResult;
import com.theokanning.openai.search.SearchRequest;
import com.theokanning.openai.search.SearchResult;
import io.reactivex.Single;
import okhttp3.MultipartBody;
import okhttp3.RequestBody;
import retrofit2.http.*;
@ -33,6 +35,9 @@ public interface OpenAiApi {
@POST("v1/classifications")
Single<ClassificationResult> createClassification(@Body ClassificationRequest request);
@POST("v1/answers")
Single<AnswerResult> createAnswer(@Body AnswerRequest request);
@GET("/v1/files")
Single<OpenAiResponse<File>> listFiles();

View File

@ -4,18 +4,20 @@ import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import com.theokanning.openai.answer.AnswerRequest;
import com.theokanning.openai.answer.AnswerResult;
import com.theokanning.openai.classification.ClassificationRequest;
import com.theokanning.openai.classification.ClassificationResult;
import com.theokanning.openai.file.File;
import com.theokanning.openai.finetune.FineTuneRequest;
import com.theokanning.openai.finetune.FineTuneEvent;
import com.theokanning.openai.finetune.FineTuneResult;
import com.theokanning.openai.search.SearchRequest;
import okhttp3.*;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.CompletionResult;
import com.theokanning.openai.engine.Engine;
import com.theokanning.openai.file.File;
import com.theokanning.openai.finetune.FineTuneEvent;
import com.theokanning.openai.finetune.FineTuneRequest;
import com.theokanning.openai.finetune.FineTuneResult;
import com.theokanning.openai.search.SearchRequest;
import com.theokanning.openai.search.SearchResult;
import okhttp3.*;
import retrofit2.Retrofit;
import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory;
import retrofit2.converter.jackson.JacksonConverterFactory;
@ -68,6 +70,10 @@ public class OpenAiService {
return api.createClassification(request).blockingGet();
}
public AnswerResult createAnswer(AnswerRequest request) {
return api.createAnswer(request).blockingGet();
}
public List<File> listFiles() {
return api.listFiles().blockingGet().data;
}

View File

@ -0,0 +1,37 @@
package com.theokanning.openai;
import com.theokanning.openai.answer.AnswerRequest;
import com.theokanning.openai.answer.AnswerResult;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collections;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class AnswerTest {
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token);
@Test
void createAnswer() {
AnswerRequest answerRequest = AnswerRequest.builder()
.documents(Arrays.asList("Puppy A is happy.", "Puppy B is sad."))
.question("which puppy is happy?")
.searchModel("ada")
.model("curie")
.examplesContext("In 2017, U.S. life expectancy was 78.6 years.")
.examples(Collections.singletonList(
Arrays.asList("What is human life expectancy in the United States?", "78 years.")
))
.maxTokens(5)
.stop(Arrays.asList("\n", "<|endoftext|>"))
.build();
AnswerResult result = service.createAnswer(answerRequest);
assertNotNull(result.getAnswers().get(0));
}
}