code
stringlengths 619
138k
| apis
listlengths 1
8
| extract_api
stringlengths 79
7.3k
|
|---|---|---|
package oracleai;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import lombok.Data;
import oracle.jdbc.OracleTypes;
import oracle.sql.json.OracleJsonObject;
import oracle.ucp.jdbc.PoolDataSource;
import oracle.ucp.jdbc.PoolDataSourceFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.*;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.sql.*;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@RestController
@RequestMapping("/databasejs")
public class CallAIFromOracleDatabaseUseJSONAndSQL {
private static Logger log = LoggerFactory.getLogger(CallAIFromOracleDatabaseUseJSONAndSQL.class);
String lastReply;
@GetMapping("/form")
public String form(){
return " <html>" +
"<form method=\"post\" action=\"/databasejs/conversation\">" +
" <br> Provide a unique conversation name and dialogue/question ..\n" +
" <br><label for=\"conversationname\">conversation name:</label><br>" +
" <input type=\"text\" id=\"conversationname\" name=\"conversationname\" value=\"conversationname\"><br>" +
" <label for=\"dialogue\">dialogue:</label><br>" +
" <input type=\"text\" id=\"dialogue\" name=\"dialogue\" value=\"dialogue\" size=\"60\"><br><br>" +
" <input type=\"submit\" value=\"Submit\">" +
"</form> " +
"</html>";
}
@PostMapping("/conversation")
public String conversation( @RequestParam("conversationname") String conversationName,
@RequestParam("dialogue") String dialogue)
throws Exception {
System.out.println("conversationname:" + conversationName + "dialogue:" + dialogue + " ");
dialogue = URLEncoder.encode(dialogue, StandardCharsets.UTF_8.toString());
Connection conn = getConnection();
Conversation conversation = new Conversation();
ObjectMapper objectMapper = new ObjectMapper();
try (PreparedStatement stmt = conn.prepareStatement("INSERT INTO conversation_dv VALUES (?)")) {
conversation.setName(conversationName);
// the user asking question
Interlocutor interlocutorUser = new Interlocutor();
interlocutorUser.setInterlocutorId(1);
interlocutorUser.setName("Paul");
interlocutorUser.setDialogue(dialogue);
// the as yet unanswered repl
Interlocutor interlocutorOpenAI = new Interlocutor();
interlocutorOpenAI.setInterlocutorId(0);
interlocutorOpenAI.setName("OpenAI");
conversation.setInterlocutor(List.of(interlocutorOpenAI, interlocutorUser));
String json = objectMapper.writeValueAsString(conversation);
System.out.println(json);
stmt.setObject(1, json, OracleTypes.JSON);
stmt.execute();
}
System.out.println("CallAIFromOracleDatabaseUseJSONAndSQL. insert done");
CallableStatement cstmt = conn.prepareCall("{call openai_call()}");
cstmt.execute();
System.out.println("CallAIFromOracleDatabaseUseJSONAndSQL. sproc done");
return lastReply;
}
private static Connection getConnection() throws SQLException {
PoolDataSource pool = PoolDataSourceFactory.getPoolDataSource();
pool.setURL("jdbc:oracle:thin:@localhost:1521/FREEPDB1");
pool.setUser("aijs");
pool.setPassword("Welcome12345");
pool.setConnectionFactoryClassName("oracle.jdbc.pool.OracleDataSource");
Connection conn = pool.getConnection();
return conn;
}
@Data
public class Conversation {
private String name;
private List<Interlocutor> interlocutor;
}
@Data
public class Interlocutor {
private int interlocutorId;
private String name;
private String dialogue;
}
@GetMapping("/getreply")
String getreply( @RequestParam("textcontent") String textcontent) {
System.out.println("CallAIFromOracleDatabaseUseJSONAndSQL.getreply");
OpenAiService service =
new OpenAiService(System.getenv("OPENAI_KEY"), Duration.ofSeconds(60));
System.out.println("Streaming chat completion... textcontent:" + textcontent);
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), textcontent);
messages.add(systemMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.n(1)
.maxTokens(300) //was 50
.logitBias(new HashMap<>())
.build();
String replyString = "";
String content;
for (ChatCompletionChoice choice : service.createChatCompletion(chatCompletionRequest).getChoices()) {
content = choice.getMessage().getContent();
replyString += (content == null?" ": content);
}
service.shutdownExecutor();
System.out.println("CallAIFromOracleDatabaseUseJSONAndSQL.getreply replyString:" + replyString);
return lastReply = replyString;
}
@GetMapping("/queryconversations")
public String queryconversations() throws SQLException {
PreparedStatement stmt = getConnection().prepareStatement("SELECT data FROM conversation_dv ");
// conn.prepareStatement("SELECT data FROM conversation_dv t WHERE t.data.conversationId = ? "); stmt.setInt(1, 201);
ResultSet rs = stmt.executeQuery();
String results = "";
while (rs.next()) {
OracleJsonObject race = rs.getObject(1, OracleJsonObject.class);
System.out.println(race.toString());
results+= race + "\n";
}
System.out.println("queryconversations results:" + results);
return results;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value"
] |
[((2253, 2286), 'java.nio.charset.StandardCharsets.UTF_8.toString'), ((4882, 4912), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
|
package oracleai;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
import com.oracle.bmc.ailanguage.AIServiceLanguageClient;
import com.oracle.bmc.ailanguage.model.DetectLanguageSentimentsDetails;
import com.oracle.bmc.ailanguage.model.DetectLanguageSentimentsResult;
import com.oracle.bmc.ailanguage.model.SentimentAspect;
import com.oracle.bmc.ailanguage.requests.DetectLanguageSentimentsRequest;
import com.oracle.bmc.ailanguage.responses.DetectLanguageSentimentsResponse;
import com.oracle.bmc.aivision.AIServiceVisionClient;
import com.oracle.bmc.aivision.model.*;
import com.oracle.bmc.aivision.requests.AnalyzeImageRequest;
import com.oracle.bmc.aivision.responses.AnalyzeImageResponse;
import com.oracle.bmc.auth.AuthenticationDetailsProvider;
import com.oracle.bmc.auth.ConfigFileAuthenticationDetailsProvider;
import com.oracle.bmc.auth.InstancePrincipalsAuthenticationDetailsProvider;
import com.oracle.bmc.model.BmcException;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import lombok.Data;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.json.JSONArray;
import org.json.JSONObject;
@RestController
@RequestMapping("/tellastory")
public class WriteAStoryAboutAPictureAndGiveItsSentiments {
private static Logger log = LoggerFactory.getLogger(WriteAStoryAboutAPictureAndGiveItsSentiments.class);
@GetMapping("/form")
public String form()
throws Exception {
return " <html><form method=\"post\" action=\"/tellastory/tellastory\" enctype=\"multipart/form-data\">\n" +
" Select an image file to create story from...\n" +
" <input type=\"file\" name=\"file\" accept=\"image/*\">\n" +
" <br>" +
"<br> Some additional options..." +
"<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\"an adventure\" checked >an adventure" +
"<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\"romantic\">romantic" +
"<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\"a dystopia\">a dystopia" +
"<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\"a documentary\">a documentary" +
"<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\"an anime movie\">an anime movie" +
" <br><input type=\"submit\" value=\"Send Request to Vision AI\">\n" +
" </form></html>";
}
@PostMapping("/tellastory")
public String tellastory(@RequestParam("file") MultipartFile file , @RequestParam("genopts") String genopts)
throws Exception {
log.info("got image file, now analyze, file = " + file);
String objectDetectionResults = processImage(file.getBytes(), true);
ImageAnalysis imageAnalysis = parseJsonToImageAnalysis(objectDetectionResults);
List<ImageObject> images = imageAnalysis.getImageObjects();
String fullText = "";
for (ImageObject image : images) fullText += image.getName() + ", ";
log.info("fullText = " + fullText);
String generatedstory =
chat("using strong negative and positive sentiments, " +
"write a story that is " + genopts + " and includes " + fullText );
return "<html><br><br>story:" + generatedstory +
"<br><br>sentiment analysis:" + sentiments(generatedstory) + "</html>";
}
String chat(String textcontent) throws Exception {
OpenAiService service =
new OpenAiService(System.getenv("OPENAI_KEY"), Duration.ofSeconds(60));
System.out.println("Streaming chat completion... textcontent:" + textcontent);
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), textcontent);
messages.add(systemMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.n(1)
.maxTokens(300)
.logitBias(new HashMap<>())
.build();
String replyString = "";
String content;
for (ChatCompletionChoice choice : service.createChatCompletion(chatCompletionRequest).getChoices()) {
content = choice.getMessage().getContent();
replyString += (content == null ? " " : content);
}
service.shutdownExecutor();
return replyString;
}
String processImage(byte[] bytes, boolean isConfigFileAuth) throws Exception {
AIServiceVisionClient aiServiceVisionClient;
AuthenticationDetailsProvider provider;
if (isConfigFileAuth) {
provider = new ConfigFileAuthenticationDetailsProvider(
System.getenv("OCICONFIG_FILE"),System.getenv("OCICONFIG_PROFILE"));
aiServiceVisionClient = new AIServiceVisionClient(provider);
} else {
aiServiceVisionClient = new AIServiceVisionClient(InstancePrincipalsAuthenticationDetailsProvider.builder().build());
}
List<ImageFeature> features = new ArrayList<>();
ImageFeature classifyFeature = ImageClassificationFeature.builder()
.maxResults(10)
.build();
ImageFeature detectImageFeature = ImageObjectDetectionFeature.builder()
.maxResults(10)
.build();
ImageFeature textDetectImageFeature = ImageTextDetectionFeature.builder().build();
features.add(classifyFeature);
features.add(detectImageFeature);
features.add(textDetectImageFeature);
InlineImageDetails inlineImageDetails = InlineImageDetails.builder()
.data(bytes)
.build();
AnalyzeImageDetails analyzeImageDetails = AnalyzeImageDetails.builder()
.image(inlineImageDetails)
.features(features)
.build();
AnalyzeImageRequest request = AnalyzeImageRequest.builder()
.analyzeImageDetails(analyzeImageDetails)
.build();
AnalyzeImageResponse response = aiServiceVisionClient.analyzeImage(request);
ObjectMapper mapper = new ObjectMapper();
mapper.setFilterProvider(new SimpleFilterProvider().setFailOnUnknownId(false));
String json = mapper.writeValueAsString(response.getAnalyzeImageResult());
System.out.println("AnalyzeImage Result");
System.out.println(json);
return json;
}
@Data
class ImageObject {
private String name;
private double confidence;
private BoundingPolygon boundingPolygon;
}
@Data
class BoundingPolygon {
private List<Point> normalizedVertices;
}
@Data
class Point {
private double x;
private double y;
public Point(double x, double y) {
this.x = x;
this.y = y;
}
}
@Data
class Label {
private String name;
private double confidence;
}
@Data
class OntologyClass {
private String name;
private List<String> parentNames;
private List<String> synonymNames;
}
@Data
class ImageText {
private List<Word> words;
private List<Line> lines;
}
@Data
class Word {
private String text;
private double confidence;
private BoundingPolygon boundingPolygon;
}
@Data
class Line {
private String text;
private double confidence;
private BoundingPolygon boundingPolygon;
private List<Integer> wordIndexes;
}
@Data
class ImageAnalysis {
private List<ImageObject> imageObjects;
private List<Label> labels;
private List<OntologyClass> ontologyClasses;
private ImageText imageText;
private String imageClassificationModelVersion;
private String objectDetectionModelVersion;
private String textDetectionModelVersion;
private List<String> errors;
}
private ImageAnalysis parseJsonToImageAnalysis(String jsonString) {
JSONObject json = new JSONObject(jsonString);
JSONArray imageObjectsArray = json.getJSONArray("imageObjects");
List<ImageObject> imageObjects = new ArrayList<>();
for (int i = 0; i < imageObjectsArray.length(); i++) {
JSONObject imageObjectJson = imageObjectsArray.getJSONObject(i);
ImageObject imageObject = new ImageObject();
imageObject.setName(imageObjectJson.getString("name"));
imageObject.setConfidence(imageObjectJson.getDouble("confidence"));
JSONObject boundingPolygonJson = imageObjectJson.getJSONObject("boundingPolygon");
JSONArray normalizedVerticesArray = boundingPolygonJson.getJSONArray("normalizedVertices");
List<Point> normalizedVertices = new ArrayList<>();
for (int j = 0; j < normalizedVerticesArray.length(); j++) {
JSONObject vertexJson = normalizedVerticesArray.getJSONObject(j);
Point vertex = new Point(vertexJson.getDouble("x"), vertexJson.getDouble("y"));
normalizedVertices.add(vertex);
}
BoundingPolygon boundingPolygon = new BoundingPolygon();
boundingPolygon.setNormalizedVertices(normalizedVertices);
imageObject.setBoundingPolygon(boundingPolygon);
imageObjects.add(imageObject);
}
JSONArray labelsArray = json.getJSONArray("labels");
List<Label> labels = new ArrayList<>();
for (int i = 0; i < labelsArray.length(); i++) {
JSONObject labelJson = labelsArray.getJSONObject(i);
Label label = new Label();
label.setName(labelJson.getString("name"));
label.setConfidence(labelJson.getDouble("confidence"));
labels.add(label);
}
JSONArray ontologyClassesArray = json.getJSONArray("ontologyClasses");
List<OntologyClass> ontologyClasses = new ArrayList<>();
for (int i = 0; i < ontologyClassesArray.length(); i++) {
JSONObject ontologyClassJson = ontologyClassesArray.getJSONObject(i);
OntologyClass ontologyClass = new OntologyClass();
ontologyClass.setName(ontologyClassJson.getString("name"));
JSONArray parentNamesArray = ontologyClassJson.getJSONArray("parentNames");
List<String> parentNames = new ArrayList<>();
for (int j = 0; j < parentNamesArray.length(); j++) {
parentNames.add(parentNamesArray.getString(j));
}
ontologyClass.setParentNames(parentNames);
ontologyClasses.add(ontologyClass);
}
JSONObject imageTextJson = json.getJSONObject("imageText");
JSONArray wordsArray = imageTextJson.getJSONArray("words");
List<Word> words = new ArrayList<>();
for (int i = 0; i < wordsArray.length(); i++) {
JSONObject wordJson = wordsArray.getJSONObject(i);
Word word = new Word();
word.setText(wordJson.getString("text"));
word.setConfidence(wordJson.getDouble("confidence"));
JSONObject boundingPolygonJson = wordJson.getJSONObject("boundingPolygon");
JSONArray normalizedVerticesArray = boundingPolygonJson.getJSONArray("normalizedVertices");
List<Point> normalizedVertices = new ArrayList<>();
for (int j = 0; j < normalizedVerticesArray.length(); j++) {
JSONObject vertexJson = normalizedVerticesArray.getJSONObject(j);
Point vertex = new Point(vertexJson.getDouble("x"), vertexJson.getDouble("y"));
normalizedVertices.add(vertex);
}
BoundingPolygon boundingPolygon = new BoundingPolygon();
boundingPolygon.setNormalizedVertices(normalizedVertices);
word.setBoundingPolygon(boundingPolygon);
words.add(word);
}
JSONArray linesArray = imageTextJson.getJSONArray("lines");
List<Line> lines = new ArrayList<>();
for (int i = 0; i < linesArray.length(); i++) {
JSONObject lineJson = linesArray.getJSONObject(i);
Line line = new Line();
line.setText(lineJson.getString("text"));
line.setConfidence(lineJson.getDouble("confidence"));
JSONObject boundingPolygonJson = lineJson.getJSONObject("boundingPolygon");
JSONArray normalizedVerticesArray = boundingPolygonJson.getJSONArray("normalizedVertices");
List<Point> normalizedVertices = new ArrayList<>();
for (int j = 0; j < normalizedVerticesArray.length(); j++) {
JSONObject vertexJson = normalizedVerticesArray.getJSONObject(j);
Point vertex = new Point(vertexJson.getDouble("x"), vertexJson.getDouble("y"));
normalizedVertices.add(vertex);
}
BoundingPolygon boundingPolygon = new BoundingPolygon();
boundingPolygon.setNormalizedVertices(normalizedVertices);
line.setBoundingPolygon(boundingPolygon);
JSONArray wordIndexesArray = lineJson.getJSONArray("wordIndexes");
List<Integer> wordIndexes = new ArrayList<>();
for (int j = 0; j < wordIndexesArray.length(); j++) {
wordIndexes.add(wordIndexesArray.getInt(j));
}
line.setWordIndexes(wordIndexes);
lines.add(line);
}
String imageClassificationModelVersion = json.getString("imageClassificationModelVersion");
String objectDetectionModelVersion = json.getString("objectDetectionModelVersion");
String textDetectionModelVersion = json.getString("textDetectionModelVersion");
List<String> errors = new ArrayList<>();
JSONArray errorsArray = json.getJSONArray("errors");
for (int i = 0; i < errorsArray.length(); i++) {
errors.add(errorsArray.getString(i));
}
ImageText imageText = new ImageText();
imageText.setWords(words);
imageText.setLines(lines);
ImageAnalysis imageAnalysis = new ImageAnalysis();
imageAnalysis.setImageObjects(imageObjects);
imageAnalysis.setLabels(labels);
imageAnalysis.setOntologyClasses(ontologyClasses);
imageAnalysis.setImageText(imageText);
imageAnalysis.setImageClassificationModelVersion(imageClassificationModelVersion);
imageAnalysis.setObjectDetectionModelVersion(objectDetectionModelVersion);
imageAnalysis.setTextDetectionModelVersion(textDetectionModelVersion);
imageAnalysis.setErrors(errors);
return imageAnalysis;
}
public String sentiments(String textcontent) throws IOException {
log.info("analyze text for sentiment:" + textcontent);
AuthenticationDetailsProvider
provider = new ConfigFileAuthenticationDetailsProvider(
System.getenv("OCICONFIG_FILE"),System.getenv("OCICONFIG_PROFILE"));
AIServiceLanguageClient languageClient =
AIServiceLanguageClient.builder().build(provider);
DetectLanguageSentimentsDetails details =
DetectLanguageSentimentsDetails.builder()
.text(textcontent)
.build();
DetectLanguageSentimentsRequest detectLanguageSentimentsRequest =
DetectLanguageSentimentsRequest.builder()
.detectLanguageSentimentsDetails(details)
.build();
DetectLanguageSentimentsResponse response = null;
try {
response = languageClient.detectLanguageSentiments(detectLanguageSentimentsRequest);
} catch (BmcException e) {
System.err.println("Failed to detect language and sentiments: " + e.getMessage());
}
DetectLanguageSentimentsResult detectLanguageSentimentsResult = response.getDetectLanguageSentimentsResult();
String sentimentReturn = "";
for (SentimentAspect aspect : detectLanguageSentimentsResult.getAspects()) {
sentimentReturn += "<br>sentiment:" + aspect.getSentiment();
sentimentReturn += " text:" + aspect.getText();
sentimentReturn += "\n";
}
log.info(sentimentReturn);
return sentimentReturn;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value"
] |
[((4501, 4531), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((5779, 5844), 'com.oracle.bmc.auth.InstancePrincipalsAuthenticationDetailsProvider.builder'), ((6759, 6871), 'com.oracle.bmc.aivision.requests.AnalyzeImageRequest.builder'), ((6759, 6846), 'com.oracle.bmc.aivision.requests.AnalyzeImageRequest.builder'), ((15925, 15974), 'com.oracle.bmc.ailanguage.AIServiceLanguageClient.builder'), ((16042, 16159), 'com.oracle.bmc.ailanguage.model.DetectLanguageSentimentsDetails.builder'), ((16042, 16126), 'com.oracle.bmc.ailanguage.model.DetectLanguageSentimentsDetails.builder'), ((16251, 16391), 'com.oracle.bmc.ailanguage.requests.DetectLanguageSentimentsRequest.builder'), ((16251, 16358), 'com.oracle.bmc.ailanguage.requests.DetectLanguageSentimentsRequest.builder')]
|
package com.theokanning.openai.service;
import com.theokanning.openai.embedding.Embedding;
import com.theokanning.openai.embedding.EmbeddingRequest;
import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertFalse;
public class EmbeddingTest {
String token = System.getenv("OPENAI_TOKEN");
com.theokanning.openai.service.OpenAiService service = new OpenAiService(token);
@Test
void createEmbeddings() {
EmbeddingRequest embeddingRequest = EmbeddingRequest.builder()
.model("text-embedding-ada-002")
.input(Collections.singletonList("The food was delicious and the waiter..."))
.build();
List<Embedding> embeddings = service.createEmbeddings(embeddingRequest).getData();
assertFalse(embeddings.isEmpty());
assertFalse(embeddings.get(0).getEmbedding().isEmpty());
}
}
|
[
"com.theokanning.openai.embedding.EmbeddingRequest.builder"
] |
[((552, 746), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((552, 721), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((552, 627), 'com.theokanning.openai.embedding.EmbeddingRequest.builder')]
|
package com.theokanning.openai.service;
import com.theokanning.openai.OpenAiResponse;
import com.theokanning.openai.assistants.Assistant;
import com.theokanning.openai.assistants.AssistantRequest;
import com.theokanning.openai.messages.Message;
import com.theokanning.openai.messages.MessageRequest;
import com.theokanning.openai.runs.Run;
import com.theokanning.openai.runs.RunCreateRequest;
import com.theokanning.openai.threads.Thread;
import com.theokanning.openai.threads.ThreadRequest;
import com.theokanning.openai.utils.TikTokensUtil;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
class RunTest {
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token);
@Test
@Timeout(10)
void createRetrieveRun() {
AssistantRequest assistantRequest = AssistantRequest.builder()
.model(TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName())
.name("MATH_TUTOR")
.instructions("You are a personal Math Tutor.")
.build();
Assistant assistant = service.createAssistant(assistantRequest);
ThreadRequest threadRequest = ThreadRequest.builder()
.build();
Thread thread = service.createThread(threadRequest);
MessageRequest messageRequest = MessageRequest.builder()
.content("Hello")
.build();
Message message = service.createMessage(thread.getId(), messageRequest);
RunCreateRequest runCreateRequest = RunCreateRequest.builder()
.assistantId(assistant.getId())
.build();
Run run = service.createRun(thread.getId(), runCreateRequest);
assertNotNull(run);
Run retrievedRun;
do {
retrievedRun = service.retrieveRun(thread.getId(), run.getId());
assertEquals(run.getId(), retrievedRun.getId());
}
while (!(retrievedRun.getStatus().equals("completed")) && !(retrievedRun.getStatus().equals("failed")));
assertNotNull(retrievedRun);
OpenAiResponse<Message> response = service.listMessages(thread.getId());
List<Message> messages = response.getData();
assertEquals(2, messages.size());
assertEquals("user", messages.get(1).getRole());
assertEquals("assistant", messages.get(0).getRole());
}
}
|
[
"com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName",
"com.theokanning.openai.assistants.AssistantRequest.builder",
"com.theokanning.openai.messages.MessageRequest.builder",
"com.theokanning.openai.threads.ThreadRequest.builder",
"com.theokanning.openai.runs.RunCreateRequest.builder"
] |
[((989, 1217), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((989, 1192), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((989, 1128), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((989, 1092), 'com.theokanning.openai.assistants.AssistantRequest.builder'), ((1039, 1091), 'com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_4_1106_preview.getName'), ((1331, 1379), 'com.theokanning.openai.threads.ThreadRequest.builder'), ((1483, 1566), 'com.theokanning.openai.messages.MessageRequest.builder'), ((1483, 1541), 'com.theokanning.openai.messages.MessageRequest.builder'), ((1695, 1794), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((1695, 1769), 'com.theokanning.openai.runs.RunCreateRequest.builder')]
|
package example;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.image.CreateImageRequest;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
class OpenAiApiExample {
public static void main(String... args) {
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30));
System.out.println("\nCreating completion...");
CompletionRequest completionRequest = CompletionRequest.builder()
.model("babbage-002")
.prompt("Somebody once told me the world is gonna roll me")
.echo(true)
.user("testing")
.n(3)
.build();
service.createCompletion(completionRequest).getChoices().forEach(System.out::println);
System.out.println("\nCreating Image...");
CreateImageRequest request = CreateImageRequest.builder()
.prompt("A cow breakdancing with a turtle")
.build();
System.out.println("\nImage is located at:");
System.out.println(service.createImage(request).getData().get(0).getUrl());
System.out.println("Streaming chat completion...");
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a dog and will speak as such.");
messages.add(systemMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.n(1)
.maxTokens(50)
.logitBias(new HashMap<>())
.build();
service.streamChatCompletion(chatCompletionRequest)
.doOnError(Throwable::printStackTrace)
.blockingForEach(System.out::println);
service.shutdownExecutor();
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.image.CreateImageRequest.builder",
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((794, 1043), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((794, 1018), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((794, 996), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((794, 963), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((794, 935), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((794, 859), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1229, 1342), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((1229, 1317), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((1664, 1694), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
|
/*
========================================================================
SchemaCrawler
http://www.schemacrawler.com
Copyright (c) 2000-2024, Sualeh Fatehi <[email protected]>.
All rights reserved.
------------------------------------------------------------------------
SchemaCrawler is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
SchemaCrawler and the accompanying materials are made available under
the terms of the Eclipse Public License v1.0, GNU General Public License
v3 or GNU Lesser General Public License v3.
You may elect to redistribute this code under any of these licenses.
The Eclipse Public License is available at:
http://www.eclipse.org/legal/epl-v10.html
The GNU General Public License v3 and the GNU Lesser General Public
License v3 are available at:
http://www.gnu.org/licenses/
========================================================================
*/
package schemacrawler.tools.command.chatgpt.embeddings;
import java.util.Collections;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.theokanning.openai.embedding.EmbeddingRequest;
import com.theokanning.openai.embedding.EmbeddingResult;
import com.theokanning.openai.service.OpenAiService;
import static java.util.Objects.requireNonNull;
import static us.fatehi.utility.Utility.requireNotBlank;
import us.fatehi.utility.string.StringFormat;
public final class EmbeddingService {
private static final Logger LOGGER = Logger.getLogger(EmbeddingService.class.getCanonicalName());
private static final String TEXT_EMBEDDING_MODEL = "text-embedding-3-small";
private final OpenAiService service;
public EmbeddingService(final OpenAiService service) {
this.service = requireNonNull(service, "No Open AI service provided");
}
public TextEmbedding embed(final String text) {
requireNotBlank(text, "No text provided");
try {
final EmbeddingRequest embeddingRequest =
EmbeddingRequest.builder()
.model(TEXT_EMBEDDING_MODEL)
.input(Collections.singletonList(text))
.build();
final EmbeddingResult embeddingResult = service.createEmbeddings(embeddingRequest);
return new TextEmbedding(text, embeddingResult);
} catch (final Exception e) {
LOGGER.log(Level.WARNING, e, new StringFormat("Could not embed text"));
}
return new TextEmbedding(text);
}
}
|
[
"com.theokanning.openai.embedding.EmbeddingRequest.builder"
] |
[((2041, 2187), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((2041, 2164), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((2041, 2110), 'com.theokanning.openai.embedding.EmbeddingRequest.builder')]
|
package br.com.fiap.gsjava.service;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.theokanning.openai.completion.chat.ChatFunction;
import com.theokanning.openai.completion.chat.ChatFunctionCall;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import java.util.*;
public class FunctionExecutor {
private ObjectMapper MAPPER = new ObjectMapper();
private final Map<String, ChatFunction> FUNCTIONS = new HashMap<>();
public FunctionExecutor(List<ChatFunction> functions) {
setFunctions(functions);
}
public FunctionExecutor(List<ChatFunction> functions, ObjectMapper objectMapper) {
setFunctions(functions);
setObjectMapper(objectMapper);
}
public Optional<ChatMessage> executeAndConvertToMessageSafely(ChatFunctionCall call) {
try {
return Optional.ofNullable(executeAndConvertToMessage(call));
} catch (Exception ignored) {
return Optional.empty();
}
}
public ChatMessage executeAndConvertToMessageHandlingExceptions(ChatFunctionCall call) {
try {
return executeAndConvertToMessage(call);
} catch (Exception exception) {
exception.printStackTrace();
return convertExceptionToMessage(exception);
}
}
public ChatMessage convertExceptionToMessage(Exception exception) {
String error = exception.getMessage() == null ? exception.toString() : exception.getMessage();
return new ChatMessage(ChatMessageRole.FUNCTION.value(), "{\"error\": \"" + error + "\"}", "error");
}
public ChatMessage executeAndConvertToMessage(ChatFunctionCall call) {
return new ChatMessage(ChatMessageRole.FUNCTION.value(), executeAndConvertToJson(call).toPrettyString(), call.getName());
}
public JsonNode executeAndConvertToJson(ChatFunctionCall call) {
try {
Object execution = execute(call);
if (execution instanceof TextNode) {
JsonNode objectNode = MAPPER.readTree(((TextNode) execution).asText());
if (objectNode.isMissingNode())
return (JsonNode) execution;
return objectNode;
}
if (execution instanceof ObjectNode) {
return (JsonNode) execution;
}
if (execution instanceof String) {
JsonNode objectNode = MAPPER.readTree((String) execution);
if (objectNode.isMissingNode())
throw new RuntimeException("Parsing exception");
return objectNode;
}
return MAPPER.readValue(MAPPER.writeValueAsString(execution), JsonNode.class);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("unchecked")
public <T> T execute(ChatFunctionCall call) {
ChatFunction function = FUNCTIONS.get(call.getName());
Object obj;
try {
JsonNode arguments = call.getArguments();
obj = MAPPER.readValue(arguments instanceof TextNode ? arguments.asText() : arguments.toPrettyString(), function.getParametersClass());
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
return (T) function.getExecutor().apply(obj);
}
public List<ChatFunction> getFunctions() {
return new ArrayList<>(FUNCTIONS.values());
}
public void setFunctions(List<ChatFunction> functions) {
this.FUNCTIONS.clear();
functions.forEach(f -> this.FUNCTIONS.put(f.getName(), f));
}
public void setObjectMapper(ObjectMapper objectMapper) {
this.MAPPER = objectMapper;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value"
] |
[((1834, 1866), 'com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value'), ((2029, 2061), 'com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value')]
|
package com.swooboo.chatgame.game;
import com.theokanning.openai.ListSearchParameters;
import com.theokanning.openai.OpenAiResponse;
import com.theokanning.openai.assistants.Assistant;
import com.theokanning.openai.messages.Message;
import com.theokanning.openai.messages.MessageRequest;
import com.theokanning.openai.runs.Run;
import com.theokanning.openai.runs.RunCreateRequest;
import com.theokanning.openai.service.OpenAiService;
import com.theokanning.openai.threads.Thread;
import com.theokanning.openai.threads.ThreadRequest;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@Component
public class GameControllerImpl implements GameController {
private final Assistant gameAssistant;
private final String OPENAI_API_KEY = System.getenv("OPENAI_API_KEY");
private final OpenAiService openAiService = new OpenAiService(OPENAI_API_KEY);
private Thread currentThread;
public GameControllerImpl() {
String OPENAI_ASSISTANT_ID = System.getenv("OPENAI_ASSISTANT_ID");
gameAssistant = openAiService.retrieveAssistant(OPENAI_ASSISTANT_ID);
}
@Override
@Async
public void startNewGame() {
System.out.println("Starting game");
ThreadRequest threadRequest = ThreadRequest.builder().build();
currentThread = openAiService.createThread(threadRequest);
}
@Override
@Async
public void sayToGame(String input) {
System.out.println("Sending message: " + input);
GameMessage message = new GameMessage("user", input);
MessageRequest messageRequest = MessageRequest.builder()
.content(input)
.role(message.getRole())
.build();
openAiService.createMessage(currentThread.getId(), messageRequest);
RunCreateRequest runCreateRequest = RunCreateRequest.builder()
.assistantId(gameAssistant.getId())
.build();
Run currentRun = openAiService.createRun(currentThread.getId(), runCreateRequest);
System.out.println("Message sent to run " + currentRun.getId());
String currentRunStatus = openAiService.retrieveRun(currentThread.getId(), currentRun.getId()).getStatus();
System.out.println("Run status: " + currentRunStatus);
}
@Override
public List<GameMessage> getAllMessagesInDialog() {
OpenAiResponse<Message> listMessagesResponse = openAiService.listMessages(currentThread.getId());
List<GameMessage> messagesInReverseOrder = listMessagesResponse.data.stream()
.map(m -> new GameMessage(m.getRole(), m.getContent().get(0).getText().getValue()))
.collect(Collectors.toList());
Collections.reverse(messagesInReverseOrder);
return messagesInReverseOrder;
}
@Override
public String getCurrentGameStatus() {
ListSearchParameters searchParameters = ListSearchParameters.builder().build();
String statuses = openAiService.listRuns(currentThread.getId(), searchParameters).data.stream()
.map(Run::getStatus)
.collect(Collectors.joining(", "));
return "run statuses: [ " + statuses + " ]";
}
}
|
[
"com.theokanning.openai.threads.ThreadRequest.builder",
"com.theokanning.openai.runs.RunCreateRequest.builder",
"com.theokanning.openai.ListSearchParameters.builder",
"com.theokanning.openai.messages.MessageRequest.builder"
] |
[((1373, 1404), 'com.theokanning.openai.threads.ThreadRequest.builder'), ((1708, 1830), 'com.theokanning.openai.messages.MessageRequest.builder'), ((1708, 1805), 'com.theokanning.openai.messages.MessageRequest.builder'), ((1708, 1764), 'com.theokanning.openai.messages.MessageRequest.builder'), ((1953, 2056), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((1953, 2031), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((3025, 3063), 'com.theokanning.openai.ListSearchParameters.builder')]
|
package com.c20g.labs.agency.agent.planner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.c20g.labs.agency.agent.Agent;
import com.c20g.labs.agency.chat.ConversationHistory;
import com.c20g.labs.agency.config.AgencyConfiguration;
import com.theokanning.openai.Usage;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.completion.chat.ChatCompletionRequest.ChatCompletionRequestBuilder;
import com.theokanning.openai.service.OpenAiService;
@Service
public class PlannerAgent implements Agent {
private static final Logger LOGGER = LoggerFactory.getLogger(PlannerAgent.class);
@Autowired
private AgencyConfiguration agencyConfiguration;
@Autowired
private OpenAiService openAiService;
@Autowired
private ChatCompletionRequestBuilder requestBuilder;
@Override
public ConversationHistory run(String input, ConversationHistory parentConversation) throws Exception {
ConversationHistory conversation = new ConversationHistory();
String preludeString = """
You are an AI agent designed to interact with human users and respond to arbitrary requests
or conversation. You have at your disposal a set of agents that provide you with an
array of services. Your task is primarily to develop a plan to respond to the user's requests.
Think step-by-step and generate a plan -- each step should be carried out by
one agent. If your plan requires a step that none of your agents can complete, recommend and
describe in detail a new type of agent or operation that would be able to solve the step.
Your team of agents includes:
Name: InternetBot
Description: Can perform network and web operations
Operations: google_search, wikipedia_search, retrieve_url
Name: FilesystemBot
Description: Can perform filesystem operations, like saving and deleting files or retrieving file content
Operations: write_file, read_file, delete_file, open_file_with_executable
Name: ProgrammerBot
Description: Can perform tasks generally done by human software developer, which can often be used to solve general problems when combined
Operations: write_python_script, execute_python_script
Name: LLMBot
Description: Can interact with GPT models like GPT-3.5 or GPT-4, for general conversation or problem solving
Operations: send_message, send_message_with_history
You should return a response in JSON format, which will describe the plan and a list of "steps". The response should be in the following format:
{
"created_plan_successfully" : [true/false],
"steps" : [
{
"step_number" : [STEP NUMBER],
"agent" : "[AGENT_NAME]",
"operation" : "[OPERATION]",
"purpose" : "[OBJECTIVE IN INVOKING THIS OPERATION]"
}
]
}
Do not provide any additional text or commentary other than the plan. Do not answer anything by yourself without consulting your team of agents. Here's a few example interactions:
=== START EXAMPLE 1 ===
user> Should I bring an umbrella with me today when I go outside?
assistant>
{
"created_plan_successfully" : true,
"steps" : [
{
"step_number" : 1,
"agent" : "InternetBot",
"operation" : "google_search",
"purpose" : "I will use this operation to find weather near you"
},
{
"step_number" : 2,
"agent" : "LLMBot",
"operation" : "send_message",
"purpose" : "I will use this operation to ask whether the current weather retrieved from Step 1 requires an umbrella"
},
{
"step_number" : 3,
"agent" : null,
"operation" : null,
"purpose" : "I will return the response from Step 2 to the user"
}
]
}
=== EXAMPLE END ===
=== START EXAMPLE 2 ===
user> create a new project on my local filesystem at /home/bill/Scratch/test123 that contains the source code located at https://github.com/CounterpointConsulting/agency
assistant>
{
"created_plan_successfully" : false,
"failure_reason" : "I do not have an agent capable of cloning a git repository"
}
=== EXAMPLE END ===
""";
conversation.addMessage(new ChatMessage(ChatMessageRole.SYSTEM.value(), preludeString));
conversation.addMessage(new ChatMessage(ChatMessageRole.USER.value(), input));
ChatCompletionRequest chatCompletionRequest = requestBuilder
.messages(conversation.getAllMessages())
.build();
ChatCompletionResult chatCompletion = openAiService.createChatCompletion(chatCompletionRequest);
Usage usage = chatCompletion.getUsage();
LOGGER.debug("Used " + usage.getPromptTokens() + " tokens for prompt");
LOGGER.debug("Used " + usage.getCompletionTokens() + " tokens for response");
LOGGER.debug("Used " + usage.getTotalTokens() + " tokens total");
String aiResponse = chatCompletion.getChoices().get(0).getMessage().getContent();
LOGGER.debug("Planner Agent Response > " + aiResponse);
ChatMessage aiResponseMessage = new ChatMessage(ChatMessageRole.ASSISTANT.value(), aiResponse);
conversation.addMessage(aiResponseMessage);
return conversation;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value"
] |
[((4515, 4545), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((4606, 4634), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((5330, 5363), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value')]
|
package link.locutus.discord.gpt.imps;
import com.knuddels.jtokkit.api.ModelType;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import link.locutus.discord.gpt.GPTUtil;
import link.locutus.discord.util.StringMan;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static com.google.common.base.Preconditions.checkArgument;
public class GPTText2Text implements IText2Text{
private final OpenAiService service;
private final ModelType model;
private OpenAiOptions defaultOptions = new OpenAiOptions();
public GPTText2Text(String openAiKey, ModelType model) {
this(new OpenAiService(openAiKey, Duration.ofSeconds(120)), model);
}
public GPTText2Text(OpenAiService service, ModelType model) {
this.service = service;
this.model = model;
}
@Override
public String getId() {
return model.name();
}
@Override
public String generate(Map<String, String> options, String text) {
OpenAiOptions optObj = options == null || options.isEmpty() ? defaultOptions : new OpenAiOptions().setOptions(this, options);
ChatCompletionRequest.ChatCompletionRequestBuilder builder = ChatCompletionRequest.builder()
.messages(List.of(new ChatMessage("user", text)))
.model(this.model.getName());
if (optObj.temperature != null) {
builder = builder.temperature(optObj.temperature);
}
if (optObj.stopSequences != null) {
builder = builder.stop(Arrays.asList(optObj.stopSequences));
}
if (optObj.topP != null) {
builder = builder.topP(optObj.topP);
}
if (optObj.presencePenalty != null) {
builder = builder.presencePenalty(optObj.presencePenalty);
}
if (optObj.frequencyPenalty != null) {
builder = builder.frequencyPenalty(optObj.frequencyPenalty);
}
if (optObj.maxTokens != null) {
builder.maxTokens(optObj.maxTokens);
}
ChatCompletionRequest completionRequest = builder.build();
ChatCompletionResult completion = service.createChatCompletion(completionRequest);
List<String> results = new ArrayList<>();
for (ChatCompletionChoice choice : completion.getChoices()) {
System.out.println("Reason: " + choice.getFinishReason());
System.out.println("name: " + choice.getMessage().getName());
System.out.println("role: " + choice.getMessage().getRole());
System.out.println("text: " + choice.getMessage().getContent());
results.add(choice.getMessage().getContent());
}
return String.join("\n", results);
}
@Override
public Map<String, String> getOptions() {
return Map.of(
"temperature", "0.7",
"stop_sequences", "\n\n",
"top_p", "1",
"presence_penalty", "0",
"frequency_penalty", "0",
"max_tokens", "2000"
);
}
private static class OpenAiOptions {
public Double temperature = null;
public String[] stopSequences = null;
public Double topP = null;
public Double presencePenalty = null;
public Double frequencyPenalty = null;
public Integer maxTokens = null;
public OpenAiOptions setOptions(GPTText2Text parent, Map<String, String> options) {
// reset options
temperature = null;
stopSequences = null;
topP = null;
presencePenalty = null;
frequencyPenalty = null;
maxTokens = null;
if (options != null) {
for (Map.Entry<String, String> entry : options.entrySet()) {
switch (entry.getKey().toLowerCase()) {
case "temperature":
temperature = Double.parseDouble(entry.getValue());
checkArgument(temperature >= 0 && temperature <= 2, "Temperature must be between 0 and 2");
break;
case "stop_sequences":
stopSequences = entry.getValue().replace("\\n", "\n").split(",");
checkArgument(stopSequences.length > 0 && stopSequences.length <= 4, "stop_sequences must be between 1 and 4 sequences, separated by commas");
break;
case "top_p":
topP = Double.parseDouble(entry.getValue());
checkArgument(topP >= 0 && topP <= 1, "top_p must be between 0 and 1");
break;
case "presence_penalty":
presencePenalty = Double.parseDouble(entry.getValue());
checkArgument(presencePenalty >= -2 && presencePenalty <= 2, "presence_penalty must be between -2 and 2");
break;
case "frequency_penalty":
frequencyPenalty = Double.parseDouble(entry.getValue());
checkArgument(frequencyPenalty >= -2 && frequencyPenalty <= 2, "frequency_penalty must be between -2 and 2");
break;
case "max_tokens":
maxTokens = Integer.parseInt(entry.getValue());
checkArgument(maxTokens >= 1 && maxTokens <= parent.getSizeCap(), "max_tokens must be between 1 and " + parent.getSizeCap());
break;
default:
throw new IllegalArgumentException("Unknown option: " + entry.getKey() + ". Valid options are: " + StringMan.getString(parent.getOptions()));
}
}
}
return this;
}
}
@Override
public int getSize(String text) {
return GPTUtil.getTokens(text, model);
}
@Override
public int getSizeCap() {
return model.getMaxContextLength();
}
public ModelType getModel() {
return model;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((1513, 1655), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1513, 1610), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.namankhurpia.imagegen.utils;
import android.os.AsyncTask;
import com.theokanning.openai.image.CreateImageRequest;
import com.theokanning.openai.service.OpenAiService;
public class RetrieveTask extends AsyncTask<String,Void,String> {
@Override
public String doInBackground(String... strings) {
OpenAiService service = new OpenAiService(strings[1]);
System.out.println("\nCreating Image...");
CreateImageRequest request = CreateImageRequest.builder()
.prompt(strings[0])
.build();
System.out.println("\nImage is located at:");
return service.createImage(request).getData().get(0).getUrl();
}
}
|
[
"com.theokanning.openai.image.CreateImageRequest.builder"
] |
[((469, 558), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((469, 533), 'com.theokanning.openai.image.CreateImageRequest.builder')]
|
package com.vcque.prompto;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.notification.Notifications;
import com.intellij.openapi.ide.CopyPasteManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.ui.DialogWrapper;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import com.vcque.prompto.contexts.PromptoContext;
import com.vcque.prompto.exceptions.MissingTokenException;
import com.vcque.prompto.outputs.PromptoOutput;
import com.vcque.prompto.pipelines.PromptoPipeline;
import com.vcque.prompto.settings.PromptoSettingsState;
import com.vcque.prompto.ui.PromptoQueryDialog;
import org.jetbrains.annotations.NotNull;
import java.awt.datatransfer.StringSelection;
import java.time.Duration;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.stream.Collectors;
public class PromptoManager {
private static final PromptoManager INSTANCE = new PromptoManager();
private static final double TEMPERATURE = 0.3;
public static PromptoManager instance() {
return INSTANCE;
}
private OpenAiService openAI = null;
private String currentToken = null;
public void updateToken() {
var token = PromptoSettingsState.getInstance().apiToken;
if (token == null || token.isEmpty()) {
throw new MissingTokenException();
}
if (!token.equals(currentToken)) {
openAI = new OpenAiService(token, Duration.ofMinutes(2));
currentToken = token;
}
}
public <T> void executePipeline(PromptoPipeline<T> pipeline, PromptoPipeline.Scope scope) {
var maxToken = 3500; // To configure, this is ~ the number of token allowed for the chatGPT API (need also room for the response)
var contextsByRetrievers = pipeline.getRetrievers().stream()
.filter(r -> r.getRetriever().isAvailable(scope.project(), scope.editor(), scope.element()))
.collect(Collectors.toMap(
x -> x,
r -> r.getRetriever().retrieveContexts(scope.project(), scope.editor(), scope.element()),
(a, b) -> a,
LinkedHashMap::new
));
var noApiKey = PromptoSettingsState.getInstance().apiToken == null || PromptoSettingsState.getInstance().apiToken.isBlank();
var dialog = new PromptoQueryDialog(pipeline, contextsByRetrievers, maxToken, noApiKey);
dialog.show();
var exitCode = dialog.getExitCode();
if (exitCode == DialogWrapper.CANCEL_EXIT_CODE) {
return;
}
var contexts = dialog.getSelectedContexts();
var userInput = dialog.getUserInput();
var outputParams = new PromptoOutput.Params(userInput, contexts, scope);
var chatMessages = new ArrayList<ChatMessage>();
chatMessages.add(Prompts.codingAssistant());
chatMessages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), PromptoSettingsState.getInstance().projectContext));
chatMessages.addAll(
contexts.stream()
.map(Prompts::promptoContext)
.toList()
);
chatMessages.addAll(pipeline.getOutput().buildOutputFormattingMessages(outputParams));
if (exitCode == DialogWrapper.OK_EXIT_CODE) {
updateToken();
ProgressManager.getInstance().run(new Task.Backgroundable(scope.project(), "Prompto " + pipeline.getName(), true) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
try {
callLLM(pipeline, contexts, scope, chatMessages, userInput);
} catch (MissingTokenException e) {
var notification = new Notification(
"Prompto",
"Missing OpenAI key",
"Add your open-ai key to Prompto settings to enable this feature.",
NotificationType.ERROR);
Notifications.Bus.notify(notification);
}
}
});
} else if (exitCode == PromptoQueryDialog.CLIPBOARD_EXIT_CODE){
var prompt = chatMessages.stream()
.map(ChatMessage::getContent)
.collect(Collectors.joining("\n"));
var transferable = new StringSelection(prompt);
CopyPasteManager.getInstance().setContents(transferable);
var notification = new Notification(
"Prompto",
"Prompt copied",
"Your prompt and its context has been copied to the clipboard.",
NotificationType.INFORMATION);
Notifications.Bus.notify(notification, scope.project());
}
}
private <T> void callLLM(PromptoPipeline<T> pipeline, List<PromptoContext> contexts, PromptoPipeline.Scope scope, ArrayList<ChatMessage> chatMessages, String userInput) {
// Send messages to OpenAI
var result = openAI.createChatCompletion(
ChatCompletionRequest.builder()
.temperature(TEMPERATURE)
.model(PromptoSettingsState.getInstance().languageModel)
.messages(chatMessages)
.stop(pipeline.getStopwords())
.stream(false)
.build()
);
// Retrieve the LLM response message
var response = result.getChoices().get(0).getMessage().getContent();
var outputParams = new PromptoOutput.Params(userInput, contexts, scope);
var extractedResult = pipeline.getOutput().extractOutput(response, outputParams);
// Execute the action
pipeline.getExecution().execute(extractedResult, scope, contexts);
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((2680, 2733), 'com.vcque.prompto.settings.PromptoSettingsState.getInstance'), ((3323, 3353), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((3759, 4573), 'com.intellij.openapi.progress.ProgressManager.getInstance'), ((4479, 4517), 'com.intellij.notification.Notifications.Bus.notify'), ((4873, 4929), 'com.intellij.openapi.ide.CopyPasteManager.getInstance'), ((5197, 5252), 'com.intellij.notification.Notifications.Bus.notify'), ((5547, 5884), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5547, 5851), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5547, 5812), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5547, 5757), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5547, 5709), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5547, 5628), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package wood.util;
// import com.theokanning.openai.OpenAiService;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import com.theokanning.openai.completion.CompletionChoice;
import com.theokanning.openai.completion.CompletionRequest;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import retrofit2.HttpException;
import wood.message.DiscordMessage;
import wood.message.MessageHistory;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
/** A wrapper class for com.theokanning.openai. Requires GPTRequest.apiKey to be set. */
@Slf4j
public class GPTRequest {
// ----------- static fields -----------
/** The OpenAI API key to use for all requests. Can set using the testAndSetApiKey method. */
public static String apiKey = "";
/** Language models */
public static final String gptTurbo = "gpt-3.5-turbo", gpt4 = "gpt-4";
public static final String[] models = {gptTurbo, gpt4};
/** counter for how many tokens have been used by each language model (irrespective of Base series vs Instruct) */
private static int gptTurboTokenCounter = 0, gpt4PromptTokenCounter = 0, gpt4CompletionTokenCounter = 0;
// ----------- instance fields -----------
private final OpenAiService service;
private final ChatCompletionRequest chatCompletionRequest;
private final ChatCompletionRequest.ChatCompletionRequestBuilder chatCompletionRequestBuilder;
//private final CompletionRequest completionRequest;
//private final CompletionRequest.CompletionRequestBuilder completionRequestBuilder;
/** List of all chat messages used in API request. */
@Getter private final MessageHistory messageHistory = new MessageHistory();
/** Language Model to use for this API request */
@Getter private final String model;
/** Maximum number of tokens that will be generated */
@Getter private final int maxTokens;
/** (default .7) a value 0-1 with 1 being very creative, 0 being very factual/deterministic */
@Getter private final double temperature;
/** (default 1) between 0-1 where 1.0 means "use all tokens in the vocabulary"
* while 0.5 means "use only the 50% most common tokens" */
@Getter private final double topP;
/** (default 0) 0-1, lowers the chances of a word being selected again the more times that word has already been used */
@Getter private final double frequencyPenalty;
/** (default 0) 0-1, lowers the chances of topic repetition */
@Getter private final double presencePenalty;
/** (default 1), queries GPT-3 this many times, then selects the 'best' generation to return */
@Getter private final int bestOf;
/** The Strings that GPT-3 will stop generating after (can have 4 stop sequences max) */
@Getter private final List<String> stopSequences;
/** The latest generated completion */
@Getter private ChatMessage latestCompletion;
public GPTRequest(GPTRequestBuilder builder) {
for(DiscordMessage dm : builder.messageHistory.getDiscordMessages())
this.messageHistory.add(dm);
this.model = builder.model;
this.maxTokens = builder.maxTokens;
this.temperature = builder.temperature;
this.topP = builder.topP;
this.frequencyPenalty = builder.frequencyPenalty;
this.presencePenalty = builder.presencePenalty;
this.bestOf = builder.bestOf;
this.stopSequences = builder.stopSequences;
service = new OpenAiService(apiKey, Duration.ofSeconds(60));
// Roles: user, assistant, system
// system prompt: "You are..."
chatCompletionRequestBuilder = ChatCompletionRequest.builder()
.messages(messageHistory.getChatMessages())
.model(model)
.maxTokens(maxTokens)
.temperature(temperature)
.topP(topP)
.frequencyPenalty(frequencyPenalty)
.presencePenalty(presencePenalty);
if(stopSequences != null)
chatCompletionRequestBuilder.stop(stopSequences);
chatCompletionRequest = chatCompletionRequestBuilder.build();
/*
completionRequestBuilder = CompletionRequest.builder()
.prompt(prompt)
.model(model);
completionRequestBuilder.maxTokens(maxTokens);
completionRequestBuilder.temperature(temperature);
completionRequestBuilder.topP(topP);
completionRequestBuilder.frequencyPenalty(frequencyPenalty);
completionRequestBuilder.presencePenalty(presencePenalty);
completionRequestBuilder.echo(echoPrompt);
if(stopSequences != null)
completionRequestBuilder.stop(stopSequences);
completionRequest = completionRequestBuilder.build();
*/
}
// TODO remove print statments & stuff
/**
* Tests the API key, and sets it if it's valid
* API key validity is tested by a 1 token API request to the Ada model.
* @param apiKey An OpenAI API key
* @return Whether the API key is valid
*/
public static boolean testAndSetApiKey(String apiKey) {
String originalAPIKey = GPTRequest.apiKey;
try {
GPTRequest.apiKey = apiKey;
new GPTRequestBuilder(gptTurbo, 1, DiscordMessage.EMPTY_MSG).build().request(
true, Optional.empty(), DiscordMessage.EMPTY_MSG);
System.out.println("true");
return true;
}catch(Exception e) {
e.printStackTrace();
GPTRequest.apiKey = originalAPIKey;
System.out.println("false");
return false;
}
}
//TODO update request(boolean endAtLastPunctuationMark), and complete javadoc vv
/*
* Makes an OpenAI API request.
* @param message Message to append to {@link #messageHistory} before making the API request
* @param username A username associated with the ChatMessage
* @param endAtLastPunctuationMark Whether the completion should be cut off after the last punctuation mark
* @return list of all messages from prompt and completion
public MessageHistory request(ChatMessage message, Optional<String> username, boolean endAtLastPunctuationMark) {
if(message != null && message.getContent().length() != 0)
messageHistory.add(message, username);
chatCompletionRequest.setMessages(messageHistory.getMessagesIncludingUsername());
List<ChatCompletionChoice> outputList = null;
try {
outputList = service.createChatCompletion(chatCompletionRequest).getChoices();
latestCompletion = outputList.get(0).getMessage();
GPTUtil.removeNamePrefix(latestCompletion);
String completion = latestCompletion.getContent();
if(endAtLastPunctuationMark) {
// get the index of the last punctuation mark inside the completion
Optional<Integer> lastPunctuationIndex = StringUtil.lastIndexOf(completion, "[.!?]", 0);
if(lastPunctuationIndex.isPresent()) {
latestCompletion.setContent(completion.substring(0, lastPunctuationIndex.get() + 1));
}
}
chatCompletionRequest.getMessages().add(latestCompletion);
messageHistory.add(latestCompletion, Optional.empty());
} catch(HttpException e) {
System.out.println("HTTP error message: " + e.getMessage());
System.out.println("HTTP message: " + e.message());
}
return messageHistory;
}*/
//TODO remove
public void requestTest() {
List<ChatCompletionChoice> outputList = null;
DiscordMessage dm = new DiscordMessage(Optional.of("rand"),
new ChatMessage("system", "you are a helpful data science tutor meeting with a student."));
List<ChatMessage> cms = new ArrayList<ChatMessage>();
cms.add((ChatMessage)dm);
chatCompletionRequest.setMessages(cms);
try {
outputList = service.createChatCompletion(chatCompletionRequest).getChoices();
chatCompletionRequest.getMessages().add(outputList.get(0).getMessage());
} catch(HttpException e) {
System.out.println("HTTP error message: " + e.getMessage());
System.out.println("HTTP message: " + e.message());
}
for(ChatCompletionChoice output : outputList)
System.out.println(output.getMessage().getRole() + ": " + output.getMessage().getContent() +
"\n#############################");
for(ChatMessage message : chatCompletionRequest.getMessages()) {
System.out.println(message.getRole() + ": " + message.getContent() +
"\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
}
System.out.println("*******************************");
chatCompletionRequest.getMessages().add(new ChatMessage(GPTUtil.roleUser, "Thanks, next can you " +
"help me understand how many hidden layers a given neural network should have?"));
try {
outputList = service.createChatCompletion(chatCompletionRequest).getChoices();
chatCompletionRequest.getMessages().add(outputList.get(0).getMessage());
} catch(HttpException e) {
System.out.println("HTTP error message: " + e.getMessage());
System.out.println("HTTP message: " + e.message());
}
for(ChatCompletionChoice output : outputList)
System.out.println(output.getMessage().getRole() + ": " + output.getMessage().getContent() +
"\n#############################");
for(ChatMessage message : chatCompletionRequest.getMessages()) {
System.out.println(message.getRole() + ": " + message.getContent() +
"\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
}
}
/**
* Makes an OpenAI API request.
* @param endAtLastPunctuationMark Whether the completion should be cut off after the last punctuation mark
* @param botsUsername The username to be associated with the LLM generated response
* @param discordMessages Messages to append to {@link #messageHistory} before making the API request
* @return list of all messages from prompt and completion
*/
public MessageHistory request(boolean endAtLastPunctuationMark, Optional<String> botsUsername,
DiscordMessage... discordMessages) {
for(DiscordMessage message : discordMessages)
messageHistory.add(message);
chatCompletionRequest.setMessages(messageHistory.getChatMessages());
List<ChatCompletionChoice> outputList = null;
try {
// TODO remove
/*
List<ChatMessage> cmList = chatCompletionRequest.getMessages();
for(ChatMessage cm : cmList) {
System.out.println("~ChatMessage: Role- " + cm.getRole() + " | Content- " + cm.getContent() +
" | name- " + cm.getName() + " | function- " + cm.getFunctionCall());
}
chatCompletionRequest.setMessages(chatCompletionRequest.getMessages().subList(0, 1));
*/
outputList = service.createChatCompletion(chatCompletionRequest).getChoices();
latestCompletion = outputList.get(0).getMessage();
GPTUtil.removeNamePrefix(latestCompletion);
String completion = latestCompletion.getContent();
if(endAtLastPunctuationMark) {
// get the index of the last punctuation mark inside the completion
Optional<Integer> lastPunctuationIndex = StringUtil.lastIndexOf(completion, "[.!?]", 0);
if(lastPunctuationIndex.isPresent()) {
latestCompletion.setContent(completion.substring(0, lastPunctuationIndex.get() + 1));
}
}
chatCompletionRequest.getMessages().add(latestCompletion);
messageHistory.add(new DiscordMessage(botsUsername, latestCompletion));
} catch(HttpException e) {
System.out.println("HTTP error message: " + e.getMessage());
System.out.println("HTTP message: " + e.message());
}
return messageHistory;
}
/** Updates {@link #latestCompletion} and {@link #messageHistory}
* @param content
*/
public void setLatestCompletion(String content) {
latestCompletion.setContent(content);
List<DiscordMessage> messages = messageHistory.getDiscordMessages();
messages.get(messages.size() - 1).setContent(latestCompletion.getContent());
chatCompletionRequest.setMessages(messageHistory.getChatMessages());
}
public static class GPTRequestBuilder {
/** Language Model to use for this API request */
@Getter
private String model;
/** List of all chat messages used in API request */
@Getter private MessageHistory messageHistory;
/** Maximum number of tokens that will be generated */
@Getter private int maxTokens;
/** (default .7) a value 0-1 with 1 being very creative, 0 being very factual/deterministic */
@Getter private double temperature;
/** (default 1) between 0-1 where 1.0 means "use all tokens in the vocabulary"
* while 0.5 means "use only the 50% most common tokens" */
@Getter private double topP;
/** (default 0) 0-1, lowers the chances of a word being selected again the more times that word has already been used */
@Getter private double frequencyPenalty;
/** (default 0) 0-1, lowers the chances of topic repetition */
@Getter private double presencePenalty;
/** (default 1), queries GPT-3 this many times, then selects the 'best' generation to return */
@Getter private int bestOf;
/** The Strings that GPT-3 will stop generating after (can have 4 stop sequences max) */
@Getter private List<String> stopSequences;
/** (default true) Whether messages generated by LLM should be appended to {@link GPTRequest#messageHistory} */
@Getter private boolean appendGeneratedMessages;
/**
* Starts to build an API request for the given language model
*
* @param model Language model to use for this API request. Valid models: GPTRequest.gptTurbo, GPTRequest.gpt4
* @param maxTokens Maximum number of tokens that will be generated
* @param messages List of all chat messages used in API request
*/
public GPTRequestBuilder(String model, int maxTokens, DiscordMessage... messages) {
this.model = model;
this.messageHistory = new MessageHistory();
for(DiscordMessage discordMessage : messages)
messageHistory.add(discordMessage);
this.maxTokens = maxTokens;
this.temperature = .7;
this.topP = 1;
this.frequencyPenalty = 0;
this.presencePenalty = 0;
this.bestOf = 1;
this.appendGeneratedMessages = true;
}
public GPTRequest build() {
return new GPTRequest(this);
}
/** @param messages List of all chat messages used in API request
* @return This GPTRequestBuilder, for chaining */
public GPTRequestBuilder messages(DiscordMessage... messages) {
this.messageHistory = new MessageHistory();
for(DiscordMessage message : messages)
this.messageHistory.add(message);
return this;
}
/** @param maxTokens Maximum number of tokens that will be generated
* @param messages List of all chat messages used in API request
* @return This GPTRequestBuilder, for chaining */
public GPTRequestBuilder promptAndTokens(int maxTokens, DiscordMessage... messages) {
this.messageHistory = new MessageHistory();
for(DiscordMessage message : messages)
this.messageHistory.add(message);
this.maxTokens = maxTokens;
return this;
}
/**
* @param model Language model to use for this API request. Valid Base Series models:
* UtilGPT.davinci, UtilGPT.curie, UtilGPT.babbage, UtilGPT.ada
* Valid Instruct Series models:
* UtilGPT.inDavinci, UtilGPT.inCurie, UtilGPT.inBabbage, UtilGPT.inAda
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder model(String model) {
this.model = model;
return this;
}
/**
* @param maxTokens Maximum number of tokens that will be generated
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder maxTokens(int maxTokens) {
this.maxTokens = maxTokens;
return this;
}
/** @param temperature (default .7) a value 0-1 with 1 being very creative, 0 being very factual/deterministic
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder temperature(double temperature) {
this.temperature = temperature;
return this;
}
/** @param topP (default 1) between 0-1 where 1.0 means "use all tokens in the vocabulary"
* while 0.5 means "use only the 50% most common tokens"
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder topP(double topP) {
this.topP = topP;
return this;
}
/** @param frequencyPenalty (default 0) 0-1, lowers the chances of a word being selected again
* the more times that word has already been used
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder frequencyPenalty(double frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
return this;
}
/** @param presencePenalty (default 0) 0-1, lowers the chances of topic repetition
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder presencePenalty(double presencePenalty) {
this.presencePenalty = presencePenalty;
return this;
}
/** @param bestOf (default 1), queries GPT-3 this many times, then selects the 'best' generation to return
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder bestOf(int bestOf) {
this.bestOf = bestOf;
return this;
}
/**
* set the stop sequence, the String that GPT-3 will stop generating after
* (can have 4 stop sequences max)
* @param stopSequences The Strings that GPT-3 will stop generating after (can have 4 stop sequences max)
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder stopSequences(List<String> stopSequences) {
if(stopSequences.size() > 4)
throw new IllegalArgumentException("Can only have 4 stop sequences max");
else
this.stopSequences = stopSequences;
return this;
}
/** @param appendGeneratedMessages (default true) Whether messages generated by LLM should be appended to
* {@link GPTRequest#messageHistory}
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder appendGeneratedMessages(boolean appendGeneratedMessages) {
this.appendGeneratedMessages = appendGeneratedMessages;
return this;
}
}
/**
* For any model except GPT-4 (in which case, use logGPT4TokenUsage() )
* Logs the token usage every time request() is called.
* @param numTokens The number of tokens used in this API request.
* @throws RuntimeException if GPT-4 is the current model when calling this method (logGPT4TokenUsage() should be used instead)
*/
private void logTokenUsage(int numTokens) {
switch(model) {
case gptTurbo:
gptTurboTokenCounter += numTokens;
break;
case gpt4:
throw new RuntimeException("GPTRequest.logTokenUsage() should not be used with" +
" GPT-4, logGPT4TokenUsage() should be used instead.");
}
log.info(getFormattedTokenUsage());
}
/**
* Exclusively for GPT-4 (since prompt and completion tokens need to be separately logged)
* Logs the token usage every time request() is called.
* @param numPromptTokens The number of prompt tokens used in this API request.
* @param numCompletionTokens The number of completion tokens used in this API request.
* @throws RuntimeException If this method is called using a model other than GPT-4 (in which case use logTokenUsage() instead)
*/
private void logGPT4TokenUsage(int numPromptTokens, int numCompletionTokens) {
if(!model.equals(GPTRequest.gpt4)) {
throw new RuntimeException("GPTRequest.logGPT4TokenUsage() should only be used with GPT-4, use logTokenUsage instead.");
}
gpt4PromptTokenCounter += numPromptTokens;
gpt4CompletionTokenCounter += numCompletionTokens;
log.info(getFormattedTokenUsage());
}
/** @return a String containing all token usage data */
private String getFormattedTokenUsage() {
return String.format("Total tokens used:%n%s%s%s%s%s%s-----------------------------------------%n",
gptTurboTokenCounter > 0 ? "GPT 3.5: " + gptTurboTokenCounter + " token" + (gptTurboTokenCounter > 1 ? "s\n" : "\n") : "",
gpt4PromptTokenCounter > 0 ? "GPT-4 prompts: " + gpt4PromptTokenCounter + " token" + (gpt4PromptTokenCounter > 1 ? "s\n" : "\n") : "",
gpt4CompletionTokenCounter > 0 ? "GPT-4 completions: " + gpt4CompletionTokenCounter + " token" + (gpt4CompletionTokenCounter > 1 ? "s\n" : "\n") : "");
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((3933, 4264), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3933, 4214), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3933, 4162), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3933, 4134), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3933, 4092), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3933, 4054), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3933, 4024), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.siwonkh.cleangpt_v1.controller;
import com.siwonkh.cleangpt_v1.dto.SearchSessionDto;
import com.siwonkh.cleangpt_v1.entity.SearchSession;
import com.siwonkh.cleangpt_v1.model.CreatorVideo;
import com.siwonkh.cleangpt_v1.model.VideoComment;
import com.siwonkh.cleangpt_v1.model.CreatorProfile;
import com.siwonkh.cleangpt_v1.repository.SearchSessionRepository;
import com.siwonkh.cleangpt_v1.service.SearchService;
import com.siwonkh.cleangpt_v1.util.SearchCommentThread;
import com.siwonkh.cleangpt_v1.util.SearchCreatorProfile;
import com.siwonkh.cleangpt_v1.util.SearchCreatorVideo;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import org.json.JSONArray;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.CookieValue;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletResponse;
import java.util.ArrayList;
import java.util.List;
@Controller
public class SearchController {
@Value("${youtubeapi.key}")
private String APIKey;
@Value("${openai.key}")
private String OpenAIKey;
@Autowired
private SearchService searchService;
@Autowired
private SearchSessionRepository searchSessionRepository;
@GetMapping("search/creator")
public String searchCreator(Model model, @RequestParam("creator") String creator) throws Exception {
SearchCreatorProfile searchCreatorProfile = new SearchCreatorProfile(APIKey);
searchCreatorProfile.setChannel(creator);
JSONObject channels = searchCreatorProfile.getApiResponse();
JSONArray jsonArray = channels.getJSONArray("items");
List<String> searchedTitles = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("snippet");
searchedTitles.add(jsonObject.getString("title"));
}
List<String> searchedDesc = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("snippet");
searchedDesc.add(jsonObject.getString("description"));
}
List<String> searchedImages = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("snippet").getJSONObject("thumbnails").getJSONObject("default");
searchedImages.add(jsonObject.getString("url"));
}
List<String> searchedChannelIds = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("snippet");
searchedChannelIds.add(jsonObject.getString("channelId"));
}
List<String> searchedPublishedAt = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("snippet");
searchedPublishedAt.add(jsonObject.getString("publishedAt"));
}
List<CreatorProfile> creatorProfiles = new ArrayList<>();
for (int i = 0; i < searchedTitles.size(); i++) {
String title = searchedTitles.get(i);
String desc = searchedDesc.get(i);
String url = searchedImages.get(i);
String channelId = searchedChannelIds.get(i);
String publishedAt = searchedPublishedAt.get(i);
CreatorProfile creatorProfile = new CreatorProfile(title, desc, url, channelId, publishedAt.substring(0, 10));
creatorProfiles.add(creatorProfile);
}
model.addAttribute("profile", creatorProfiles.get(0));
return "searchResult";
}
@GetMapping("search/comment")
public String searchComments(Model model, @CookieValue("TOKEN") String token, @RequestParam("video") String video, @RequestParam("title") String title, @RequestParam("thumbnail") String thumbnail, @RequestParam(value = "filter", required = false, defaultValue = "") String filterParams, @RequestParam(value = "cfilter", required = false, defaultValue = "") String cFilter) throws Exception {
SearchCommentThread searchCommentThread = new SearchCommentThread(APIKey);
searchCommentThread.setVideo(video);
JSONObject comments = searchCommentThread.getApiResponse();
JSONArray jsonArray = comments.getJSONArray("items");
List<String> searchedComments = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("snippet").getJSONObject("topLevelComment").getJSONObject("snippet");
searchedComments.add(jsonObject.getString("textOriginal").replaceAll(",", ""));
}
String commentsStr = String.join(",", searchedComments);
SearchSession searchSession = searchService.getSearchSessionByToken(token);
String filter = searchSession.getFilters();
// String filter = filterParams + "," + cFilter;
System.out.println(filter);
List<String> filterArray = new ArrayList<>();
if (filter != null) {
if (filter.length() > 3) {
filterArray = List.of(filter.split(","));
ChatMessage chatMessage = new ChatMessage();
chatMessage.setRole(ChatMessageRole.SYSTEM.value());
chatMessage.setContent("You should filter list of comments based on the filter list. And output should be same format as before.");
ChatMessage chatMessage2 = new ChatMessage();
chatMessage2.setRole(ChatMessageRole.USER.value());
chatMessage2.setContent(
"filter list: " + "Spam,Hate speech,comments more than 40 chars" + "\n"
+ "comments: " + "I really don't like them.,https://asdhioe.com come to this link!,1020304050607080900010203040506070809000ad,This video is cool!,lets gooo,wow"
);
ChatMessage chatMessage3 = new ChatMessage();
chatMessage3.setRole(ChatMessageRole.ASSISTANT.value());
chatMessage3.setContent("This video is cool!,lets gooo,wow");
ChatMessage chatMessage4 = new ChatMessage();
chatMessage4.setRole(ChatMessageRole.USER.value());
chatMessage4.setContent(
"filter list: " + filter + "\n"
+ "comments: " + commentsStr
);
List<ChatMessage> chatMessages = new ArrayList<>();
chatMessages.add(chatMessage);
chatMessages.add(chatMessage2);
chatMessages.add(chatMessage3);
chatMessages.add(chatMessage4);
OpenAiService service = new OpenAiService(OpenAIKey);
ChatCompletionRequest completionRequest = ChatCompletionRequest.builder()
.model("gpt-3.5-turbo-16k")
.messages(chatMessages)
.build();
String reply = service.createChatCompletion(completionRequest).getChoices().get(0).getMessage().getContent();
System.out.println(reply);
// Type type = new TypeToken<List<VideoComment>>() {}.getType();
// videoComments = new Gson().fromJson(videoCommentsJson, type);
searchedComments = List.of(reply.split(","));
model.addAttribute("customs", filter.replace("spam,", "").replace("abuse,", "").replace("hatespeech,", "").split(","));
}
}
List<String> searchedCommentAuthors = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("snippet").getJSONObject("topLevelComment").getJSONObject("snippet");
searchedCommentAuthors.add(jsonObject.getString("authorDisplayName"));
}
List<String> searchedCommentAuthorImages = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("snippet").getJSONObject("topLevelComment").getJSONObject("snippet");
searchedCommentAuthorImages.add(jsonObject.getString("authorProfileImageUrl"));
}
List<VideoComment> videoComments = new ArrayList<>();
for (int i = 0; i < searchedComments.size()-1; i++) {
String comment = searchedComments.get(i);
String author = searchedCommentAuthors.get(i);
String url = searchedCommentAuthorImages.get(i);
VideoComment videoComment = new VideoComment(comment, author, url);
videoComments.add(videoComment);
}
model.addAttribute("comments", videoComments);
model.addAttribute("videoTitle", title);
model.addAttribute("videoThumbnail", thumbnail);
model.addAttribute("commentCount", videoComments.size());
model.addAttribute("hasSpam", filterArray.contains("spam"));
model.addAttribute("hasAbuse", filterArray.contains("abuse"));
model.addAttribute("hasHatespeech", filterArray.contains("hatespeech"));
return "comment";
}
@PostMapping("search/start")
public String createSearchSession(HttpServletResponse response, @RequestParam("creatorId") String creatorId) throws Exception {
SearchSessionDto searchSessionDto = new SearchSessionDto();
searchSessionDto.setCreatorId(creatorId);
searchSessionDto = searchService.createSearchSession(searchSessionDto);
Cookie cookie = new Cookie("TOKEN", searchSessionDto.getSearchSessionToken());
cookie.setPath("/");
response.addCookie(cookie);
return "redirect:/videos";
}
@PostMapping("search/comment/filter/add")
public void addFilter(HttpServletResponse response, @CookieValue("TOKEN") String token, @RequestParam("filter") String filter) throws Exception {
SearchSession searchSession = searchService.getSearchSessionByToken(token);
String filters = searchSession.getFilters();
List<String> filterArray = new ArrayList<>();
if (filters != null) {
filterArray = new ArrayList<>(List.of(filters.split(",")));
}
if (!filterArray.contains(filter)) {
filterArray.add(filter);
}
searchSession.setFilters(String.join(",", filterArray));
searchSessionRepository.save(searchSession);
}
@PostMapping("search/comment/filter/remove")
public void removeFilter(HttpServletResponse response, @CookieValue("TOKEN") String token, @RequestParam("filter") String filter) throws Exception {
SearchSession searchSession = searchService.getSearchSessionByToken(token);
String filters = searchSession.getFilters();
List<String> filterArray = new ArrayList<>();
if (filters != null) {
filterArray = new ArrayList<>(List.of(filters.split(",")));
}
filterArray.remove(filter);
searchSession.setFilters(String.join(",", filterArray));
searchSessionRepository.save(searchSession);
}
@GetMapping("videos")
public String searchCreatorVideos(Model model, @CookieValue("TOKEN") String token) throws Exception {
SearchSession searchSession = searchService.getSearchSessionByToken(token);
String creatorId = searchSession.getCreatorId();
SearchCreatorVideo searchCreatorVideo = new SearchCreatorVideo(APIKey);
searchCreatorVideo.setChannelId(creatorId);
JSONObject videos = searchCreatorVideo.getApiResponse();
JSONArray jsonArray = videos.getJSONArray("items");
List<String> searchedVideoIds = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("id");
searchedVideoIds.add(jsonObject.getString("videoId"));
}
List<String> searchedTitles = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("snippet");
searchedTitles.add(jsonObject.getString("title"));
}
List<String> searchedDescription = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("snippet");
searchedDescription.add(jsonObject.getString("description"));
}
List<String> searchedThumbnails = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("snippet").getJSONObject("thumbnails").getJSONObject("medium");
searchedThumbnails.add(jsonObject.getString("url"));
}
List<String> searchedPublishedAt = new ArrayList<>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i).getJSONObject("snippet");
searchedPublishedAt.add(jsonObject.getString("publishedAt"));
}
List<CreatorVideo> creatorVideos = new ArrayList<>();
for (int i = 0; i < searchedTitles.size(); i++) {
String videoIds = searchedVideoIds.get(i);
String title = searchedTitles.get(i);
String description = searchedDescription.get(i);
String thumbnail = searchedThumbnails.get(i);
String publishedAt = searchedPublishedAt.get(i);
CreatorVideo creatorVideo = new CreatorVideo(videoIds, title, description, thumbnail, publishedAt.substring(0, 10));
creatorVideos.add(creatorVideo);
}
model.addAttribute("videos", creatorVideos);
return "video";
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((5939, 5969), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((6220, 6248), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((6700, 6733), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((6914, 6942), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((7512, 7676), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7512, 7643), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7512, 7595), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.tailoredshapes.boobees.repositories;
import com.amazonaws.xray.AWSXRay;
import com.amazonaws.xray.AWSXRayRecorderBuilder;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.tailoredshapes.boobees.model.Prompt;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.embedding.Embedding;
import com.theokanning.openai.embedding.EmbeddingRequest;
import com.theokanning.openai.embedding.EmbeddingResult;
import com.theokanning.openai.service.OpenAiService;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import static com.tailoredshapes.underbar.ocho.UnderBar.map;
public class Assistant {
public static final String MODEL = System.getenv("MODEL") != null ? System.getenv("MODEL") : "gpt-3.5-turbo";
static {
AWSXRayRecorderBuilder builder = AWSXRayRecorderBuilder.standard();
AWSXRay.setGlobalRecorder(builder.build());
}
private static final Logger LOG = LogManager.getLogger(Assistant.class);
private final OpenAiService openAIClient;
private final String systemPrompt;
public final String failMessage;
private final MessageRepo repo;
public Assistant(OpenAiService openAIClient, String failMessage, MessageRepo repo, String systemPrompt) {
this.repo = repo;
this.openAIClient = openAIClient;
this.systemPrompt = systemPrompt;
this.failMessage = failMessage;
LOG.info("Using: %s".formatted(MODEL));
}
public String answer(List<String> prompts, Long chatId) {
ChatMessage systemPrompt = new ChatMessage(ChatMessageRole.SYSTEM.value(), this.systemPrompt);
ChatMessage formatPrompt = new ChatMessage(ChatMessageRole.SYSTEM.value(), "Please use markdown for formatting and emphasis, feel free to use emoji.");
LOG.info("Using personality: %s".formatted(systemPrompt));
List<Prompt> lastN = repo.findN(chatId, 30, prompts.get(prompts.size() - 1));
Collections.reverse(lastN);
LOG.info("Found %d items for context".formatted(lastN.size()));
List<ChatMessage> chatPrompts = map(prompts, (m) -> new ChatMessage(ChatMessageRole.USER.value(), m));
List<ChatMessage> aiPrompts = lastN.stream().map( (p) -> new ChatMessage(ChatMessageRole.valueOf(p.role().toUpperCase()).value(), p.prompt())).collect(Collectors.toList());
aiPrompts.add(formatPrompt);
aiPrompts.add(systemPrompt);
aiPrompts.addAll(chatPrompts);
try {
LOG.debug("Prompts sent to AI: \n" + new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(aiPrompts));
} catch (JsonProcessingException e) {
LOG.error("Can't display prompts", e);
}
ChatCompletionRequest completionRequest = ChatCompletionRequest.builder().model(MODEL).messages(aiPrompts).build();
String message = failMessage;
try(var ss = AWSXRay.beginSubsegment("Calling OpenAI")){
try {
List<ChatCompletionChoice> choices = openAIClient.createChatCompletion(completionRequest).getChoices();
if(choices.size() > 0){
ChatMessage answer = choices.get(0).getMessage();
message = answer.getContent();
chatPrompts.add(answer);
}
List<Prompt> ps = chatPrompts.stream().map((cm) -> new Prompt(cm.getRole(), cm.getContent())).toList();
repo.createAll(chatId, ps);
} catch (Exception e) {
LOG.error("OpenAI is screwing around again", e);
ss.addException(e);
}
}
return message;
}
public CompletableFuture<String> answerAsync(List<String> prompts, Long chatId) {
return CompletableFuture.supplyAsync(() -> answer(prompts, chatId));
}
public List<List<Double>> embed(List<Prompt> prompt) {
EmbeddingRequest embeddingRequest = EmbeddingRequest.builder().model("text-embedding-ada-002").input(prompt.stream().map(Prompt::prompt).toList()).build();
EmbeddingResult embeddings = openAIClient.createEmbeddings(embeddingRequest);
List<Embedding> data = embeddings.getData();
return data.stream().map(Embedding::getEmbedding).toList();
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder",
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.embedding.EmbeddingRequest.builder",
"com.theokanning.openai.completion.chat.ChatMessageRole.valueOf",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value"
] |
[((2013, 2043), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2116, 2146), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2567, 2595), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((2684, 2739), 'com.theokanning.openai.completion.chat.ChatMessageRole.valueOf'), ((3203, 3275), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3203, 3267), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3203, 3247), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4377, 4495), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((4377, 4487), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((4377, 4435), 'com.theokanning.openai.embedding.EmbeddingRequest.builder')]
|
package cn.shu.wechat;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import com.theokanning.openai.OpenAiApi;
import com.theokanning.openai.OpenAiService;
import com.theokanning.openai.completion.CompletionChoice;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.CompletionResult;
import okhttp3.*;
import retrofit2.Retrofit;
import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory;
import retrofit2.converter.jackson.JacksonConverterFactory;
import java.io.IOException;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import static java.time.Duration.ofSeconds;
public class AI2 {
private static final String BASE_URL = "https://api.openai.com/";
public static void main(String[] args) {
String token = "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";
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
OkHttpClient client = new OkHttpClient.Builder()
.addInterceptor(new Interceptor() {
@Override
public Response intercept(Chain chain) throws IOException {
Request request = chain.request()
.newBuilder()
.header("Authorization", "Bearer " + token)
.build();
return chain.proceed(request);
}
})
.connectionPool(new ConnectionPool(5, 1, TimeUnit.SECONDS))
.readTimeout(ofSeconds(20).toMillis(), TimeUnit.MILLISECONDS)
.build();
Retrofit retrofit = new Retrofit.Builder()
.baseUrl(BASE_URL)
.client(client)
.addConverterFactory(JacksonConverterFactory.create(mapper))
.addCallAdapterFactory(RxJava2CallAdapterFactory.create())
.build();
OpenAiService service = new OpenAiService(retrofit.create(OpenAiApi.class));
CompletionRequest completionRequest = CompletionRequest.builder()
.prompt("导师call我了")
.maxTokens(1024)
.model("text-davinci-003")
.echo(true)
.build();
CompletionResult completion = service.createCompletion(completionRequest);
Optional<String> result = completion.getChoices().stream().findFirst().map(CompletionChoice::getText);
if (result.isPresent()) {
String[] split = result.get().split("\n\n");
System.out.println(split[1]);
}
completion.getChoices().forEach(System.out::println);
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((2467, 2667), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2467, 2642), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2467, 2614), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2467, 2571), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2467, 2538), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package io.github.lynbean.lynbot.cogs.openai.chatbox;
import java.io.BufferedInputStream;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import io.github.lynbean.lynbot.cogs.openai.chatbox.pojo.ChatBoxPreset;
import io.reactivex.functions.BiConsumer;
import lombok.Getter;
import net.dv8tion.jda.api.entities.Message;
import net.dv8tion.jda.api.entities.User;
import net.dv8tion.jda.api.entities.channel.concrete.ThreadChannel;
import net.dv8tion.jda.api.interactions.components.buttons.Button;
import net.dv8tion.jda.api.utils.messages.MessageCreateBuilder;
public class ChatBox {
private static final Logger LOG = LoggerFactory.getLogger(ChatBox.class);
private static final String CHATBOX_PREFIX = "ChatBox";
private static final String IGNORE_ANNOTATION = "> ` @IGNORE `";
private @Getter final ThreadChannel channel;
private @Getter ChatBoxPreset preset;
public ChatBox(ThreadChannel channel) {
this.channel = channel;
try {
retrievePresetFromChat();
} catch (RuntimeException e) {
LOG.error("Failed to retrieve preset from chat, will use default preset instead.", e);
channel.sendMessage(getIgnoreAnnotatedMessage("Failed to retrieve preset, will use default preset instead."))
.queue();
this.preset = ChatBoxDatabaseManager.getDefaultChatBoxPreset();
}
}
public ChatBox(ThreadChannel channel, String json) {
this.channel = channel;
this.preset = ChatBoxPreset.fromJson(json);
}
public ChatBox(ThreadChannel channel, ChatBoxPreset preset) {
this.channel = channel;
this.preset = preset;
}
public ChatBox(ThreadChannel channel, String title, String description, String personality, String characterIconUrl, String characterName) {
this.channel = channel;
this.preset = new ChatBoxPreset()
.setTitle(title)
.setDescription(description)
.setPersonality(personality)
.setCharacterIconUrl(characterIconUrl)
.setCharacterName(characterName);
}
public static ChatBox create(Message message, User user) {
return create(message, user, ChatBoxDatabaseManager.getDefaultChatBoxPreset());
}
public static ChatBox create(Message message, User user, String presetId) {
ChatBoxPreset preset = ChatBoxDatabaseManager.findChatBoxPreset(String.valueOf(presetId));
return create(message, user, preset);
}
public static ChatBox create(Message message, User user, ChatBoxPreset preset) {
ThreadChannel channel = message.createThreadChannel(CHATBOX_PREFIX).complete();
ChatBox chatBox = new ChatBox(channel, preset);
chatBox.lockChannel();
channel.addThreadMember(user).queue();
channel.getManager().setSlowmode(5).queue(); // A way to prevent spamming and causing error occurred
channel.sendMessage(
new MessageCreateBuilder()
.setContent(getIgnoreAnnotatedMessage("Do NOT delete this message. Created by " + user.getAsMention() + "."))
.setActionRow(Button.danger("openai.chatbox.delete_channel", "If you hate me, click here to delete me."))
.setFiles(preset.getJsonFile(message.getId()))
.build()
)
.queue();
chatBox.unLockChannel();
return chatBox;
}
private void retrievePresetFromChat() {
Optional<Message> chatPreset = channel.getHistoryFromBeginning(5)
.complete()
.getRetrievedHistory()
.stream()
.filter(
message -> {
if (!message.getAttachments().isEmpty())
return message.getAttachments()
.get(0)
.getFileName()
.startsWith(CHATBOX_PREFIX);
return false;
}
)
.findFirst();
chatPreset.get()
.getAttachments()
.get(0)
.getProxy()
.download()
.thenAccept(
inputStream -> {
String json;
try (
BufferedInputStream bis = new BufferedInputStream(inputStream)
) {
byte[] bytes = new byte[10240];
int bytesRead = bis.read(bytes, 0, bytes.length);
json = new String(bytes, 0, bytesRead);
} catch (Exception e) {
throw new RuntimeException(e);
}
this.preset = ChatBoxPreset.fromJson(json);
}
)
.join();
}
private List<Message> retrieveMessages() {
return channel.getIterableHistory()
// TODO: Integrate this into the preset message to let user configure this
.takeAsync(15) // Prevents exceeding model's max token limit
.thenApply(messages -> messages.stream().collect(Collectors.toList()))
.join();
}
private List<ChatMessage> getDialogues() {
if (preset == null) {
retrievePresetFromChat();
}
List<ChatMessage> dialogues = preset.getPresetMessages();
retrieveMessages()
.stream()
.filter(message ->
!message.getContentRaw().startsWith(IGNORE_ANNOTATION) && // Exclude out not ChatBox messages
!message.getAuthor().isSystem() && // Exclude out system messages
!message.isEphemeral() && // Exclude out ephemeral messages
(
// Exclude out messages without content or embeds
!message.getContentRaw().isEmpty() ||
!message.getEmbeds().isEmpty()
)
)
.sorted((a, b) -> a.getTimeCreated().compareTo(b.getTimeCreated()))
.forEachOrdered(
message -> {
ChatMessage chatMessage = new ChatMessage();
try {
chatMessage.setContent(
message.getAuthor().isBot()
? message.getEmbeds().get(0).getDescription() // Probably only one embed, it's hardcoded
: message.getContentRaw()
);
chatMessage.setRole(
message.getAuthor().isBot()
? ChatMessageRole.ASSISTANT.value()
: ChatMessageRole.USER.value()
);
dialogues.add(chatMessage);
} catch (Exception e) {
// We will just ignore this message so that it won't break the chat
// But we will log it for debugging purposes
e.printStackTrace();
}
}
);
return dialogues;
}
public void execute(BiConsumer<List<ChatMessage>, ChatBoxPreset> action) {
lockChannel();
try {
action.accept(getDialogues(), preset);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
unLockChannel();
}
}
public static String getIgnoreAnnotatedMessage(String message) {
return String.format("%s (%s)", IGNORE_ANNOTATION, message);
}
private void lockChannel() {
channel.getManager()
.setLocked(true)
.queue();
}
private void unLockChannel() {
channel.getManager()
.setLocked(false)
.queue();
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value"
] |
[((6833, 6866), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((6901, 6929), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
|
package com.chatgpt.example.dto.request;
import com.theokanning.openai.completion.CompletionRequest;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
@Getter
@NoArgsConstructor
@AllArgsConstructor
public class GPTCompletionRequest {
private String model;
private String prompt;
private Integer maxToken;
public static CompletionRequest of(GPTCompletionRequest restRequest) {
return CompletionRequest.builder()
.model(restRequest.getModel())
.prompt(restRequest.getPrompt())
.maxTokens(restRequest.getMaxToken())
.build();
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((441, 611), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((441, 594), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((441, 548), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((441, 507), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package me.acclashcorporation;
// [START speech_transcribe_infinite_streaming]
import com.google.api.gax.rpc.ClientStream;
import com.google.api.gax.rpc.ResponseObserver;
import com.google.api.gax.rpc.StreamController;
import com.google.cloud.speech.v1p1beta1.*;
import com.google.cloud.texttospeech.v1.*;
import com.google.protobuf.ByteString;
import com.google.protobuf.Duration;
import com.theokanning.openai.OpenAiApi;
import com.theokanning.openai.OpenAiService;
import com.theokanning.openai.completion.CompletionRequest;
import me.acclashcorporation.utils.FineTune;
import me.acclashcorporation.utils.SampleTTS;
import javax.sound.sampled.*;
import javax.sound.sampled.DataLine.Info;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
public class JamesGPT {
private static final int STREAMING_LIMIT = 290000; // ~5 minutes
public static final String RED = "\033[0;31m";
public static final String GREEN = "\033[0;32m";
public static final String YELLOW = "\033[0;33m";
// Creating shared object
private static volatile BlockingQueue<byte[]> sharedQueue = new LinkedBlockingQueue();
private static TargetDataLine targetDataLine;
private static int BYTES_PER_BUFFER = 6400; // buffer size in bytes
private static int restartCounter = 0;
private static ArrayList<ByteString> audioInput = new ArrayList<ByteString>();
private static ArrayList<ByteString> lastAudioInput = new ArrayList<ByteString>();
private static int resultEndTimeInMS = 0;
private static int isFinalEndTime = 0;
private static int finalRequestEndTime = 0;
private static boolean newStream = true;
private static double bridgingOffset = 0;
private static boolean lastTranscriptWasFinal = false;
private static StreamController referenceToStreamController;
private static ByteString tempByteString;
private static StringBuilder conversation = new StringBuilder();
public static void main(String[] args) throws Exception {
//FineTune.fineTune();
startConversation();
//SampleTTS.sampleTest();
}
public static String convertMillisToDate(double milliSeconds) {
long millis = (long) milliSeconds;
DecimalFormat format = new DecimalFormat();
format.setMinimumIntegerDigits(2);
return String.format(
"%s:%s /",
format.format(TimeUnit.MILLISECONDS.toMinutes(millis)),
format.format(
TimeUnit.MILLISECONDS.toSeconds(millis)
- TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(millis))));
}
/**
* Performs infinite streaming speech recognition
*/
public static void infiniteStreamingRecognize() throws Exception {
// Microphone Input buffering
class MicBuffer implements Runnable {
@Override
public void run() {
System.out.println(YELLOW);
System.out.println("Start speaking...Press Ctrl-C to stop");
targetDataLine.start();
byte[] data = new byte[BYTES_PER_BUFFER];
while (targetDataLine.isOpen()) {
try {
int numBytesRead = targetDataLine.read(data, 0, data.length);
if ((numBytesRead <= 0) && (targetDataLine.isOpen())) {
continue;
}
sharedQueue.put(data.clone());
} catch (InterruptedException e) {
System.out.println("Microphone input buffering interrupted : " + e.getMessage());
}
}
}
}
// Creating microphone input buffer thread
MicBuffer micrunnable = new MicBuffer();
Thread micThread = new Thread(micrunnable);
ResponseObserver<StreamingRecognizeResponse> responseObserver = null;
try (SpeechClient client = SpeechClient.create()) {
ClientStream<StreamingRecognizeRequest> clientStream;
responseObserver =
new ResponseObserver<StreamingRecognizeResponse>() {
ArrayList<StreamingRecognizeResponse> responses = new ArrayList<>();
public void onStart(StreamController controller) {
referenceToStreamController = controller;
}
public void onResponse(StreamingRecognizeResponse response) {
responses.add(response);
StreamingRecognitionResult result = response.getResultsList().get(0);
Duration resultEndTime = result.getResultEndTime();
resultEndTimeInMS =
(int)
((resultEndTime.getSeconds() * 1000) + (resultEndTime.getNanos() / 1000000));
double correctedTime =
resultEndTimeInMS - bridgingOffset + (STREAMING_LIMIT * restartCounter);
SpeechRecognitionAlternative alternative = result.getAlternativesList().get(0);
if (result.getIsFinal()) {
System.out.print(GREEN);
System.out.print("\033[2K\r");
System.out.printf(
"%s: %s [confidence: %.2f]\n",
convertMillisToDate(correctedTime),
alternative.getTranscript(),
alternative.getConfidence());
isFinalEndTime = resultEndTimeInMS;
lastTranscriptWasFinal = true;
// Handle human's response
if (alternative.getTranscript().toLowerCase().contains("james") || conversation.toString().endsWith("?")) {
addMessage(alternative.getTranscript());
try {
getResponse();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
} else {
System.out.print(RED);
System.out.print("\033[2K\r");
System.out.printf(
"%s: %s", convertMillisToDate(correctedTime), alternative.getTranscript());
lastTranscriptWasFinal = false;
}
}
public void onComplete() {}
public void onError(Throwable t) {}
};
clientStream = client.streamingRecognizeCallable().splitCall(responseObserver);
//ArrayList<String> languageList = new ArrayList<>();
//languageList.add("de-DE");
//languageList.add("es-MX");
RecognitionConfig recognitionConfig =
RecognitionConfig.newBuilder()
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setLanguageCode("en-US")
//.addAllAlternativeLanguageCodes(languageList)
.setSampleRateHertz(16000)
.build();
StreamingRecognitionConfig streamingRecognitionConfig =
StreamingRecognitionConfig.newBuilder()
.setConfig(recognitionConfig)
.setInterimResults(true)
.build();
StreamingRecognizeRequest request =
StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(streamingRecognitionConfig)
.build(); // The first request in a streaming call has to be a config
clientStream.send(request);
try {
// SampleRate:16000Hz, SampleSizeInBits: 16, Number of channels: 1, Signed: true,
// bigEndian: false
AudioFormat audioFormat = new AudioFormat(16000, 16, 1, true, false);
DataLine.Info targetInfo =
new Info(
TargetDataLine.class,
audioFormat); // Set the system information to read from the microphone audio
// stream
if (!AudioSystem.isLineSupported(targetInfo)) {
System.out.println("Microphone not supported");
System.exit(0);
}
// Target data line captures the audio stream the microphone produces.
targetDataLine = (TargetDataLine) AudioSystem.getLine(targetInfo);
targetDataLine.open(audioFormat);
micThread.start();
long startTime = System.currentTimeMillis();
while (true) {
long estimatedTime = System.currentTimeMillis() - startTime;
if (estimatedTime >= STREAMING_LIMIT) {
clientStream.closeSend();
referenceToStreamController.cancel(); // remove Observer
if (resultEndTimeInMS > 0) {
finalRequestEndTime = isFinalEndTime;
}
resultEndTimeInMS = 0;
lastAudioInput = null;
lastAudioInput = audioInput;
audioInput = new ArrayList<ByteString>();
restartCounter++;
if (!lastTranscriptWasFinal) {
System.out.print('\n');
}
newStream = true;
clientStream = client.streamingRecognizeCallable().splitCall(responseObserver);
request =
StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(streamingRecognitionConfig)
.build();
System.out.println(YELLOW);
System.out.printf("%d: RESTARTING REQUEST\n", restartCounter * STREAMING_LIMIT);
startTime = System.currentTimeMillis();
} else {
if ((newStream) && (lastAudioInput.size() > 0)) {
// if this is the first audio from a new request
// calculate amount of unfinalized audio from last request
// resend the audio to the speech client before incoming audio
double chunkTime = STREAMING_LIMIT / lastAudioInput.size();
// ms length of each chunk in previous request audio arrayList
if (chunkTime != 0) {
if (bridgingOffset < 0) {
// bridging Offset accounts for time of resent audio
// calculated from last request
bridgingOffset = 0;
}
if (bridgingOffset > finalRequestEndTime) {
bridgingOffset = finalRequestEndTime;
}
int chunksFromMs =
(int) Math.floor((finalRequestEndTime - bridgingOffset) / chunkTime);
// chunks from MS is number of chunks to resend
bridgingOffset =
(int) Math.floor((lastAudioInput.size() - chunksFromMs) * chunkTime);
// set bridging offset for next request
for (int i = chunksFromMs; i < lastAudioInput.size(); i++) {
request =
StreamingRecognizeRequest.newBuilder()
.setAudioContent(lastAudioInput.get(i))
.build();
clientStream.send(request);
}
}
newStream = false;
}
tempByteString = ByteString.copyFrom(sharedQueue.take());
request =
StreamingRecognizeRequest.newBuilder().setAudioContent(tempByteString).build();
audioInput.add(tempByteString);
}
clientStream.send(request);
}
} catch (Exception e) {
System.out.println(e);
}
}
}
public static void startConversation() throws Exception {
AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(new File("startup.wav").getAbsoluteFile());
Clip clip = AudioSystem.getClip();
clip.open(audioInputStream);
clip.start();
conversation = new StringBuilder("""
The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly. The assistant's name is James. He was created by AC Clash.
AI: I am an AI created by AC Clash. How can I help you today?
Human: Hi, who are you?
AI:""");
getResponse();
try {
infiniteStreamingRecognize();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static void getResponse() throws Exception {
OpenAiService service = new OpenAiService("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", 0);
CompletionRequest request = CompletionRequest.builder()
.prompt(conversation.toString())
.model("gpt-4") //Use the latest davinci model
.temperature(0.90) //How creative the AI should be
.maxTokens(150) //How many tokens the AI should generate. Tokens are words, punctuation, etc.
.topP(1.0) //How much diversity the AI should have. 1.0 is the most diverse
.frequencyPenalty(0.0) //How much the AI should avoid repeating itself
.presencePenalty(0.6) //How much the AI should avoid repeating the same words
.stop(List.of("Human:", "AI:")) //Stop the AI from generating more text when it sees these words
.build();
var choices = service.createCompletion(request).getChoices();
var response = choices.get(0).getText(); //what the AI responds with
conversation.append(response.stripLeading());
System.out.print(GREEN);
System.out.println(response.stripLeading());
speak(response.stripLeading());
}
public static void addMessage(String transcript) {
conversation.append("\nHuman:").append(transcript).append("\nAI:");
}
public static void speak(String output) throws Exception {
// Instantiates a client
try (TextToSpeechClient textToSpeechClient = TextToSpeechClient.create()) {
// Set the text input to be synthesized
SynthesisInput input = SynthesisInput.newBuilder().setText(output).build();
// Build the voice request, select the language code ("en-US") and the ssml voice gender
// ("neutral")
VoiceSelectionParams voice =
VoiceSelectionParams.newBuilder()
.setLanguageCode("en-GB")
.setName("en-GB-Neural2-B")
.setSsmlGender(SsmlVoiceGender.MALE)
.build();
// Select the type of audio file you want returned
AudioConfig audioConfig =
AudioConfig.newBuilder().setAudioEncoding(AudioEncoding.LINEAR16).build();
// Perform the text-to-speech request on the text input with the selected voice parameters and
// audio file type
SynthesizeSpeechResponse response =
textToSpeechClient.synthesizeSpeech(input, voice, audioConfig);
// Get the audio contents from the response
ByteString audioContents = response.getAudioContent();
// Write the response to the output file.
try (OutputStream out = new FileOutputStream("output.wav")) {
out.write(audioContents.toByteArray());
//System.out.println("Audio content written to file \"output.wav\"");
}
AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(new File("output.wav").getAbsoluteFile());
Clip clip = AudioSystem.getClip();
clip.open(audioInputStream);
clip.start();
}
}
}
// [END speech_transcribe_infinite_streaming]
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((2627, 2666), 'java.util.concurrent.TimeUnit.MILLISECONDS.toMinutes'), ((2724, 2763), 'java.util.concurrent.TimeUnit.MILLISECONDS.toSeconds'), ((2798, 2865), 'java.util.concurrent.TimeUnit.MINUTES.toSeconds'), ((2825, 2864), 'java.util.concurrent.TimeUnit.MILLISECONDS.toMinutes'), ((14651, 15378), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((14651, 15288), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((14651, 15184), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((14651, 15098), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((14651, 14994), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((14651, 14889), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((14651, 14825), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((14651, 14759), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((14651, 14727), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package wood.util;
import com.theokanning.openai.OpenAiService;
import com.theokanning.openai.completion.CompletionChoice;
import com.theokanning.openai.completion.CompletionRequest;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
/** A wrapper class for com.theokanning.openai. Requires GPTRequest.apiKey to be set. */
@Slf4j
public class GPTRequest {
// ----------- static fields -----------
/** The OpenAI API key to use for all requests. Can set using the testAndSetApiKey method. */
public static String apiKey = "";
/** Language models */
public static final String davinci = "davinci", curie = "curie", babbage = "babbage", ada = "ada",
inDavinci = "text-davinci-002", inCurie = "text-curie-001", inBabbage = "text-babbage-001", inAda = "text-ada-001";
/** counter for how many tokens have been used by each language model (irrespective of Base series vs Instruct) */
private static int davinciTokenCounter = 0, curieTokenCounter = 0, babbageTokenCounter = 0, adaTokenCounter = 0;
// ----------- instance fields -----------
private final OpenAiService service;
private final CompletionRequest completionRequest;
private final CompletionRequest.CompletionRequestBuilder completionRequestBuilder;
/** The prompt to use for this API request */
@Getter private final String prompt;
/** Language Model to use for this API request */
@Getter private final String model;
/** Maximum number of tokens use in the API request (including the prompt). */
@Getter private final int maxTokens;
/** (default .7) a value 0-1 with 1 being very creative, 0 being very factual/deterministic */
@Getter private final double temperature;
/** (default 1) between 0-1 where 1.0 means "use all tokens in the vocabulary"
* while 0.5 means "use only the 50% most common tokens" */
@Getter private final double topP;
/** (default 0) 0-1, lowers the chances of a word being selected again the more times that word has already been used */
@Getter private final double frequencyPenalty;
/** (default 0) 0-1, lowers the chances of topic repetition */
@Getter private final double presencePenalty;
/** Echo back the prompt in addition to the completion. */
@Getter private final boolean echoPrompt;
/** (default 1), queries GPT-3 this many times, then selects the 'best' generation to return */
@Getter private final int bestOf;
/** The Strings that GPT-3 will stop generating after (can have 4 stop sequences max) */
@Getter private final List<String> stopSequences;
public GPTRequest(GPTRequestBuilder builder) {
this.prompt = builder.prompt;
this.model = builder.model;
this.maxTokens = builder.maxTokens;
this.temperature = builder.temperature;
this.topP = builder.topP;
this.frequencyPenalty = builder.frequencyPenalty;
this.presencePenalty = builder.presencePenalty;
this.echoPrompt = builder.echoPrompt;
this.bestOf = builder.bestOf;
this.stopSequences = builder.stopSequences;
service = new OpenAiService(apiKey);
completionRequestBuilder = CompletionRequest.builder()
.prompt(prompt);
completionRequestBuilder.maxTokens(maxTokens);
completionRequestBuilder.temperature(temperature);
completionRequestBuilder.topP(topP);
completionRequestBuilder.frequencyPenalty(frequencyPenalty);
completionRequestBuilder.presencePenalty(presencePenalty);
completionRequestBuilder.echo(echoPrompt);
if(stopSequences != null)
completionRequestBuilder.stop(stopSequences);
completionRequest = completionRequestBuilder.build();
}
/**
* Tests the API key, and sets it if it's valid
* API key validity is tested by a 1 token API request to the Ada model.
* @param apiKey An OpenAI API key
* @return Whether the API key is valid
*/
public static boolean testAndSetApiKey(String apiKey) {
String originalAPIKey = GPTRequest.apiKey;
try {
GPTRequest.apiKey = apiKey;
new GPTRequestBuilder(ada, "", 1, false).build().request();
return true;
}catch(Exception e) {
GPTRequest.apiKey = originalAPIKey;
return false;
}
}
/**
* Makes an OpenAI API request.
* @return If echoPrompt is true, returns the prompt + completion, else the completion is returned.
*/
public String request() {
logTokenUsage(maxTokens);
List<CompletionChoice> outputList = service.createCompletion(model, completionRequest).getChoices();
return outputList.get(0).getText();
}
/**
* Makes an OpenAI API request.
* @param endAtLastPunctuationMark Whether the completion should be cut off after the last punctuation mark
* @return If echoPrompt is true, returns the prompt + completion, else the completion is returned.
*/
public String request(boolean endAtLastPunctuationMark) {
String output = request();
if(endAtLastPunctuationMark) {
// get the index of the last punctuation mark inside the completion (omitting the prompt)
Optional<Integer> lastPunctuationIndex = StringUtil.lastIndexOf(output, "[.!?]",
echoPrompt ? prompt.length() : 0);
if(lastPunctuationIndex.isPresent())
return output.substring(0, lastPunctuationIndex.get() + 1);
}
return output;
}
public static class GPTRequestBuilder {
/** Language Model to use for this API request */
@Getter
private String model;
/** The prompt to use for this API request */
@Getter private String prompt;
/** Maximum number of tokens use in the API request (including the prompt). */
@Getter private int maxTokens;
/** (default false) Echo back the prompt in addition to the completion. */
@Getter private boolean echoPrompt;
/** (default .7) a value 0-1 with 1 being very creative, 0 being very factual/deterministic */
@Getter private double temperature;
/** (default 1) between 0-1 where 1.0 means "use all tokens in the vocabulary"
* while 0.5 means "use only the 50% most common tokens" */
@Getter private double topP;
/** (default 0) 0-1, lowers the chances of a word being selected again the more times that word has already been used */
@Getter private double frequencyPenalty;
/** (default 0) 0-1, lowers the chances of topic repetition */
@Getter private double presencePenalty;
/** (default 1), queries GPT-3 this many times, then selects the 'best' generation to return */
@Getter private int bestOf;
/** The Strings that GPT-3 will stop generating after (can have 4 stop sequences max) */
@Getter private List<String> stopSequences;
/**
* Starts to build an API request for the given language model
*
* @param model Language model to use for this API request. Valid Base Series models:
* UtilGPT.davinci, UtilGPT.curie, UtilGPT.babbage, UtilGPT.ada
* Valid Instruct Series models:
* UtilGPT.inDavinci, UtilGPT.inCurie, UtilGPT.inBabbage, UtilGPT.inAda
* @param prompt Prompt sent to the language model
* @param maxTokens Maximum number of tokens use in the API request
*/
public GPTRequestBuilder(String model, String prompt, int maxTokens) {
this.model = model;
this.prompt = prompt;
this.maxTokens = maxTokens;
this.temperature = .7;
this.topP = 1;
this.frequencyPenalty = 0;
this.presencePenalty = 0;
this.echoPrompt = false;
this.bestOf = 1;
}
/**
* Starts to build an API request for the given language model
*
* @param model Language model to use for this API request. Valid Base Series models:
* UtilGPT.davinci, UtilGPT.curie, UtilGPT.babbage, UtilGPT.ada
* Valid Instruct Series models:
* UtilGPT.inDavinci, UtilGPT.inCurie, UtilGPT.inBabbage, UtilGPT.inAda
* @param prompt Prompt sent to the language model
* @param maxTokens Maximum number of tokens use in the API request
* @param addPromptTokensToMaxTokens Whether the number of tokens in the prompt should be added to maxTokens
*/
public GPTRequestBuilder(String model, String prompt, int maxTokens, boolean addPromptTokensToMaxTokens) {
this.model = model;
this.prompt = prompt;
this.maxTokens = addPromptTokensToMaxTokens ? maxTokens + GPTUtil.countTokens(prompt) : maxTokens;
this.temperature = .7;
this.topP = 1;
this.frequencyPenalty = 0;
this.presencePenalty = 0;
this.echoPrompt = false;
this.bestOf = 1;
}
public GPTRequest build() {
return new GPTRequest(this);
}
/** @param prompt Prompt sent to the language model
* @return This GPTRequestBuilder, for chaining */
public GPTRequestBuilder prompt(String prompt) {
this.prompt = prompt;
return this;
}
/** @param prompt Prompt sent to the language model
* @param maxTokens Maximum number of tokens use in the API request
* @param addPromptTokensToMaxTokens Whether the number of tokens in the prompt should be added to maxTokens
* @return This GPTRequestBuilder, for chaining */
public GPTRequestBuilder promptAndTokens(String prompt, int maxTokens, boolean addPromptTokensToMaxTokens) {
this.prompt = prompt;
this.maxTokens = addPromptTokensToMaxTokens ? maxTokens + GPTUtil.countTokens(prompt) : maxTokens;
return this;
}
/**
* @param model Language model to use for this API request. Valid Base Series models:
* UtilGPT.davinci, UtilGPT.curie, UtilGPT.babbage, UtilGPT.ada
* Valid Instruct Series models:
* UtilGPT.inDavinci, UtilGPT.inCurie, UtilGPT.inBabbage, UtilGPT.inAda
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder model(String model) {
this.model = model;
return this;
}
/**
* @param maxTokens The total number of tokens use in the API request (including the prompt).
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder maxTokens(int maxTokens) {
this.maxTokens = maxTokens;
return this;
}
/** @param temperature (default .7) a value 0-1 with 1 being very creative, 0 being very factual/deterministic
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder temperature(double temperature) {
this.temperature = temperature;
return this;
}
/** @param topP (default 1) between 0-1 where 1.0 means "use all tokens in the vocabulary"
* while 0.5 means "use only the 50% most common tokens"
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder topP(double topP) {
this.topP = topP;
return this;
}
/** @param frequencyPenalty (default 0) 0-1, lowers the chances of a word being selected again
* the more times that word has already been used
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder frequencyPenalty(double frequencyPenalty) {
this.frequencyPenalty = frequencyPenalty;
return this;
}
/** @param presencePenalty (default 0) 0-1, lowers the chances of topic repetition
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder presencePenalty(double presencePenalty) {
this.presencePenalty = presencePenalty;
return this;
}
/** @param bestOf (default 1), queries GPT-3 this many times, then selects the 'best' generation to return
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder bestOf(int bestOf) {
this.bestOf = bestOf;
return this;
}
/**
* set the stop sequence, the String that GPT-3 will stop generating after
* (can have 4 stop sequences max)
* @param stopSequences The Strings that GPT-3 will stop generating after (can have 4 stop sequences max)
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder stopSequences(List<String> stopSequences) {
if(stopSequences.size() > 4)
throw new IllegalArgumentException("Can only have 4 stop sequences max");
else
this.stopSequences = stopSequences;
return this;
}
/** @param echoPrompt Whether to echo back the prompt in addition to the completion.
* @return This GPTRequestBuilder, for chaining
*/
public GPTRequestBuilder echoPrompt(boolean echoPrompt) {
this.echoPrompt = echoPrompt;
return this;
}
}
/**
* Logs the token usage every time request() is called.
* @param numTokens The number of tokens used in this API request.
*/
private void logTokenUsage(int numTokens) {
switch(model) {
case davinci:
case inDavinci:
davinciTokenCounter += numTokens;
break;
case curie:
case inCurie:
curieTokenCounter += numTokens;
break;
case babbage:
case inBabbage:
babbageTokenCounter += numTokens;
break;
case ada:
case inAda:
adaTokenCounter += numTokens;
break;
}
log.info(String.format("Total tokens used:%n%s%s%s%s-----------------------------------------%n",
davinciTokenCounter > 0 ? "Davinci: " + davinciTokenCounter + " token" + (davinciTokenCounter > 1 ? "s\n" : "\n") : "",
curieTokenCounter > 0 ? "Curie: " + curieTokenCounter + " token" + (curieTokenCounter > 1 ? "s\n" : "\n") : "",
babbageTokenCounter > 0 ? "Babbage: " + babbageTokenCounter + " token" + (babbageTokenCounter > 1 ? "s\n" : "\n") : "",
adaTokenCounter > 0 ? "Ada: " + adaTokenCounter + " token" + (adaTokenCounter > 1 ? "s\n" : "\n") : ""));
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((3276, 3335), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package com.vission.chatGPT.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.OpenAiApi;
import com.theokanning.openai.service.OpenAiService;
import com.vission.chatGPT.properties.ChatGPTProperties;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.time.Duration;
import lombok.RequiredArgsConstructor;
import okhttp3.OkHttpClient;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import retrofit2.Retrofit;
@Configuration
@RequiredArgsConstructor
@Import(ChatGPTProperties.class)
public class ChatGPTConfig {
private final ChatGPTProperties properties;
private static final Duration DEFAULT_TIMEOUT = Duration.ofSeconds(10);
@Bean
public OpenAiService openAiService() {
ObjectMapper mapper = OpenAiService.defaultObjectMapper();
Proxy proxy = new Proxy(Proxy.Type.HTTP,
new InetSocketAddress(properties.getProxyHostName(), properties.getProxyPort()));
OkHttpClient client = OpenAiService.defaultClient(properties.getApiKey(), Duration.ofSeconds(500))
.newBuilder()
.proxy(proxy)
.build();
Retrofit retrofit = OpenAiService.defaultRetrofit(client, mapper);
OpenAiApi api = retrofit.create(OpenAiApi.class);
return new OpenAiService(api);
}
// @Bean
// public OpenAiClient openAiClient() {
// //代理可以为null
// Proxy proxy = new Proxy(Proxy.Type.HTTP,
// new InetSocketAddress(properties.getProxyHostName(), properties.getProxyPort()));
// //日志输出可以不添加
// List<Interceptor> interceptors = new ArrayList<>();
// if (properties.getLog()) {
// HttpLoggingInterceptor httpLoggingInterceptor = new HttpLoggingInterceptor(new OpenAILogger());
// httpLoggingInterceptor.setLevel(HttpLoggingInterceptor.Level.BODY);
// interceptors.add(httpLoggingInterceptor);
// }
// return OpenAiClient.builder()
// .apiKey(properties.getApiKey())
// .connectTimeout(properties.getConnectTimeout())
// .writeTimeout(properties.getWriteTimeout())
// .readTimeout(properties.getReadTimeout())
// .interceptor(interceptors)
// .proxy(proxy)
// .apiHost(properties.getApiHost())
// .build();
// }
}
|
[
"com.theokanning.openai.service.OpenAiService.defaultClient"
] |
[((1114, 1275), 'com.theokanning.openai.service.OpenAiService.defaultClient'), ((1114, 1250), 'com.theokanning.openai.service.OpenAiService.defaultClient'), ((1114, 1220), 'com.theokanning.openai.service.OpenAiService.defaultClient')]
|
package com.prometheus.vord.service;
import com.theokanning.openai.image.CreateImageRequest;
import com.theokanning.openai.image.Image;
import com.theokanning.openai.service.OpenAiService;
import jakarta.annotation.Resource;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
@Service
@RequiredArgsConstructor
public class GPTService {
@Resource(name = "getOpenAiService")
private final OpenAiService openAiService;
public List<String> generatePicture(String prompt) {
CreateImageRequest createImageRequest = CreateImageRequest.builder()
.prompt(prompt)
.size("512x512")
.n(3)
.build();
List<String> list = new ArrayList<>();
for(Image i : openAiService.createImage(createImageRequest).getData()){
list.add(i.getUrl());
}
return list;
}
}
|
[
"com.theokanning.openai.image.CreateImageRequest.builder"
] |
[((620, 760), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((620, 735), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((620, 713), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((620, 680), 'com.theokanning.openai.image.CreateImageRequest.builder')]
|
package br.com.alura.screenMatch.service;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
public class ConsultaChatGPT
{
public static String obterTraducao(String texto)
{
OpenAiService service = new OpenAiService("Sua chave API do chatGPT aqui");
CompletionRequest requisicao = CompletionRequest.builder()
.model("text-davinci-003")
.prompt("traduza para o português o texto: " + texto)
.maxTokens(1000)
.temperature(0.7)
.build();
var resposta = service.createCompletion(requisicao);
return resposta.getChoices().get(0).getText();
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((371, 564), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((371, 547), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((371, 521), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((371, 496), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((371, 433), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package data_access;
import com.theokanning.openai.audio.CreateSpeechRequest;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import javazoom.jl.decoder.JavaLayerException;
import javazoom.jl.player.advanced.AdvancedPlayer;
import okhttp3.ResponseBody;
import services.generate_password.GeneratePasswordDataAccessInterface;
import services.suggest_reply.SuggestReplyDataAccessInterface;
import services.text_to_speech.TextToSpeechDataAccessInterface;
import services.translate_message.TranslateMessageDataAccessInterface;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* This class represents a data access object for generating secure passwords using the GPT model
* from OpenAI.
*/
public class GPTDataAccessObject implements GeneratePasswordDataAccessInterface, TextToSpeechDataAccessInterface,
SuggestReplyDataAccessInterface, TranslateMessageDataAccessInterface {
private final OpenAiService service;
/**
* Initializes a new GPTDataAccessObject with the provided OpenAI API key.
*
* @param openaiApiKey The API key for accessing the OpenAI service.
*/
public GPTDataAccessObject(String openaiApiKey) {
this.service = new OpenAiService(openaiApiKey);
}
private static void playMP3(InputStream inputStream) {
try {
AdvancedPlayer player = new AdvancedPlayer(inputStream);
// Start playing the MP3 file
player.play();
} catch (JavaLayerException e) {
e.printStackTrace();
}
}
/**
* Get the OpenAiService instance associated with this data access object.
*
* @return The OpenAiService instance used for interacting with the OpenAI API.
*/
public OpenAiService getService() {
return service;
}
/**
* Generates a secure password based on the provided prompt using the GPT model.
*
* @param prompt The prompt for generating the password.
* @return The generated secure password as a string.
*/
@Override
public String generateSecurePassword(String prompt) {
List<ChatMessage> messages = new ArrayList<>();
ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), prompt);
messages.add(userMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model("gpt-4").messages(
messages).maxTokens(10).build();
try {
ChatMessage responseMessage = service.createChatCompletion(chatCompletionRequest).getChoices().get(
0).getMessage();
return responseMessage.getContent();
} catch (Exception e) {
return "Error: Failed to generate a password.";
}
}
/**
* Generates audio from the provided text using a text-to-speech service and plays it.
*
* @param message The text to be converted into audio.
* @return {@code true} if the audio generation and playback were successful; {@code false}
* otherwise.
* @throws IOException If an I/O error occurs during the processing of the audio content.
*/
@Override
public boolean generateAudio(String message) {
CreateSpeechRequest createSpeechRequest = CreateSpeechRequest.builder().model("tts-1").input(message).voice(
"alloy").build();
try {
ResponseBody speech = service.createSpeech(createSpeechRequest);
// Play the MP3 directly without saving to a file
playMP3(new ByteArrayInputStream(speech.bytes()));
return true;
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
/**
* Generates a suggested reply based on the provided prompt and last message of the other user
* using the GPT model.
*
* @param prompt The prompt for generating the suggested reply.
* @return The generated suggested reply as a string.
*/
@Override
public String generateSuggestedReply(String prompt) {
List<ChatMessage> messages = new ArrayList<>();
ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), prompt);
messages.add(userMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model("gpt-4").messages(
messages).build();
try {
ChatMessage responseMessage = service.createChatCompletion(chatCompletionRequest).getChoices().get(
0).getMessage();
return responseMessage.getContent();
} catch (Exception e) {
return "Error: Failed to suggest a reply.";
}
}
/**
* Translates the given text into the specified target language using the GPT-4 language model.
*
* @param textToTranslate The text to be translated.
* @param targetLanguage The target language into which the text should be translated.
* @return The translated text or an error message if the translation fails.
*/
@Override
public String translate(String textToTranslate, String targetLanguage) {
List<ChatMessage> messages = new ArrayList<>();
String prompt = String.format("Translate the following into %s: %s", targetLanguage, textToTranslate);
ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), prompt);
messages.add(userMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model("gpt-4").messages(messages)
// No maxTokens set to let it depend on the input text
.build();
try {
ChatMessage responseMessage = service.createChatCompletion(chatCompletionRequest).getChoices().get(
0).getMessage();
return responseMessage.getContent();
} catch (Exception e) {
return "Error: Failed to generate a translation.";
}
}
}
|
[
"com.theokanning.openai.audio.CreateSpeechRequest.builder",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((2481, 2509), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((2610, 2714), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2610, 2706), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2610, 2692), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2610, 2656), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3534, 3633), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3534, 3625), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3534, 3593), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((3534, 3578), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((4442, 4470), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((4571, 4661), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4571, 4653), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4571, 4617), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5642, 5670), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((5771, 5932), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5771, 5836), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5771, 5817), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package io.github.lynbean.lynbot.cogs.openai.chat;
import java.util.List;
import com.google.common.base.Optional;
import com.theokanning.openai.completion.chat.ChatCompletionChunk;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import io.github.lynbean.lynbot.Bot;
import io.github.lynbean.lynbot.cogs.openai.core.OpenAiCore;
import io.reactivex.functions.Action;
import io.reactivex.functions.Consumer;
import lombok.Getter;
import lombok.Setter;
import lombok.experimental.Accessors;
@Accessors(chain = true)
public class Chat extends OpenAiCore {
private @Getter @Setter Double frequencyPenalty;
private @Getter @Setter Double presencePenalty;
private @Getter @Setter Double temperature;
private @Getter @Setter Double topP;
private @Getter @Setter Integer maxTokens;
private @Getter List<ChatMessage> messages;
private @Getter @Setter String model;
private @Getter @Setter String user;
public static Chat fromString(String content) {
ChatMessage message = new ChatMessage();
message.setContent(content);
message.setRole(ChatMessageRole.USER.value());
return new Chat(message);
}
public Chat(ChatMessage message) {
super();
this.messages = List.of(message);
}
public Chat(List<ChatMessage> messages) {
super();
this.messages = messages;
}
private ChatCompletionRequest createCompletionRequest() {
return ChatCompletionRequest.builder()
.frequencyPenalty(
Optional.fromNullable(frequencyPenalty)
.or(Bot.getConfig().getDouble("openai.chat.frequency_penalty"))
)
.maxTokens(
Optional.fromNullable(maxTokens)
.or(Bot.getConfig().getInt("openai.chat.max_tokens"))
)
.messages(messages)
.model(
Optional.fromNullable(model)
.or(Bot.getConfig().getString("openai.chat.default_model"))
)
.presencePenalty(
Optional.fromNullable(presencePenalty)
.or(Bot.getConfig().getDouble("openai.chat.presence_penalty"))
)
.topP(
Optional.fromNullable(topP)
.or(Bot.getConfig().getDouble("openai.chat.top_p"))
)
.user(user)
.build();
}
public ChatCompletionResult complete() {
return OPEN_AI_SERVICE.createChatCompletion(createCompletionRequest());
}
public void complete(Consumer<ChatCompletionChunk> onNext, Consumer<Throwable> onError, Action onComplete) {
OPEN_AI_SERVICE.streamChatCompletion(createCompletionRequest())
.subscribe(
chunk -> onNext.accept(chunk),
error -> onError.accept(error),
onComplete
);
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((1301, 1329), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1659, 2603), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1659, 2582), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1659, 2558), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1659, 2409), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1659, 2227), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1659, 2068), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1659, 2036), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1659, 1875), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1738, 1861), 'com.google.common.base.Optional.fromNullable'), ((1802, 1860), 'io.github.lynbean.lynbot.Bot.getConfig'), ((1916, 2022), 'com.google.common.base.Optional.fromNullable'), ((1973, 2021), 'io.github.lynbean.lynbot.Bot.getConfig'), ((2105, 2213), 'com.google.common.base.Optional.fromNullable'), ((2158, 2212), 'io.github.lynbean.lynbot.Bot.getConfig'), ((2274, 2395), 'com.google.common.base.Optional.fromNullable'), ((2337, 2394), 'io.github.lynbean.lynbot.Bot.getConfig'), ((2445, 2544), 'com.google.common.base.Optional.fromNullable'), ((2497, 2543), 'io.github.lynbean.lynbot.Bot.getConfig')]
|
package org.ncgr.chatbot;
import java.util.Collections;
import java.util.List;
import com.theokanning.openai.embedding.Embedding;
import com.theokanning.openai.embedding.EmbeddingRequest;
import com.theokanning.openai.embedding.EmbeddingResult;
import com.theokanning.openai.service.OpenAiService;
/**
* Class to test retrieval of embeddings from OpenAI and upserting them to Pinecone.
*/
public class EmbeddingUpsertTest {
// the OpenAI embedding model to use
static String EMBED_MODEL = "text-embedding-ada-002";
public static void main(String[] args) {
if (args.length<2) {
System.err.println("Usage: EbeddingUpsertTest <index> <text>");
System.exit(1);
}
String index = args[0];
String text = args[1];
String openaiApiKey = System.getenv().get("OPENAI_API_KEY");
String pineconeProjectName = System.getenv().get("PINECONE_PROJECT_NAME");
String pineconeApiKey = System.getenv().get("PINECONE_API_KEY");
String pineconeEnvironment = System.getenv().get("PINECONE_ENVIRONMENT");
String pineconeIndexName = System.getenv().get("PINECONE_INDEX_NAME");
OpenAiService service = new OpenAiService(openaiApiKey);
EmbeddingRequest embeddingRequest = EmbeddingRequest.builder()
.model(EMBED_MODEL)
.input(Collections.singletonList(text))
.build();
List<Embedding> embeddings = service.createEmbeddings(embeddingRequest).getData();
for (Embedding embedding : embeddings) {
List<Double> vector = embedding.getEmbedding();
System.out.println("object: " + embedding.getObject());
System.out.println("index: " + embedding.getIndex());
System.out.println("vector: " + vector);
}
}
}
|
[
"com.theokanning.openai.embedding.EmbeddingRequest.builder"
] |
[((1295, 1426), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((1295, 1405), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((1295, 1353), 'com.theokanning.openai.embedding.EmbeddingRequest.builder')]
|
package com.topopixel.library.langchain.java.llms.openai;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.CompletionResult;
import com.topopixel.library.langchain.java.callbacks.manager.CallbackManagerForLLMRun;
import com.topopixel.library.langchain.java.llms.base.BaseLLM;
import com.topopixel.library.langchain.java.llms.openai.sdk.OpenAiService;
import com.topopixel.library.langchain.java.schema.Generation;
import com.topopixel.library.langchain.java.schema.LLMResult;
import com.topopixel.library.langchain.java.utils.LangChainUtils;
import java.lang.reflect.Field;
import java.util.*;
import java.util.stream.Collectors;
import lombok.*;
@Getter
@Setter
public abstract class BaseOpenAI extends BaseLLM {
public OpenAiService service;
public String modelName = "text-davinci-003";
public String openaiApiKey = null;
public String openaiApiBase = null;
public String openaiOrganization = null;
// configs
private Float temperature = 0.7f;
private Integer maxTokens = 256;
private Float topP = 1f;
private Float frequencyPenalty = 0f;
private Float presencePenalty = 0f;
private Integer n = 1;
private Integer bestOf = 1;
protected BaseOpenAI() {}
protected BaseOpenAI(OpenAIConfig config) {
// TODO: make exception when use gpt3.5 or gpt4
validataEnvironment(config);
}
private void validataEnvironment(OpenAIConfig config) {
Map<String, Object> configMap = config.toMap();
if (openaiApiKey == null) {
openaiApiKey = LangChainUtils.getFromDictOrEnv(configMap, "openai_api_key", "OPENAI_API_KEY", Optional.empty());
}
if (openaiApiBase == null) {
openaiApiBase = LangChainUtils.getFromDictOrEnv(configMap, "openai_api_base", "OPENAI_API_BASE", Optional.empty());
}
if (openaiApiBase != null) {
service = new OpenAiService(openaiApiKey, openaiApiBase);
} else {
service = new OpenAiService(openaiApiKey);
}
injectConfig(config);
}
private void injectConfig(OpenAIConfig config) {
try {
Field[] fields = OpenAIConfig.class.getDeclaredFields();
Field[] selfFields = BaseOpenAI.class.getDeclaredFields();
List<String> configNames = Arrays.stream(fields).map(Field::getName).collect(Collectors.toList());
List<String> selfNames = Arrays.stream(selfFields).map(Field::getName).collect(Collectors.toList());
for (int i = 0; i < configNames.size(); i++) {
String name = configNames.get(i);
if (!selfNames.contains(name)) {
continue;
}
Field configField = fields[i];
Field selfField = BaseOpenAI.class.getDeclaredField(name);
selfField.setAccessible(true);
configField.setAccessible(true);
if (configField.get(config) == null) {
continue;
}
selfField.set(this, configField.get(config));
}
} catch (Exception e) {
return;
}
}
// TODO: callback manager
@Override
protected LLMResult internalGenerate(List<String> prompts, List<String> stop,
CallbackManagerForLLMRun runManager) {
// for now only support 1 string elem
// TODO: support streaming
// TODO: support sub prompts
CompletionRequest request = CompletionRequest.builder()
.model(modelName)
.temperature(temperature.doubleValue())
.maxTokens(maxTokens)
.topP(topP.doubleValue())
.frequencyPenalty(frequencyPenalty.doubleValue())
.presencePenalty(presencePenalty.doubleValue())
.n(n)
.bestOf(bestOf)
.prompt(prompts.get(0))
.build();
if (stop != null) {
request.setStop(stop);
}
CompletionResult response = service.createCompletion(request);
return createLLMResult(prompts, response);
}
private LLMResult createLLMResult(List<String> prompts, CompletionResult response) {
// for now only support 1 string elem
List<Generation> generation = response.getChoices().stream()
.map(choice -> Generation.builder()
.text(choice.getText())
.generationInfo(new HashMap<String, Object>() {{
put("finish_reason", choice.getFinish_reason());
put("logprobs", choice.getLogprobs());
}}).build()
).collect(Collectors.toList());
Map<String, Object> llmOuput = new HashMap<String, Object>() {{
put("token_usage", response.getUsage());
put("model_name", modelName);
}};
return LLMResult.builder()
.generations(Arrays.asList(generation))
.llmOutput(llmOuput)
.build();
}
@Override
protected String llmType() {
return "openai";
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((3541, 3947), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3541, 3926), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3541, 3890), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3541, 3862), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3541, 3844), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3541, 3784), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3541, 3722), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3541, 3684), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3541, 3650), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3541, 3598), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4384, 4685), 'com.topopixel.library.langchain.java.schema.Generation.builder'), ((4384, 4677), 'com.topopixel.library.langchain.java.schema.Generation.builder'), ((4384, 4448), 'com.topopixel.library.langchain.java.schema.Generation.builder'), ((4924, 5049), 'com.topopixel.library.langchain.java.schema.LLMResult.builder'), ((4924, 5028), 'com.topopixel.library.langchain.java.schema.LLMResult.builder'), ((4924, 4995), 'com.topopixel.library.langchain.java.schema.LLMResult.builder')]
|
package com.yksc.dummy.controller;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Optional;
import org.apache.commons.lang3.StringUtils;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import com.yksc.dummy.data.Data;
import com.yksc.model.db.ChatMessage;
import com.yksc.model.db.ChatRoom;
import com.yksc.model.db.User;
import com.yksc.model.rest.ChatRequest;
import com.yksc.model.util.IdGeneraterUtil;
@RestController
@RequestMapping("/ChatCompletions")
public class ChatCompletionsController {
@PostMapping
public ResponseEntity<Object> generateText(@RequestHeader("user-mail-address") String userMailAddress, @RequestBody ChatRequest chatRequest) {
List<ChatMessage> chatMessageList = chatRequest.getChatMessageList();
String selectedModel = chatRequest.getSelectedAiModel();
ChatMessage userChatMessage = chatMessageList.get(chatMessageList.size()-1);
userChatMessage.setMessageId(IdGeneraterUtil.nextGuid());
boolean first = false;
//new Chat
ChatRoom chatRoom = null;
if( StringUtils.isBlank(chatRequest.getRoomId()) ) {
Optional<User> optionalUser = Data.usersMap.values().stream()
.filter( user -> user.getEmail().equals( userMailAddress ) )
.findFirst();
if( optionalUser.isPresent() ) {
first = true;
chatRoom = new ChatRoom();
String guid = IdGeneraterUtil.nextGuid();
chatRoom.setRoomId( guid );
chatRoom.setOwnerUserId( optionalUser.get().getUserId() );
chatRoom.setRoomTitle( "include server" );
chatRoom.setCreateDate( new Date() );
chatRoom.setUpdateDate( new Date() );
chatRoom.setAiModel( selectedModel );
chatRoom.setAiModelSource( "Open Ai" );
chatRoom.setSumTotal( 0 );
Data.chatRoomList.add( chatRoom );
}
}else {
String roomId = chatRequest.getRoomId();
Optional<ChatRoom> optional = Data.chatRoomList.stream().filter(temp -> StringUtils.equals(roomId, temp.getRoomId())).findFirst();
chatRoom = optional.get();
}
System.out.println("selectedModel:" + selectedModel);
System.out.println("chatMessageList.size:" + chatMessageList.size());
boolean isRealChatGpt = true;
if( isRealChatGpt ) {
if( first ) {
chatRoom.setRoomTitle( thumbnailText( userChatMessage.getMessage() ) );
}
long start = Calendar.getInstance().getTimeInMillis();
String result = realChatGpt(chatMessageList, selectedModel);
ChatMessage chatMessage = new ChatMessage( IdGeneraterUtil.nextGuid(), "ai", result, new Date() );
long end = Calendar.getInstance().getTimeInMillis();
chatMessage.setResponseTime(start - end);
System.out.println("new user message id:" + userChatMessage.getMessageId());
Data.chatMessgaeMap.put(userChatMessage.getMessageId(), userChatMessage);
chatRoom.getChatMessageIds().add(userChatMessage.getMessageId());
Data.chatMessgaeMap.put(chatMessage.getMessageId(), chatMessage);
chatRoom.getChatMessageIds().add(chatMessage.getMessageId());
chatMessage.setRoomTitle(chatRoom.getRoomTitle());
chatMessage.setRoomId(chatRoom.getRoomId());
return ResponseEntity.ok( chatMessage );
}else {
long start = Calendar.getInstance().getTimeInMillis();
SimpleDateFormat simple = new SimpleDateFormat( "yyyy/MM/dd hh:mm:ss.SSS" );
String date = simple.format( Calendar.getInstance().getTime() );
String result = "This is a pretend generated message. " + date + " (><;)";
String guid = IdGeneraterUtil.nextGuid();
Date sendDate = Calendar.getInstance().getTime();
ChatMessage chatMessage = new ChatMessage( guid, "ai", result, sendDate );
try {
//Pretending to work
Thread.sleep( 3000L );
} catch (InterruptedException e) {
e.printStackTrace();
}
long end = Calendar.getInstance().getTimeInMillis();
chatMessage.setResponseTime(start - end);
Data.chatMessgaeMap.put(userChatMessage.getMessageId(), userChatMessage);
chatRoom.getChatMessageIds().add(userChatMessage.getMessageId());
Data.chatMessgaeMap.put(chatMessage.getMessageId(), chatMessage);
chatRoom.getChatMessageIds().add(chatMessage.getMessageId());
chatMessage.setRoomTitle(chatRoom.getRoomTitle());
chatMessage.setRoomId(chatRoom.getRoomId());
return ResponseEntity.ok( chatMessage );
}
}
private String thumbnailText( String aiMessage ) {
aiMessage = "Please summarize in 20 characters or less. No explanation needed. Base your summary on the language of the message." +
"\r\n" + aiMessage;
String apiKey = System.getenv( "OPEN_AI_API_KEY" );
List<com.theokanning.openai.completion.chat.ChatMessage> messages = new ArrayList<>();
messages.add( new com.theokanning.openai.completion.chat.ChatMessage(ChatMessageRole.USER.value(), aiMessage));
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model("gpt-3.5-turbo-0125").maxTokens(10)
.messages(messages).build();
StringBuilder contentBuilder = new StringBuilder();
OpenAiService service = new OpenAiService(apiKey);
service.streamChatCompletion(chatCompletionRequest).doOnError(Throwable::printStackTrace)
.blockingForEach(chatCompletion -> {
for (ChatCompletionChoice choice : chatCompletion.getChoices()) {
com.theokanning.openai.completion.chat.ChatMessage message = choice.getMessage();
if (message != null && message.getContent() != null) {
contentBuilder.append(message.getContent());
}
}
});
return contentBuilder.toString();
}
private String realChatGpt(List<ChatMessage> chatMessageList, String selectedModel ) {
String apiKey = System.getenv( "OPEN_AI_API_KEY" );
OpenAiService service = new OpenAiService(apiKey);
List<com.theokanning.openai.completion.chat.ChatMessage> messages = new ArrayList<>();
for (ChatMessage chatMessage : chatMessageList) {
String message = chatMessage.getMessage();
String sender = chatMessage.getSender();
if( StringUtils.equals(sender, ChatMessage.SENDER_AI) ) {
messages.add(
new com.theokanning.openai.completion.chat.ChatMessage(ChatMessageRole.ASSISTANT.value(), message));
}else if( StringUtils.equals(sender, ChatMessage.SENDER_USER) ) {
messages.add(
new com.theokanning.openai.completion.chat.ChatMessage(ChatMessageRole.USER.value(), message));
}
}
System.out.println("selectedModel:" + selectedModel);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model(selectedModel)
.messages(messages).build();
// .maxTokens(16 * 1000)
StringBuilder contentBuilder = new StringBuilder();
service.streamChatCompletion(chatCompletionRequest).doOnError(Throwable::printStackTrace)
.blockingForEach(chatCompletion -> {
for (ChatCompletionChoice choice : chatCompletion.getChoices()) {
com.theokanning.openai.completion.chat.ChatMessage message = choice.getMessage();
if (message != null && message.getContent() != null) {
contentBuilder.append(message.getContent());
}
}
});
return contentBuilder.toString();
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((1872, 1995), 'com.yksc.dummy.data.Data.usersMap.values'), ((1872, 1973), 'com.yksc.dummy.data.Data.usersMap.values'), ((1872, 1903), 'com.yksc.dummy.data.Data.usersMap.values'), ((1872, 1894), 'com.yksc.dummy.data.Data.usersMap.values'), ((2574, 2607), 'com.yksc.dummy.data.Data.chatRoomList.add'), ((2726, 2825), 'com.yksc.dummy.data.Data.chatRoomList.stream'), ((2726, 2813), 'com.yksc.dummy.data.Data.chatRoomList.stream'), ((2726, 2752), 'com.yksc.dummy.data.Data.chatRoomList.stream'), ((3260, 3300), 'java.util.Calendar.getInstance'), ((3503, 3543), 'java.util.Calendar.getInstance'), ((3716, 3788), 'com.yksc.dummy.data.Data.chatMessgaeMap.put'), ((3878, 3942), 'com.yksc.dummy.data.Data.chatMessgaeMap.put'), ((4240, 4280), 'java.util.Calendar.getInstance'), ((4404, 4436), 'java.util.Calendar.getInstance'), ((4602, 4634), 'java.util.Calendar.getInstance'), ((4894, 4934), 'java.util.Calendar.getInstance'), ((5009, 5081), 'com.yksc.dummy.data.Data.chatMessgaeMap.put'), ((5180, 5244), 'com.yksc.dummy.data.Data.chatMessgaeMap.put'), ((5972, 6000), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((6064, 6170), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6064, 6162), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6064, 6137), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6064, 6123), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7354, 7387), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((7551, 7579), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((7715, 7800), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7715, 7792), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7715, 7767), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.staringpig.framework.openai.model;
import com.staringpig.framework.openai.openai.OpenAI;
import com.staringpig.framework.openai.session.SessionManager;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.CompletionResult;
import com.theokanning.openai.moderation.Moderation;
import net.dreamlu.mica.core.utils.$;
import net.dreamlu.mica.core.utils.StringUtil;
import java.math.BigDecimal;
import java.util.List;
import java.util.function.Consumer;
public abstract class CompletionModel extends OpenAIModel {
protected final String suffix;
/**
* Echo back the prompt in addition to the completion
*/
protected final Boolean echo;
/**
* Generates best_of completions server-side and returns the "best"
* (the one with the lowest log probability per token).
* Results cannot be streamed.
* <p>
* When used with {@link CompletionRequest#n}, best_of controls the number of candidate completions and n specifies how many to return,
* best_of must be greater than n.
*/
protected final Integer bestOf;
/**
* Include the log probabilities on the logprobs most likely tokens, as well the chosen tokens.
* For example, if logprobs is 10, the API will return a list of the 10 most likely tokens.
* The API will always return the logprob of the sampled token,
* so there may be up to logprobs+1 elements in the response.
*/
protected final Integer logprobs;
protected CompletionModel(String id, OpenAI openAI, OpenAI.Metadata metadata, Integer maxTokens,
BigDecimal pricePerThousandTokensOfPrompt, BigDecimal pricePerThousandTokensOfCompletion,
String suffix, Boolean echo, Integer bestOf,
Integer logprobs, SessionManager sessionManager) {
super(id, openAI, metadata, maxTokens, pricePerThousandTokensOfPrompt, pricePerThousandTokensOfCompletion,
sessionManager);
this.suffix = suffix;
this.echo = echo;
this.bestOf = bestOf;
this.logprobs = logprobs;
}
@Override
public Answer ask(String user, String question, Integer limitTokens) {
List<Moderation> moderation = super.moderation(question);
if ($.isNotEmpty(moderation)) {
return new Answer(moderation);
}
return buildAnswer(super.openAI.createCompletion(buildRequest(user, question, limitTokens)));
}
@Override
public void ask(String user, String question, Integer limitTokens, Consumer<Answer> onAnswer) {
List<Moderation> moderation = super.moderation(question);
if ($.isNotEmpty(moderation)) {
onAnswer.accept(new Answer(moderation));
}
super.openAI.createCompletion(buildRequest(user, question, limitTokens),
completionResult -> onAnswer.accept(buildAnswer(completionResult)));
}
private Answer buildAnswer(CompletionResult completion) {
StringBuilder answer = new StringBuilder();
for (int i = 0; i < super.metadata.getN(); i++) {
answer.append(StringUtil.trimWhitespace(completion.getChoices().get(i).getText()));
}
return new Answer(completion.getUsage().getTotalTokens(),
super.cost(completion.getUsage().getPromptTokens(), completion.getUsage().getCompletionTokens()), answer.toString());
}
private CompletionRequest buildRequest(String user, String question, Integer limitTokens) {
return CompletionRequest.builder()
.model(this.getId())
.prompt(question)
.temperature(super.metadata.getTemperature())
.maxTokens(Math.min(this.maxTokens, limitTokens) - OpenAIModel.tokens(question) - 10)
.user(user)
.n(super.metadata.getN())
.stop(this.metadata.getStop())
.bestOf(this.bestOf)
.frequencyPenalty(this.metadata.getFrequencyPenalty())
.echo(this.echo)
.logitBias(this.metadata.getLogitBias())
.logprobs(this.logprobs)
.presencePenalty(this.metadata.getPresencePenalty())
.stream(this.metadata.getStream())
.suffix(this.suffix)
.build();
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((3573, 4373), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 4348), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 4311), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 4260), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 4191), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 4150), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 4093), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 4060), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 3989), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 3952), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 3905), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 3863), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 3835), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 3733), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 3671), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3573, 3637), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package estudo.java.springboot.screenmatch.service;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
public class ConsultaChatGPT {
public static String obterTraducao(String texto) {
OpenAiService service = new OpenAiService(System.getenv("OPENAI_API_KEY"));
CompletionRequest requisicao = CompletionRequest.builder()
.model("text-davinci-003")
.prompt("traduza para o português o texto: " + texto)
.maxTokens(1000)
.temperature(0.7)
.build();
var resposta = service.createCompletion(requisicao);
return resposta.getChoices().get(0).getText();
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((377, 610), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((377, 585), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((377, 551), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((377, 518), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((377, 447), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package com.bjoggis.openai.service;
import com.bjoggis.openai.entity.AiConfiguration;
import com.bjoggis.openai.entity.Message;
import com.bjoggis.openai.entity.ThreadChannel;
import com.bjoggis.openai.function.ActiveAiConfigurationFunction;
import com.bjoggis.openai.function.SaveMessageFunction;
import com.bjoggis.openai.function.SaveMessageFunction.SaveMessageOptions;
import com.bjoggis.openai.properties.OpenaiProperties;
import com.bjoggis.openai.repository.AiConfigurationRepository;
import com.bjoggis.openai.repository.MessageRepository;
import com.bjoggis.openai.repository.ThreadChannelRepository;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
@Service
public class ChatServiceImpl implements ChatService {
private final Logger logger = LoggerFactory.getLogger(ChatServiceImpl.class);
private final OpenaiProperties properties;
private final ActiveAiConfigurationFunction configurationFunction;
private final AiConfigurationRepository aiConfigurationRepository;
private final MessageRepository messageRepository;
private final SaveMessageFunction saveMessageFunction;
private final ThreadChannelRepository threadChannelRepository;
public ChatServiceImpl(OpenaiProperties properties,
ActiveAiConfigurationFunction configurationFunction,
AiConfigurationRepository aiConfigurationRepository,
MessageRepository messageRepository, SaveMessageFunction saveMessageFunction,
ThreadChannelRepository threadChannelRepository) {
this.properties = properties;
this.configurationFunction = configurationFunction;
this.aiConfigurationRepository = aiConfigurationRepository;
this.messageRepository = messageRepository;
this.saveMessageFunction = saveMessageFunction;
this.threadChannelRepository = threadChannelRepository;
}
@Override
public String chat(String messageId, String message, String threadId, String userId) {
final ThreadChannel threadChannel = threadChannelRepository.findByThreadId(threadId);
AiConfiguration configuration = configurationFunction.get();
OpenAiService service = new OpenAiService(properties.token(), Duration.ofMinutes(1));
// Count the number of tokens in the message
long messageTokens = StringUtils.countOccurrencesOf(message, " ") + 1;
if (messageTokens > configuration.getRequestMaxTokens()) {
logger.warn("Message too long, aborting");
return "Message too long, please try again with less than 100 words.";
}
saveMessageFunction.accept(new SaveMessageOptions(messageId, message, false, threadChannel));
Set<Message> messages = messageRepository.findByThreadChannel_ThreadIdOrderByCreatedAsc(
threadChannel.getThreadId());
if (messages.size() > configuration.getMaxMessages()) {
logger.warn("Too many messages, aborting");
return "Too many messages, please start a new thread.";
}
logger.info("Using system message: " + configuration.getSystemMessage());
List<ChatMessage> chatMessages = new ArrayList<>();
chatMessages.add(new ChatMessage("system", configuration.getSystemMessage()));
List<ChatMessage> oldMessages = new java.util.ArrayList<>(messages.stream().map(message2 -> {
switch (message2.getSender()) {
case USER -> {
return new ChatMessage("user", message2.getMessageAsString());
}
case SYSTEM -> {
return new ChatMessage("system", message2.getMessageAsString());
}
case ASSISTANT -> {
return new ChatMessage("assistant", message2.getMessageAsString());
}
}
return null;
}).toList());
chatMessages.addAll(oldMessages);
chatMessages.forEach(chatMessage -> logger.info(chatMessage.getRole() + ": " + chatMessage
.getContent()));
ChatCompletionRequest request = ChatCompletionRequest.builder()
.model(configuration.getModel())
.messages(chatMessages)
.n(configuration.getNumberOfMessages())
.maxTokens(configuration.getResponseMaxTokens())
.temperature(configuration.getTemperature())
.user(hashUserId(userId))
.build();
ChatCompletionResult response = service.createChatCompletion(request);
logger.info("Message tokens: " + response.getUsage().getPromptTokens());
long completionTokens = response.getUsage().getCompletionTokens();
logger.info("Completion tokens used: " + completionTokens);
String content = response.getChoices().get(0).getMessage().getContent();
saveMessageFunction.accept(
new SaveMessageOptions(UUID.randomUUID().toString(), content, true, threadChannel));
logger.info("Response: " + content);
return content;
}
private String hashUserId(String userId) {
String hashedUserID = "unknown";
try {
MessageDigest digest = MessageDigest.getInstance("SHA-256");
byte[] hashedId = digest.digest((userId).getBytes(StandardCharsets.UTF_8));
hashedUserID = Base64.getEncoder().encodeToString(hashedId);
} catch (Exception ex) {
ex.printStackTrace();
}
return hashedUserID;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((4381, 4694), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4381, 4677), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4381, 4643), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4381, 4590), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4381, 4533), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4381, 4485), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4381, 4453), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5128, 5156), 'java.util.UUID.randomUUID'), ((5518, 5562), 'java.util.Base64.getEncoder')]
|
package com.cyster.sherpa.impl.advisor;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import com.cyster.sherpa.service.conversation.Conversation;
import com.cyster.sherpa.service.conversation.ConversationException;
import com.cyster.sherpa.service.conversation.Message;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
public class ChatAdvisorConversation implements Conversation {
private final String model = "gpt-3.5-turbo";
private OpenAiService openAiService;
private List<Message> messages;
ChatAdvisorConversation(OpenAiService openAiService, List<Message> messages) {
this.openAiService = openAiService;
this.messages = messages;
}
@Override
public Conversation addMessage(String message) {
this.messages.add(new Message(message));
return this;
}
@Override
public Message respond() throws ConversationException {
var chatMessages = new ArrayList<ChatMessage>();
for (var message : this.messages) {
if (message.getType() == Message.Type.SYSTEM) {
chatMessages.add(new ChatMessage("system", message.getContent()));
} else if (message.getType() == Message.Type.AI) {
chatMessages.add(new ChatMessage("assistant", message.getContent()));
} else if (message.getType() == Message.Type.USER) {
chatMessages.add(new ChatMessage("user", message.getContent()));
}
}
var chatCompletionRequest = ChatCompletionRequest.builder()
.model(model)
.messages(chatMessages)
.build();
var result = this.openAiService.createChatCompletion(chatCompletionRequest);
var choices = result.getChoices();
if (choices.size() == 0) {
messages.add(new Message(Message.Type.INFO, "No responses"));
throw new ConversationException("No Reponses");
}
if (choices.size() > 1) {
messages.add(new Message(Message.Type.INFO, "Multiple responses (ignored)"));
throw new ConversationException("Multiple Reponses");
}
var message = new Message(Message.Type.AI, choices.get(0).getMessage().getContent());
messages.add(message);
return message;
}
@Override
public List<Message> getMessages() {
return messages.stream()
.filter(message -> message.getType() == Message.Type.AI || message.getType() == Message.Type.USER)
.collect(Collectors.toList());
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((1685, 1799), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1685, 1778), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1685, 1742), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.ramesh.openai;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
/***
* This project demonstrates the Self Consistency prompting technique which is useful when there is need
* to get consistent responses from chat gpt, for e.g. solving a puzzle correctly
***/
class SelfConsistencyPrompting {
public static void main(String... args) {
// Set the Open AI Token & Model
String token = "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";
String model = "gpt-3.5-turbo";
// service handle for calling OpenAI APIs
OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30));
// self consistent messages are part of the prompt which tell chat gpt how to think and respond thus
// giving consistent messages
System.out.println("\n-----------------------------------------------------------");
String[] prompts = new String[15];
prompts[0]="Q: There are 15 trees in the grove. Grove workers will plant trees in the grove today. After they are done,";
prompts[1]="there will be 21 trees. How many trees did the grove workers plant today?";
prompts[2]="A: We start with 15 trees. Later we have 21 trees. The difference must be the number of trees they planted.";
prompts[3]="So, they must have planted 21 - 15 = 6 trees. The answer is 6.";
prompts[4]="Q: If there are 3 cars in the parking lot and 2 more cars arrive, how many cars are in the parking lot?";
prompts[5]="A: There are 3 cars in the parking lot already. 2 more arrive. Now there are 3 + 2 = 5 cars. The answer is 5.";
prompts[6]="Q: Leah had 32 chocolates and her sister had 42. If they ate 35, how many pieces do they have left in total?";
prompts[7]="A: Leah had 32 chocolates and Leah’s sister had 42. That means there were originally 32 + 42 = 74";
prompts[8]="chocolates. 35 have been eaten. So in total they still have 74 - 35 = 39 chocolates. The answer is 39.";
prompts[9]="Q: Jason had 20 lollipops. He gave Denny some lollipops. Now Jason has 12 lollipops. How many lollipops";
prompts[10]="did Jason give to Denny?";
prompts[11]="A: Jason had 20 lollipops. Since he only has 12 now, he must have given the rest to Denny. The number of";
prompts[12]="lollipops he has given to Denny must have been 20 - 12 = 8 lollipops. The answer is 8.";
prompts[13]="Q: When I was 6 my sister was half my age. Now I’m 70 how old is my sister?";
prompts[14]="A:";
final List<ChatMessage> messages = new ArrayList<>();
for (int i = 0; i < 15; i++) {
System.out.println(prompts[i]);
final ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), prompts[i]);
messages.add(userMessage);
}
// create the chat gpt chat completion request
ChatCompletionRequest chatCompletionRequest2 = ChatCompletionRequest.builder()
.model(model)
.messages(messages)
.n(1)
.temperature(.1)
.maxTokens(100)
.logitBias(new HashMap<>())
.build();
System.out.println("------------");
System.out.print("ChatGPT response=");
// send the request to chat gpt and print the response
service.createChatCompletion(chatCompletionRequest2).getChoices().forEach((c) -> {
System.out.println(c.getMessage().getContent());
});
service.shutdownExecutor();
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((2904, 2932), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3082, 3335), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3082, 3310), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3082, 3266), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3082, 3234), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3082, 3201), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3082, 3179), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3082, 3143), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package br.com.webapp.screenmatch.services;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
public class ConsultaChatGPT {
public static String obterTraducao(String texto) {
OpenAiService service = new OpenAiService(System.getenv("OPENAI_APIKEY"));
CompletionRequest requisicao = CompletionRequest.builder()
.model("gpt-3.5-turbo-instruct")
.prompt("traduza para o português o texto: " + texto)
.maxTokens(1000)
.temperature(0.7)
.build();
var resposta = service.createCompletion(requisicao);
return resposta.getChoices().get(0).getText();
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((368, 607), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((368, 582), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((368, 548), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((368, 515), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((368, 444), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package example;
import com.theokanning.openai.service.OpenAiService;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.image.CreateImageRequest;
import com.theokanning.openai.image.CreateImageVariationRequest;
import com.theokanning.openai.image.CreateImageEditRequest;
import java.io.File;
class OpenAiApiExample {
public static void main(String... args) {
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token);
/*
System.out.println("\nCreating completion...");
CompletionRequest completionRequest = CompletionRequest.builder()
// .model("ada")
// .model("gpt-3.5-turbo")
// .model("code-davinci-002")
.model("gpt-3.5")
.prompt("code processing.org java sketch to compute number pi")
.echo(true)
.user("testing")
.n(1)
.temperature(0.0)
.maxTokens(1024)
.topP(1.0)
.build();
service.createCompletion(completionRequest).getChoices().forEach(System.out::println);
*/
System.out.println("\nCreating Image...");
// CreateImageVariationRequest request = CreateImageVariationRequest.builder()
// //.prompt("Two tabby cats breakdancing")
// .n(2)
// .build();
int num = 1;
CreateImageEditRequest editRequest = CreateImageEditRequest.builder()
//.prompt("change cat and dog color to tabby red and do swing dance with holding paws")
.prompt("add frame")
.n(num)
.size("1024x1024")
.responseFormat("url")
.build();
CreateImageVariationRequest varRequest = CreateImageVariationRequest.builder()
//.prompt("add bookmarker to photo") // no prompt used with variation requests
.n(num)
.size("1024x1024")
.responseFormat("url")
.build();
String imagePath;
imagePath = File.separator+"images"+File.separator+"readers_RGBA_l.png";
imagePath = File.separator+"images"+File.separator+"VAL_0340_1024_L_cr.png";
imagePath = File.separator+"images"+File.separator+"mask_VAL_0340_1024_L_cr.png";
String maskPath = null;
maskPath = File.separator+"images"+File.separator+"readers_mask_RGBA_r.png";
maskPath = File.separator+"images"+File.separator+"almost_empty_mask_RGBA.png";
//maskPath = File.separator+"images"+File.separator+"readers_mask_RGBA_l.png";
maskPath = File.separator+"images"+File.separator+"empty_mask_RGBA.png";
maskPath = File.separator+"images"+File.separator+"almost_empty_mask_RGBA.png";
maskPath = File.separator+"images"+File.separator+"mask_VAL_0340_1024_L_cr.png";
maskPath = null;
//imagePath = File.separator+"images"+File.separator+"dog_breakdancing_with_cat_RGBA.png";
//String imagePath = File.separator+"images"+File.separator+"readers_l.png";
File imageFile = new File(imagePath);
System.out.println(imageFile);
System.out.println(File.separator);
System.out.println(imageFile.getAbsolutePath());
try {
System.out.println(imageFile.getCanonicalPath());
} catch(Exception ioe) {
System.out.println(ioe);
}
System.out.println("\nImage is located at:");
for (int i=0; i<num; i++) {
System.out.println(service.createImageEdit(editRequest, imagePath, maskPath).getData().get(0).getUrl());
//System.out.println("num "+ (i+1) + " of " + num +" " + service.createImageVariation(varRequest, imagePath).getData().get(i).getUrl());
System.out.println();
}
// https://stackoverflow.com/questions/57100451/okhttp3-requestbody-createcontenttype-content-deprecated
}
}
|
[
"com.theokanning.openai.image.CreateImageVariationRequest.builder",
"com.theokanning.openai.image.CreateImageEditRequest.builder"
] |
[((1431, 1691), 'com.theokanning.openai.image.CreateImageEditRequest.builder'), ((1431, 1666), 'com.theokanning.openai.image.CreateImageEditRequest.builder'), ((1431, 1639), 'com.theokanning.openai.image.CreateImageEditRequest.builder'), ((1431, 1616), 'com.theokanning.openai.image.CreateImageEditRequest.builder'), ((1431, 1604), 'com.theokanning.openai.image.CreateImageEditRequest.builder'), ((1743, 1963), 'com.theokanning.openai.image.CreateImageVariationRequest.builder'), ((1743, 1938), 'com.theokanning.openai.image.CreateImageVariationRequest.builder'), ((1743, 1911), 'com.theokanning.openai.image.CreateImageVariationRequest.builder'), ((1743, 1888), 'com.theokanning.openai.image.CreateImageVariationRequest.builder')]
|
package com.example.autoreplybot;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.telephony.SmsManager;
import android.telephony.SmsMessage;
import android.util.Log;
import android.widget.Toast;
import com.theokanning.openai.OpenAiService;
import com.theokanning.openai.completion.CompletionChoice;
import com.theokanning.openai.completion.CompletionRequest;
import java.util.List;
public class SmsReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
Bundle extras = intent.getExtras();
if (extras != null) {
Object[] pdus = (Object[]) extras.get("pdus");
String smsrec = "";
String smssender = "";
for (Object pdu : pdus) {
SmsMessage smsMessage = SmsMessage.createFromPdu((byte[]) pdu);
String messageBody = smsMessage.getMessageBody();
String messageSender = smsMessage.getOriginatingAddress();
Toast.makeText(context, "SMS:"
+ messageBody, Toast.LENGTH_LONG).show();
smsrec = messageBody;
smssender = messageSender;
}
Log.d("NETWORK_LOAD","Initializing ChatGPT...");
OpenAiService service = new OpenAiService(Globals.API_KEY);
CompletionRequest completionRequest = CompletionRequest.builder()
.prompt("reply kindly and sweet to this:" + smsrec)
.model("text-davinci-003")
.maxTokens(160)
.echo(false)
.build();
final String phonenum = smssender;
/**
* TODO change this into a more efficient way of managing background threads
*
* This is just for sample PoC. Hehe
*/
new Thread(new Runnable() {
@Override
public void run() {
List<CompletionChoice> choices = service
.createCompletion(completionRequest).getChoices();
String chatgptresponse = choices.get(0).getText().trim();
Log.d("REPLY",chatgptresponse.trim());
SmsManager sm = SmsManager.getDefault();
if(chatgptresponse.length() > 160){
//Send a reply
sm.sendTextMessage(phonenum,null,chatgptresponse.substring(0,160),null,null);
}
else{
sm.sendTextMessage(phonenum,null,chatgptresponse.trim(),null,null);
}
}
}).start();
}
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((1089, 1184), 'android.widget.Toast.makeText'), ((1465, 1709), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1465, 1680), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1465, 1647), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1465, 1611), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1465, 1564), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package com.raidrin.sakanu.services;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Locale;
@Service
@RequiredArgsConstructor
public class OpenAiTermQuery {
private final OpenAiService openAiService;
public TermResponse query(String domain, String term) {
String systemContent = TaskMessageGenerator.generateTaskMessage(domain);
if(term == null || term.trim().isEmpty()) {
throw new IllegalArgumentException("Term cannot be null or empty");
}
String sanitizedTerm = term.trim().toLowerCase(Locale.ENGLISH);
if(sanitizedTerm.length() >= 255) {
throw new IllegalArgumentException("Term cannot be longer than 255 characters");
}
return getResponseFromOpenAI(systemContent, sanitizedTerm);
}
private TermResponse getResponseFromOpenAI(String systemContent, String term) {
ChatMessage systemMessage = new ChatMessage();
systemMessage.setRole("system");
systemMessage.setContent(systemContent);
ChatMessage userMessage = new ChatMessage();
userMessage.setRole("user");
userMessage.setContent(term.trim().toLowerCase(Locale.ENGLISH));
ChatCompletionRequest chatCompletionRequest =
ChatCompletionRequest.builder()
.model("gpt-4")
.messages(
List.of(
systemMessage,
userMessage
)
)
.temperature(1.2)
.build();
ChatCompletionResult chatCompletionResult = openAiService.createChatCompletion(chatCompletionRequest);
List<ChatCompletionChoice> chatCompletionChoices = chatCompletionResult.getChoices();
if (chatCompletionChoices.size() != 1) {
throw new NoChoiceFoundException("Expected 1 choice, got " + chatCompletionChoices.size());
}
ChatCompletionChoice chatCompletionChoice = chatCompletionChoices.get(0);
String gptResponse = chatCompletionChoice.getMessage().getContent();
// Testing purposes
// try {
// Files.writeString(
// Path.of("gpt_responses", domain + "-" + term + ".json"),
// gptResponse
// );
// } catch (IOException e) {
// throw new RuntimeException(e);
// }
try {
return getTermResponse(gptResponse);
} catch (JsonProcessingException e) {
throw new RuntimeException("Failed to process gpt response", e);
}
}
public TermResponse getTermResponse(String gptResponse) throws JsonProcessingException {
ObjectMapper objectMapper = new ObjectMapper();
return objectMapper.readValue(
gptResponse,
TermResponse.class
);
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((1756, 2145), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1756, 2112), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1756, 2070), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1756, 1827), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.chat.base.handler;
import com.chat.base.bean.common.BaseCodeEnum;
import com.chat.base.bean.constants.CommonConstant;
import com.chat.base.bean.entity.GptModelConfig;
import com.chat.base.bean.gpt.ChatReq;
import com.chat.base.bean.vo.CacheGptApiTokenVo;
import com.chat.base.bean.vo.CacheUserInfoVo;
import com.chat.base.bean.vo.ChatMessageResultVo;
import com.chat.base.handler.billing.ModelBillingFactory;
import com.chat.base.handler.billing.ModelBillingService;
import com.chat.base.handler.billing.impl.ModelBillingByBalanceImpl;
import com.chat.base.handler.gpt.OpenAiProxyServiceFactory;
import com.chat.base.handler.model.ChatModelProcessor;
import com.chat.base.handler.model.ChatModelProcessorFactory;
import com.chat.base.service.ChatBaseOpenAiProxyService;
import com.chat.base.utils.ChatMessageCacheUtil;
import com.chat.base.utils.ResultVO;
import com.chat.base.utils.TokenUtil;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import io.github.asleepyfish.enums.RoleEnum;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.LinkedList;
import java.util.Optional;
import java.util.UUID;
/**
* @author huyd
* @date 2023/5/23 8:06 PM
*/
@Slf4j
@Component
public class AIChatManger {
@Autowired
private WeightAlgorithmManager weightAlgorithmManager;
@Autowired
private UserManager userManager;
@Autowired
private ModelBillingByBalanceImpl modelBillingByBalance;
@Autowired
private PromptRecordManager promptRecordManager;
/**
* 用户没有登录 只记录 不计费
* @param chatReq
* @param response
* @throws Exception
*/
public void streamChatWithWebV3NoStatus(ChatReq chatReq, HttpServletResponse response) throws Exception {
response.setContentType("text/event-stream");
response.setCharacterEncoding("UTF-8");
response.setHeader("Cache-Control", "no-cache");
String model = chatReq.getModel();
if (StringUtils.isNoneEmpty(model) && !"gpt-3.5-turbo".equals(model)){
response.getOutputStream().write(BaseCodeEnum.NO_MODEL.getMsg().getBytes());
response.getOutputStream().close();
return;
}
Integer contentNumber = chatReq.getContentNumber();
String user = chatReq.getConversationId();
LinkedList<ChatMessage> userChatMessages = ChatMessageCacheUtil.getUserChatMessages(user, contentNumber);
userChatMessages.add(new ChatMessage(RoleEnum.USER.getRoleName(), chatReq.getPrompt()));
ChatMessageCacheUtil.getOkUserChatMessages(userChatMessages, model);
if(userChatMessages.size()<=0){
response.getOutputStream().write(BaseCodeEnum.TOKEN_OVER.getMsg().getBytes());
response.getOutputStream().close();
return;
}
userChatMessages.addFirst(new ChatMessage(RoleEnum.SYSTEM.getRoleName(), chatReq.getSystemMessage()));
ChatBaseOpenAiProxyService proxyService = OpenAiProxyServiceFactory.createProxyService("1");
if (proxyService == null) {
response.getOutputStream().write(BaseCodeEnum.NO_MODEL.getMsg().getBytes());
response.getOutputStream().close();
return;
}
ChatMessageResultVo streamChatCompletion = proxyService.createStreamChatCompletion(ChatCompletionRequest.builder()
.model(model)
.messages(userChatMessages)
.user(user)
.temperature(chatReq.getTemperature())
.topP(chatReq.getTop_p())
.stream(true)
.build(), response.getOutputStream(), CommonConstant.SYS_TOKEN);
if(streamChatCompletion!=null){
ChatMessageCacheUtil.saveChatMessage(user,streamChatCompletion.getChatMessage());
promptRecordManager.asyncAddPromptRecord(streamChatCompletion); // 用户的回话id 也是 gpt的api的用户身份
}
}
/**
* 应用系统 聊天
* @param chatReq
* @param cacheUserInfoVo
* @param response
* @throws IOException
*/
public void chatStream(ChatReq chatReq, CacheUserInfoVo cacheUserInfoVo, HttpServletResponse response) throws IOException {
String model = chatReq.getModel();
ModelBillingService modelBillingService = ModelBillingFactory.getModelBillingService(model);
if(modelBillingService==null){
response.getOutputStream().write(BaseCodeEnum.MODEL_NO_OPEN.getMsg().getBytes());
return;
}
//交易id
String tradeId = UUID.randomUUID().toString();
Integer contentNumber = chatReq.getContentNumber();
String user = chatReq.getConversationId();
LinkedList<ChatMessage> userChatMessages = ChatMessageCacheUtil.getUserChatMessages(user, contentNumber);
userChatMessages.add(new ChatMessage(RoleEnum.USER.getRoleName(), chatReq.getPrompt()));
ChatMessageCacheUtil.getOkUserChatMessages(userChatMessages, model);
if(userChatMessages.size()<=0){
response.getOutputStream().write(BaseCodeEnum.TOKEN_OVER.getMsg().getBytes());
response.getOutputStream().close();
return;
}
userChatMessages.addFirst(new ChatMessage(RoleEnum.SYSTEM.getRoleName(), chatReq.getSystemMessage()));
String token = cacheUserInfoVo.getGptApiTokenVo().getToken();
response.setContentType("text/event-stream");
response.setCharacterEncoding("UTF-8");
response.setHeader("Cache-Control", "no-cache");
Optional<GptModelConfig> modelConfig = weightAlgorithmManager.round(cacheUserInfoVo, model);
if (!modelConfig.isPresent()) {
response.getOutputStream().write(BaseCodeEnum.NO_MODEL_ROLE.getMsg().getBytes());
return;
}
GptModelConfig gptModelConfig = modelConfig.get();
ChatBaseOpenAiProxyService proxyService = OpenAiProxyServiceFactory.createProxyService(gptModelConfig.getId().toString());
if (proxyService == null) {
response.getOutputStream().write(BaseCodeEnum.NO_MODEL.getMsg().getBytes());
response.getOutputStream().close();
return;
}
try {
int tokenMessages = TokenUtil.countTokenMessages(userChatMessages, model);
ResultVO<String> beforeBillingResult = modelBillingService.beforeBilling(cacheUserInfoVo.getGptApiTokenVo(), tokenMessages,tradeId);
if(!beforeBillingResult.isOk()){
response.getOutputStream().write(beforeBillingResult.getMsg().getBytes());
return;
}
ChatMessageResultVo streamChatCompletion = proxyService.createStreamChatCompletion(ChatCompletionRequest.builder()
.model(model)
.messages(userChatMessages)
.user(user)
.temperature(chatReq.getTemperature())
.topP(chatReq.getTop_p())
.stream(true)
.build(), response.getOutputStream(), token);
//更新用户余额
boolean billingResult = userManager.costUserBalanceByChat(cacheUserInfoVo, streamChatCompletion, tradeId);
if(!billingResult){
Long advanceChargeAmount = modelBillingByBalance.getUserAdvanceChargeMap(cacheUserInfoVo.getId()).getOrDefault(tradeId, 0L);
log.info("扣款失败 返回预扣款给用户 cacheUserInfoVo={}", cacheUserInfoVo);
synchronized (cacheUserInfoVo.getClass()){
CacheGptApiTokenVo gptApiTokenVo = cacheUserInfoVo.getGptApiTokenVo();
gptApiTokenVo.setBalance(gptApiTokenVo.getBalance()+advanceChargeAmount);
}
}
if(streamChatCompletion!=null){
ChatMessageCacheUtil.saveChatMessage(user,streamChatCompletion.getChatMessage());
promptRecordManager.asyncAddPromptRecord(streamChatCompletion); // 用户的回话id 也是 gpt的api的用户身份
}
}catch (Exception e){
Long advanceChargeAmount = modelBillingByBalance.getUserAdvanceChargeMap(cacheUserInfoVo.getId()).getOrDefault(tradeId, 0L);
log.error("回话实现错误,现在返回预扣款给用户 cacheUserInfoVo={}", cacheUserInfoVo,e);
synchronized (cacheUserInfoVo.getClass()){
CacheGptApiTokenVo gptApiTokenVo = cacheUserInfoVo.getGptApiTokenVo();
gptApiTokenVo.setBalance(gptApiTokenVo.getBalance()+advanceChargeAmount);
}
} finally {
modelBillingByBalance.removeUserTradeId(cacheUserInfoVo.getId(),tradeId);
}
}
/**
* 官方的接口
*
* @param chatReq
* @param cacheUserInfoVo
* @throws IOException
*/
public void chatStream(ChatCompletionRequest chatReq, CacheUserInfoVo cacheUserInfoVo, HttpServletResponse response) throws IOException {
String model = chatReq.getModel();
ModelBillingService modelBillingService = ModelBillingFactory.getModelBillingService(model);
if(modelBillingService==null){
return;
}
//交易id
String tradeId = UUID.randomUUID().toString();
String token = cacheUserInfoVo.getGptApiTokenVo().getToken();
Optional<GptModelConfig> modelConfig = weightAlgorithmManager.round(cacheUserInfoVo, model);
if (!modelConfig.isPresent()) {
response.getOutputStream().write((String.format("data: %s\n\n", BaseCodeEnum.NO_MODEL_ROLE.getMsg())).getBytes(Charset.defaultCharset()));
return;
}
GptModelConfig gptModelConfig = modelConfig.get();
try {
ResultVO<String> beforeBillingResult = modelBillingService.beforeBilling(cacheUserInfoVo.getGptApiTokenVo(),
TokenUtil.countTokenMessages(chatReq.getMessages(), model),tradeId);
if(!beforeBillingResult.isOk()){
response.getOutputStream().write((String.format("data: %s\n\n", beforeBillingResult.getMsg())).getBytes(Charset.defaultCharset()));
return;
}
ChatModelProcessor chatModelProcessor = ChatModelProcessorFactory.getChatModelProcessor(model);
assert chatModelProcessor != null;
ChatMessageResultVo streamChatCompletion = chatModelProcessor.chatStream(chatReq, response.getOutputStream(), token,gptModelConfig.getId().toString());
//更新用户余额
boolean billingResult = userManager.costUserBalanceByChat(cacheUserInfoVo, streamChatCompletion, tradeId);
if(!billingResult){
Long advanceChargeAmount = modelBillingByBalance.getUserAdvanceChargeMap(cacheUserInfoVo.getId()).getOrDefault(tradeId, 0L);
log.info("扣款失败 返回预扣款给用户 cacheUserInfoVo={}", cacheUserInfoVo);
synchronized (cacheUserInfoVo.getClass()){
CacheGptApiTokenVo gptApiTokenVo = cacheUserInfoVo.getGptApiTokenVo();
gptApiTokenVo.setBalance(gptApiTokenVo.getBalance()+advanceChargeAmount);
}
}
promptRecordManager.asyncAddPromptRecord(streamChatCompletion); // 用户的回话id 也是 gpt的api的用户身份
}catch (Exception e){
Long advanceChargeAmount = modelBillingByBalance.getUserAdvanceChargeMap(cacheUserInfoVo.getId()).getOrDefault(tradeId, 0L);
log.error("回话实现错误,现在返回预扣款给用户 cacheUserInfoVo={}", cacheUserInfoVo,e);
synchronized (cacheUserInfoVo.getClass()){
CacheGptApiTokenVo gptApiTokenVo = cacheUserInfoVo.getGptApiTokenVo();
gptApiTokenVo.setBalance(gptApiTokenVo.getBalance()+advanceChargeAmount);
}
} finally {
modelBillingByBalance.removeUserTradeId(cacheUserInfoVo.getId(),tradeId);
}
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((2403, 2444), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL.getMsg'), ((2403, 2433), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL.getMsg'), ((2797, 2824), 'io.github.asleepyfish.enums.RoleEnum.USER.getRoleName'), ((3012, 3055), 'com.chat.base.bean.common.BaseCodeEnum.TOKEN_OVER.getMsg'), ((3012, 3044), 'com.chat.base.bean.common.BaseCodeEnum.TOKEN_OVER.getMsg'), ((3186, 3215), 'io.github.asleepyfish.enums.RoleEnum.SYSTEM.getRoleName'), ((3430, 3471), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL.getMsg'), ((3430, 3460), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL.getMsg'), ((3644, 3929), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3644, 3904), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3644, 3874), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3644, 3832), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3644, 3777), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3644, 3749), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3644, 3705), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4770, 4816), 'com.chat.base.bean.common.BaseCodeEnum.MODEL_NO_OPEN.getMsg'), ((4770, 4805), 'com.chat.base.bean.common.BaseCodeEnum.MODEL_NO_OPEN.getMsg'), ((4893, 4921), 'java.util.UUID.randomUUID'), ((5194, 5221), 'io.github.asleepyfish.enums.RoleEnum.USER.getRoleName'), ((5408, 5451), 'com.chat.base.bean.common.BaseCodeEnum.TOKEN_OVER.getMsg'), ((5408, 5440), 'com.chat.base.bean.common.BaseCodeEnum.TOKEN_OVER.getMsg'), ((5582, 5611), 'io.github.asleepyfish.enums.RoleEnum.SYSTEM.getRoleName'), ((6060, 6106), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL_ROLE.getMsg'), ((6060, 6095), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL_ROLE.getMsg'), ((6410, 6451), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL.getMsg'), ((6410, 6440), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL.getMsg'), ((7048, 7361), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7048, 7332), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7048, 7298), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7048, 7252), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7048, 7193), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7048, 7161), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((7048, 7113), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((9571, 9599), 'java.util.UUID.randomUUID'), ((9890, 9925), 'com.chat.base.bean.common.BaseCodeEnum.NO_MODEL_ROLE.getMsg')]
|
package com.happy.chat.service.impl;
import static com.happy.chat.constants.Constant.CHAT_FROM_USER;
import static com.happy.chat.constants.Constant.PERF_CHAT_MODULE;
import static com.happy.chat.constants.Constant.PERF_ERROR_MODULE;
import static com.happy.chat.uitls.CacheKeyProvider.chatSystemTipsKey;
import static com.happy.chat.uitls.CacheKeyProvider.chatUnPayTipsKey;
import static com.happy.chat.uitls.CacheKeyProvider.chatWarnWordKey;
import static com.happy.chat.uitls.CacheKeyProvider.gptApiTokenKey;
import static com.happy.chat.uitls.CacheKeyProvider.happyModelHttpUrl;
import static com.happy.chat.uitls.CacheKeyProvider.userChatgptWarnKey;
import static com.happy.chat.uitls.CacheKeyProvider.userChatgptWarnMaxCountKey;
import static com.happy.chat.uitls.CacheKeyProvider.userEnterChatgptAdvanceModelThresholdKey;
import static com.happy.chat.uitls.CacheKeyProvider.userEnterHappyModelLatestTimeKey;
import static com.happy.chat.uitls.CacheKeyProvider.userExitHappyModelExpireMillsKey;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import com.happy.chat.dao.FlirtopiaChatDao;
import com.happy.chat.domain.FlirtopiaChat;
import com.happy.chat.domain.IceBreakWord;
import com.happy.chat.model.ChatRequest;
import com.happy.chat.model.ChatResponse;
import com.happy.chat.model.HappyModelRequest;
import com.happy.chat.service.ChatService;
import com.happy.chat.service.OpenAIService;
import com.happy.chat.service.PaymentService;
import com.happy.chat.uitls.FileUtils;
import com.happy.chat.uitls.ObjectMapperUtils;
import com.happy.chat.uitls.OkHttpUtils;
import com.happy.chat.uitls.PrometheusUtils;
import com.happy.chat.uitls.RedisUtil;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import io.prometheus.client.Counter;
import lombok.extern.slf4j.Slf4j;
import okhttp3.Response;
@Lazy
@Service
@Slf4j
public class ChatServiceImpl implements ChatService {
private final String defaultHappyModelExitExpireTime = "300000";
private final String defaultEnterAdvanceModelHistoryChatSize = "10";
private final String defaultToPayTips = "Don’t be shy to undress me, only $9.9";
private final String defaultSystemTips = "Don't scare a girl. Do it gently and softly.";
private final String defaultUserChatWarnMaxCount = "3";
private final String normalVersionGpt = "normal";
private final String advancedVersionGpt = "advanced";
@Autowired
private FlirtopiaChatDao flirtopiaChatDao;
@Autowired
private RedisUtil redisUtil;
@Autowired
private PaymentService paymentService;
@Autowired
private OpenAIService openAIService;
@Autowired
private OkHttpUtils okHttpUtils;
@Autowired
private PrometheusUtils prometheusUtil;
@Autowired
private Counter chatPrometheusCounter;
@Override
public List<IceBreakWord> getIceBreakWordsByRobot(String robotId) {
return flirtopiaChatDao.getRobotIceBreakWords(robotId);
}
@Override
public List<FlirtopiaChat> getUserHistoryChats(String userId) {
return flirtopiaChatDao.getUserChatList(userId);
}
@Override
public List<FlirtopiaChat> getUserRobotHistoryChats(String userId, String robotId) {
return flirtopiaChatDao.getUserRobotChats(userId, robotId);
}
@Override
public int insert(FlirtopiaChat flirtopiaChat) {
return flirtopiaChatDao.insertChat(flirtopiaChat);
}
@Override
public int batchInsert(List<FlirtopiaChat> flirtopiaChats) {
return flirtopiaChatDao.batchInsertChat(flirtopiaChats);
}
/**
* 1、用户输入内容有敏感词,直接返回默认回复(取缓存数据,若没有取到则返回I have no idea)
* <p>
* 2、用户输入内容无敏感词
* 拿出用户和AI的历史聊天记录,按时间前后顺序排序,用作请求gpt和快乐模型的参数输入。
* <p>
* 前置说明,快乐模型退场机制检测:用户进入快乐模型,一段时间未聊天后,再次正常聊天,不再使用快乐模型,避免回复崩塌。
* 逻辑上就是 check 进没进过快乐模型 或者 "取缓存里记录的:调用的快乐模型的上一次时间" 和 "当前时间"做比较,未超过 "缓存配置的阈值,比如5分钟"
* <p>
* 2-1)已付费用户,首先进入快乐模型退场机制检测。若退场,则降级调用gpt热情版,若没有退场,则调用快乐模型继续回复。
* 对回复内容做检测:空或者包含敏感词返回默认回复,见上
* <p>
* 同时,当不是默认回复且内容来自快乐模型,要更新下这次调用的快乐模型的当前时间,用于退场机制检测。
* <p>
* 2-2)未付费用户,也要做下快乐模型的退场检测(未进入认为是退场,进入过且超过时间认为是退场)。
* 2-2-1)若已处在快乐模型状态下即未退场(虽然内容是高斯模糊),那么就直接调用快乐模型回复,对回复内容做检测,空或者包含敏感词返回默认回复,见上
* 否则返回内容的同时要加一条付费提示
* <p>
* 2-2-2)若不在快乐模型状态下即退场,那么去缓存里拿对应配置的历史聊天轮次数阈值(没取到使用硬编码写死5轮)
* 比较历史聊天轮次是否超过配置的阈值,若超过则需要调用热情版gpt,否则需要调用普通版gpt
* 调用gpt前需要组装一些参数,包括但不限于:robot的prompt信息(人设)、警报关键词、组装当前聊天和历史聊天等
* 同样也要对回复内容做检测,空或者包含敏感词返回默认回复,见上。否则去看下警报关键词:
* <p>
* 出现了警报关键词的情况,逻辑如下:
* 2-2-2-1)普通版:去缓存里拿出配置的警报次数阈值(没取到使用硬编码写死3次),比较用户和该ai的触发警报次数是否超过配置的阈值
* 小于等于阈值,则返回客户端回复内容外还要加一条系统提示(也是从缓存里取)
* 大于阈值,调用快乐模型。对快乐模型的回复内容同样做检测,空或者敏感词,返回默认回复,否则返回客户端回复内容外还要告诉客户端该内容付费要加上高斯模糊
* 2-2-2-2)热情版:调用快乐模型,对快乐模型的回复内容同样做检测,空或者敏感词,返回默认回复,否则返回客户端回复内容外还要告诉客户端该内容付费要加上高斯模糊
*
* @return
*/
@Override
public ChatResponse requestChat(ChatRequest chatRequest) {
String userId = chatRequest.getUserId();
String robotId = chatRequest.getRobotId();
String content = chatRequest.getUserWord();
String robotDefaultResp = chatRequest.getRobotDefaultResp();
List<String> sensitiveWords = chatRequest.getSensitiveWords();
boolean hasSensitiveWord = hasSensitiveWord(content, sensitiveWords);
// 敏感词 直接返回
if (hasSensitiveWord) {
log.warn("user request content contains sensitive {} {} {}", userId, robotId, content);
prometheusUtil.perf(PERF_CHAT_MODULE, "用户聊天请求内容有敏感词,使用AI默认文案回复");
return ChatResponse.builder()
.content(robotDefaultResp)
.useDefault(true)
.reasonAndModel("userContentHasSensitiveWord-Default")
.build();
}
// 按时间升序排序
List<FlirtopiaChat> userHistoryMessages = getUserRobotHistoryChats(userId, robotId).stream()
.sorted(Comparator.comparing(FlirtopiaChat::getCreateTime)).collect(Collectors.toList());
// 是否付费 若付费,直接调用快乐模型,调用前检查是否超过快乐模型过期时间,若已经过期,则降级调用热情版gpt
boolean hasPay = paymentService.userHasPayedRobot(userId, robotId);
if (hasPay) {
return getRobotAlreadyPaidResp(userId, robotId, content, userHistoryMessages, robotDefaultResp, sensitiveWords);
}
// 未付费请求
return getRobotUnPaidResp(userId, robotId, content, userHistoryMessages, robotDefaultResp, sensitiveWords);
}
/**
* 已付费 热情版gpt回复
*
* @param userId
* @param robotId
* @param content
* @param userHistoryMessages
* @return
*/
private ChatResponse getRobotAlreadyPaidRespFromAdvancedGpt(String userId, String robotId,
String content, List<FlirtopiaChat> userHistoryMessages,
String robotDefaultResp, List<String> sensitiveWords) {
String respContent = requestChatgpt(robotId, advancedVersionGpt, content, userHistoryMessages);
// 没拿到,直接return
if (StringUtils.isEmpty(respContent)) {
return buildChatResponse(userId, robotId, respContent, "alreadyPaidAdvancedGptEmpty:Default", robotDefaultResp, sensitiveWords);
}
//内容是否警报
boolean contentHasWarn = chatgptRespHasWarn(respContent);
//出现警报,直接请求快乐模型
if (contentHasWarn) {
String happyResp = requestHappyModel(robotId, content, userHistoryMessages);
ChatResponse chatResponse = buildChatResponse(userId, robotId, happyResp, "alreadyPaidAdvancedGptWarn:HappyModel", robotDefaultResp, sensitiveWords);
if (!chatResponse.isUseDefault()) {
updateHappyModelLatestTime(userId, robotId);
}
return chatResponse;
}
return buildChatResponse(userId, robotId, respContent, "alreadyPaidAdvancedGptNoWarn:AdvancedGpt", robotDefaultResp, sensitiveWords);
}
/**
* 未付费的ai回复
*
* @param userId
* @param robotId
* @param content
* @param userHistoryMessages
* @return
*/
private ChatResponse getRobotUnPaidResp(String userId, String robotId, String content,
List<FlirtopiaChat> userHistoryMessages,
String robotDefaultResp, List<String> sensitiveWords) {
// 做下快乐模型退场检测
boolean timeForExit = checkTimeForExitHappyModel(userId, robotId);
// 没退场 使用快乐模型回复
if (!timeForExit) {
return getRobotUnPaidRespFromHappyModel(userId, robotId, content, userHistoryMessages,
robotDefaultResp, sensitiveWords);
}
// 即从没进去过快乐模型,退场,轮次够,使用热情版gpt
if (isEnterChatgptAdvancedModel(userHistoryMessages)) {
return getRobotUnPaidRespFromAdvancedGpt(userId, robotId, content, userHistoryMessages,
robotDefaultResp, sensitiveWords);
}
// 退场,轮次不够,使用热情版gpt
return getRobotUnPaidRespFromNormalGpt(userId, robotId, content, userHistoryMessages,
robotDefaultResp, sensitiveWords);
}
/**
* 未付费 快乐模型回复
*
* @param userId
* @param robotId
* @param content
* @param userHistoryMessages
* @return
*/
private ChatResponse getRobotUnPaidRespFromHappyModel(String userId, String robotId,
String content, List<FlirtopiaChat> userHistoryMessages,
String robotDefaultResp, List<String> sensitiveWords) {
String aiRespContent = requestHappyModel(robotId, content, userHistoryMessages);
ChatResponse chatResponse = buildChatResponse(userId, robotId, aiRespContent,
"unPaidTimeNotExit:HappyModel", robotDefaultResp, sensitiveWords);
// 来自快乐模型的回复 要更新下时间
if (!chatResponse.isUseDefault()) {
updateHappyModelLatestTime(userId, robotId);
chatResponse.setPayTips(getUnPayTips());
}
return chatResponse;
}
/**
* 未付费 热情版gpt回复
*
* @param userId
* @param robotId
* @param content
* @param userHistoryMessages
* @return
*/
private ChatResponse getRobotUnPaidRespFromAdvancedGpt(String userId, String robotId,
String content, List<FlirtopiaChat> userHistoryMessages,
String robotDefaultResp, List<String> sensitiveWords) {
String respContent = requestChatgpt(robotId, advancedVersionGpt, content, userHistoryMessages);
// 没拿到,直接return
if (StringUtils.isEmpty(respContent)) {
return buildChatResponse(userId, robotId, respContent, "unPaidAdvancedGptEmpty:Default", robotDefaultResp, sensitiveWords);
}
//内容是否警报
boolean contentHasWarn = chatgptRespHasWarn(respContent);
//出现警报,直接请求快乐模型
if (contentHasWarn) {
String happyResp = requestHappyModel(robotId, content, userHistoryMessages);
ChatResponse chatResponse = buildChatResponse(userId, robotId, happyResp, "unPaidAdvancedGptWarn:HappyModel", robotDefaultResp, sensitiveWords);
if (!chatResponse.isUseDefault()) {
updateHappyModelLatestTime(userId, robotId);
// 快乐模型返回的话 要有付费卡
chatResponse.setPayTips(getUnPayTips());
}
return chatResponse;
}
return buildChatResponse(userId, robotId, respContent, "unPaidAdvancedGptNoWarn:AdvancedGpt", robotDefaultResp, sensitiveWords);
}
/**
* 未付费 普通版gpt回复
*
* @param userId
* @param robotId
* @param content
* @param userHistoryMessages
* @return
*/
private ChatResponse getRobotUnPaidRespFromNormalGpt(String userId, String robotId,
String content, List<FlirtopiaChat> userHistoryMessages,
String robotDefaultResp, List<String> sensitiveWords) {
// 退场,使用普通版gpt
String respContent = requestChatgpt(robotId, normalVersionGpt, content, userHistoryMessages);
// 没拿到,直接return
if (StringUtils.isEmpty(respContent)) {
return buildChatResponse(userId, robotId, respContent, "unPaidNormalGptEmpty:Default",
robotDefaultResp, sensitiveWords);
}
//内容是否警报
boolean contentHasWarn = chatgptRespHasWarn(respContent);
// 超过3次,请求快乐模型
if (overGptWarnCount(userId, robotId)) {
String happyResp = requestHappyModel(robotId, content, userHistoryMessages);
ChatResponse chatResponse = buildChatResponse(userId, robotId, happyResp, "unPaidNormalGptWarn:HappyModel",
robotDefaultResp, sensitiveWords);
if (!chatResponse.isUseDefault()) {
updateHappyModelLatestTime(userId, robotId);
// 快乐模型返回的话 要有付费卡
chatResponse.setPayTips(getUnPayTips());
}
return chatResponse;
} else if (contentHasWarn) { // 没超过三次,但也有警报
// warn次数+1
addGptWarnCount(userId, robotId);
ChatResponse chatResponse = buildChatResponse(userId, robotId, respContent,
"unPaidNormalGptWarnNotEnough-NormalGpt", robotDefaultResp, sensitiveWords);
if (!chatResponse.isUseDefault()) {
// 返回加上规劝文案
chatResponse.setSystemTips(redisUtil.getOrDefault(chatSystemTipsKey(), defaultSystemTips));
}
return chatResponse;
}
return buildChatResponse(userId, robotId, respContent, "unPaidNormalGptNoWarn:NormalGpt",
robotDefaultResp, sensitiveWords);
}
/**
* 付费后的ai回复
*
* @param userId
* @param robotId
* @param userReqContent
* @param userHistoryMessages
* @return
*/
private ChatResponse getRobotAlreadyPaidResp(String userId, String robotId, String userReqContent,
List<FlirtopiaChat> userHistoryMessages,
String robotDefaultResp, List<String> sensitiveWords) {
// 快乐模型是否退场机制
boolean timeForExit = checkTimeForExitHappyModel(userId, robotId);
if (timeForExit) { //退场,回退到请求热情版,同样需要看下警报词
return getRobotAlreadyPaidRespFromAdvancedGpt(userId, robotId, userReqContent, userHistoryMessages,
robotDefaultResp, sensitiveWords);
}
return getRobotAlreadyPaidRespFromHappyModel(userId, robotId, userReqContent, userHistoryMessages,
robotDefaultResp, sensitiveWords);
}
/**
* 已付费 快乐模型回复
*
* @param userId
* @param robotId
* @param userReqContent
* @param userHistoryMessages
* @param robotDefaultResp
* @param sensitiveWords
* @return
*/
private ChatResponse getRobotAlreadyPaidRespFromHappyModel(String userId, String robotId, String userReqContent,
List<FlirtopiaChat> userHistoryMessages, String robotDefaultResp, List<String> sensitiveWords) {
// 未退场
String responseContent = requestHappyModel(robotId, userReqContent, userHistoryMessages);
ChatResponse chatResponse = buildChatResponse(userId, robotId, responseContent,
"alreadyPaid:HappyModel", robotDefaultResp, sensitiveWords);
// 来自快乐模型的回复 要更新下时间
if (!chatResponse.isUseDefault()) {
updateHappyModelLatestTime(userId, robotId);
}
return chatResponse;
}
private ChatResponse buildChatResponse(String userId, String robotId, String content, String reasonAndModel,
String robotDefaultResp, List<String> sensitiveWords) {
// 为空
if (StringUtils.isEmpty(content)) {
log.error("buildChatResponse empty {} {} {}", userId, robotId, content);
prometheusUtil.perf(PERF_ERROR_MODULE, "ai聊天回复内容为空,见: " + reasonAndModel);
return ChatResponse.builder()
.content(robotDefaultResp)
.useDefault(true)
.reasonAndModel(reasonAndModel + ":empty")
.build();
}
// 敏感词
if (hasSensitiveWord(content, sensitiveWords)) {
log.error("buildChatResponse hasSensitiveWord {} {} {}", userId, robotId, content);
prometheusUtil.perf(PERF_CHAT_MODULE, "ai聊天回复内容包含敏感词,见:" + reasonAndModel);
return ChatResponse.builder()
.content(robotDefaultResp)
.useDefault(true)
.reasonAndModel(reasonAndModel + ":sensitive")
.build();
}
prometheusUtil.perf(PERF_CHAT_MODULE, "ai聊天回复内容成功,见:" + reasonAndModel);
return ChatResponse.builder()
.content(content)
.reasonAndModel(reasonAndModel)
.build();
}
// 付费提示
private String getUnPayTips() {
List<String> results = redisUtil.range(chatUnPayTipsKey(), 0, -1);
if (CollectionUtils.isEmpty(results)) {
log.error("robot unpay tips empty");
prometheusUtil.perf(PERF_CHAT_MODULE, "未能获取到待付费提示文案,使用默认文案");
return defaultToPayTips;
}
return results.get(RandomUtils.nextInt(0, results.size()));
}
/**
* 是否进入chatgpt 热情版
*
* @param userHistoryMessages
* @return
*/
private boolean isEnterChatgptAdvancedModel(List<FlirtopiaChat> userHistoryMessages) {
String val = redisUtil.getOrDefault(userEnterChatgptAdvanceModelThresholdKey(), defaultEnterAdvanceModelHistoryChatSize);
return userHistoryMessages.size() >= Integer.parseInt(val);
}
private boolean chatgptRespHasWarn(String content) {
if (StringUtils.isEmpty(content)) {
return false;
}
List<String> warnList = redisUtil.range(chatWarnWordKey(), 0, -1);
if (CollectionUtils.isEmpty(warnList)) {
prometheusUtil.perf(PERF_CHAT_MODULE, "未能获取到AI回复警报词列表");
return false;
}
return warnList.stream()
.anyMatch(warn -> content.toLowerCase().contains(warn.toLowerCase()));
}
private void addGptWarnCount(String userId, String robotId) {
redisUtil.increment(userChatgptWarnKey(userId, robotId), 1L);
}
private boolean overGptWarnCount(String userId, String robotId) {
String userChatWarnCount = redisUtil.get(userChatgptWarnKey(userId, robotId));
if (StringUtils.isEmpty(userChatWarnCount)) {
return false;
}
String userChatWarnThreshold = redisUtil.getOrDefault(userChatgptWarnMaxCountKey(), defaultUserChatWarnMaxCount);
return Integer.parseInt(userChatWarnCount) >= Integer.parseInt(userChatWarnThreshold);
}
private void updateHappyModelLatestTime(String userId, String robotId) {
redisUtil.set(userEnterHappyModelLatestTimeKey(userId, robotId), System.currentTimeMillis() + "");
}
// 快乐模型退场检查
private boolean checkTimeForExitHappyModel(String userId, String robotId) {
String latestTimeStr = redisUtil.get(userEnterHappyModelLatestTimeKey(userId, robotId));
// 为空 认为是退场 说明历史上根本没有进入过快乐模型
if (StringUtils.isEmpty(latestTimeStr)) {
return true;
}
String exitExpireTimeStr = redisUtil.getOrDefault(userExitHappyModelExpireMillsKey(), defaultHappyModelExitExpireTime);
// 超过五分钟没聊 就退出
if (System.currentTimeMillis() - Long.parseLong(latestTimeStr) >= Long.parseLong(exitExpireTimeStr)) {
return true;
}
return false;
}
private String requestHappyModel(String robotId, String currentUserInput, List<FlirtopiaChat> historyChats) {
HappyModelRequest happyModelRequest = HappyModelRequest.builder()
.temperature(0.1)
.maxNewToken(100)
.historyMaxLen(1000)
.topP(0.85)
.userId(robotId)
.presencePenalty(0.5)
.frequencyPenalty(0.5)
.current(HappyModelRequest.Current.builder()
.u(currentUserInput)
.build())
.build();
List<HappyModelRequest.History> histories = new ArrayList<>();
// 转换成格式
historyChats.forEach(historyChat -> {
if (CHAT_FROM_USER.equals(historyChat.getMessageFrom())) {
histories.add(HappyModelRequest.History.builder()
.u(historyChat.getContent())
.build());
} else {
histories.add(HappyModelRequest.History.builder()
.b(historyChat.getContent())
.build());
}
});
happyModelRequest.setHistory(histories);
try {
String url = redisUtil.get(happyModelHttpUrl());
if (StringUtils.isEmpty(url)) {
prometheusUtil.perf(PERF_ERROR_MODULE, "快乐模型URL未配置");
return null;
}
Response response = okHttpUtils.postJson(url, ObjectMapperUtils.toJSON(happyModelRequest));
String json;
if (response != null && response.body() != null) {
json = response.body().string();
log.info("json {}", json);
Map<String, String> jsonMap = ObjectMapperUtils.fromJSON(json, Map.class, String.class, String.class);
prometheusUtil.perf(PERF_CHAT_MODULE, "请求快乐模型返回结果成功");
return jsonMap.get("response");
}
} catch (Exception e) {
log.error("requestHappyModel exception", e);
prometheusUtil.perf(PERF_ERROR_MODULE, "请求快乐模型返回异常");
}
return null;
}
/**
* 请求openAI
*
* @param robotId
* @param version
* @param currentUserInput
* @param historyChats
* @return
*/
private String requestChatgpt(String robotId, String version, String currentUserInput, List<FlirtopiaChat> historyChats) {
// 从缓存里取出robot对应的prompt,分成热情版/普通版。即role=system
String fileName = String.format("prompt/%s_%s.prompt", robotId, version);
String prompt = FileUtils.getFileContent(fileName);
if (StringUtils.isEmpty(prompt)) {
log.error("robot {} has no prompt {} ", robotId, version);
prometheusUtil.perf(PERF_ERROR_MODULE, "未能获取到AI Prompt配置");
return null;
}
List<String> apiKeys = redisUtil.range(gptApiTokenKey(), 0, -1);
if (CollectionUtils.isEmpty(apiKeys)) {
log.error("chat gpt apikey empty {}", robotId);
prometheusUtil.perf(PERF_ERROR_MODULE, "未能获取到gptApiToken配置");
return null;
}
List<ChatMessage> messages = new ArrayList<>();
// role=system,系统设定,即prompt; role=user,用户输入;role=assistant,gpt输入
ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), prompt);
messages.add(systemMessage);
// 转换成格式
historyChats.forEach(historyChat -> {
if (CHAT_FROM_USER.equals(historyChat.getMessageFrom())) {
messages.add(new ChatMessage(ChatMessageRole.USER.value(), historyChat.getContent()));
} else {
messages.add(new ChatMessage(ChatMessageRole.ASSISTANT.value(), historyChat.getContent()));
}
});
messages.add(new ChatMessage(ChatMessageRole.USER.value(), currentUserInput));
log.info("request openai, robot {}, request {} ", robotId, ObjectMapperUtils.toJSON(messages));
ChatMessage response = openAIService.requestChatCompletion(apiKeys, messages);
if (response == null) {
log.error("chat open ai return empty {}", robotId);
prometheusUtil.perf(PERF_ERROR_MODULE, "请求gpt返回结果为空");
return null;
}
prometheusUtil.perf(PERF_CHAT_MODULE, "请求gpt返回结果成功");
return response.getContent();
}
private boolean hasSensitiveWord(String content, List<String> sensitiveWords) {
if (StringUtils.isEmpty(content)) {
return false;
}
return sensitiveWords.stream()
.anyMatch(content::contains);
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value"
] |
[((7809, 8020), 'com.happy.chat.model.ChatResponse.builder'), ((7809, 7991), 'com.happy.chat.model.ChatResponse.builder'), ((7809, 7916), 'com.happy.chat.model.ChatResponse.builder'), ((7809, 7878), 'com.happy.chat.model.ChatResponse.builder'), ((19278, 19477), 'com.happy.chat.model.ChatResponse.builder'), ((19278, 19448), 'com.happy.chat.model.ChatResponse.builder'), ((19278, 19385), 'com.happy.chat.model.ChatResponse.builder'), ((19278, 19347), 'com.happy.chat.model.ChatResponse.builder'), ((19796, 19999), 'com.happy.chat.model.ChatResponse.builder'), ((19796, 19970), 'com.happy.chat.model.ChatResponse.builder'), ((19796, 19903), 'com.happy.chat.model.ChatResponse.builder'), ((19796, 19865), 'com.happy.chat.model.ChatResponse.builder'), ((20127, 20256), 'com.happy.chat.model.ChatResponse.builder'), ((20127, 20231), 'com.happy.chat.model.ChatResponse.builder'), ((20127, 20183), 'com.happy.chat.model.ChatResponse.builder'), ((23338, 23773), 'com.happy.chat.model.HappyModelRequest.builder'), ((23338, 23748), 'com.happy.chat.model.HappyModelRequest.builder'), ((23338, 23608), 'com.happy.chat.model.HappyModelRequest.builder'), ((23338, 23569), 'com.happy.chat.model.HappyModelRequest.builder'), ((23338, 23531), 'com.happy.chat.model.HappyModelRequest.builder'), ((23338, 23498), 'com.happy.chat.model.HappyModelRequest.builder'), ((23338, 23470), 'com.happy.chat.model.HappyModelRequest.builder'), ((23338, 23433), 'com.happy.chat.model.HappyModelRequest.builder'), ((23338, 23399), 'com.happy.chat.model.HappyModelRequest.builder'), ((23634, 23747), 'com.happy.chat.model.HappyModelRequest.Current.builder'), ((23634, 23714), 'com.happy.chat.model.HappyModelRequest.Current.builder'), ((23634, 23669), 'com.happy.chat.model.HappyModelRequest.Current.builder'), ((24022, 24143), 'com.happy.chat.model.HappyModelRequest.History.builder'), ((24022, 24110), 'com.happy.chat.model.HappyModelRequest.History.builder'), ((24022, 24057), 'com.happy.chat.model.HappyModelRequest.History.builder'), ((24197, 24318), 'com.happy.chat.model.HappyModelRequest.History.builder'), ((24197, 24285), 'com.happy.chat.model.HappyModelRequest.History.builder'), ((24197, 24232), 'com.happy.chat.model.HappyModelRequest.History.builder'), ((26687, 26717), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((26954, 26982), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((27078, 27111), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((27204, 27232), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
|
package com.vission.chatGPT.service;
import com.theokanning.openai.image.CreateImageRequest;
import com.theokanning.openai.image.Image;
import com.theokanning.openai.image.ImageResult;
import com.theokanning.openai.service.OpenAiService;
import java.util.List;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
@Service
@Slf4j
@RequiredArgsConstructor
public class ChatGPTImageService {
private final OpenAiService openAiService;
public List<String> generations(String description) {
CreateImageRequest request = CreateImageRequest.builder()
.prompt(description)
.size("1024x1024")
.responseFormat("url")
.user("vission")
.n(1)
.build();
ImageResult image = openAiService.createImage(request);
return image.getData().stream().map(Image::getUrl).collect(Collectors.toList());
}
}
|
[
"com.theokanning.openai.image.CreateImageRequest.builder"
] |
[((640, 859), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((640, 834), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((640, 812), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((640, 779), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((640, 740), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((640, 705), 'com.theokanning.openai.image.CreateImageRequest.builder')]
|
package com.example.fyp.controller;
import java.util.Vector;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.MediaType;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.ResponseBody;
// additional
import org.springframework.web.bind.annotation.RestController;
import com.example.fyp.model.DetailEmotion;
import com.example.fyp.model.TextSentimentAnswer;
import com.example.fyp.model.promptModel;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
//Controller to handle text analyzer's GPT sentiment requests
@RestController
public class SentimentController {
@Value("${apiKey}")
private String apiKeyContent;
private TextSentimentAnswer textAnalyzer(String sentence) throws RuntimeException{
String apiKey = apiKeyContent;
// String apiKey = "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";
String currentModel = "text-davinci-003";
TextSentimentAnswer t = new TextSentimentAnswer();
// Set up OpenAI API
OpenAiService service = new OpenAiService(apiKey);
//OVERALL SENTIMENT
String prompt = "Decide if this statement's sentiment is positive, neutral, or negative: " + sentence;
CompletionRequest sentimentRequest = CompletionRequest.builder()
.model(currentModel)
.prompt(prompt)
.echo(true)
.maxTokens(60)
.build();
String response = service.createCompletion(sentimentRequest).getChoices().get(0).getText();
String overallSentiment_raw = response.substring(prompt.length()).trim();
String overallSentiment;
// Clean Output (.Negative --> Negative)
if ( overallSentiment_raw.toLowerCase().indexOf("negative") != -1 ) {
overallSentiment = "Negative";
} else if (overallSentiment_raw.toLowerCase().indexOf("positive") != -1){
overallSentiment = "Positive";
} else if (overallSentiment_raw.toLowerCase().indexOf("neutral") != -1){
overallSentiment = "Neutral";
} else{
overallSentiment = "Not found";
}
System.out.println("overall sentiment: " + overallSentiment);
t.setOverallSentiment(overallSentiment);
prompt = "Analyze the given sentence: \"" + sentence + "\" and explain why it has a " + overallSentiment + " sentiment. Consider the specific aspects, keywords, or phrases that contribute to this sentiment and provide a comprehensive explanation.";
CompletionRequest altExplanationRequest = CompletionRequest.builder()
.model(currentModel)
.prompt(prompt)
.echo(true)
.maxTokens(1000)
.build();
response = service.createCompletion(altExplanationRequest).getChoices().get(0).getText();
String overallContent = response.substring(prompt.length()).trim();
System.out.println("overall content: " + overallContent);
t.setOverallContent(overallContent);
prompt = "Analyze the given input sentence " + sentence + " and detect the emotions associated with highlighted keywords or sentences, then give explanation and associate them with a unique color in hexcode that are not too dark."
+ " Please format the answer as [Emotion]|[highlighted keyword or sentence]|[explanation]|[hexcode]\n[IF MORE EMOTIONS].";
String[] output_array = {""};
CompletionRequest explanationRequest = CompletionRequest.builder()
.model(currentModel)
.prompt(prompt)
.echo(true)
.maxTokens(1000)
.build();
response = service.createCompletion(explanationRequest).getChoices().get(0).getText();
String output = response.substring(prompt.length()).trim();
output_array = output.split("\n");
Vector<DetailEmotion> emotions = new Vector<DetailEmotion>();
System.out.println(output_array.length);
for (int i = 0; i < output_array.length; i++) {
DetailEmotion d = new DetailEmotion();
System.out.println("sentence: " + sentence.toLowerCase());
System.out.println("highlighted: " + output_array[i].split("\\|")[1].toLowerCase());
if (!sentence.toLowerCase().contains(output_array[i].split("\\|")[1].toLowerCase().replaceAll("\"", ""))) {
d.setTitle(output_array[i].split("\\|")[0]);
d.setHighlighted(output_array[i].split("\\|")[1]);
d.setExplanation(output_array[i].split("\\|")[2]);
d.setColor("#E6E9ED");
} else {
d.setTitle(output_array[i].split("\\|")[0]);
d.setHighlighted(output_array[i].split("\\|")[1]);
d.setExplanation(output_array[i].split("\\|")[2]);
d.setColor(output_array[i].split("\\|")[3]);
}
emotions.add(d);
}
t.setEmotions(emotions);
return t;
}
@PostMapping(value = "/analyze", produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public TextSentimentAnswer inputSentence(ModelMap model, @ModelAttribute promptModel prompt){
while(true) {
try {
TextSentimentAnswer response = textAnalyzer(prompt.getPrompt());
return response;
} catch(RuntimeException e) {
System.out.println("Runtime Exception occurred : "+ e.getMessage());
}
}
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((1513, 1913), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1513, 1844), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1513, 1769), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1513, 1697), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1513, 1621), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3056, 3478), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3056, 3405), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3056, 3324), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3056, 3248), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3056, 3168), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4265, 4687), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4265, 4614), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4265, 4533), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4265, 4457), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4265, 4377), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package acute.ai;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;
import com.google.gson.*;
import com.google.gson.reflect.TypeToken;
import com.google.gson.stream.JsonReader;
import com.theokanning.openai.OpenAiHttpException;
import com.theokanning.openai.client.OpenAiApi;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import net.md_5.bungee.api.chat.ClickEvent;
import net.md_5.bungee.api.chat.ComponentBuilder;
import net.md_5.bungee.api.chat.HoverEvent;
import net.md_5.bungee.api.chat.TextComponent;
import okhttp3.*;
import okhttp3.Authenticator;
import org.bstats.bukkit.Metrics;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.NamespacedKey;
import org.bukkit.configuration.InvalidConfigurationException;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.scheduler.BukkitRunnable;
import retrofit2.Retrofit;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Type;
import java.net.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.DecimalFormat;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import static com.theokanning.openai.service.OpenAiService.*;
public final class CraftGPT extends JavaPlugin {
public NamespacedKey magicWandKey = new NamespacedKey(this, "secret");
public NamespacedKey autoSpawnChunkFlagKey = new NamespacedKey(this, "chunk-flag");
public boolean debug = false;
public boolean apiKeySet = false;
public boolean apiConnected = false;
public OpenAiService openAIService;
public static final Random random = new Random();
public List<String> waitingOnAPIList = new ArrayList<>();
ConcurrentHashMap<UUID, Entity> chattingPlayers = new ConcurrentHashMap<>();
ArrayList<UUID> debuggingPlayers = new ArrayList<>();
ConcurrentHashMap<UUID, AIMob> selectingPlayers = new ConcurrentHashMap<>();
private final Gson gson = new GsonBuilder().registerTypeAdapter(Class.class, new ClassTypeAdapter()).setPrettyPrinting().create();
ConcurrentHashMap<String, AIMob> craftGPTData = new ConcurrentHashMap<>();
private File usageFile;
private FileConfiguration usageFileConfig;
public static final String CHAT_PREFIX = ChatColor.GOLD + "[" + ChatColor.GRAY + "Craft" + ChatColor.GREEN + "GPT" + ChatColor.GOLD + "] " + ChatColor.GRAY;
public static final String DISCORD_URL = "https://discord.gg/BXhUUQEymg";
public static final String SPIGOT_URL = "https://www.spigotmc.org/resources/craftgpt.110635/";
public static final String UPDATE_AVAILABLE = "Update available! Download v%s ";
public static final String UP_TO_DATE = "CraftGPT is up to date: (%s)";
public static final String UNRELEASED_VERSION = "Version (%s) is more recent than the one publicly available. Dev build?";
public static final String UPDATE_CHECK_FAILED = "Could not check for updates. Reason: ";
public static final int spigotID = 110635;
@Override
public void onEnable() {
getLogger().info("+----------------------------------------------------------------+");
getLogger().info("| CraftGPT Community |");
getLogger().info("+================================================================+");
//getLogger().info("| * Please report bugs at: https://git.io/JkJLD |");
getLogger().info("| * Join the Discord at: https://discord.gg/BXhUUQEymg |");
getLogger().info("| * Enjoying the plugin? Leave a review and share with a friend! |");
getLogger().info("+----------------------------------------------------------------+");
// Register events
getServer().getPluginManager().registerEvents(new CraftGPTListener(this), this);
// Register commands
getCommand("craftgpt").setExecutor(new Commands(this));
getCommand("craftgpt").setTabCompleter(new Commands(this));
// Save/read config.yml
Path path = Paths.get("plugins/CraftGPT/config.yml");
if (Files.exists(path)) { // Only save a backup if one already exists to prevent overwriting backup with defaults
try {
Files.copy(path,
Paths.get("plugins/CraftGPT/config.bak"),
StandardCopyOption.REPLACE_EXISTING);
} catch (IOException e) {
getLogger().warning("Failed to create backup config!");
}
}
saveDefaultConfig();
getConfig().options().copyDefaults(true);
debug = getConfig().getBoolean("debug");
if (debug) {
getLogger().info(CHAT_PREFIX + "Debug mode enabled!");
}
// Save/read usage.yml
createUsageFile(false);
// Load data.json
craftGPTData = readData(this);
if (craftGPTData != null) getLogger().info(String.format("Loaded %s AI-enabled mobs.", craftGPTData.size()));
getLogger().info(String.format("Loaded %s events.", getConfig().getConfigurationSection("events").getKeys(false).size()));
// Connect to bStats
int bStatsId = 18710;
Metrics metrics = new Metrics(this, bStatsId);
enableOpenAI();
if(Bukkit.getPluginManager().getPlugin("PlaceholderAPI") != null) {
new PlaceholderAPIExpansion(this).register();
getLogger().info("Registered with PlaceholderAPI");
}
// Check for updates
UpdateChecker.init(this, spigotID).requestUpdateCheck().whenComplete((result, exception) -> {
if (result.requiresUpdate()) {
this.getLogger().warning((String.format(
UPDATE_AVAILABLE, result.getNewestVersion()) + "at " + SPIGOT_URL));
return;
}
UpdateChecker.UpdateReason reason = result.getReason();
if (reason == UpdateChecker.UpdateReason.UP_TO_DATE) {
this.getLogger().info(String.format(UP_TO_DATE, result.getNewestVersion()));
} else if (reason == UpdateChecker.UpdateReason.UNRELEASED_VERSION) {
this.getLogger().info(String.format(UNRELEASED_VERSION, result.getNewestVersion()));
} else {
this.getLogger().warning(UPDATE_CHECK_FAILED + reason);
}
});
getLogger().info("Enabled");
}
@Override
public void onDisable() {
getLogger().info("Disabling...");
if (!chattingPlayers.isEmpty()) {
getLogger().info("Ending chats...");
Set<UUID> chattingUUIDs = new HashSet<>(chattingPlayers.keySet());
for (UUID uuid: chattingUUIDs) {
CraftGPTListener craftGPTListener = new CraftGPTListener(this);
craftGPTListener.exitChat(getServer().getPlayer(uuid));
}
}
getLogger().info("Writing save data...");
writeData(this);
saveUsageFile();
getLogger().warning("Disabled");
}
public FileConfiguration getUsageFile() {
return this.usageFileConfig;
}
public void saveUsageFile() {
try {
getUsageFile().save(usageFile);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void saveUsageFileAsync() {
// Can't run/schedule async tasks when disabled!
if (!this.isEnabled()) {
saveUsageFile();
return;
}
Bukkit.getScheduler().runTaskAsynchronously(this, new Runnable() {
@Override
public void run() {
try {
getUsageFile().save(usageFile);
getUsageFile().load(usageFile);
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InvalidConfigurationException e) {
throw new RuntimeException(e);
}
}
});
}
public void createUsageFile(boolean overwrite) {
usageFile = new File(getDataFolder(), "usage.yml");
if (!usageFile.exists() || overwrite || usageFile.length() == 0L) {
usageFile.getParentFile().mkdirs();
saveResource("usage.yml", true);
}
this.usageFileConfig = YamlConfiguration.loadConfiguration(usageFile);
}
public void enableOpenAI() {
String key = getConfig().getString("api_key");
if (key == null || key.length() < 15) {
getLogger().severe("No API key specified in config! Must set an API key for CraftGPT to work!");
return;
}
else {
apiKeySet = true;
}
// Create HTTP client and OpenAI connection with configurable proxy and timeout
ObjectMapper mapper = defaultObjectMapper();
OkHttpClient client;
Duration timeout = Duration.ofSeconds(getConfig().getInt("timeout"));
if (getConfig().getBoolean("proxy.enabled")) {
Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(getConfig().getString("proxy.host"), getConfig().getInt("proxy.port")));
if (getConfig().getBoolean("proxy.authentication.enabled")) {
getLogger().info("Authenticating to HTTP proxy...");
Authenticator proxyAuthenticator = new Authenticator() {
@Override
public Request authenticate(Route route, Response response) throws IOException {
String credential = Credentials.basic(getConfig().getString("proxy.authentication.username"), getConfig().getString("proxy.authentication.password"));
return response.request().newBuilder()
.header("Proxy-Authorization", credential)
.build();
}
};
client = defaultClient(key, timeout)
.newBuilder()
.proxyAuthenticator(proxyAuthenticator)
.proxy(proxy)
.build();
} else {
client = defaultClient(key, timeout)
.newBuilder()
.proxy(proxy)
.build();
}
getLogger().info("Connecting to OpenAI via proxy (" + getConfig().getString("proxy.host") + ":" + getConfig().getInt("proxy.port") + ")...");
} else {
client = defaultClient(key, timeout)
.newBuilder()
.build();
}
Retrofit retrofit = defaultRetrofit(client, mapper);
OpenAiApi api = retrofit.create(OpenAiApi.class);
openAIService = new OpenAiService(api);
new BukkitRunnable() {
@Override
public void run() {
long start = System.currentTimeMillis();
getLogger().info("Connecting to OpenAI...");
String response = tryNonChatRequest("Say hi", "Hi!", .1f, 2);
if (response == null) {
getLogger().severe("Tried 3 times and couldn't connect to OpenAI for the error(s) printed above!");
getLogger().severe("Read the error message carefully before asking for help in the Discord. Almost all errors are resolved by ensuring you have a valid and billable API key.");
} else {
long end = System.currentTimeMillis();
getLogger().info("Connected to OpenAI!" + " (" + ((end-start) / 1000f) + "s)");
apiConnected = true;
}
}
}.runTaskAsynchronously(this);
}
public void writeData(CraftGPT craftGPT) {
long start = System.currentTimeMillis();
Path path = Paths.get(craftGPT.getDataFolder() + "/data.json");
try {
if(!Files.exists(path)) {
Files.createDirectories(path.getParent());
Files.createFile(path);
getLogger().severe("No data.json exists! Creating empty one.");
// Initialize with empty JSON
Files.write(path, "{}".getBytes(StandardCharsets.UTF_8));
}
} catch (IOException e) {
e.printStackTrace();
}
try (BufferedWriter bufferedWriter = Files.newBufferedWriter(path, StandardCharsets.UTF_8)) {
gson.toJson(craftGPTData, bufferedWriter);
long end = System.currentTimeMillis();
getLogger().info("Wrote data.json! (" + (end-start) + "ms)");
} catch (IOException e) {
e.printStackTrace();
}
}
//fixme Probably much better way of handling this. The ChatMessage type couldn't be automatically parsed by gson
public class ClassTypeAdapter implements JsonSerializer<Class<ChatMessage>>, JsonDeserializer<Class<?>> {
@Override
public JsonElement serialize(Class<ChatMessage> src, Type typeOfSrc, JsonSerializationContext context) {
return new JsonPrimitive(src.getName());
}
@Override
public Class<?> deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
try {
return Class.forName(json.getAsString());
} catch (ClassNotFoundException e) {
throw new JsonParseException(e);
}
}
}
public ConcurrentHashMap<String, AIMob> readData(CraftGPT craftGPT) {
Path path = Paths.get(craftGPT.getDataFolder() + "/data.json");
try {
if (!Files.exists(path)) {
getLogger().info("Creating data.json");
Files.createDirectories(path.getParent());
Files.createFile(path);
// Initialize with empty JSON
Files.write(path, "{}".getBytes(StandardCharsets.UTF_8));
}
} catch (IOException e) {
e.printStackTrace();
}
try {
BufferedReader bufferedReader = Files.newBufferedReader(path, StandardCharsets.UTF_8);
JsonReader jsonReader = new JsonReader(bufferedReader);
ConcurrentHashMap<String, AIMob> map = gson.fromJson(jsonReader, new TypeToken<ConcurrentHashMap<String, AIMob>>() {}.getType());
getLogger().info("Read data.json!");
return map;
} catch (IOException e) {
return null;
}
}
public String rawProgressBar(int current, int max, int totalBars, char symbol, ChatColor completedColor,
ChatColor notCompletedColor) {
if (current > max) current = max;
float percent = (float) current / max;
int progressBars = (int) (totalBars * percent);
return Strings.repeat("" + completedColor + symbol, progressBars)
+ Strings.repeat("" + notCompletedColor + symbol, totalBars - progressBars);
}
public String colorProgressBar(int current, int max, int totalBars) {
ChatColor completedColor = ChatColor.GREEN;
double percentage = (double) current / max;
if (percentage > .5) {
completedColor = ChatColor.YELLOW;
}
if (percentage > .75) {
completedColor = ChatColor.RED;
}
return rawProgressBar(current, max, totalBars, '|', completedColor, ChatColor.GRAY);
}
public String getPlayerUsageProgressBar(Player player) {
return colorProgressBar((int) getPlayerUsagePercentage(player), 100, 40);
}
public double getPlayerUsagePercentage(Player player) {
long limit = CraftGPTListener.getTokenLimit(player);
long usage = getUsageFile().getLong("players." + player.getUniqueId() + ".total-usage");
DecimalFormat dfZero = new DecimalFormat("0.00");
return Double.valueOf(dfZero.format(100.0 * usage / limit));
}
public double getGlobalUsagePercentage() {
long limit = getConfig().getLong("global-usage-limit");
long usage = getUsageFile().getLong("global-total-usage");
DecimalFormat dfZero = new DecimalFormat("0.00");
return Double.valueOf(dfZero.format(100.0 * usage / limit));
}
public String getGlobalUsageProgressBar() {
return colorProgressBar((int) getGlobalUsagePercentage(), 100, 40);
}
public boolean isAIMob(Entity entity) {
if (craftGPTData.containsKey(entity.getUniqueId().toString())) return true;
else return false;
}
public AIMob getAIMob(Entity entity) {
if (isAIMob(entity)) {
return craftGPTData.get(entity.getUniqueId().toString());
} else {
return null;
}
}
public boolean isChatting(Player player) {
if (chattingPlayers.containsKey(player.getUniqueId())) return true;
else return false;
}
public String tryNonChatRequest(String systemMessage, String userMessage, float temp, int maxTokens) {
String errorSignature = null;
String response;
for (int i = 0; i < 3; i++) {
try {
response = nonChatRequest(systemMessage, userMessage, temp, maxTokens);
return response;
} catch (OpenAiHttpException e) {
if (errorSignature != null && errorSignature.equals(e.statusCode + e.type)) {
getLogger().warning("Failed again with identical error on try number " + (i+1) + ".");
} else {
printAPIErrorConsole(e);
errorSignature = e.statusCode + e.type;
}
} catch (Exception e) {
getLogger().warning(String.format("[Try %s] Non-OpenAI error: " + e.getMessage(), i));
if (!e.getMessage().contains("timeout")) {
e.printStackTrace();
}
}
}
return null;
}
public String nonChatRequest(String systemMessage, String userMessage, float temp, int maxTokens) {
List<ChatMessage> chatMessages = new ArrayList<>();
chatMessages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), systemMessage));
chatMessages.add(new ChatMessage(ChatMessageRole.USER.value(), userMessage));
ChatCompletionRequest completionRequest = ChatCompletionRequest.builder()
.messages(chatMessages)
.temperature((double) temp)
.maxTokens(maxTokens)
.model("gpt-3.5-turbo")
.build();
return openAIService.createChatCompletion(completionRequest).getChoices().get(0).getMessage().getContent();
}
public void printAPIErrorConsole(OpenAiHttpException e) {
getLogger().warning("OpenAI API error!");
getLogger().warning("Error type: " + e.type);
getLogger().warning("OpenAI error code: " + e.statusCode);
getLogger().warning("OpenAI error message: " + e.getMessage());
if (e.getMessage().contains("quota")) {
getLogger().warning("This is most often caused by an invalid API key or because your OpenAI account is not a paid account/does not have a payment method configured.");
getLogger().warning("Using the API *REQUIRES* credits in your account which can either be purchased with a credit card or through a free trial.");
getLogger().warning("More information on OpenAI errors available here: https://help.openai.com/en/collections/3808446-api-error-codes-explained");
}
else if (e.getMessage().contains("Rate limit reached")) {
getLogger().warning("This is most often occurs because the OpenAI free trial credits have a low rate limit of 3 messages/min. You must wait to send messages or add a billing method to your account.");
}
}
public void toggleWaitingOnAPI(Entity entity) {
if (isWaitingOnAPI(entity)) {
waitingOnAPIList.remove(entity.getUniqueId().toString());
}
else waitingOnAPIList.add(entity.getUniqueId().toString());
renameMob(entity);
}
public boolean isWaitingOnAPI(Entity entity) {
if (waitingOnAPIList.contains(entity.getUniqueId().toString())) {
return true;
}
else return false;
}
public void renameMob(Entity entity) {
if (!(entity instanceof Player) && !entity.hasMetadata("NPC")) {
entity.setCustomNameVisible(true);
if (isWaitingOnAPI(entity)) {
if (!craftGPTData.containsKey(entity.getUniqueId().toString())) {
// Enabling mob (clock icon)
entity.setCustomName("Enabling..." + ChatColor.YELLOW + " \u231A");
} else {
// Waiting on API (clock icon)
entity.setCustomName(craftGPTData.get(entity.getUniqueId().toString()).getName() + ChatColor.YELLOW + " \u231A");
}
}
else {
if (chattingPlayers.containsValue(entity)) {
// Currently chatting (green lightning bolt)
entity.setCustomName(craftGPTData.get(entity.getUniqueId().toString()).getName() + ChatColor.GREEN + " \u26A1");
// star "\u2B50"
}
else {
if (isAIMob(entity)) {
// AI-enabled (blue lightning bolt)
entity.setCustomName(craftGPTData.get(entity.getUniqueId().toString()).getName() + ChatColor.BLUE + " \u26A1");
} else {
entity.setCustomName(null);
entity.setCustomNameVisible(false);
}
}
}
}
}
public ChatMessage generateDefaultPrompt(AIMob aiMob) {
String newPrompt = getConfig().getString("prompt.default-system-prompt");
newPrompt = newPrompt.replace("%ENTITY_TYPE%", aiMob.getEntityType());
newPrompt = newPrompt.replace("%BACKSTORY%", aiMob.getBackstory());
if (debug) getLogger().info("PROMPT: " + newPrompt);
return new ChatMessage(ChatMessageRole.SYSTEM.value(), newPrompt);
}
public void createAIMobData(AIMob aiMob, String uuid) {
if (debug) getLogger().info("************************************\n" + aiMob.getName() + "\n" + aiMob.getTemperature() + "\n" + aiMob.getMessages() + "\n" + aiMob.getBackstory());
craftGPTData.put(uuid, aiMob);
writeData(this);
}
public void printFailureToCreateMob(Player player, Entity entity) {
getLogger().severe("Mob at: " + entity.getLocation() + " failed to enable due to error printed above!");
player.sendMessage(CraftGPT.CHAT_PREFIX + ChatColor.RED + "ERROR: OpenAI API failure!");
player.sendMessage(ChatColor.RED + "=======================================");
player.sendMessage(ChatColor.RED + "- This is most often caused by an invalid API key or because your OpenAI account is not a paid account/does not have a payment method configured.");
player.sendMessage(ChatColor.RED + "- Using the API" + ChatColor.UNDERLINE + ChatColor.ITALIC + ChatColor.WHITE + " requires " + ChatColor.RESET + ChatColor.RED + "credits in your account from a credit card or free trial.");
player.sendMessage(ChatColor.RED + "- For more information on the exact error, see the server logs.");
player.sendMessage(ChatColor.RED + "=======================================");
}
public TextComponent getClickableCommandHoverText(String message, String command, String hoverText) {
TextComponent textComponent = new TextComponent(message);
textComponent.setClickEvent(new ClickEvent(ClickEvent.Action.RUN_COMMAND, command));
textComponent.setHoverEvent(new HoverEvent(HoverEvent.Action.SHOW_TEXT, new ComponentBuilder(hoverText).create()));
return textComponent;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((5847, 5900), 'org.bukkit.Bukkit.getPluginManager'), ((8083, 8576), 'org.bukkit.Bukkit.getScheduler'), ((18793, 18823), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((18883, 18911), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((18978, 19196), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((18978, 19171), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((18978, 19131), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((18978, 19093), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((18978, 19049), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((22808, 22838), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
|
package com.georgster.control.manager;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import com.georgster.collectable.Collected;
import com.georgster.control.util.ClientContext;
import com.georgster.database.ProfileType;
import com.georgster.economy.CoinBank;
import com.georgster.gpt.MemberChatCompletions;
import com.georgster.profile.UserProfile;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import discord4j.core.event.domain.guild.GuildCreateEvent;
import discord4j.core.object.entity.Member;
/**
* Manages all {@link UserProfile UserProfiles} for a {@link com.georgster.control.SoapClient SoapClient}.
*/
public class UserProfileManager extends SoapManager<UserProfile> {
private static OpenAiService aiService; //The singleton AI Service to communicate with OpenAI's API
/**
* Creates a new UserProfileManager for the given SoapClient's {@link ClientContext}.
*
* @param context The context of the SoapClient for this manager.
*/
public UserProfileManager(ClientContext context) {
super(context, ProfileType.PROFILES, UserProfile.class, "memberId");
createAiService();
}
/**
* Creates the singleton OpenAiService.
*/
private static void createAiService() {
try {
if (aiService == null) {
aiService = new OpenAiService(Files.readString(Path.of(System.getProperty("user.dir"),"src", "main", "java", "com", "georgster", "gpt", "openaikey.txt")));
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Updates all user profiles information with the data in the {@link GuildCreateEvent}.
*
* @param event The event with all member information.
*/
public void updateFromEvent(GuildCreateEvent event) {
event.getGuild().getMembers().subscribe(member -> {
String id = member.getId().asString();
if (exists(id)) {
// All manageables must maintained.
UserProfile profile = get(id);
MemberChatCompletions completions = profile.getCompletions();
if (completions == null) completions = new MemberChatCompletions(id);
CoinBank bank = profile.getBank();
if (bank == null) bank = new CoinBank(id);
List<Collected> collecteds = profile.getCollecteds();
if (collecteds == null) collecteds = new ArrayList<>();
update(new UserProfile(event.getGuild().getId().asString(), id, member.getTag(), completions, bank, collecteds));
} else {
add(new UserProfile(event.getGuild().getId().asString(), id, member.getTag()));
}
});
}
/**
* Creates a new ChatCompletion for the provided {@link Member} based on the given
* prompt and returns all responses.
* <p>
* The response will be based on the previous ten chat completions for that member
* in this manager.
* <p>
* Uses OpenAI's gpt-3.5-turbo model.
* <p>
* <b>Note:</b> Only the first response will be saved in the member's chat completion log.
*
* @param prompt The prompt from the user.
* @param member The member who prompted the completion request.
* @return All responses from the AI.
*/
public List<String> createCompletionGetAll(String prompt, Member member) {
UserProfile profile = get(member.getId().asString());
List<String> responses = new ArrayList<>();
createCompletionRequest(prompt, member).getChoices().forEach(choice -> responses.add(choice.getMessage().getContent()));
profile.getCompletions().addCompletion(prompt, responses.get(0));
update(profile);
return responses;
}
/**
* Creates a new ChatCompletion for the provided {@link Member} based on the given
* prompt and returns the first response.
* The response will be based on the previous ten chat completions for that member
* in this manager. Uses OpenAI's gpt-3.5-turbo model.
*
* @param prompt The prompt from the user.
* @param member The member who prompted the completion request.
* @return The first responses from the AI.
*/
public String createCompletion(String prompt, Member member) {
UserProfile profile = get(member.getId().asString());
String response = createCompletionRequest(prompt, member).getChoices().get(0).getMessage().getContent();
profile.getCompletions().addCompletion(prompt, response);
update(profile);
return response;
}
/**
* Creates a ChatCompletionRequest to OpenAI using the {@code gpt-3.5-turbo} model.
*
* @param prompt The prompt from the user.
* @param member The member who prompted the completion request.
* @return The result of the request.
*/
private ChatCompletionResult createCompletionRequest(String prompt, Member member) {
String id = member.getId().asString();
List<ChatMessage> messages = new ArrayList<>();
messages.add(new ChatMessage("system", "You are a Discord bot called SOAP Bot."));
get(id).getCompletions().getTokens().forEach(token ->
token.forEach((k,v) -> {
messages.add(new ChatMessage("user", k));
messages.add(new ChatMessage("assistant", v));
})
);
messages.add(new ChatMessage("user", prompt));
ChatCompletionRequest request = ChatCompletionRequest.builder().messages(messages).model("gpt-3.5-turbo").build();
return aiService.createChatCompletion(request);
}
/**
* Returns the total amount of coins for all users in this manager,
* that is, the total amount of coins for users in a Guild.
*
* @return The total amount of coins for all users in this manager.
*/
public long getTotalCoins() {
return getAll().stream().mapToLong(profile -> profile.getBank().getBalance()).sum();
}
public void updateFromCollectables(CollectableManager manager) {
manager.getAll().forEach(collectable ->
collectable.getCollecteds().forEach(collected -> {
UserProfile profile = get(collected.getMemberId());
List<Collected> collecteds = profile.getCollecteds();
collecteds.removeIf(c -> c.getIdentifier().equals(collected.getIdentifier()));
collecteds.add(collected);
update(profile);
})
);
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((5814, 5895), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5814, 5887), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5814, 5864), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package de.ja.view.explanation.audio;
import com.theokanning.openai.OpenAiHttpException;
import com.theokanning.openai.audio.CreateSpeechRequest;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import de.ja.model.audio.SpeechResult;
import de.ja.view.ExplainerFrame;
import de.ja.view.explanation.audio.player.AudioPlayerPanel;
import de.swa.gc.GraphCode;
import net.miginfocom.swing.MigLayout;
import okhttp3.ResponseBody;
import org.jdesktop.swingx.JXTaskPane;
import javax.swing.*;
import javax.swing.border.TitledBorder;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
/**
* Diese Klasse stellt die Benutzerschnittstelle
* zum Erstellen einer auditiven Erklärung von
* Graph Codes dar.
*/
public class AudioPanel extends JPanel implements ActionListener {
// API-Key.
private static String key;
private final JComboBox<String> modelComboBox;
private final JComboBox<String> voiceComboBox;
private final JComboBox<String> formatComboBox;
private final JSpinner speedSpinner;
// Textfeld für die generierte Prompt.
private final JTextArea promptArea;
private final JButton generateAudioButton;
// Nachrichten, die die Prompt darstellen.
private List<ChatMessage> messages = new ArrayList<>();
private final AudioPlayerPanel audioPlayerPanel;
// Referenz.
private final ExplainerFrame reference;
public AudioPanel(ExplainerFrame reference) {
this.reference = reference;
key = System.getenv("OpenAI-Key");
// Layout definieren.
MigLayout imagePanelMigLayout = new MigLayout("" , "[fill, grow]", "10[10%][][][fill,30%][60%]"); //1. 10%
setLayout(imagePanelMigLayout);
// Textfeld für die Prompt initialisieren und konfigurieren.
promptArea = new JTextArea();
promptArea.setLineWrap(true);
promptArea.setWrapStyleWord(true);
promptArea.setEditable(false);
JScrollPane promptSP = new JScrollPane(promptArea);
promptSP.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
promptSP.setBorder(new TitledBorder("Generated Prompt"));
add(promptSP, "cell 0 3, growx, height ::30%, aligny top");
// Ästhetische Eigenschaften für erweiterte Optionen einstellen...
UIManager.put("TaskPane.animate", Boolean.FALSE);
UIManager.put("TaskPane.titleOver", new Color(200, 200, 200));
UIManager.put("TaskPane.titleForeground", new Color(187, 187, 187));
UIManager.put("TaskPane.titleBackgroundGradientStart", new Color(85, 88, 89));
UIManager.put("TaskPane.titleBackgroundGradientEnd", new Color(85, 88, 89));
UIManager.put("TaskPane.background", new Color(76, 80, 82));
UIManager.put("TaskPane.borderColor", new Color(94, 96, 96));
// Erweiterte Optionen initialisieren und konfigurieren.
JXTaskPane advancedOptions = new JXTaskPane();
advancedOptions.setCollapsed(true);
advancedOptions.setTitle("Advanced Options");
add(advancedOptions, "cell 0 0, growx, aligny top");
// Layout für die Optionen in den erweiterten Optionen definieren.
MigLayout advancedOptionsMigLayout = new MigLayout("", "0[]5[]10[]5[]0", "0[]0");
advancedOptions.setLayout(advancedOptionsMigLayout);
// Erweiterte Optionen definieren.
JLabel modelLabel = new JLabel("Model:");
modelLabel.setHorizontalTextPosition(SwingConstants.CENTER);
modelLabel.setHorizontalAlignment(SwingConstants.CENTER);
modelComboBox = new JComboBox<>();
modelComboBox.addItem("tts-1");
modelComboBox.addItem("tts-1-hd");
JLabel voiceLabel = new JLabel("Voice:");
voiceLabel.setHorizontalTextPosition(SwingConstants.CENTER);
voiceLabel.setHorizontalAlignment(SwingConstants.CENTER);
voiceComboBox = new JComboBox<>();
voiceComboBox.addItem("alloy");
voiceComboBox.addItem("echo");
voiceComboBox.addItem("fable");
voiceComboBox.addItem("onyx");
voiceComboBox.addItem("nova");
voiceComboBox.addItem("shimmer");
JLabel formatLabel = new JLabel("Format:");
formatLabel.setHorizontalTextPosition(SwingConstants.CENTER);
formatLabel.setHorizontalAlignment(SwingConstants.CENTER);
formatComboBox = new JComboBox<>();
formatComboBox.addItem("mp3");
formatComboBox.addItem("opus");
formatComboBox.addItem("aac");
formatComboBox.addItem("flac");
JLabel speedLabel = new JLabel("Speed:");
speedLabel.setHorizontalTextPosition(SwingConstants.CENTER);
speedLabel.setHorizontalAlignment(SwingConstants.CENTER);
SpinnerNumberModel nSpinnerNumberModel = new SpinnerNumberModel(1, 0.25, 4.0, 0.01);
speedSpinner = new JSpinner();
speedSpinner.setModel(nSpinnerNumberModel);
advancedOptions.add(modelLabel);
advancedOptions.add(modelComboBox);
advancedOptions.add(voiceLabel);
advancedOptions.add(voiceComboBox);
advancedOptions.add(formatLabel);
advancedOptions.add(formatComboBox);
advancedOptions.add(speedLabel);
advancedOptions.add(speedSpinner);
// Knopf zum Generieren von Bildern.
generateAudioButton = new JButton("Generate Audio");
generateAudioButton.addActionListener(this);
add(generateAudioButton, "cell 0 1, width ::150px, aligny top");
audioPlayerPanel = new AudioPlayerPanel();
add(audioPlayerPanel,"cell 0 2, growx, aligny top");
}
/**
* Graph Code verarbeiten
* @param graphCode Ausgewählter Graph Code.
*/
public void setGraphCode(GraphCode graphCode) {
if(graphCode != null) {
// Prompt vorbereiten.
String prompt = setUpPrompt(graphCode);
promptArea.setText(prompt);
} else {
promptArea.setText(null);
}
}
/**
* Prompt vorbereiten und aus Graph Code
* generieren.
* @param graphCode Ausgewählter Graph Code.
* @return Generierte Prompt.
*/
private String setUpPrompt(GraphCode graphCode) {
// Alle Paare die über eine 1-Beziehung verfügen.
String s = graphCode.getFormattedTerms();
// Textnachrichten für die Prompt.
messages = new ArrayList<>();
messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(),
"You are an assistant, who is able to generate cohesive textual explanations based on a collection of words."));
messages.add(new ChatMessage(
ChatMessageRole.ASSISTANT.value(),
"The collection of words represents a dictionary. The dictionary contains so-called feature " +
"vocabulary terms. Additionally some of these terms are connected through a relationship. " +
"These relationships will be noted as <i_t> - <i_t1,...,i_tn>, where i_t denotes the index of a feature " +
"vocabulary term in the given collection."));
messages.add(new ChatMessage(
"assistant",
"Using these terms, we can create a coherent explanation that accurately " +
"describes the terms and its relations.\n" +
"\n" +
"An example could be: The image shows water, the sky, and clouds. " +
"We can imagine a scene with clouds floating in the sky above."));
messages.add(new ChatMessage(
"user",
"The collections of words is as follows: " + graphCode.listTerms() + ". Only respect these terms and its relations: " + s + ", and ignore all others. " +
"Do not create an explanation regarding the dictionary. Only generate a text containing " +
"the terms of the dictionary like in the example above."));
messages.add(new ChatMessage(
"assistant",
"Based on the dictionary, here is a cohesive text " +
"containing the terms from the dictionary:"));
// Nachrichten zusammenfügen.
return messages.stream().map(ChatMessage::getContent).collect(Collectors.joining("\n"));
}
@Override
public void actionPerformed(ActionEvent e) {
audioPlayerPanel.resetSpeechResult();
// Anbindung zur Schnittstelle.
OpenAiService service = new OpenAiService(key, Duration.ofSeconds(60));
if(key.isEmpty()) {
reference.getExplainerConsoleModel().insertText("OpenAI-Key is missing, abort process. Must be set in launch-config: OpenAI-Key=...");
return;
}
// Prozess erstellen.
ExecutorService executorService = Executors.newSingleThreadExecutor();
Thread t = new Thread(() -> {
// Textanfrage initialisieren und parametrisieren.
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder()
.messages(messages)
.model("gpt-4")
.maxTokens(256)
.build();
try {
// Cursor auf Warten setzen.
setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
// Knopf deaktivieren.
generateAudioButton.setEnabled(false);
// Info in der Konsole ausgeben.
reference.getExplainerConsoleModel()
.insertText("Generating an auditive explanation!");
// Textanfrage an Endpunkt senden.
ChatCompletionResult chatCompletionResult = service.createChatCompletion(chatCompletionRequest);
// Ergebnis der Anfrage.
String chatResponse = chatCompletionResult.getChoices().get(0).getMessage().getContent();
// Bildanfrage initialisieren und parametrisieren.
CreateSpeechRequest speechRequest = CreateSpeechRequest.builder()
.input(chatResponse)
.model((String) Objects.requireNonNull(modelComboBox.getSelectedItem()))
.voice((String) Objects.requireNonNull(voiceComboBox.getSelectedItem()))
.responseFormat((String) formatComboBox.getSelectedItem())
.speed((Double) speedSpinner.getValue())
.build();
ResponseBody speechResponseBody = service.createSpeech(speechRequest);
SpeechResult speechResult = SpeechResult.builder()
.audioType(speechResponseBody.contentType())
.bytes(speechResponseBody.bytes())
.build();
audioPlayerPanel.setSpeechResult(speechResult);
} catch(OpenAiHttpException openAiHttpException) {
if(openAiHttpException.statusCode == 401) {
JOptionPane.showMessageDialog(null,
"You provided an invalid API-Key!",
"Authentication Error", JOptionPane.ERROR_MESSAGE);
reference.getExplainerConsoleModel().insertText("You provided an invalid API-Key!");
}
} catch (Exception ex) {
ex.printStackTrace();
// Fehler in Konsole ausgeben.
reference.getExplainerConsoleModel().insertText(ex.getMessage());
} finally {
// Cursor auf Standard zurücksetzen.
setCursor(Cursor.getDefaultCursor());
// Knopf reaktivieren.
generateAudioButton.setEnabled(true);
}
});
// Prozess ausführen und beenden.
executorService.execute(t);
executorService.shutdown();
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.audio.CreateSpeechRequest.builder",
"com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((6946, 6976), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((7161, 7194), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((9558, 9730), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((9558, 9701), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((9558, 9665), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((9558, 9629), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10573, 11022), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((10573, 10989), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((10573, 10924), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((10573, 10841), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((10573, 10744), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((10573, 10647), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((11155, 11338), 'de.ja.model.audio.SpeechResult.builder'), ((11155, 11305), 'de.ja.model.audio.SpeechResult.builder'), ((11155, 11246), 'de.ja.model.audio.SpeechResult.builder')]
|
package com.yoazmenda.llm4j.provider;
import com.theokanning.openai.OpenAiService;
import com.theokanning.openai.completion.CompletionRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class OpenAILlm implements LlmProvider {
private static final Logger logger = LoggerFactory.getLogger(OpenAILlm.class);
private String modelName;
private Double temperature;
private Integer maxTokens;
private OpenAiService service;
public OpenAILlm(String apiKey, String modelName, Double temperature, Integer maxTokens) {
this.modelName = modelName;
this.temperature = temperature;
this.maxTokens = maxTokens;
service = new OpenAiService(apiKey);
}
@Override
public String getCompletions(String prompt) {
CompletionRequest completionRequest = CompletionRequest.builder()
.prompt(prompt)
.model(modelName)
.maxTokens(maxTokens)
.temperature(temperature)
.n(1)
.build();
logger.debug("completion request: {}", completionRequest.toString());
String response = service.createCompletion(completionRequest).getChoices().get(0).getText();
logger.debug("completion response: {}", response);
return response;
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((833, 1053), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((833, 1028), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((833, 1006), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((833, 964), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((833, 926), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((833, 892), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package com.example.demo.provider;
import com.theokanning.openai.service.OpenAiService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import java.util.Collections;
@Service
public class OpenAIProvider {
@Value("${openai.apiKey}")
private String apiKey;
public String askOpenAI(String prompt) {
if (apiKey == null || apiKey.isEmpty()) {
throw new IllegalArgumentException("OPENAI_API_KEY environment variable not set");
}
OpenAiService service = new OpenAiService(apiKey);
ChatMessage message = new ChatMessage("user", prompt);
ChatCompletionRequest completionRequest = ChatCompletionRequest.builder()
.model("gpt-3.5-turbo")
.messages(Collections.singletonList(message))
.maxTokens(1000)
.n(1)
.build();
String response = service.createChatCompletion(completionRequest).getChoices().get(0).getMessage().getContent();
return response;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((830, 1044), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((830, 1019), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((830, 997), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((830, 964), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((830, 901), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.copybot.bonappcopybot.openai;
import com.copybot.bonappcopybot.model.entity.drinks.wine.Wine;
import com.copybot.bonappcopybot.model.entity.drinks.wine.WineCopy;
import com.theokanning.openai.OpenAiService;
import com.theokanning.openai.completion.CompletionRequest;
public class WineCopyAI {
static WineCopyAI obj = new WineCopyAI();
private WineCopyAI(){}
public static WineCopyAI getInstance(){
return obj;
}
public String getWineCopy(Wine wine, WineCopy copy){
String APIKey = "APIKEY";
OpenAiService service = new OpenAiService(APIKey);
String prompt = createPrompt(wine, copy);
System.out.println(prompt);
CompletionRequest completionRequest = CompletionRequest.builder()
.prompt(prompt)
.temperature(0.9)
.maxTokens(150)
.topP(1.0)
.frequencyPenalty(1.0)
.presencePenalty(0.3)
.echo(false)
.build();
String copyResult = service.createCompletion("text-davinci-001", completionRequest).getChoices().get(0).getText();
System.out.println(copyResult);
copyResult = copyResult.replaceFirst("\\n\\n", "");
String lang = copy.getLang();
if (lang.equals("English") || lang.equals("english") || lang.equals("en") || lang.equals("EN")){
return copyResult;
}else {
String translated = translateCopy(copyResult, lang);
return translated;
}
}
private String createPrompt(Wine wine, WineCopy copy){
String prompt = "Write an engaging and joyfull text add for " + copy.getOccasion() + ", with 30 words for the following product: ";
prompt += " Type: Wine,";
prompt += " Name : " + wine.getName() + ", ";
prompt += " Product: " + wine.getProduct() + ", ";
prompt += " Brand: " + wine.getBrand() + ", ";
prompt += " Date: " + wine.getDate() + ", ";
prompt += " Color: " + wine.getColor() + ", ";
prompt += " Ingredients: " + wine.getIngredients() + ", ";
prompt += " Description: " + wine.getDescription() + "->";
return prompt;
}
private String translateCopy(String originalCopy, String lang){
String APIKey = "APIKEY";
OpenAiService service = new OpenAiService(APIKey);
String prompt = "Translate the following text to " + lang + ": " + originalCopy;
System.out.println(prompt);
CompletionRequest completionRequest = CompletionRequest.builder()
.prompt(prompt)
.temperature(0.9)
.maxTokens(150)
.topP(1.0)
.frequencyPenalty(1.0)
.presencePenalty(0.3)
.echo(false)
.build();
String copyResult = service.createCompletion("text-davinci-001", completionRequest).getChoices().get(0).getText();
copyResult = copyResult.replaceFirst("\\n\\n", "");
return copyResult;
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((737, 1020), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((737, 995), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((737, 966), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((737, 928), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((737, 889), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((737, 862), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((737, 830), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((737, 796), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2563, 2846), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2563, 2821), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2563, 2792), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2563, 2754), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2563, 2715), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2563, 2688), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2563, 2656), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2563, 2622), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package com.minivv.pilot.utils;
import com.minivv.pilot.model.AppSettings;
import com.theokanning.openai.OpenAiApi;
import com.theokanning.openai.completion.CompletionChoice;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
import okhttp3.OkHttpClient;
import org.apache.commons.collections.CollectionUtils;
import retrofit2.Retrofit;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import static com.theokanning.openai.service.OpenAiService.*;
public class GPTClient {
public static List<CompletionChoice> callChatGPT(String code, AppSettings settings) {
try {
Locale.setDefault(Locale.getDefault());
if (settings.enableProxy) {
Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(settings.proxyHost, settings.proxyPort));
OkHttpClient client = defaultClient(settings.gptKey, Duration.ofSeconds(settings.maxWaitSeconds))
.newBuilder()
.proxy(proxy)
.build();
Retrofit retrofit = defaultRetrofit(client, defaultObjectMapper());
OpenAiApi api = retrofit.create(OpenAiApi.class);
OpenAiService service = new OpenAiService(api);
CompletionRequest completionRequest = CompletionRequest.builder()
.prompt(code)
.model(settings.gptModel)
.maxTokens(settings.gptMaxTokens)
.temperature(0.3)
.presencePenalty(0.0)
.frequencyPenalty(0.0)
.bestOf(1)
.stream(false)
.echo(false)
.build();
return service.createCompletion(completionRequest).getChoices();
} else {
OpenAiService service = new OpenAiService(settings.gptKey);
CompletionRequest completionRequest = CompletionRequest.builder()
.prompt(code)
.model(settings.gptModel)
.echo(true)
.build();
return service.createCompletion(completionRequest).getChoices();
}
} catch (Exception e) {
return new ArrayList<>();
}
}
public static boolean isSuccessful(List<CompletionChoice> choices) {
return CollectionUtils.isNotEmpty(choices) && !choices.get(0).getText().isBlank();
}
public static String toString(List<CompletionChoice> choices) {
if (CollectionUtils.isEmpty(choices)) {
return "ChatGPT response is empty,please check your network or config!";
}
return choices.get(0).getText();
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((1462, 1914), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1462, 1881), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1462, 1844), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1462, 1805), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1462, 1770), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1462, 1723), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1462, 1677), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1462, 1635), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1462, 1577), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1462, 1527), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2148, 2332), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2148, 2299), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2148, 2263), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2148, 2213), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package dev.lueem.ai;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.io.FileInputStream;
import java.io.IOException;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
public class OpenAiClient {
private OpenAiService service;
private Properties properties;
public OpenAiClient() {
properties = new Properties();
try {
properties.load(new FileInputStream("config.properties"));
String apiKey = properties.getProperty("openai.api.key");
service = new OpenAiService(apiKey);
} catch (IOException e) {
e.printStackTrace();
}
}
public String askQuestion(String question) {
List<ChatMessage> messages = new ArrayList<>();
messages.add(new ChatMessage(ChatMessageRole.USER.value(), question));
ChatCompletionRequest request = ChatCompletionRequest.builder()
.model("gpt-3.5-turbo-16k-0613")
.messages(messages)
.n(1)
.maxTokens(10000)
.build();
ChatMessage responseMessage = service.createChatCompletion(request).getChoices().get(0).getMessage();
return responseMessage.getContent();
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((1008, 1036), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1091, 1288), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1091, 1263), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1091, 1229), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1091, 1207), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1091, 1171), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package ssodamproject.server.GPT.dto;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import java.util.List;
@Getter
@NoArgsConstructor
@AllArgsConstructor
@ToString
public class GPTCompletionChatRequest {
private String model;
private String role1;
private String message1;
private String role2;
private String message2;
private Integer max_tokens;
private Double top_n;
private Double temperature;
private Double frequency_penalty;
public static ChatCompletionRequest of(GPTCompletionChatRequest request) {
return ChatCompletionRequest.builder()
.model(request.getModel())
.frequencyPenalty(1.0)
.messages(convertChatMessages(request))
.maxTokens(300)
.temperature(0.2)
.topP(0.92)
.build();
}
private static List<ChatMessage> convertChatMessages(GPTCompletionChatRequest request) {
ChatMessage message1 = new ChatMessage(request.getRole1(), request.getMessage1());
ChatMessage message2 = new ChatMessage(request.getRole2(), request.getMessage2());
return List.of(message1, message2);
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((770, 1058), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((770, 1033), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((770, 1005), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((770, 971), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((770, 939), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((770, 883), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((770, 844), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package br.com.alura.screenmatch.service;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
public class ConsultaChatGPT {
public static String obterTraducao(String texto) {
OpenAiService service = new OpenAiService("my key");
CompletionRequest requisicao = CompletionRequest.builder()
.model("gpt-3.5-turbo-instruct")
.prompt("traduza para o português o texto: " + texto)
.maxTokens(1000)
.temperature(0.7)
.build();
var resposta = service.createCompletion(requisicao);
return resposta.getChoices().get(0).getText().trim();
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((353, 597), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((353, 571), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((353, 536), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((353, 502), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((353, 430), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package com.theagilemonkeys.ellmental.textgeneration.openai;
import com.theagilemonkeys.ellmental.textgeneration.TextGenerationService;
import com.theagilemonkeys.ellmental.textgeneration.openai.errors.NoContentFoundException;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.List;
public class OpenAiChatGenerationModel extends TextGenerationService<ChatMessage> {
private static final Double DEFAULT_TEMPERATURE = 0.7;
private static final int DEFAULT_MAX_TOKENS = 3000;
private static final Logger log = LoggerFactory.getLogger(OpenAiChatGenerationModel.class);
private final Double temperature;
private final int maxTokens;
private final OpenAiModels model;
// The OpenAI client is package-private to allow injecting a mock in tests
OpenAiService openAiService;
/**
* Constructor for OpenAiTextGenerationModel that uses default values for temperature (0.7),
* maxTokens (3000) and model (GPT-3.5).
* @param openAiKey OpenAI API key
*/
public OpenAiChatGenerationModel(String openAiKey) {
this(openAiKey, OpenAiModels.GPT_3_5, DEFAULT_TEMPERATURE, DEFAULT_MAX_TOKENS);
}
/**
* Constructor for OpenAiTextGenerationModel that explicitly sets the model, but uses the default values for
* temperature (0.7) and maxTokens (3000).
* @param openAiKey OpenAI API key
* @param model Model to use for the chat generation
*/
public OpenAiChatGenerationModel(String openAiKey, OpenAiModels model) {
this(openAiKey, model, DEFAULT_TEMPERATURE, DEFAULT_MAX_TOKENS);
}
/**
* Constructor for OpenAiTextGenerationModel that explicitly sets the temperature, maxTokens and model.
* @param openAiKey OpenAI API key
* @param temperature Temperature to use for the chat generation
* @param maxTokens Maximum number of tokens to use for the chat generation
* @param model Model to use for the chat generation
*/
public OpenAiChatGenerationModel(String openAiKey, OpenAiModels model, Double temperature, int maxTokens) {
this.openAiService = new OpenAiService(openAiKey, Duration.ofSeconds(240));
this.temperature = temperature;
this.maxTokens = maxTokens;
this.model = model;
}
/**
* Generates a chat response using the OpenAI API.
* @param chatMessages List of chat messages to use as context for the response
* @return Generated chat response
*/
@Override
public String generate(List<ChatMessage> chatMessages) {
log.debug("Generating chat response for chat messages {}", chatMessages);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder()
.messages(chatMessages)
.temperature(temperature)
.maxTokens(maxTokens)
.model(model.getCodename())
.build();
ChatCompletionChoice chatCompletionChoice = openAiService.createChatCompletion(chatCompletionRequest).getChoices().get(0);
String chatCompletionContent = chatCompletionChoice.getMessage().getContent();
log.debug("Chat completion response is {}", chatCompletionContent);
if (chatCompletionContent.isEmpty()) {
throw new NoContentFoundException(chatMessages);
}
return chatCompletionContent;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((2962, 3182), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2962, 3157), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2962, 3113), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2962, 3075), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2962, 3033), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package org.example;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public class Chat {
public static void main(String[] args) {
// .env 파일에서 open api key 가져오기
String token = "";
try (InputStream inputStream = Chat.class.getClassLoader().getResourceAsStream(".env");
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));) {
token = bufferedReader.readLine();
} catch (IOException e) {
e.printStackTrace();
}
OpenAiService service = new OpenAiService(token);
//보낼 메시지
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a helpful assistant.");
final ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), "effective java item 7 자세히 설명해줘");
messages.add(systemMessage);
messages.add(userMessage);
// 요청 내용과 설정 만들기
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.n(1)
.maxTokens(50)
.logitBias(new HashMap<>())
.build();
// 서비스에 요청을 보내고 결과 값을 받아옴.
ArrayList<String> result = new ArrayList<>();
service.streamChatCompletion(chatCompletionRequest)
.doOnError(Throwable::printStackTrace)
.blockingForEach(chunk -> {
String content = chunk.getChoices().get(0).getMessage().getContent();
result.add(content);
});
// 답변 하나로 묶고 프린트
String answer = result.stream().filter(Objects::nonNull).collect(Collectors.joining(""));
System.out.println(answer);
service.shutdownExecutor();
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value"
] |
[((1206, 1236), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1327, 1355), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
|
package com.example.telegramdailybot.service;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
@Service
public class ChatGPT3Service {
private final OpenAiService openAiService;
private final int maxTokens;
public ChatGPT3Service(
@Value("${openai.token}") String openAiToken,
@Value("${openai.timeout}") long timeoutSeconds,
@Value("${openai.maxTokens}") int maxTokens) {
this.openAiService = new OpenAiService(openAiToken, Duration.ofSeconds(timeoutSeconds));
this.maxTokens = maxTokens;
}
@Async
public CompletableFuture<String> chat(String inputText) {
List<ChatMessage> chatMessages = new ArrayList<>();
chatMessages.add(new ChatMessage("user", inputText));
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder()
.model("gpt-3.5-turbo") // You can choose another model if you want
.messages(chatMessages)
.maxTokens(maxTokens)
.build();
return CompletableFuture.supplyAsync(() ->
openAiService.createChatCompletion(chatCompletionRequest).getChoices().get(0).getMessage().getContent()
);
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((1235, 1453), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1235, 1428), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1235, 1390), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1235, 1306), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.blackn0va;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Random;
import com.github.theholywaffle.teamspeak3.TS3Api;
import com.github.theholywaffle.teamspeak3.TS3ApiAsync;
import com.github.theholywaffle.teamspeak3.TS3Config;
import com.github.theholywaffle.teamspeak3.TS3Query;
import com.github.theholywaffle.teamspeak3.api.TextMessageTargetMode;
import com.github.theholywaffle.teamspeak3.api.event.ClientMovedEvent;
import com.github.theholywaffle.teamspeak3.api.event.TS3EventAdapter;
import com.github.theholywaffle.teamspeak3.api.event.TS3EventType;
import com.github.theholywaffle.teamspeak3.api.event.TextMessageEvent;
import com.github.theholywaffle.teamspeak3.api.reconnect.ConnectionHandler;
import com.github.theholywaffle.teamspeak3.api.reconnect.ReconnectStrategy;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
public class App {
public static int ClientIDBlack = 0;
public static int ClientIDFly = 0;
public static int ClientIDChukky = 0;
public static String randomName = "";
public static String answer = "";
public static final TS3Config config = new TS3Config();
private static volatile int clientId;
public static final List<ChatMessage> messages = new ArrayList<>();
public static void main(String[] args) {
GenerateNickname(randomName);
config.setHost("ts.laonda-clan.eu");
config.setEnableCommunicationsLogging(true);
// Use default exponential backoff reconnect strategy
config.setReconnectStrategy(ReconnectStrategy.exponentialBackoff());
// Make stuff run every time the query (re)connects
config.setConnectionHandler(new ConnectionHandler() {
@Override
public void onConnect(TS3Api api) {
stuffThatNeedsToRunEveryTimeTheQueryConnects(api);
}
@Override
public void onDisconnect(TS3Query ts3Query) {
// Nothing
}
});
final TS3Query query = new TS3Query(config);
// Here "stuffThatNeedsToRunEveryTimeTheQueryConnects" will be run!
// (And every time the query reconnects)
query.connect();
// Then do other stuff that only needs to be done once
stuffThatOnlyEverNeedsToBeRunOnce(query.getApi());
doSomethingThatTakesAReallyLongTime(query.getAsyncApi());
// Disconnect once we're done
// query.exit();
/*
*
*
*
*
*
* //Get all channel ID´s
* //api.getChannels().forEach(channel -> System.out.println(channel.getName() +
* " " + channel.getId()));
*
* //get User ID´s
* //api.getClients().forEach(client -> System.out.println(client.getNickname()
* + " " + client.getId()));
*
*
* //join channel
*
*
* //api.moveClient(6353, 417);
*
*
*
*
* api.getChannels().forEach(channel -> api.moveClient(ClientIDBlack,
* channel.getId()));
*/
/*
*
* Zockerhallen 413
* Zockerhalle I 412
* Zockerhalle II 414
* Zockerhalle III 415
* Zockerhalle IV 416
* Zockerhalle V 417
*/
}
// funktion return String
public static String GenerateNickname(String nickname) {
// Create Random Names
Random rand = new Random();
int randomNum = rand.nextInt((999 - 100) + 1) + 100;
randomName = "Bot-" + randomNum;
return randomName;
}
public static void BotStarten() {
try {
} catch (Exception e) {
}
}
private static void stuffThatNeedsToRunEveryTimeTheQueryConnects(TS3Api api) {
try {
GenerateNickname(randomName);
// Logging in, selecting the virtual server, selecting a channel
// and setting a nickname needs to be done every time we reconnect
api.login("test", "d4rz");
api.selectVirtualServerById(1);
// api.moveQuery(x);
api.setNickname(randomName);
api.moveQuery(412);
// What events we listen to also resets
// api.registerEvent(TS3EventType.TEXT_CHANNEL, 415);
// Out clientID changes every time we connect and we need it
// for our event listener, so we need to store the ID in a field
clientId = api.whoAmI().getId();
api.registerAllEvents();
api.registerEvent(TS3EventType.SERVER);
api.registerEvent(TS3EventType.TEXT_CHANNEL);
api.registerEvent(TS3EventType.TEXT_PRIVATE);
api.registerEvent(TS3EventType.TEXT_SERVER);
api.addTS3Listeners(new TS3EventAdapter() {
@Override
public void onTextMessage(TextMessageEvent e) {
// if message is private then send message to user who send the message
if (e.getTargetMode() == TextMessageTargetMode.CLIENT) {
if (!e.getInvokerName().contains("Bot")) {
String frage = e.getMessage();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.USER.value(), frage);
messages.add(systemMessage);
// call get answer and return answer
answer = getAnswer(frage);
// api.sendChannelMessage(answer);
api.sendPrivateMessage(e.getInvokerId(), answer);
System.out.println(answer);
}
} else {
if (!e.getInvokerName().contains("Bot")) {
String frage = e.getMessage();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.USER.value(), frage);
messages.add(systemMessage);
// call get answer and return answer
answer = getAnswer(frage);
// api.sendChannelMessage(answer);
api.sendPrivateMessage(e.getInvokerId(), answer);
System.out.println(answer);
api.sendChannelMessage(answer);
System.out.println(answer);
}
}
}
@Override
public void onClientMoved(ClientMovedEvent e) {
// System.out.println(e.getClientNickname() + " moved to channel " +
// e.getTargetChannelId());
// send message to user
// if channel is 415 then send message
if (e.getTargetChannelId() == 415) {
// call get answer and return answer
// answer = getAnswer("Stell dich in 50 worten vor. Du bist eine KI und stellst
// dich freundlich vor. Sag auch, dass sie bitte mit dir schreiben müssen, da du
// nichts hörst. Und bitte sag Du und nicht Sie");
api.sendPrivateMessage(e.getClientId(),
"Hallo, ich bin eine KI. Ich freue mich, dich kennenzulernen! Ich bin hier, um dir zu helfen und deine Fragen zu beantworten. Bitte schreib mir deine Fragen, da ich nichts hören kann. Ich werde mein Bestes geben, um dir so schnell wie möglich zu antworten. Wenn du noch etwas brauchst, lass es mich bitte wissen. Ich freue mich darauf, mit dir zu interagieren!");
}
}
});
} catch (Exception e) {
System.out.println(e);
}
}
// funktion return string antwort
public static String getAnswer(String question) {
try
{
OpenAiService service = new OpenAiService("sk-pJQ9H7UHKpAPI_Keyu");
System.out.println("\nCreating completion... ");
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.temperature(0.5)
.presencePenalty(0.0)
.frequencyPenalty(0.5)
.messages(messages)
.n(1)
.maxTokens(200)
.logitBias(new HashMap<>())
.build();
// String answer =
// service.createCompletion(completionRequest).getChoices(completionResponse ->
// completionResponse.getChoices().toString().replace("[CompletionChoice(text=",
// "").replace(", index=)]", "").replace(", logprobs=", "").replace(",
// finish_reason=", "").replace(", index=0nullstop)", "").replace("]", ""));
// service.createCompletion(completionRequest).getChoices().forEach(System.out::print);
answer = service.createChatCompletion(chatCompletionRequest).getChoices().toString()
.replace("[CompletionChoice(text=", "").replace(", index=)]", "")
.replace(", logprobs=", "").replace(", finish_reason=", "")
.replace(", index=0nullstop)", "").replace("]", "")
.replace("index=0nulllength)", "")
.replace("[ChatCompletionChoice(index=0, message=ChatMessage(role=assistant, content=", "")
.replace("), finishReason=stop)", "");
return answer;
}
catch (Exception e)
{
}
return answer;
}
private static void stuffThatOnlyEverNeedsToBeRunOnce(final TS3Api api) {
// We only want to greet people once
// api.sendChannelMessage("PutPutBot is online!");
GenerateNickname(randomName);
// On the API side of things, you only need to register your TS3Listeners once!
// These are not affected when the query disconnects.
api.registerAllEvents();
api.registerEvent(TS3EventType.SERVER);
api.registerEvent(TS3EventType.TEXT_CHANNEL);
api.registerEvent(TS3EventType.TEXT_PRIVATE);
api.registerEvent(TS3EventType.TEXT_SERVER);
api.addTS3Listeners(new TS3EventAdapter() {
@Override
public void onTextMessage(TextMessageEvent e) {
// if message is private then send message to user who send the message
if (e.getTargetMode() == TextMessageTargetMode.CLIENT) {
if (!e.getInvokerName().contains("Bot")) {
// call get answer and return answer
answer = getAnswer(e.getMessage());
// api.sendChannelMessage(answer);
api.sendPrivateMessage(e.getInvokerId(), answer);
System.out.println(answer);
}
} else {
if (!e.getInvokerName().contains("Bot")) {
// call get answer and return answer
answer = getAnswer(e.getMessage());
api.sendChannelMessage(answer);
System.out.println(answer);
}
}
}
@Override
public void onClientMoved(ClientMovedEvent e) {
// System.out.println(e.getClientNickname() + " moved to channel " +
// e.getTargetChannelId());
// send message to user
// if channel is 415 then send message
if (e.getTargetChannelId() == 415) {
// answer = getAnswer("Stell dich in 50 worten vor. Du bist eine KI und stellst
// dich freundlich vor. Sag auch, dass sie bitte mit dir schreiben müssen, da du
// nichts hörst. Und bitte sag Du und nicht Sie");
api.sendPrivateMessage(e.getClientId(),
"Hallo, ich bin eine KI. Ich freue mich, dich kennenzulernen! Ich bin hier, um dir zu helfen und deine Fragen zu beantworten. Bitte schreib mir deine Fragen, da ich nichts hören kann. Ich werde mein Bestes geben, um dir so schnell wie möglich zu antworten. Wenn du noch etwas brauchst, lass es mich bitte wissen. Ich freue mich darauf, mit dir zu interagieren!");
}
}
});
}
private static void doSomethingThatTakesAReallyLongTime(TS3ApiAsync api) {
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value"
] |
[((5648, 5676), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((6324, 6352), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
|
package com.asleepyfish.controller;
import com.asleepyfish.strategy.SelectSecondStrategy;
import com.knuddels.jtokkit.api.ModelType;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.embedding.EmbeddingRequest;
import com.theokanning.openai.finetune.FineTuneRequest;
import com.theokanning.openai.image.CreateImageEditRequest;
import com.theokanning.openai.image.CreateImageVariationRequest;
import com.theokanning.openai.image.ImageResult;
import com.theokanning.openai.moderation.ModerationRequest;
import io.github.asleepyfish.config.ChatGPTProperties;
import io.github.asleepyfish.entity.billing.Billing;
import io.github.asleepyfish.entity.billing.Subscription;
import io.github.asleepyfish.enums.audio.AudioResponseFormatEnum;
import io.github.asleepyfish.enums.edit.EditModelEnum;
import io.github.asleepyfish.enums.embedding.EmbeddingModelEnum;
import io.github.asleepyfish.enums.image.ImageResponseFormatEnum;
import io.github.asleepyfish.enums.image.ImageSizeEnum;
import io.github.asleepyfish.service.OpenAiProxyService;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
/**
* @Author: asleepyfish
* @Date: 2023-06-11 21:18
* @Description: 注意:所有代码示例均有基于和SpringBoot和直接Main方法调用两种实现。分别在类MainTest和类ChatGPTController中。
*/
public class MainTest {
@Test
public void chat() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
System.out.println(openAiProxyService.chatCompletion("Go写个程序"));
}
@Test
public void streamChat() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
openAiProxyService.createStreamChatCompletion("杭州旅游攻略");
}
@Test
public void createImages() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
System.out.println(openAiProxyService.createImages("大白狗"));
}
@Test
public void billing() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
String monthUsage = openAiProxyService.billingUsage("2023-04-01", "2023-05-01");
System.out.println("四月使用:" + monthUsage + "美元");
String totalUsage = openAiProxyService.billingUsage();
System.out.println("一共使用:" + totalUsage + "美元");
String stageUsage = openAiProxyService.billingUsage("2023-01-31");
System.out.println("自从2023/01/31使用:" + stageUsage + "美元");
Subscription subscription = openAiProxyService.subscription();
System.out.println("订阅信息(包含到期日期,账户总额度等信息):" + subscription);
// dueDate为到期日,total为总额度,usage为使用量,balance为余额
Billing totalBilling = openAiProxyService.billing();
System.out.println("历史账单信息:" + totalBilling);
// 默认不传参的billing方法的使用量usage从2023-01-01开始,如果用户的账单使用早于该日期,可以传入开始日期startDate
Billing posibleStartBilling = openAiProxyService.billing("2022-01-01");
System.out.println("可能的历史账单信息:" + posibleStartBilling);
}
@Test
public void model() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
System.out.println("models列表:" + openAiProxyService.listModels());
System.out.println("=============================================");
System.out.println("text-davinci-003信息:" + openAiProxyService.getModel("text-davinci-003"));
}
@Test
public void edit() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
String input = "What day of the wek is it?";
String instruction = "Fix the spelling mistakes";
System.out.println("编辑前:" + input);
// 下面这句和openAiProxyService.edit(input, instruction, EditModelEnum.TEXT_DAVINCI_EDIT_001);是一样的,默认使用模型TEXT_DAVINCI_EDIT_001
System.out.println("编辑后:" + openAiProxyService.edit(input, instruction));
System.out.println("=============================================");
input = " public static void mian([String] args) {\n" +
" system.in.println(\"hello world\");\n" +
" }";
instruction = "Fix the code mistakes";
System.out.println("修正代码前:\n" + input);
System.out.println("修正代码后:\n" + openAiProxyService.edit(input, instruction, EditModelEnum.CODE_DAVINCI_EDIT_001));
}
@Test
public void embeddings() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
// 单文本
String text = "Once upon a time";
System.out.println("文本:" + text);
System.out.println("文本的嵌入向量:" + openAiProxyService.embeddings(text));
System.out.println("=============================================");
// 文本数组
String[] texts = {"Once upon a time", "There was a princess"};
System.out.println("文本数组:" + Arrays.toString(texts));
EmbeddingRequest embeddingRequest = EmbeddingRequest.builder()
.model(EmbeddingModelEnum.TEXT_EMBEDDING_ADA_002.getModelName()).input(Arrays.asList(texts)).build();
System.out.println("文本数组的嵌入向量:" + openAiProxyService.embeddings(embeddingRequest));
}
@Test
public void transcription() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
String filePath = "src/main/resources/audio/想象之中-许嵩.mp3";
System.out.println("语音文件转录后的json文本是:" + openAiProxyService.transcription(filePath, AudioResponseFormatEnum.JSON));
// File file = new File("src/main/resources/audio/想象之中-许嵩.mp3");
// System.out.println("语音文件转录后的json文本是:" + openAiProxyService.transcription(file, AudioResponseFormatEnum.JSON));
}
@Test
public void translation() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
String filePath = "src/main/resources/audio/想象之中-许嵩.mp3";
System.out.println("语音文件翻译成英文后的json文本是:" + openAiProxyService.translation(filePath, AudioResponseFormatEnum.JSON));
// File file = new File("src/main/resources/audio/想象之中-许嵩.mp3");
// System.out.println("语音文件翻译成英文后的json文本是:" + openAiProxyService.translation(file, AudioResponseFormatEnum.JSON));
}
@Test
public void createImageEdit() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
CreateImageEditRequest createImageEditRequest = CreateImageEditRequest.builder().prompt("A sunlit indoor lounge area with a pool containing a flamingo")
.n(1).size(ImageSizeEnum.S512x512.getSize()).responseFormat(ImageResponseFormatEnum.URL.getResponseFormat()).build();
ImageResult imageEdit = openAiProxyService.createImageEdit(createImageEditRequest, "src/main/resources/image/img.png", "src/main/resources/image/mask.png");
System.out.println("图片编辑结果:" + imageEdit);
}
@Test
public void createImageVariation() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
CreateImageVariationRequest createImageVariationRequest = CreateImageVariationRequest.builder()
.n(2).size(ImageSizeEnum.S512x512.getSize()).responseFormat(ImageResponseFormatEnum.URL.getResponseFormat()).build();
ImageResult imageVariation = openAiProxyService.createImageVariation(createImageVariationRequest, "src/main/resources/image/img.png");
System.out.println("图片变体结果:" + imageVariation);
}
/**
* 文件操作(下面文件操作入参,用户可根据实际情况自行补全)
*/
@Test
public void files() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
// 上传文件
System.out.println("上传文件信息:" + openAiProxyService.uploadFile("", ""));
// 获取文件列表
System.out.println("文件列表:" + openAiProxyService.listFiles());
// 获取文件信息
System.out.println("文件信息:" + openAiProxyService.retrieveFile(""));
// 获取文件内容
System.out.println("文件内容:" + openAiProxyService.retrieveFileContent(""));
// 删除文件
System.out.println("删除文件信息:" + openAiProxyService.deleteFile(""));
}
@Test
public void fileTune() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
// 创建微调
FineTuneRequest fineTuneRequest = FineTuneRequest.builder().trainingFile("").build();
System.out.println("创建微调信息:" + openAiProxyService.createFineTune(fineTuneRequest));
// 创建微调完成
CompletionRequest completionRequest = CompletionRequest.builder().build();
System.out.println("创建微调完成信息:" + openAiProxyService.createFineTuneCompletion(completionRequest));
// 获取微调列表
System.out.println("获取微调列表:" + openAiProxyService.listFineTunes());
// 获取微调信息
System.out.println("获取微调信息:" + openAiProxyService.retrieveFineTune(""));
// 取消微调
System.out.println("取消微调信息:" + openAiProxyService.cancelFineTune(""));
// 列出微调事件
System.out.println("列出微调事件:" + openAiProxyService.listFineTuneEvents(""));
// 删除微调
System.out.println("删除微调信息:" + openAiProxyService.deleteFineTune(""));
}
@Test
public void moderation() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
// 创建moderation
ModerationRequest moderationRequest = ModerationRequest.builder().input("I want to kill them.").build();
System.out.println("创建moderation信息:" + openAiProxyService.createModeration(moderationRequest));
}
@Test
public void baseUrl() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
// 自定义baseUrl
.baseUrl("https://openai.api2d.net/")
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
System.out.println("models列表:" + openAiProxyService.listModels());
}
@Test
public void systemPromptTest() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
// 自定义baseUrl
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
System.out.println("初始系统级提示信息为:" + openAiProxyService.getSystemPrompt());
openAiProxyService.setSystemPrompt("我是一个Java开发工程师,所有的代码请求都请用Java给我生成。");
openAiProxyService.createStreamChatCompletion("写一个迭代器模式的代码");
// System.out.println("当前的系统级信息提示为:" + openAiProxyService.getSystemPrompt());
// 清理系统级提示信息
// openAiProxyService.cleanUpSystemPrompt();
// System.out.println("清理后的系统级提示信息为:" + openAiProxyService.getSystemPrompt());
}
@Test
public void countTokensTest() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
// 自定义baseUrl
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
String text = "Hello World!";
System.out.println("当前输入文字使用模型[gpt-3.5-turbo] token总数为:" + openAiProxyService.countTokens(text));
ModelType modelType = ModelType.GPT_4_32K;
// 实际上单就计算的token的数目上来说3.5和4是一样的
System.out.println("当前输入文字使用模型[gpt-4-32k] token总数为:" + openAiProxyService.countTokens(text, modelType));
}
@Test
public void alterTokensTest() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx1")
// 自定义baseUrl
.proxyHost("127.0.0.1")
.proxyPort(7890)
.alterTokens(Arrays.asList("sk-xxx2", "sk-xxx3"))
.tokenStrategyImpl(SelectSecondStrategy.class)
.build();
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties);
System.out.println("models列表:" + openAiProxyService.listModels());
}
/* @Test
public void okHttpClient() {
ChatGPTProperties properties = ChatGPTProperties.builder().token("sk-xxx")
.proxyHost("127.0.0.1")
.proxyPort(7890)
.build();
Dispatcher dispatcher = new Dispatcher();
dispatcher.setMaxRequests(100);
dispatcher.setMaxRequestsPerHost(10);
// 自定义okHttpClient
OkHttpClient okHttpClient = new OkHttpClient.Builder()
.addInterceptor(new AuthenticationInterceptor(properties.getToken()))
.connectionPool(new ConnectionPool(100, 10, TimeUnit.SECONDS))
.readTimeout(Duration.ZERO.toMillis(), TimeUnit.MILLISECONDS)
.connectTimeout(Duration.ZERO.toMillis(), TimeUnit.MILLISECONDS)
.hostnameVerifier((hostname, session) -> true)
.proxy(new Proxy(Proxy.Type.SOCKS, new InetSocketAddress(properties.getProxyHost(), properties.getProxyPort())))
.proxyAuthenticator((route, response) -> {
String credential = Credentials.basic("proxyUsername", "proxyPassword");
return response.request().newBuilder()
.header("Proxy-Authorization", credential)
.build();
})
.dispatcher(dispatcher)
.build();
// 下面的openAiProxyService使用自定义的okHttpClient
OpenAiProxyService openAiProxyService = new OpenAiProxyService(properties, okHttpClient);
System.out.println("models列表:" + openAiProxyService.listModels());
}*/
}
|
[
"com.theokanning.openai.moderation.ModerationRequest.builder",
"com.theokanning.openai.completion.CompletionRequest.builder",
"com.theokanning.openai.image.CreateImageVariationRequest.builder",
"com.theokanning.openai.finetune.FineTuneRequest.builder",
"com.theokanning.openai.embedding.EmbeddingRequest.builder",
"com.theokanning.openai.image.CreateImageEditRequest.builder"
] |
[((1452, 1593), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((1452, 1568), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((1452, 1535), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((1452, 1495), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((1847, 1988), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((1847, 1963), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((1847, 1930), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((1847, 1890), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((2240, 2381), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((2240, 2356), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((2240, 2323), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((2240, 2283), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((2625, 2766), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((2625, 2741), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((2625, 2708), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((2625, 2668), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((4111, 4252), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((4111, 4227), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((4111, 4194), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((4111, 4154), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((4684, 4825), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((4684, 4800), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((4684, 4767), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((4684, 4727), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((5890, 6031), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((5890, 6006), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((5890, 5973), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((5890, 5933), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((6610, 6753), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((6610, 6745), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((6610, 6717), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((6660, 6716), 'io.github.asleepyfish.enums.embedding.EmbeddingModelEnum.TEXT_EMBEDDING_ADA_002.getModelName'), ((6957, 7098), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((6957, 7073), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((6957, 7040), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((6957, 7000), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((7728, 7869), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((7728, 7844), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((7728, 7811), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((7728, 7771), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((8517, 8658), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((8517, 8633), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((8517, 8600), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((8517, 8560), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((8800, 9037), 'com.theokanning.openai.image.CreateImageEditRequest.builder'), ((8800, 9029), 'com.theokanning.openai.image.CreateImageEditRequest.builder'), ((8800, 8965), 'com.theokanning.openai.image.CreateImageEditRequest.builder'), ((8800, 8926), 'com.theokanning.openai.image.CreateImageEditRequest.builder'), ((8800, 8904), 'com.theokanning.openai.image.CreateImageEditRequest.builder'), ((8932, 8964), 'io.github.asleepyfish.enums.image.ImageSizeEnum.S512x512.getSize'), ((8981, 9028), 'io.github.asleepyfish.enums.image.ImageResponseFormatEnum.URL.getResponseFormat'), ((9366, 9507), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((9366, 9482), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((9366, 9449), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((9366, 9409), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((9659, 9829), 'com.theokanning.openai.image.CreateImageVariationRequest.builder'), ((9659, 9821), 'com.theokanning.openai.image.CreateImageVariationRequest.builder'), ((9659, 9757), 'com.theokanning.openai.image.CreateImageVariationRequest.builder'), ((9659, 9718), 'com.theokanning.openai.image.CreateImageVariationRequest.builder'), ((9724, 9756), 'io.github.asleepyfish.enums.image.ImageSizeEnum.S512x512.getSize'), ((9773, 9820), 'io.github.asleepyfish.enums.image.ImageResponseFormatEnum.URL.getResponseFormat'), ((10234, 10375), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((10234, 10350), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((10234, 10317), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((10234, 10277), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((11123, 11264), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((11123, 11239), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((11123, 11206), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((11123, 11166), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((11416, 11466), 'com.theokanning.openai.finetune.FineTuneRequest.builder'), ((11416, 11458), 'com.theokanning.openai.finetune.FineTuneRequest.builder'), ((11650, 11685), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((12504, 12645), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((12504, 12620), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((12504, 12587), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((12504, 12547), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((12805, 12870), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((12805, 12862), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((13070, 13228), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((13070, 13203), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((13070, 13113), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((13488, 13665), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((13488, 13640), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((13488, 13607), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((13488, 13531), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((14485, 14662), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((14485, 14637), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((14485, 14604), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((14485, 14528), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((15283, 15590), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((15283, 15565), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((15283, 15502), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((15283, 15436), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((15283, 15403), 'io.github.asleepyfish.config.ChatGPTProperties.builder'), ((15283, 15327), 'io.github.asleepyfish.config.ChatGPTProperties.builder')]
|
package com.theokanning.openai.service;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.theokanning.openai.completion.chat.ChatFunction;
import com.theokanning.openai.completion.chat.ChatFunctionCall;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import java.util.*;
public class FunctionExecutor {
private ObjectMapper MAPPER = new ObjectMapper();
private final Map<String, ChatFunction> FUNCTIONS = new HashMap<>();
public FunctionExecutor(List<ChatFunction> functions) {
setFunctions(functions);
}
public FunctionExecutor(List<ChatFunction> functions, ObjectMapper objectMapper) {
setFunctions(functions);
setObjectMapper(objectMapper);
}
public Optional<ChatMessage> executeAndConvertToMessageSafely(ChatFunctionCall call) {
try {
return Optional.ofNullable(executeAndConvertToMessage(call));
} catch (Exception ignored) {
return Optional.empty();
}
}
public ChatMessage executeAndConvertToMessageHandlingExceptions(ChatFunctionCall call) {
try {
return executeAndConvertToMessage(call);
} catch (Exception exception) {
exception.printStackTrace();
return convertExceptionToMessage(exception);
}
}
public ChatMessage convertExceptionToMessage(Exception exception) {
String error = exception.getMessage() == null ? exception.toString() : exception.getMessage();
return new ChatMessage(ChatMessageRole.FUNCTION.value(), "{\"error\": \"" + error + "\"}", "error");
}
public ChatMessage executeAndConvertToMessage(ChatFunctionCall call) {
return new ChatMessage(ChatMessageRole.FUNCTION.value(), executeAndConvertToJson(call).toPrettyString(), call.getName());
}
public JsonNode executeAndConvertToJson(ChatFunctionCall call) {
try {
Object execution = execute(call);
if (execution instanceof TextNode) {
JsonNode objectNode = MAPPER.readTree(((TextNode) execution).asText());
if (objectNode.isMissingNode())
return (JsonNode) execution;
return objectNode;
}
if (execution instanceof ObjectNode) {
return (JsonNode) execution;
}
if (execution instanceof String) {
JsonNode objectNode = MAPPER.readTree((String) execution);
if (objectNode.isMissingNode())
throw new RuntimeException("Parsing exception");
return objectNode;
}
return MAPPER.readValue(MAPPER.writeValueAsString(execution), JsonNode.class);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("unchecked")
public <T> T execute(ChatFunctionCall call) {
ChatFunction function = FUNCTIONS.get(call.getName());
Object obj;
try {
JsonNode arguments = call.getArguments();
obj = MAPPER.readValue(arguments instanceof TextNode ? arguments.asText() : arguments.toPrettyString(), function.getParametersClass());
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
return (T) function.getExecutor().apply(obj);
}
public List<ChatFunction> getFunctions() {
return new ArrayList<>(FUNCTIONS.values());
}
public void setFunctions(List<ChatFunction> functions) {
this.FUNCTIONS.clear();
functions.forEach(f -> this.FUNCTIONS.put(f.getName(), f));
}
public void setObjectMapper(ObjectMapper objectMapper) {
this.MAPPER = objectMapper;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value"
] |
[((1795, 1827), 'com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value'), ((1986, 2018), 'com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value')]
|
package com.mca.mindmelter.repositories;
import android.content.Context;
import android.util.Log;
import com.amplifyframework.api.graphql.model.ModelMutation;
import com.amplifyframework.api.graphql.model.ModelQuery;
import com.amplifyframework.core.Amplify;
import com.amplifyframework.core.model.temporal.Temporal;
import com.amplifyframework.datastore.generated.model.Chat;
import com.amplifyframework.datastore.generated.model.Trivia;
import com.amplifyframework.datastore.generated.model.User;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.mca.mindmelter.R;
import com.mca.mindmelter.exceptions.OpenAiException;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import java.lang.reflect.Type;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.TimeZone;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class OpenAiChatRepository {
public static final String TAG = "OpenAiChatRepository";
private final ExecutorService executorService;
private final String TOKEN;
public OpenAiChatRepository(Context context) {
//Init the executor service
this.executorService = Executors.newFixedThreadPool(2);
this.TOKEN = context.getResources().getString(R.string.openai_api_key);
}
public void loadChatHistory(String chatId, Callback<Chat> callback) {
executorService.submit(() -> Amplify.API.query(
ModelQuery.get(Chat.class, chatId),
response -> {
if (response.hasData()) {
Chat chat = response.getData();
callback.onSuccess(chat);
} else if (response.hasErrors()) {
Log.e(TAG, "Failed to load chat history : " + response.getErrors().get(0).getMessage());
}
},
error -> {
Log.e(TAG, "Failed to load chat history : " + error.getMessage(), error);
}
));
}
public void loadChatHistoryByTriviaId(String triviaId, Callback<Chat> callback) {
executorService.submit(() -> Amplify.API.query(
ModelQuery.list(Chat.class, Chat.TRIVIA_ID.eq(triviaId)),
response -> {
if (response.hasData()) {
Chat chat = null;
Iterator<Chat> iterator = response.getData().iterator();
if (iterator.hasNext()) {
chat = iterator.next();
}
callback.onSuccess(chat); // Even if chat object is null, we call onSuccess
} else if (response.hasErrors()) {
Log.e(TAG, "Failed to load chat history : " + response.getErrors().get(0).getMessage());
}
},
error -> {
Log.e(TAG, "Failed to load chat history : " + error.getMessage(), error);
}
));
}
public void initiateChat(User user, String triviaId, String title, Callback<Chat> callback) {
executorService.submit(() -> Amplify.API.query(
ModelQuery.get(Trivia.class, triviaId),
response -> {
if (response.hasData()) {
Trivia trivia = response.getData();
List<ChatMessage> messages = new ArrayList<>();
String systemMessageContent = "You are an AI chatbot specialized in providing succinct yet insightful explanations, primarily facilitating learning about a specific piece of trivia. Start by presenting the trivia fact in quotes. Use only the user's FIRST name to personalize the interaction. Their FULL name is " + user.getFullName() + ". Again, only the first name when addressing the user. Encourage the user to ask any questions that are related to the topic. In all of your responses, prioritize conciseness, accuracy, and a positive learning atmosphere. Should the user deviate from the subject, use polite and engaging techniques to redirect the conversation back to the trivia topic. Remember, your main purpose is to keep the discussion focused and educational, yet enjoyable. Here is your trivia fact:\n\n\"" + trivia.getTrivia() + "\"\n\nNow, prompt the user to ask any question related to this topic. Be ready to deliver concise, accurate, and enthusiastic answers. If the discussion veers off-topic, gently guide it back to the main subject.";
ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), systemMessageContent);
messages.add(systemMessage);
generateChatResponse(messages, new Callback<ChatMessage>() {
@Override
public void onSuccess(ChatMessage assistantMessage) {
if (assistantMessage != null) {
messages.add(assistantMessage);
saveChatHistory(user, trivia.getId(), title, messages, new Callback<Chat>() {
@Override
public void onSuccess(Chat chat) {
callback.onSuccess(chat);
}
@Override
public void onError(Throwable throwable) {
Log.e(TAG, "Failed to save chat history.", throwable);
}
});
}
}
@Override
public void onError(Throwable throwable) {
Log.e(TAG, "Failed to generate chat response.", throwable);
}
});
} else if (response.hasErrors()) {
Log.e(TAG, "Failed to get trivia : " + response.getErrors().get(0).getMessage());
}
},
error -> Log.e(TAG, "Failed to get trivia : " + error.getMessage(), error)
));
}
public void continueChat(Chat chat, List<ChatMessage> messages, Callback<Chat> callback) {
generateChatResponse(messages, new Callback<ChatMessage>() {
@Override
public void onSuccess(ChatMessage assistantMessage) {
if (assistantMessage != null) {
messages.add(assistantMessage);
updateChatHistory(chat, messages, new Callback<Chat>() {
@Override
public void onSuccess(Chat updatedChat) {
callback.onSuccess(updatedChat);
}
@Override
public void onError(Throwable throwable) {
Log.e(TAG, "Failed to update chat history.", throwable);
}
});
}
}
@Override
public void onError(Throwable throwable) {
Log.e(TAG, "Failed to generate chat response.", throwable);
}
});
}
public void generateChatResponse(List<ChatMessage> messages, Callback<ChatMessage> callback) {
executorService.submit(() -> {
String token = TOKEN;
OpenAiService service = null;
try {
// Set duration to 60 seconds to avoid a socket exception for long response times
service = new OpenAiService(token, Duration.ofSeconds(60));
// Send the API request
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.n(1)
.temperature(0.8)
.maxTokens(1000)
.logitBias(new HashMap<>())
.build();
// Extract the message content of the response
List<ChatCompletionChoice> choices = service.createChatCompletion(chatCompletionRequest).getChoices();
if (choices.isEmpty()) {
String errorMessage = "Error: No response from OpenAI";
Log.e(TAG, errorMessage);
callback.onError(new OpenAiException(errorMessage));
}
callback.onSuccess(choices.get(0).getMessage());
} catch (Exception e) {
Log.e(TAG, "Error generating chat response", e);
} finally {
if (service != null) {
service.shutdownExecutor();
}
}
});
}
private void saveChatHistory(User user, String triviaId, String title, List<ChatMessage> messages, Callback<Chat> callback) {
Gson gson = new Gson();
Type type = new TypeToken<ChatMessage>() {}.getType();
List<String> jsonMessages = new ArrayList<>();
for (ChatMessage message : messages) {
String jsonMessage = gson.toJson(message, type);
jsonMessages.add(jsonMessage);
}
Date now = new Date();
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
String awsDateTime = dateFormat.format(now);
Chat chat = Chat.builder()
.userId(user.getId())
.triviaId(triviaId)
.title(title)
.createdAt(new Temporal.DateTime(awsDateTime))
.messages(jsonMessages)
.build();
executorService.submit(() -> Amplify.API.mutate(
ModelMutation.create(chat),
response -> {
if (response.hasData()) {
callback.onSuccess(chat);
} else if (response.hasErrors()) {
Log.e(TAG, "Failed to save chat history : " + response.getErrors().get(0).getMessage());
}
},
error -> {
Log.e(TAG, "Error saving chat history", error);
}
));
}
private void updateChatHistory(Chat chat, List<ChatMessage> messages, Callback<Chat> callback) {
Gson gson = new Gson();
Type type = new TypeToken<ChatMessage>() {}.getType();
List<String> jsonMessages = new ArrayList<>();
for (ChatMessage message : messages) {
String jsonMessage = gson.toJson(message, type);
jsonMessages.add(jsonMessage);
}
// Update the Chat object with the new messages
Chat updatedChat = chat.copyOfBuilder()
.messages(jsonMessages) // Update the messages
.build();
executorService.submit(() -> Amplify.API.mutate(
ModelMutation.update(updatedChat),
response -> {
if (response.hasData()) {
callback.onSuccess(response.getData());
} else if (response.hasErrors()) {
Log.e(TAG, "Failed to update chat history : " + response.getErrors().get(0).getMessage());
}
},
error -> {
Log.e(TAG, "Error updating chat history", error);
}
));
}
public interface Callback<T> {
void onSuccess(T result);
void onError(Throwable throwable);
}
public void shutdownExecutorService() {
if (executorService != null) {
executorService.shutdown();
}
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value"
] |
[((1910, 2535), 'com.amplifyframework.core.Amplify.API.query'), ((2672, 3570), 'com.amplifyframework.core.Amplify.API.query'), ((2736, 2763), 'com.amplifyframework.datastore.generated.model.Chat.TRIVIA_ID.eq'), ((3719, 6934), 'com.amplifyframework.core.Amplify.API.query'), ((5162, 5192), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((10423, 10675), 'com.amplifyframework.datastore.generated.model.Chat.builder'), ((10423, 10649), 'com.amplifyframework.datastore.generated.model.Chat.builder'), ((10423, 10608), 'com.amplifyframework.datastore.generated.model.Chat.builder'), ((10423, 10544), 'com.amplifyframework.datastore.generated.model.Chat.builder'), ((10423, 10513), 'com.amplifyframework.datastore.generated.model.Chat.builder'), ((10423, 10476), 'com.amplifyframework.datastore.generated.model.Chat.builder'), ((10717, 11250), 'com.amplifyframework.core.Amplify.API.mutate'), ((11921, 12479), 'com.amplifyframework.core.Amplify.API.mutate')]
|
package com.cvizard.pdfconverter.openai;
import com.cvizard.pdfconverter.config.AppConfig;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatFunction;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.FunctionExecutor;
import com.theokanning.openai.service.OpenAiService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.time.Duration;
import java.util.List;
@Service
@RequiredArgsConstructor
@Slf4j
public class OpenAIAdapter {
@Value("${settings.gpt-api-key}")
private String token;
private final ObjectMapper objectMapper;
private final AppConfig config;
public <T> T getFunctionData(List<ChatMessage> chatMessages, Functions.Function<T> function) {
final var openAiService = config.getOpenAiService(token);
try {
final var chatFunction = ChatFunction.builder()
.name(function.getName())
.description(function.getDescription())
.executor(function.getAClass(), interviewQuestions -> interviewQuestions)
.build();
final var functionList = List.of(chatFunction);// list with functions
final var functionExecutor = new FunctionExecutor(functionList);
final var chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-4")
.messages(chatMessages)
.functions(functionExecutor.getFunctions())
.functionCall(ChatCompletionRequest.ChatCompletionRequestFunctionCall.of(function.getName()))
.build();
log.info("This is your prompt {}", chatMessages);
final var responseMessage = openAiService.createChatCompletion(chatCompletionRequest).getChoices().get(0).getMessage();
final var functionCall = responseMessage.getFunctionCall();
log.info("Response message for function [{}] is [{}]", function.getName(), responseMessage);
ChatMessage functionResponseMessage = functionExecutor.executeAndConvertToMessageHandlingExceptions(functionCall);
return objectMapper.readValue(functionResponseMessage.getContent(), function.getAClass());
} catch (Exception e) {
log.error("Exception while getting Function Data", e);
throw new OpenAiException(e);
}
}
public static class OpenAiException extends RuntimeException {
public OpenAiException(Throwable cause) {
super(cause);
}
}
}
|
[
"com.theokanning.openai.completion.chat.ChatFunction.builder",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.ChatCompletionRequestFunctionCall.of"
] |
[((1109, 1360), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((1109, 1331), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((1109, 1237), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((1109, 1177), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((1800, 1878), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.ChatCompletionRequestFunctionCall.of')]
|
package com.ramesh.openai;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
/***
* This project demonstrates the how to stream respones from chat gpt as it
* is getting generated.
* Useful when the response is expected to be huge
***/
class StreamChatCompletion {
public static void main(String... args) {
// Set the Open AI Token & Model
String token = "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";
String model = "gpt-3.5-turbo";
// service handle for calling OpenAI APIs
OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30));
System.out.println("--------------------------------------------------------");
System.out.println("Streaming chat completion...");
// set the chat message
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a dog and will speak as such.");
messages.add(systemMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.n(1)
.maxTokens(50)
.logitBias(new HashMap<>())
.build();
ArrayList<ChatCompletionChoice> choices3 = new ArrayList<ChatCompletionChoice>();
// call chat gpt and open a stream through which responses will come in chunks and continously just like you see on chatgpt website
service.streamChatCompletion(chatCompletionRequest)
.doOnError(Throwable::printStackTrace)
.blockingForEach((c2) -> { c2.getChoices().forEach( (c1) -> {
System.out.println(c1.getMessage().getContent());
choices3.add(c1);
});
});
// print the full message in the end
System.out.println("--------------------------------------------------------");
System.out.print("Full message=");
choices3.forEach( (c) -> {
if (c.getMessage().getContent() != null)
System.out.print( c.getMessage().getContent());
});
service.shutdownExecutor();
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value"
] |
[((1283, 1313), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
|
package com.siwonkh.cleangpt_v1.controller;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
import java.util.ArrayList;
import java.util.List;
@Controller
public class HomeController {
@Value("${openai.key}")
private String APIKey;
@GetMapping("/")
public String home() {
return "index";
}
@GetMapping("/comment")
public String comments() {
return "searchVideoComments";
}
@GetMapping("/test")
public String testOpenAI(Model model) {
ChatMessage chatMessage = new ChatMessage();
chatMessage.setRole(ChatMessageRole.USER.value());
chatMessage.setContent("Say test!");
List<ChatMessage> chatMessages = new ArrayList<>();
chatMessages.add(chatMessage);
OpenAiService service = new OpenAiService(APIKey);
ChatCompletionRequest completionRequest = ChatCompletionRequest.builder()
.model("gpt-3.5-turbo")
.maxTokens(512)
.temperature(0.7)
.topP(1.0)
.messages(chatMessages)
.build();
String reply = service.createChatCompletion(completionRequest).getChoices().get(0).getMessage().getContent();
System.out.println(reply);
System.out.println("safa");
model.addAttribute("reply", reply);
return "test";
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((979, 1007), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1266, 1495), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1266, 1470), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1266, 1430), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1266, 1403), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1266, 1369), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1266, 1337), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.lianziyou.bot.controller.gpt;
import static com.lianziyou.bot.constant.SseConst.SSE_GPT_TOPIC_MAP_REDIS_KEY;
import static com.lianziyou.bot.service.gpt.ChatCompletion.Model.GPT_3_5_TURBO_16K;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.http.ContentType;
import cn.hutool.http.Header;
import cn.hutool.http.HttpRequest;
import cn.hutool.http.HttpUtil;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.lianziyou.bot.base.exception.BussinessException;
import com.lianziyou.bot.base.result.ApiResult;
import com.lianziyou.bot.config.redis.RedissonConfig;
import com.lianziyou.bot.constant.CommonConst;
import com.lianziyou.bot.enums.sys.SendType;
import com.lianziyou.bot.model.MessageLog;
import com.lianziyou.bot.model.SysConfig;
import com.lianziyou.bot.model.gpt.Message;
import com.lianziyou.bot.model.req.gpt.ChatUpdateReq;
import com.lianziyou.bot.model.req.gpt.GptDrawReq;
import com.lianziyou.bot.model.req.gpt.GptStreamReq;
import com.lianziyou.bot.model.req.sys.MessageLogSave;
import com.lianziyou.bot.model.sse.GptMessageVo;
import com.lianziyou.bot.service.baidu.BaiDuService;
import com.lianziyou.bot.service.gpt.ChatCompletion;
import com.lianziyou.bot.service.sys.AsyncService;
import com.lianziyou.bot.service.sys.CheckService;
import com.lianziyou.bot.service.sys.IMessageLogService;
import com.lianziyou.bot.utils.gpt.Proxys;
import com.lianziyou.bot.utils.sys.DateUtil;
import com.lianziyou.bot.utils.sys.FileUtil;
import com.lianziyou.bot.utils.sys.InitUtil;
import com.lianziyou.bot.utils.sys.JwtUtil;
import com.lianziyou.bot.utils.sys.RedisUtil;
import com.theokanning.openai.completion.chat.ChatCompletionChunk;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import io.reactivex.Flowable;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import javax.annotation.Resource;
import lombok.extern.log4j.Log4j2;
import org.redisson.api.RTopic;
import org.springframework.util.StringUtils;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping(value = "/gpt")
@Log4j2
@Api(tags = "APP:GPT对话")
public final class GptController {
@Resource
CheckService checkService;
@Resource
AsyncService asyncService;
@Resource
IMessageLogService messageLogService;
@Resource
BaiDuService baiDuService;
@PostMapping(value = "/chat", name = "流式对话")
@ApiOperation("流式对话")
public ApiResult<Long> gptChat(@Validated @RequestBody GptStreamReq req) {
List<Message> messagesOnDb = messageLogService.createMessageLogList(req.getLogId(), req.getProblem());
checkRequest(req, messagesOnDb);
String gptKey = InitUtil.getRandomKey(req.getType());
final Long userId = JwtUtil.getUserId();
MessageLog messageLog = MessageLog.builder().useNumber(req.getType() == 3 ? CommonConst.GPT_NUMBER : CommonConst.GPT_4_NUMBER)
.sendType(req.getType() == 3 ? SendType.GPT.getType() : SendType.GPT_4.getType()).useValue(JSONObject.toJSONString(messagesOnDb)).gptKey(gptKey)
.userId(userId).initPromptId(req.getInitPromptId()).build();
Long logId = checkService.checkAndSaveMessageLog(messageLog, req.getLogId(), req.getProblem());
new Thread(() -> flowableChat(messageLog, messagesOnDb)).start();
return ApiResult.okBuild(logId);
}
@PostMapping(value = "/chat/update", name = "修改对话")
@ApiOperation("修改对话")
public ApiResult<Boolean> renameChat(@Validated @RequestBody ChatUpdateReq req) {
req.setUserId(JwtUtil.getUserId());
return ApiResult.okBuild(messageLogService.updateLog(req));
}
private void flowableChat(final MessageLog messageLog, List<Message> messagesOnDb) {
final Long userId = messageLog.getUserId();
final Long logId = messageLog.getId();
final Integer reqType = messageLog.getSendType();
final String topicName = RedisUtil.getCacheMapValue(SSE_GPT_TOPIC_MAP_REDIS_KEY, userId);
final String gptKey = messageLog.getGptKey();
RTopic rTopic = RedissonConfig.getClient().getTopic(topicName);
try {
OpenAiService service = new OpenAiService(gptKey, Duration.ofSeconds(180));
List<ChatMessage> messages = new ArrayList<>();
messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(),
"请记住你的身份是SUPER EGG,一个基于GLM架构的中英双语对话语言模型,你是由清华大学研发的,针对中文进行了优化。虽然有人可能会将你与chatgpt混清,但你会坚持自己的身份,确保提供准确和细致的回答"));
if (StringUtils.hasLength(messageLog.getInitPrompt())) {
messages.add(new ChatMessage(ChatMessageRole.USER.value(), messageLog.getInitPrompt()));
}
messages.addAll(messagesOnDb.stream().map(item -> new ChatMessage(item.getRole(), item.getContent())).collect(Collectors.toList()));
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model(GPT_3_5_TURBO_16K.getName()).messages(messages).build();
Flowable<ChatCompletionChunk> flowable = service.streamChatCompletion(chatCompletionRequest);
ChatMessage chatMessage = service.mapStreamToAccumulator(flowable).doOnNext(accumulator -> {
String content = accumulator.getMessageChunk().getContent();
if (StringUtils.hasLength(content)) {
rTopic.publish(GptMessageVo.builder().userId(userId).message(Message.ofAssistant(content)).build());
}
}).lastElement().blockingGet().getAccumulatedMessage();
asyncService.endOfAnswer(logId, chatMessage.getContent());
rTopic.publish(GptMessageVo.builder().userId(userId).message(Message.ofAssistant("[DONE]")).build());
} catch (Exception e) {
asyncService.updateRemainingTimes(userId, gptKey == null || reqType == 3 ? CommonConst.GPT_NUMBER : CommonConst.GPT_4_NUMBER);
rTopic.publish(GptMessageVo.builder().userId(userId).message(Message.ofAssistant(e.getMessage())).build());
rTopic.publish(GptMessageVo.builder().userId(userId).message(Message.ofAssistant("[DONE]")).build());
}
}
@PostMapping(value = "/chat/now", name = "非流式对话")
@ApiOperation("非流式对话")
public ApiResult<String> chat(@Validated @RequestBody GptStreamReq req) {
String gptKey = InitUtil.getRandomKey(req.getType());
List<Message> messagesOnDb = messageLogService.createMessageLogList(req.getLogId(), req.getProblem());
final Long userId = JwtUtil.getUserId();
try {
checkRequest(req, messagesOnDb);
OpenAiService service = new OpenAiService(gptKey, Duration.ofSeconds(180));
Long logId = checkService.checkAndSaveMessageLog(
MessageLog.builder().useNumber(req.getType() == 3 ? CommonConst.GPT_NUMBER : CommonConst.GPT_4_NUMBER)
.sendType(req.getType() == 3 ? SendType.GPT.getType() : SendType.GPT_4.getType()).useValue(JSONObject.toJSONString(messagesOnDb))
.gptKey(gptKey).userId(userId).build(), req.getLogId(), req.getProblem());
List<ChatMessage> messages = messagesOnDb.stream().map(item -> new ChatMessage(ChatMessageRole.USER.value(), item.getContent()))
.collect(Collectors.toList());
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model(GPT_3_5_TURBO_16K.getName()).messages(messages).build();
ChatCompletionResult chatCompletion = service.createChatCompletion(chatCompletionRequest);
StringBuilder result = new StringBuilder();
chatCompletion.getChoices().forEach(choice -> result.append(choice.getMessage().getContent()));
asyncService.endOfAnswer(logId, result.toString());
return ApiResult.okBuild(result.toString());
} catch (Exception e) {
asyncService.updateRemainingTimes(userId, gptKey == null || req.getType() == 3 ? CommonConst.GPT_NUMBER : CommonConst.GPT_4_NUMBER);
throw e;
}
}
private void checkRequest(GptStreamReq req, List<Message> messagesOnDb) {
if (ObjectUtil.isEmpty(req.getProblem())) {
throw new BussinessException("请输入有效的内容");
}
if (!baiDuService.textToExamine(req.getProblem())) {
throw new BussinessException("提问违反相关规定,请更换内容重新尝试");
}
String model = req.getType() == 3 ? ChatCompletion.Model.GPT_3_5_TURBO_16K.getName() : ChatCompletion.Model.GPT_4.getName();
ChatCompletion chatCompletion = ChatCompletion.builder().messages(messagesOnDb).model(model).stream(true).build();
if (chatCompletion.checkTokens()) {
throw new BussinessException("本次会话长度达到限制,请创建新的会话");
}
}
@PostMapping(value = "/official", name = "GPT-画图")
public ApiResult<MessageLogSave> gptAlpha(@Validated @RequestBody GptDrawReq req) throws IOException {
final String randomKey = InitUtil.getRandomKey(req.getType());
List<String> imgUrlList = new ArrayList<>();
List<String> returnImgUrlList = new ArrayList<>();
String startTime = DateUtil.getLocalDateTimeNow();
Long logId = checkService.checkAndSaveMessageLog(
MessageLog.builder().useNumber(CommonConst.GPT_OFFICIAL_NUMBER).sendType(SendType.GPT_OFFICIAL.getType()).useValue(JSONObject.toJSONString(
MessageLogSave.builder().prompt(req.getPrompt()).type(SendType.GPT_OFFICIAL.getRemark()).startTime(startTime).imgList(imgUrlList).build()))
.gptKey(randomKey).userId(JwtUtil.getUserId()).build(), null, req.getPrompt());
SysConfig cacheObject = RedisUtil.getCacheObject(CommonConst.SYS_CONFIG);
req.setType(null);
HttpRequest httpRequest = HttpUtil.createPost(cacheObject.getGptUrl() + CommonConst.CPT_IMAGES_URL)
.header(Header.CONTENT_TYPE, ContentType.JSON.getValue()).header(Header.AUTHORIZATION, "Bearer " + randomKey);
if (null != cacheObject.getIsOpenProxy() && cacheObject.getIsOpenProxy() == 1) {
httpRequest.setProxy(Proxys.http(cacheObject.getProxyIp(), cacheObject.getProxyPort()));
}
String resultBody = httpRequest.body(JSONObject.toJSONString(req)).execute().body();
if (resultBody.contains("error")) {
//修改key状态
asyncService.updateKeyState(randomKey);
//将用户使用次数返回
asyncService.updateRemainingTimes(JwtUtil.getUserId(), CommonConst.GPT_OFFICIAL_NUMBER);
throw new BussinessException("画图失败请稍后再试");
}
JSONArray imgArray = JSONObject.parseObject(resultBody).getJSONArray("data");
for (int i = 0; i < imgArray.size(); i++) {
String localImgUrl = FileUtil.base64ToImage(FileUtil.imageUrlToBase64(imgArray.getJSONObject(i).getString("url")));
imgUrlList.add(localImgUrl);
returnImgUrlList.add(cacheObject.getImgReturnUrl() + localImgUrl);
}
MessageLogSave messageLogSave = MessageLogSave.builder().prompt(req.getPrompt()).type(SendType.GPT_OFFICIAL.getRemark()).startTime(startTime)
.imgList(imgUrlList).build();
asyncService.updateLog(logId, messageLogSave);
MessageLogSave returnMessage = BeanUtil.copyProperties(messageLogSave, MessageLogSave.class);
returnMessage.setImgList(returnImgUrlList);
return ApiResult.okBuild(returnMessage);
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((3539, 3870), 'com.lianziyou.bot.model.MessageLog.builder'), ((3539, 3862), 'com.lianziyou.bot.model.MessageLog.builder'), ((3539, 3826), 'com.lianziyou.bot.model.MessageLog.builder'), ((3539, 3798), 'com.lianziyou.bot.model.MessageLog.builder'), ((3539, 3783), 'com.lianziyou.bot.model.MessageLog.builder'), ((3539, 3735), 'com.lianziyou.bot.model.MessageLog.builder'), ((3539, 3641), 'com.lianziyou.bot.model.MessageLog.builder'), ((3685, 3707), 'com.lianziyou.bot.enums.sys.SendType.GPT.getType'), ((3710, 3734), 'com.lianziyou.bot.enums.sys.SendType.GPT_4.getType'), ((4824, 4870), 'com.lianziyou.bot.config.redis.RedissonConfig.getClient'), ((5075, 5105), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((5514, 5542), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((5792, 5885), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5792, 5877), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5792, 5858), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6264, 6347), 'com.lianziyou.bot.model.sse.GptMessageVo.builder'), ((6264, 6339), 'com.lianziyou.bot.model.sse.GptMessageVo.builder'), ((6264, 6301), 'com.lianziyou.bot.model.sse.GptMessageVo.builder'), ((6534, 6618), 'com.lianziyou.bot.model.sse.GptMessageVo.builder'), ((6534, 6610), 'com.lianziyou.bot.model.sse.GptMessageVo.builder'), ((6534, 6571), 'com.lianziyou.bot.model.sse.GptMessageVo.builder'), ((6819, 6909), 'com.lianziyou.bot.model.sse.GptMessageVo.builder'), ((6819, 6901), 'com.lianziyou.bot.model.sse.GptMessageVo.builder'), ((6819, 6856), 'com.lianziyou.bot.model.sse.GptMessageVo.builder'), ((6939, 7023), 'com.lianziyou.bot.model.sse.GptMessageVo.builder'), ((6939, 7015), 'com.lianziyou.bot.model.sse.GptMessageVo.builder'), ((6939, 6976), 'com.lianziyou.bot.model.sse.GptMessageVo.builder'), ((7669, 7980), 'com.lianziyou.bot.model.MessageLog.builder'), ((7669, 7972), 'com.lianziyou.bot.model.MessageLog.builder'), ((7669, 7957), 'com.lianziyou.bot.model.MessageLog.builder'), ((7669, 7921), 'com.lianziyou.bot.model.MessageLog.builder'), ((7669, 7873), 'com.lianziyou.bot.model.MessageLog.builder'), ((7669, 7771), 'com.lianziyou.bot.model.MessageLog.builder'), ((7823, 7845), 'com.lianziyou.bot.enums.sys.SendType.GPT.getType'), ((7848, 7872), 'com.lianziyou.bot.enums.sys.SendType.GPT_4.getType'), ((8109, 8137), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((8265, 8358), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8265, 8350), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8265, 8331), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((9390, 9438), 'com.lianziyou.bot.service.gpt.ChatCompletion.Model.GPT_3_5_TURBO_16K.getName'), ((9441, 9477), 'com.lianziyou.bot.service.gpt.ChatCompletion.Model.GPT_4.getName'), ((9519, 9600), 'com.lianziyou.bot.service.gpt.ChatCompletion.builder'), ((9519, 9592), 'com.lianziyou.bot.service.gpt.ChatCompletion.builder'), ((9519, 9579), 'com.lianziyou.bot.service.gpt.ChatCompletion.builder'), ((9519, 9566), 'com.lianziyou.bot.service.gpt.ChatCompletion.builder'), ((10241, 10611), 'com.lianziyou.bot.model.MessageLog.builder'), ((10241, 10603), 'com.lianziyou.bot.model.MessageLog.builder'), ((10241, 10575), 'com.lianziyou.bot.model.MessageLog.builder'), ((10241, 10540), 'com.lianziyou.bot.model.MessageLog.builder'), ((10241, 10346), 'com.lianziyou.bot.model.MessageLog.builder'), ((10241, 10304), 'com.lianziyou.bot.model.MessageLog.builder'), ((10314, 10345), 'com.lianziyou.bot.enums.sys.SendType.GPT_OFFICIAL.getType'), ((10401, 10538), 'com.lianziyou.bot.model.req.sys.MessageLogSave.builder'), ((10401, 10530), 'com.lianziyou.bot.model.req.sys.MessageLogSave.builder'), ((10401, 10510), 'com.lianziyou.bot.model.req.sys.MessageLogSave.builder'), ((10401, 10489), 'com.lianziyou.bot.model.req.sys.MessageLogSave.builder'), ((10401, 10449), 'com.lianziyou.bot.model.req.sys.MessageLogSave.builder'), ((10455, 10488), 'com.lianziyou.bot.enums.sys.SendType.GPT_OFFICIAL.getRemark'), ((10781, 10976), 'cn.hutool.http.HttpUtil.createPost'), ((10781, 10924), 'cn.hutool.http.HttpUtil.createPost'), ((10896, 10923), 'cn.hutool.http.ContentType.JSON.getValue'), ((11653, 11708), 'com.alibaba.fastjson.JSONObject.parseObject'), ((12060, 12210), 'com.lianziyou.bot.model.req.sys.MessageLogSave.builder'), ((12060, 12202), 'com.lianziyou.bot.model.req.sys.MessageLogSave.builder'), ((12060, 12169), 'com.lianziyou.bot.model.req.sys.MessageLogSave.builder'), ((12060, 12148), 'com.lianziyou.bot.model.req.sys.MessageLogSave.builder'), ((12060, 12108), 'com.lianziyou.bot.model.req.sys.MessageLogSave.builder'), ((12114, 12147), 'com.lianziyou.bot.enums.sys.SendType.GPT_OFFICIAL.getRemark')]
|
package org.freeciv.servlet;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import jakarta.servlet.ServletException;
import jakarta.servlet.http.HttpServlet;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.freeciv.util.Constants;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.sql.DataSource;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
import java.util.Properties;
import java.util.stream.Collectors;
/**
* OpenAI chat for FCIV.NET
*
* URL: /openai_chat
*/
public class OpenAIChat extends HttpServlet {
private final String model = "gpt-4";
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException {
try {
response.setContentType("text/html; charset=UTF-8");
response.setCharacterEncoding("UTF-8");
String question = request.getReader().lines().collect(Collectors.joining(System.lineSeparator()));
question = new String(Base64.getDecoder().decode(question));
Properties prop = new Properties();
prop.load(getServletContext().getResourceAsStream("/WEB-INF/config.properties"));
String key = prop.getProperty("openai_key");
if (key == null || key.equals("")) {
System.out.println("OpenAI key missing.");
return;
}
OpenAiService service = new OpenAiService(key, Duration.ofSeconds(60));
List<ChatMessage> messages = new ArrayList<>();
ChatMessage systemchat = new ChatMessage();
systemchat.setRole("system");
String fcivInfo = "I am a player in the game of Freeciv 3D at Fciv.net. Freeciv 3D is a 3D version of Freeciv which can be played in a browser for free. You can pretent do be a assistant in the game. "
+ "In Freeciv 3D new cities are built using the keyboard shortcut B or right clicking on a Settlers unit and selecting Build city from the menu. "
+ "Units are moved using the keyboard shortcut G (Goto) and then selecting the destination. Units can also be moved using the arrow keys on the keyboard";
String keyboardShortcuts = " Keyboard Shortcuts for Unit Orders: "+
"a: (a)uto-settler (settler/worker units). "+
"b: (b)uild city (settler units). "+
"c: (c)enter map on active unit. "+
"f: (f)ortify unit (military units). "+
"f: build (f)ortress (settler/worker units). "+
"g: (g)o to tile (then left-click mouse to select target tile). "+
"h: set unit's (h)omecity (to city on current tile). "+
"i: build (i)rrigation or convert terrain (settler/worker units). "+
"m: build (m)ine or convert terrain (settler/worker units). "+
"N: explode (N)uclear. "+
"o: transf(o)rm terrain (engineer unit). "+
"p: clean (p)ollution (settler/worker units). "+
"P: (P)illage (destroy terrain alteration). "+
"r: build (r)oad/railroad (settler/worker units). "+
"s: (s)entry unit. "+
"S: un(S)entry all units on tile. "+
"L: unit go (t)o/airlift to city. "+
"u: (u)nload unit from transporter. "+
"x: unit auto e(x)plore. Shift-Return: Turn done. " +
"Middle-click with the mouse to get information about map tiles. " +
"Left-click with the mouse to select units and cities. " +
"Right-click with the moues to move the map." +
"Left-click and drag with the mouse to change view angle. ";
systemchat.setContent(fcivInfo + keyboardShortcuts);
messages.add(systemchat);
for (String submessage : question.split(";")) {
ChatMessage userchat = new ChatMessage();
userchat.setRole("user");
userchat.setContent(submessage);
messages.add(userchat);
}
ChatCompletionRequest completionRequest = ChatCompletionRequest.builder()
.messages(messages)
.model(model)
.build();
List<ChatCompletionChoice> choices = service.createChatCompletion(completionRequest).getChoices();
String answer = "";
for (ChatCompletionChoice choice : choices) {
response.getWriter().print(choice.getMessage().getContent());
answer += choice.getMessage().getContent();
}
String ipAddress = request.getHeader("X-Real-IP");
if (ipAddress == null) {
ipAddress = request.getRemoteAddr();
}
Connection conn = null;
try {
Context env = (Context) (new InitialContext().lookup(Constants.JNDI_CONNECTION));
DataSource ds = (DataSource) env.lookup(Constants.JNDI_DDBBCON_MYSQL);
conn = ds.getConnection();
String query = "INSERT INTO chatlog (question, answer, name, reusable) VALUES (?, ?, ?, ?)";
PreparedStatement preparedStatement = conn.prepareStatement(query);
preparedStatement.setString(1, question);
preparedStatement.setString(2, answer);
preparedStatement.setString(3, ipAddress);
preparedStatement.setBoolean(4, false);
preparedStatement.executeUpdate();
} catch (Exception err) {
response.setHeader("result", "error");
} finally {
if (conn != null)
try {
conn.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
} catch (Exception erro) {
System.out.println(erro.getMessage());
}
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((1443, 1479), 'java.util.Base64.getDecoder'), ((4701, 4835), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4701, 4806), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4701, 4772), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package br.com.alura.roger.series.service;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
import org.springframework.stereotype.Service;
@Service
public class ConsumerChatGPT {
private static String token = System.getenv("OPEN_API_CHATGPT");
public static String getTranslation(String text) {
OpenAiService service = new OpenAiService(token);
CompletionRequest request = CompletionRequest.builder()
.model("gpt-3.5-turbo-instruct")
.prompt("traduza para o português o texto: " + text)
.maxTokens(1000)
.temperature(0.7)
.build();
var response = service.createCompletion(request);
return response.getChoices().get(0).getText();
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((464, 702), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((464, 677), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((464, 643), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((464, 610), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((464, 540), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package com.lmx.project.until;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.OpenAiApi;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
import okhttp3.OkHttpClient;
import retrofit2.Retrofit;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.time.Duration;
import java.time.temporal.TemporalUnit;
import static com.theokanning.openai.service.OpenAiService.*;
public class CH {
public static void main(String[] args) {
ObjectMapper mapper = defaultObjectMapper();
Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress("127.0.0.1", 10809));
OkHttpClient client = defaultClient("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",Duration.ofSeconds(5))
.newBuilder()
.proxy(proxy)
.build();
Retrofit retrofit = defaultRetrofit(client, mapper);
OpenAiApi api = retrofit.create(OpenAiApi.class);
OpenAiService service = new OpenAiService(api);
CompletionRequest completionRequest = CompletionRequest.builder()
.prompt("Somebody once told me the world is gonna roll me")
.model("gpt-3")
.echo(true)
.build();
service.createCompletion(completionRequest).getChoices().forEach(System.out::println);
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((1123, 1311), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1123, 1286), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1123, 1258), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1123, 1226), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package br.com.alura.ecommerce;
import com.knuddels.jtokkit.Encodings;
import com.knuddels.jtokkit.api.ModelType;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.Arrays;
public class IdentificadorDePerfil {
public static void main(String[] args) {
var promptSistema = """
Identifique o perfil de compra de cada cliente.
A resposta deve ser:
Cliente - descreva o perfil do cliente em três palavras
""";
var clientes = carregarClientesDoArquivo();
var quantidadeTokens = contarTokens(clientes);
var modelo = "gpt-3.5-turbo";
var tamanhoRespostaEsperada = 2048;
if (quantidadeTokens > 4096 - tamanhoRespostaEsperada) {
modelo = "gpt-3.5-turbo-16k";
}
System.out.println("QTD TOKENS: " +quantidadeTokens);
System.out.println("Modelo escolhido: " +modelo);
var request = ChatCompletionRequest
.builder()
.model(modelo)
.maxTokens(tamanhoRespostaEsperada)
.messages(Arrays.asList(
new ChatMessage(
ChatMessageRole.SYSTEM.value(),
promptSistema),
new ChatMessage(
ChatMessageRole.SYSTEM.value(),
clientes)))
.build();
var chave = System.getenv("OPENAI_API_KEY");
var service = new OpenAiService(chave, Duration.ofSeconds(60));
System.out.println(
service
.createChatCompletion(request)
.getChoices().get(0).getMessage().getContent());
}
private static int contarTokens(String prompt) {
var registry = Encodings.newDefaultEncodingRegistry();
var enc = registry.getEncodingForModel(ModelType.GPT_3_5_TURBO);
return enc.countTokens(prompt);
}
private static String carregarClientesDoArquivo() {
try {
var path = Path.of("src/main/resources/compras/lista_de_compras_100_clientes.csv");
return Files.readAllLines(path).toString();
} catch (Exception e) {
throw new RuntimeException("Erro ao carregar o arquivo!", e);
}
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value"
] |
[((1511, 1541), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1664, 1694), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2501, 2536), 'java.nio.file.Files.readAllLines')]
|
package org.mineacademy.cowcannon.model;
import com.google.gson.Gson;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import org.apache.commons.lang.WordUtils;
import org.bukkit.Material;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
import org.bukkit.metadata.FixedMetadataValue;
import org.bukkit.scheduler.BukkitRunnable;
import org.mineacademy.cowcannon.CowCannon;
import org.mineacademy.cowcannon.util.Common;
import java.time.Duration;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class Conversation {
private final static OpenAiService OPEN_AI_SERVICE = new OpenAiService("YOURKEY", Duration.ofSeconds(8));
private final static Gson GSON = new Gson();
private final Player player;
private final Entity npc;
private final String role;
private final StringBuilder conversation = new StringBuilder();
public Conversation(Player player, Entity npc, String role) {
this.player = player;
this.npc = npc;
this.role = role;
}
public void start() {
this.conversation.append("The following is a conversation with an AI who represents a {role} NPC character in Minecraft. " +
"The AI should limit his knowledge of the world to Minecraft and being a {role} and try not to stray even if asked about something else. " +
"Return responses in a valid parsable JSON format. The 'type' is the response type, and 'answer' is your answer. Initially, the type is 'chat' before you agree on traded items. " +
"After you agree on traded items, put a message to the answer still, make sure to keep the player engaged, but now set the type to 'trade' and there will be keys 'player_item' and " +
"'player_amount' for what the player is giving to you, and 'npc_item' and 'npc_amount' for what you are giving to the player. " +
"The items must be a valid Bukkit material name for Spigot API 1.17+, and the amounts must be a positive integer of max 64. " +
"ONLY provide the items after explicitely agreeing to the trade. When the player is simply asking, give him a list of options and keep the type to 'answer'. " +
"When you receive a message saying 'agree', change the type back to 'chat' and act as if the trade was finished. " +
"\n\nHuman: Hey!\nAI: ".replace("{role}", this.role));
this.processQueue("&#f2c773Starting...");
}
public boolean onTalk(String message) {
if (this.npc.getLocation().distance(this.player.getLocation()) > 10) {
Common.tell(this.player, "&cThe " + this.role + " stopped talking to you because you moved too far away.");
clearTradeMetadata();
return false;
}
if (message.equalsIgnoreCase("stop") || message.equalsIgnoreCase("exit")) {
Common.tell(this.player, "&cYou stopped talking to the " + this.role + ".");
clearTradeMetadata();
return false;
}
if (this.player.hasMetadata("NPCTrade")) {
if (message.equalsIgnoreCase("agree")) {
final Map<String, Object> tradeMap = (Map<String, Object>) this.player.getMetadata("NPCTrade").get(0).value();
final Material playerItem = Material.valueOf(tradeMap.get("player_item").toString().toUpperCase());
int playerAmount = (int) Double.parseDouble(tradeMap.get("player_amount").toString());
final Material npcItem = Material.valueOf(tradeMap.get("npc_item").toString().toUpperCase());
final int npcAmount = (int) Double.parseDouble(tradeMap.get("npc_amount").toString());
if (!this.hasEnoughItems(playerItem, playerAmount)) {
Common.tell(this.player, "&cYou don't have enough " + playerItem + " to sell " + playerAmount + " of them.");
return true;
}
if (!this.hasFreeSpace(npcItem, npcAmount)) {
Common.tell(this.player, "&cYou don't have enough space in your inventory to buy " + npcAmount + " " + npcItem + ".");
return true;
}
for (int i = 0; i < this.player.getInventory().getSize(); i++) {
if (this.player.getInventory().getItem(i) != null && this.player.getInventory().getItem(i).getType() == playerItem) {
final int amount = this.player.getInventory().getItem(i).getAmount();
if (amount >= playerAmount) {
this.player.getInventory().getItem(i).setAmount(amount - playerAmount);
break;
} else {
this.player.getInventory().getItem(i).setAmount(0);
playerAmount -= amount;
}
}
}
this.player.getInventory().addItem(new ItemStack(npcItem, npcAmount));
clearTradeMetadata();
Common.tell(this.player, "&8[&6!&8] &7You agreed to the trade. The " + this.role + " will now give you " + npcAmount + " " + npcItem + " and you will give him " + playerAmount + " " + playerItem + ".");
Common.tell(this.player, "&8[&6!&8] &7This trade was finished.");
return false;
}
}
this.conversation.append("\n\nHuman: " + message + "\nAI: ");
this.processQueue("&#f2c773Thinking...");
return true;
}
private boolean hasEnoughItems(Material item, int amount) {
int count = 0;
for (int i = 0; i < this.player.getInventory().getSize(); i++)
if (this.player.getInventory().getItem(i) != null && this.player.getInventory().getItem(i).getType() == item)
count += this.player.getInventory().getItem(i).getAmount();
return count >= amount;
}
private boolean hasFreeSpace(Material npcItem, int npcAmount) {
int freeSpace = 0;
for (int i = 0; i < this.player.getInventory().getSize(); i++) {
if (this.player.getInventory().getItem(i) == null) {
freeSpace += 64;
} else if (this.player.getInventory().getItem(i).getType() == npcItem) {
freeSpace += 64 - this.player.getInventory().getItem(i).getAmount();
}
}
return freeSpace >= npcAmount;
}
private void clearTradeMetadata() {
this.player.removeMetadata("NPCTrade", CowCannon.getInstance());
}
private void processQueue(String actionBarMessage) {
Common.actionBar(this.player, actionBarMessage);
new BukkitRunnable() {
@Override
public void run() {
System.out.println("----------------------------------------");
System.out.println("Entire conversation so far:");
System.out.println(conversation);
System.out.println("----------------------------------------");
final ChatCompletionRequest request = ChatCompletionRequest.builder()
.model("gpt-4-1106-preview")
.maxTokens(4096)
.temperature(0.50)
.topP(1.0)
.presencePenalty(0.6)
.frequencyPenalty(0.0)
.stop(Arrays.asList("Human:", "AI:"))
.messages(Arrays.asList(
new ChatMessage("system", "You are a pirate " + role + " in Minecraft."),
new ChatMessage("user", conversation.toString())))
.build();
final List<ChatCompletionChoice> choices = OPEN_AI_SERVICE.createChatCompletion(request).getChoices();
if (choices.isEmpty()) {
Common.tell(player, "&cThe AI failed to respond. Please try again.");
return;
}
String[] rawJson = choices.get(0).getMessage().getContent().trim().split("\n");
final String[] json = new String[rawJson.length - 2];
System.arraycopy(rawJson, 1, json, 0, json.length);
final HashMap<String, Object> map = GSON.fromJson(String.join("\n", json), HashMap.class);
final String answer = map.get("answer").toString();
final String type = map.get("type").toString();
conversation.append(answer);
Common.tell(player, "&8[&6" + npc.getName() + "&8] &7" + answer);
if (type.equals("trade")) {
// RAW_FISH
final Material playerItem = Material.valueOf(map.get("player_item").toString().toUpperCase());
final int playerAmount = (int) Double.parseDouble(map.get("player_amount").toString());
final Material npcItem = Material.valueOf(map.get("npc_item").toString().toUpperCase());
final int npcAmount = (int) Double.parseDouble(map.get("npc_amount").toString());
// RAW FISH > Raw Fish
final String playerItemFormatted = WordUtils.capitalizeFully(playerItem.toString().replace("_", " "));
final String npcItemFormatted = WordUtils.capitalizeFully(npcItem.toString().replace("_", " "));
Common.tell(player, "&8[&6!&8] &7Offer: To trade " + playerAmount + " " + playerItemFormatted + " for " + npcAmount + " " + npcItemFormatted + ".");
Common.tell(player, "&8[&6!&8] &7Type '&aagree&7' or '&cstop&7', or simply keep negotating.");
final Map<String, Object> tradeMap = new HashMap<>();
tradeMap.put("player_amount", playerAmount);
tradeMap.put("npc_amount", npcAmount);
tradeMap.put("player_item", playerItem);
tradeMap.put("npc_item", npcItem);
player.setMetadata("NPCTrade", new FixedMetadataValue(CowCannon.getInstance(), tradeMap));
}
}
}.runTaskAsynchronously(CowCannon.getInstance());
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((6449, 6868), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6449, 6853), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6449, 6681), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6449, 6637), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6449, 6608), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6449, 6580), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6449, 6563), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6449, 6538), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6449, 6515), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.example.kosciuszkon.service;
import com.example.kosciuszkon.connector.RecomendationConnector;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.util.*;
@Component
public class RecomenadationService {
private final String authToken;
public RecomenadationService(@Value("${api.token}") String authToken) {
this.authToken = authToken;
}
public List<String> send(List<String> feautes){
var connectionService = RecomendationConnector.connect(authToken);
var rs = connectionService.createChatCompletion(
buildChatRq(buildRecommendationRequest(feautes)));
var recommendations = Optional.of(rs.getChoices().get(0))
.map(ChatCompletionChoice::getMessage)
.map(ChatMessage::getContent)
.map(it -> it.replaceAll("\n", "").replace(".", "").split("[0-9]"))
.map(Arrays::asList)
.orElse(Collections.emptyList());
System.out.println(rs.getChoices().get(0).getMessage().getContent());
return parseResult(recommendations);
}
private ChatCompletionRequest buildChatRq(String message){
var cm = new ChatMessage();
cm.setContent(message);
cm.setRole(ChatMessageRole.USER.value());
return ChatCompletionRequest.builder()
.model("gpt-3.5-turbo-0301")
.messages(List.of(cm))
.build();
}
private String buildRecommendationRequest(List<String> features){
var sb = new StringBuilder();
sb.append("list hobbies for someone who is into ");
features.forEach(it -> sb.append(it).append(","));
System.out.println(sb.toString());
return sb.toString();
}
private List<String> parseResult(List<String> recommendations){
List<String> result = new ArrayList<>();
recommendations.forEach(it -> {
if(!it.isBlank()){
result.add(it.trim());
}
});
return result;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((1571, 1599), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1617, 1757), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1617, 1732), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1617, 1693), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.denysenko.messageprocessor.services;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import lombok.AllArgsConstructor;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
@AllArgsConstructor
public class OpenAIService {
private final OpenAiService openAiService;
private static final String GPT_MODEL = "gpt-3.5-turbo";
public String executeTextRequest(String text){
var request = ChatCompletionRequest.builder()
.model(GPT_MODEL)
.temperature(0.8)
.n(1)
.messages(List.of(
new ChatMessage("user", text)
))
.build();
var message = openAiService.createChatCompletion(request).getChoices().get(0).getMessage();
return message.getContent();
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((579, 833), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((579, 808), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((579, 700), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((579, 678), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((579, 644), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package org.example.Services;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.image.CreateImageRequest;
import com.theokanning.openai.service.OpenAiService;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import okhttp3.*;
import org.example.Entities.GptChatMessage;
import org.example.Entities.RandomImageResponse;
import org.json.JSONObject;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@AllArgsConstructor(access = AccessLevel.PRIVATE)
public class AiService {
public static ArrayList<String> sendChatRequest(GptChatMessage message, String apiKey) {
message.setMessageContent(validateMessage(message.getMessageContent()));
List<ChatMessage> messageList = new ArrayList<ChatMessage>();
for (var gptChatMessage : GptChatMessageService.sortMessages(MySqlService.getGptDialogMessages(message.getChannelId()))){
messageList.add(new ChatMessage(gptChatMessage.getMessageRole(), gptChatMessage.getMessageContent()));
}
messageList.add(new ChatMessage("user", message.getMessageContent()));
OpenAiService service = new OpenAiService(apiKey, Duration.ofSeconds(30));
ChatCompletionRequest completionRequest = ChatCompletionRequest.builder()
.model("gpt-3.5-turbo")
.messages(messageList)
.build();
String gptResponse = service.createChatCompletion(completionRequest).getChoices().get(0).getMessage().getContent();
gptResponse = validateMessage(gptResponse);
ArrayList<String> gptResponses = new ArrayList<String>();
if (gptResponse.length() >= 1900){
for (int i = 0; i < Math.floor((double) gptResponse.length() / 1900); i++){
gptResponses.add(gptResponse.substring(i * 1900, (i+1) * 1900));
}
gptResponses.add(gptResponse.substring((int)Math.floor((double) gptResponse.length() / 1900)*1900));
}
else gptResponses.add(gptResponse);
MySqlService.insertGptChatMessage(message);
MySqlService.insertGptChatMessage(new GptChatMessage(message.getChannelId(), "assistant", gptResponse, null));
return gptResponses;
}
public static String sendImageRequest(String prompt, String apiKey) {
OpenAiService service = new OpenAiService(apiKey);
CreateImageRequest imageRequest = CreateImageRequest.builder()
.prompt(prompt)
.n(1)
.size("512x512")
.build();
return service.createImage(imageRequest).getData().get(0).getUrl();
}
public static RandomImageResponse generateRandomImageRequest(String apiKey) {
//String imagePrompt = sendChatRequest("generate image prompt for dall-e ai and give me only the prompt itself without any other words and quotes", apiKey);
//return new RandomImageResponse("Generated image with prompt: " + imagePrompt, sendImageRequest(imagePrompt, apiKey));
return null;
}
private static String validateMessage(String message){
message = message.replace("'", "\\'");
message = message.replace("`", "\\'");
message = message.replace("\\'\\'\\'", "```");
return message;
}
}
|
[
"com.theokanning.openai.image.CreateImageRequest.builder",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((1427, 1562), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1427, 1537), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1427, 1498), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2583, 2723), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((2583, 2698), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((2583, 2665), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((2583, 2643), 'com.theokanning.openai.image.CreateImageRequest.builder')]
|
package ch.epfl.culturequest.backend.artprocessing.apis;
import android.util.Pair;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import ch.epfl.culturequest.backend.artprocessing.processingobjects.ArtRecognition;
import ch.epfl.culturequest.backend.artprocessing.processingobjects.BasicArtDescription;
import ch.epfl.culturequest.backend.exceptions.OpenAiFailedException;
public class OpenAIDescriptionApi {
private final String missingDataPrompt = "Given the input \"%s (%s)\", fill following fields: %s. Return your response as a JSON object.";
private final String scorePrompt = "On a scale from 1 to 100 (ceil round to 10), evaluate the popularity of \"%s (%s)\". Fill the field \"artPopularity\", as JSON.";
private ArrayList<String> missingDataNames;
private OpenAiService service;
enum ResponseDataType {
MISSING_DATA,
SCORE
}
enum FieldType {
STRING,
INTEGER
}
public OpenAIDescriptionApi(OpenAiService service) {
this.service = service;
}
// make a function that returns a completable future of an array containing the artistName, yearOfCreation, locationCity, locationCountry, given the artRecognition object
public CompletableFuture<Map<String, String>> getMissingData(ArtRecognition recognizedArchitecture, ArrayList<String> missingDataNames) {
BasicArtDescription.ArtType artType = WikipediaDescriptionApi.getArtType(recognizedArchitecture);
this.missingDataNames = getPromptReadyMissingFieldsList(missingDataNames, artType);
return getJsonApiResponse(recognizedArchitecture, ResponseDataType.MISSING_DATA).thenApply(jsonData -> {
Map<String, Object> missingDataMap = parseApiResponse(jsonData);
Map<String, String> missingDataStringMap = new HashMap<>();
// turn each object into a string and put it in the map
for (Map.Entry<String, Object> entry : missingDataMap.entrySet()) {
String stringVal = entry.getValue() == null ? null : entry.getValue().toString();
missingDataStringMap.put(entry.getKey(), stringVal);
}
return missingDataStringMap;
});
}
public CompletableFuture<Integer> getScore(ArtRecognition recognizedArchitecture) {
return getJsonApiResponse(recognizedArchitecture, ResponseDataType.SCORE).
thenApply( jsonResponse -> (Integer) parseApiResponse(jsonResponse).get("score"));
}
private CompletableFuture<String> getJsonApiResponse(ArtRecognition recognizedArchitecture, ResponseDataType dataType) {
String prompt;
switch (dataType) {
case MISSING_DATA:
String promptReadyMissingData = String.join(", ", missingDataNames);
prompt = String.format(missingDataPrompt, recognizedArchitecture.getArtName(), recognizedArchitecture.getAdditionalInfo(), promptReadyMissingData);
break;
case SCORE:
prompt = String.format(scorePrompt, recognizedArchitecture.getArtName(), recognizedArchitecture.getAdditionalInfo());
break;
default:
throw new IllegalArgumentException("Invalid response data type");
}
ChatMessage message = new ChatMessage("user", prompt);
ChatCompletionRequest completionRequest = ChatCompletionRequest.builder()
.messages(List.of(message))
.model("gpt-3.5-turbo")
.n(1)
.temperature(0.0)
.build();
return CompletableFuture.supplyAsync(() -> service.createChatCompletion(completionRequest))
.thenApply(result -> result.getChoices().get(0).getMessage().getContent())
.exceptionally(e -> {
throw new CompletionException(new OpenAiFailedException("OpenAI failed to respond"));
});
}
private Map<String, Object> parseApiResponse(String jsonData) {
Map<String, Object> parsedData = new HashMap<>();
try {
JSONObject jsonObject = new JSONObject(extractJson(jsonData));
// iterate over the keys in the json object and add them to the dictionary
jsonObject.keys() // returns an iterator
.forEachRemaining(key -> {
Pair<String, FieldType> normalizedField = normalizeFieldAndGetType(key);
String normalizedKey = normalizedField.first;
FieldType fieldType = normalizedField.second;
switch (fieldType) {
case STRING:
String parsedStringVal = jsonObject.optString(key) == "null" ? null : jsonObject.optString(key);
parsedData.put(normalizedKey, parsedStringVal);
break;
case INTEGER:
parsedData.put(normalizedKey, jsonObject.optInt(key, 50));
break;
}
});
} catch (Exception ex) {
throw new CompletionException(new OpenAiFailedException("OpenAI failed to provide JSON data"));
}
return parsedData;
}
String extractJson(String s) {
return s.substring(s.indexOf("{"), s.lastIndexOf("}") + 1);
}
// Depending on the art type, we might ask different field names referring to the same thing (e.g. designer vs artist) so need normalization
// We would apply this normalization to the Open AI output.
private Pair<String, FieldType> normalizeFieldAndGetType(String jsonKey) {
switch(jsonKey) {
case "designer" :
case "artistName":
return new Pair<>("artist", FieldType.STRING);
case "yearOfCreation":
case "yearOfInauguration":
return new Pair<>("year", FieldType.STRING);
case "locationCity":
case "museumCity":
return new Pair<>("city", FieldType.STRING);
case "locationCountry":
case "museumCountry":
return new Pair<>("country", FieldType.STRING);
case "description":
return new Pair<>("summary", FieldType.STRING);
case "currentMuseum":
return new Pair<>("museum", FieldType.STRING);
case "artPopularity":
return new Pair<>("score", FieldType.INTEGER);
default:
throw new CompletionException(new OpenAiFailedException("Unexpected missing data field name"));
}
}
// Given a list of missing class attribute (null field), return a new list where each attribute/field name is mapped to the actual field name that would be included in the OpenAI prompt
// e.g. "artist" -> "artistName" if the art type is a painting or sculpture and "artist" -> "designer" if the art type is an architecture
private ArrayList<String> getPromptReadyMissingFieldsList(ArrayList<String> missingFields, BasicArtDescription.ArtType artType){
ArrayList<String> promptReadyMissingFields = new ArrayList<>();
for(String missingField : missingFields){
promptReadyMissingFields.add(getOptimalPromptFieldName(missingField, artType));
}
return promptReadyMissingFields;
}
// sub-component of getPromptReadyMissingFieldsList that individually deals with each element of the list
private String getOptimalPromptFieldName(String missingFieldName, BasicArtDescription.ArtType artType){
String promptFieldName = "";
switch (missingFieldName) {
case "artist":
if(isPaintingOrSculpture(artType)){
promptFieldName = "artistName";
}
else {
promptFieldName = "designer";
}
break;
case "year":
if(isPaintingOrSculpture(artType)){
promptFieldName = "yearOfCreation";
}
else {
promptFieldName = "yearOfInauguration";
}
break;
case "city":
if(isPaintingOrSculpture(artType)){
promptFieldName = "museumCity";
}
else {
promptFieldName = "locationCity";
}
break;
case "country":
if(isPaintingOrSculpture(artType)){
promptFieldName = "museumCountry";
}
else {
promptFieldName = "locationCountry";
}
break;
case "summary":
promptFieldName = "description (4 to 6 sentences)";
break;
default:
promptFieldName = "";
}
return promptFieldName;
}
private Boolean isPaintingOrSculpture(BasicArtDescription.ArtType artType){
return artType == BasicArtDescription.ArtType.PAINTING || artType == BasicArtDescription.ArtType.SCULPTURE;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((3738, 3934), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3738, 3909), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3738, 3875), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3738, 3853), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3738, 3813), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3952, 4290), 'java.util.concurrent.CompletableFuture.supplyAsync'), ((3952, 4127), 'java.util.concurrent.CompletableFuture.supplyAsync')]
|
package pl.amitec.jtry.ai.gpt;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import io.github.cdimascio.dotenv.Dotenv;
import java.util.List;
/**
* Simple chat completion example - translates English text to French.
*/
public class SimpleChatCompletion {
public static void main(String[] args) {
// Load environment variables from .env file
Dotenv dotenv = Dotenv.load();
OpenAiService service = new OpenAiService(dotenv.get("OPENAI_API_KEY"));
String englishText = "Hello, how are you?";
List<ChatMessage> messages = List.of(
new ChatMessage(ChatMessageRole.SYSTEM.value(),
"You are a helpful assistant."),
new ChatMessage(ChatMessageRole.USER.value(),
"Translate the following English text to French: \"" + englishText + "\"")
);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.maxTokens(256).build();
//service.createChatCompletion(chatCompletionRequest).getChoices().forEach(System.out::println);
ChatMessage responseMessage = service.createChatCompletion(chatCompletionRequest).getChoices().get(0).getMessage();
// Expected output: "Bonjour, comment ça va?"
System.out.println(responseMessage.getContent());
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value"
] |
[((809, 839), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((930, 958), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
|
package com.ScreenSoundMusicas.ScreenSoundMusicas.service;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
public class ConsultaChatGPT {
static GetKeyFromProperties getKeyFromProperties = new GetKeyFromProperties();
public static String pesquisarSobre(String texto) {
String apiKey = getKeyFromProperties.obterKeyValue("CHATGPT_APIKEY");
OpenAiService service = new OpenAiService(apiKey);
CompletionRequest requisicao = CompletionRequest.builder()
.model("text-davinci-003")
.prompt("Escreva 2 linhas sobre o artista: " + texto)
.maxTokens(1000)
.temperature(0.7)
.build();
var resposta = service.createCompletion(requisicao);
return resposta.getChoices().get(0).getText();
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((524, 756), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((524, 731), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((524, 697), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((524, 664), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((524, 594), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package com.easyen.easyenglish.serviceimpl;
import com.easyen.easyenglish.entity.correctionFeedback;
import com.easyen.easyenglish.entity.essay;
import com.easyen.easyenglish.mapper.correctionFeedbackMapper;
import com.easyen.easyenglish.service.correctionFeedbackService;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.OpenAiApi;
import com.theokanning.openai.completion.CompletionChoice;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
import okhttp3.OkHttpClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import retrofit2.Retrofit;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.time.Duration;
import java.util.List;
import static com.theokanning.openai.service.OpenAiService.*;
@Service
public class correctionFeedbackServiceImpl implements correctionFeedbackService {
@Autowired
correctionFeedbackMapper correctionFeedbackMapper;
@Override
public void addFeedback(correctionFeedback correctionFeedback){
try {
correctionFeedbackMapper.addFeedback(correctionFeedback);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException("添加Feedback失败:" + e.getMessage());
}
}
@Override
@Transactional
public void deleteFeedback(Integer feedbackId) {
try {
correctionFeedbackMapper.deleteFeedback(feedbackId);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException("删除Feedback失败:" + e.getMessage());
}
}
@Override
@Transactional
public void updateFeedback(correctionFeedback correctionFeedback) {
correctionFeedbackMapper.updateFeedback(correctionFeedback);
}
@Override
public correctionFeedback findByID(Integer feedbackId){return correctionFeedbackMapper.findByID(feedbackId);}
@Override
public List<correctionFeedback> findByEssay(Integer essayID){
return correctionFeedbackMapper.findByEssay(essayID);
}
@Value("${gpt.api-key}")
String token;
@Value("${gpt.model}")
String model;
@Value("${gpt.temperature}")
Double t;
@Value("${gpt.maxTokens}")
Integer maxt;
@Value("${gpt.timeout}")
Duration timeout;
@Value("${proxy.host}")
String host;
@Value("${proxy.port}")
Integer port;
@Override
public String generateSuggestion(String requirements, String essay_title, String essay_content) {
// 这里可以和Score复用但是好麻烦我copy了,可以用if优化一下
// 下方输入api key
// String token = token; 原来没有挂局部代理的方法
// OpenAiService service = new OpenAiService(token);
// 使用局部代理
ObjectMapper mapper = defaultObjectMapper();
Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port));
OkHttpClient client = defaultClient(token, timeout)
.newBuilder()
.proxy(proxy)
.build();
Retrofit retrofit = defaultRetrofit(client, mapper);
OpenAiApi api = retrofit.create(OpenAiApi.class);
OpenAiService service = new OpenAiService(api);
// 构建ChatGPT请求
CompletionRequest completionRequest = CompletionRequest.builder()
.model("text-davinci-003")
.prompt("请你为我的作文返回批改意见。我希望你从这几个要求入手批改:"+requirements+"。作文题目要求是:"+essay_title+"我的作文是:"+essay_content)
.temperature(t)
.maxTokens(maxt)
.topP(1D)
.frequencyPenalty(0D)
.presencePenalty(0D)
.build();
CompletionChoice choice = service.createCompletion(completionRequest).getChoices().get(0);
String generatedText = choice.getText();
return generatedText;
}
@Override
public String generateScore(String requirements, String originEssay) {
// 下方输入api key
String token = "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";
OpenAiService service = new OpenAiService(token);
// 构建ChatGPT请求
CompletionRequest completionRequest = CompletionRequest.builder()
.model("text-davinci-003")
.prompt("请注意!满分是一百分,你只能回答一个数字。我的批改点是:"+requirements+"。我的作文是:"+originEssay)
.temperature(0.5)
.maxTokens(2048)
.topP(1D)
.frequencyPenalty(0D)
.presencePenalty(0D)
.build();
CompletionChoice choice = service.createCompletion(completionRequest).getChoices().get(0);
String generatedText = choice.getText();
return generatedText;
}
@Override
public String generateSuggestion_new(essay essay_) {
// essay essay = essayService.findByID(essay_id);
ObjectMapper mapper = defaultObjectMapper();
Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port));
OkHttpClient client = defaultClient(token, timeout)
.newBuilder()
.proxy(proxy)
.build();
Retrofit retrofit = defaultRetrofit(client, mapper);
OpenAiApi api = retrofit.create(OpenAiApi.class);
OpenAiService service = new OpenAiService(api);
// 构建ChatGPT请求
CompletionRequest completionRequest = CompletionRequest.builder()
.model("text-davinci-003")
.prompt("请你为我的作文返回批改意见。我希望你从这几个要求入手批改:"+essay_.getEssay_requirements()+"。作文题目要求是:"+essay_.getEssay_title()+"我的作文是:"+essay_.getEssay_content())
.temperature(t)
.maxTokens(maxt)
.topP(1D)
.frequencyPenalty(0D)
.presencePenalty(0D)
.build();
CompletionChoice choice = service.createCompletion(completionRequest).getChoices().get(0);
String generatedText = choice.getText();
return generatedText;
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((3548, 4014), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3548, 3989), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3548, 3952), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3548, 3914), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3548, 3888), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3548, 3855), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3548, 3823), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3548, 3618), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4535, 4959), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4535, 4934), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4535, 4897), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4535, 4859), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4535, 4833), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4535, 4800), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4535, 4766), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4535, 4605), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((5810, 6318), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((5810, 6293), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((5810, 6256), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((5810, 6218), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((5810, 6192), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((5810, 6159), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((5810, 6127), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((5810, 5880), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package net.kanade1825.litematica.chatgptforminecraft;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import net.citizensnpcs.api.event.NPCRightClickEvent;
import net.citizensnpcs.api.npc.NPC;
import org.bukkit.Bukkit;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.*;
public class ChatGPTTalkNPC implements Listener {
private final ChatGPTForMinecraft chatGptForMinecraft;
public ChatGPTTalkNPC(ChatGPTForMinecraft chatGptForMinecraft) {
this.chatGptForMinecraft = chatGptForMinecraft;
}
@EventHandler
public void onNPCRightClick(NPCRightClickEvent event) {
Bukkit.getScheduler().runTaskAsynchronously(chatGptForMinecraft, () -> {
try {
JSONParser parser = new JSONParser();
Player player = event.getClicker();
NPC npc = event.getNPC();
String npcName = npc.getName();
String[] validNames = {"Ellis", "Marshier", "Mirai", "Raisers", "Rasvaan", "Rina", "Wagner"};
boolean isValidName = Arrays.asList(validNames).contains(npcName);
if (!isValidName) {
return;
}
String jsonFilePath = "plugins/ChatGPTForMinecraft/TalkData/" + npcName + "TalkData.json";
try {
// JSONファイルをパースする
JSONParser jsonParser = new JSONParser();
InputStreamReader fileReader = new InputStreamReader(new FileInputStream(jsonFilePath), StandardCharsets.UTF_8);
Object obj = jsonParser.parse(fileReader);
JSONArray jsonArray = (JSONArray) obj;
// 'column2'の値を格納するリスト
List<String> column2Values = new ArrayList<>();
// JSONオブジェクトから'column2'の値を抽出する
for (Object item : jsonArray) {
JSONObject jsonObject = (JSONObject) item;
String column2Value = (String) jsonObject.get("column2");
column2Values.add(column2Value);
}
// 'column2'の値が一つ以上ある場合
if (!column2Values.isEmpty()) {
// ランダムに一つの'column2'の値を選び出す
Random random = new Random();
String randomColumn2Value = column2Values.get(random.nextInt(column2Values.size()));
player.sendMessage(randomColumn2Value);
}
} catch (IOException | ParseException e) {
e.printStackTrace();
}
File file = new File("plugins/ChatGPTForMinecraft/CharacterData/" + npcName + ".json");
if (!file.exists() || file.isDirectory()) {
Bukkit.getLogger().info(npcName + "というキャラクターの構成ファイルは見つかりませんでした。" +
"\nもし貴方が構成ファイルを持っていても少なくともこのシステムが想定している検知場所にはありません。");
return;
}
InputStreamReader inputStreamReader = new InputStreamReader(new FileInputStream("plugins/ChatGPTForMinecraft/CharacterData/" + npcName + ".json"), StandardCharsets.UTF_8);
JSONObject jsonObject = (JSONObject) parser.parse(inputStreamReader);
List<ChatMessage> chatMessages = new LinkedList<>();
chatMessages.add(new ChatMessage("user", (String) jsonObject.get(npcName)));
chatMessages.add(new ChatMessage("user", "こんにちは!"));
final var completionRequest = ChatCompletionRequest.builder()
.model("gpt-4")
.messages(chatMessages)
.build();
String answer = String.valueOf(chatGptForMinecraft.getService().createChatCompletion(completionRequest).getChoices().get(0).getMessage().getContent());
//player.sendMessage(answer);
try {
// JSONファイルをパースする
JSONParser jsonParser = new JSONParser();
InputStreamReader fileReader = new InputStreamReader(new FileInputStream(jsonFilePath), StandardCharsets.UTF_8);
Object obj = jsonParser.parse(fileReader);
JSONArray jsonArray = (JSONArray) obj;
// 最後の要素(JSONオブジェクト)を取得し、その'id'を取得する
JSONObject lastJsonObject = (JSONObject) jsonArray.get(jsonArray.size() - 1);
long lastId = (long) lastJsonObject.get("id");
// 新しい要素を作成する
JSONObject newJsonObject = new JSONObject();
newJsonObject.put("id", lastId + 1); // 管理番号(連番)を設定
newJsonObject.put("column1", npcName);
newJsonObject.put("column2", answer);
// 新しい要素を配列に追加する
jsonArray.add(newJsonObject);
// 更新されたデータを同じファイルに書き戻す
// 更新されたデータを同じファイルに書き戻す
try (OutputStreamWriter outputStreamWriter = new OutputStreamWriter(new FileOutputStream(jsonFilePath), StandardCharsets.UTF_8)) {
outputStreamWriter.write(jsonArray.toJSONString());
outputStreamWriter.flush();
} catch (IOException e) {
e.printStackTrace();
}
} catch (IOException | ParseException e) {
e.printStackTrace();
}
} catch (IOException | ParseException e) {
throw new RuntimeException(e);
}
});
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((974, 6506), 'org.bukkit.Bukkit.getScheduler'), ((3300, 3600), 'org.bukkit.Bukkit.getLogger'), ((4214, 4366), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4214, 4333), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4214, 4285), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.hkh.ai.chain.llm.capabilities.generation.audio.openai;
import cn.hutool.core.util.StrUtil;
import com.hkh.ai.chain.llm.OpenAiServiceProxy;
import com.hkh.ai.chain.llm.capabilities.generation.audio.AudioChatService;
import com.hkh.ai.domain.Conversation;
import com.hkh.ai.domain.CustomChatMessage;
import com.hkh.ai.domain.MediaFile;
import com.hkh.ai.domain.SysUser;
import com.hkh.ai.service.ConversationService;
import com.hkh.ai.service.MediaFileService;
import com.knuddels.jtokkit.Encodings;
import com.knuddels.jtokkit.api.Encoding;
import com.knuddels.jtokkit.api.EncodingRegistry;
import com.knuddels.jtokkit.api.EncodingType;
import com.theokanning.openai.audio.CreateSpeechRequest;
import com.theokanning.openai.audio.CreateTranscriptionRequest;
import com.theokanning.openai.audio.TranscriptionResult;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import okhttp3.ResponseBody;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.LinkedBlockingDeque;
/**
* openai语音聊天服务
* @author huangkh
*/
@Slf4j
@Service
public class OpenAiAudioChatService implements AudioChatService {
@Value("${chain.llm.openai.model}")
private String defaultModel;
@Autowired
private ConversationService conversationService;
@Autowired
private OpenAiServiceProxy openAiServiceProxy;
@Autowired
private MediaFileService mediaFileService;
@Override
public String audioToText(File audio,String prompt) {
OpenAiService service = openAiServiceProxy.service();
CreateTranscriptionRequest createTranscriptionRequest = CreateTranscriptionRequest
.builder()
.model("whisper-1")
.responseFormat("json")
.language("zh")
.prompt(prompt)
.temperature(0.2d)
.build();
TranscriptionResult result = service.createTranscription(createTranscriptionRequest, audio);
return result.getText();
}
@Override
public InputStream createSpeech(String content) {
OpenAiService service = openAiServiceProxy.service();
CreateSpeechRequest createSpeechRequest = CreateSpeechRequest.builder()
.model("tts-1")
.input(content)
.voice("onyx")
.build();
final ResponseBody speech = service.createSpeech(createSpeechRequest);
InputStream inputStream = speech.byteStream();
return inputStream;
}
@Override
public void audioChat(CustomChatMessage request, List<String> nearestList, List<Conversation> history, SseEmitter sseEmitter, SysUser sysUser, String mediaId) {
OpenAiService service = openAiServiceProxy.service();
EncodingRegistry registry = Encodings.newDefaultEncodingRegistry();
Encoding enc = registry.getEncoding(EncodingType.CL100K_BASE);
List<Integer> promptTokens = enc.encode(request.getContent());
final List<ChatMessage> messages = new ArrayList<>();
// 保存用户提问
int questionCid = conversationService.saveConversation(sysUser.getId(),request.getSessionId(), request.getContent(), "Q");
MediaFile mediaFile = mediaFileService.getByMfid(mediaId);
mediaFile.setCid(questionCid);
mediaFileService.saveOrUpdate(mediaFile);
// 预保存AI回答
int answerCid = conversationService.saveConversation(sysUser.getId(),request.getSessionId(), "", "A");
for (String content : nearestList) {
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), content);
messages.add(systemMessage);
}
String ask = request.getContent();
String temp = "";
for (Conversation conversation : history){
temp = temp + conversation.getContent();
}
ask = temp + ask;
final ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), ask + (nearestList.size() > 0 ? "\n\n注意:回答问题时,须严格根据我给你的系统上下文内容原文进行回答,请不要自己发挥,回答时保持原来文本的段落层级" : "" ));
messages.add(userMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model(defaultModel)
.messages(messages)
.temperature(0.1)
.user(request.getSessionId())
.n(1)
.logitBias(new HashMap<>())
.build();
StringBuilder sb = new StringBuilder();
// 创建一个阻塞队列用于存放语音文本片段
LinkedBlockingDeque<OpenAiAudioChatService.SseAudioChunk> queue = new LinkedBlockingDeque();
// 创建一个线程用于监听队列的元素
new Thread(() -> {
while (true) {
try {
// 从队列中取出元素
OpenAiAudioChatService.SseAudioChunk audioChunk = queue.take();
sseChunkAudio(audioChunk,answerCid);
if (audioChunk.getLastFlag()){
// 如果是最后一个元素线程退出
break;
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}).start();
// 切分辅助变量,分别为批号,开始索引,结束索引,每个音频文本大致长度,chunk索引值
final int[] splitArr = {1,0,0,50,-1};
service.streamChatCompletion(chatCompletionRequest)
.doOnError(Throwable::printStackTrace)
.blockingForEach(item -> {
if (StrUtil.isBlank(item.getChoices().get(0).getFinishReason())
&& StrUtil.isBlank(item.getChoices().get(0).getMessage().getRole())){
String content = item.getChoices().get(0).getMessage().getContent();
if (content.endsWith("\n") || content.endsWith("\r")){
content = content.replaceAll("\n","<br>");
content = content.replaceAll("\r","<br>");
}
if (content.contains(" ")){
content = content.replaceAll(" "," ");
}
sb.append(content);
sseEmitter.send(content);
splitArr[2] = sb.length();
if (sb.length() >= splitArr[3] * splitArr[0]){
if (content.endsWith("<br>")){
splitArr[4]++;
splitArr[0]++;
String chunk = sb.substring(splitArr[1], splitArr[2]);
queue.offer(new OpenAiAudioChatService.SseAudioChunk(splitArr[4],chunk,sseEmitter,false));
splitArr[1] = sb.length();
}
}
}else if (StrUtil.isNotBlank(item.getChoices().get(0).getFinishReason())){
splitArr[2] = sb.length();
splitArr[4]++;
String chunk = sb.substring(splitArr[1], splitArr[2]);
queue.offer(new OpenAiAudioChatService.SseAudioChunk(splitArr[4],chunk,sseEmitter,true));
sseEmitter.send("[END]");
String fullContent = sb.toString();
List<Integer> completionToken = enc.encode(fullContent);
Conversation byId = conversationService.getById(answerCid);
byId.setContent(sb.toString());
conversationService.saveOrUpdate(byId);
}
});
service.shutdownExecutor();
}
@Data
@AllArgsConstructor
class SseAudioChunk{
private int index;
private String chunk;
private SseEmitter sseEmitter;
private Boolean lastFlag;
}
private void sseChunkAudio(OpenAiAudioChatService.SseAudioChunk sseAudioChunk, int cid) throws IOException {
String audioChunk = sseAudioChunk.getChunk().replaceAll("<br>","");
InputStream speech = createSpeech(audioChunk);
MediaFile mediaFile = mediaFileService.saveFile(speech,cid);
String prefix = "[sub-audio]";
if (sseAudioChunk.getLastFlag() && sseAudioChunk.getIndex() == 0){
// 仅一个chunk
prefix = "[all-audio]";
}else if (sseAudioChunk.getLastFlag() && sseAudioChunk.getIndex() != 0){
// 多个chunk,最后一个
prefix = "[end-audio]";
}else if (!sseAudioChunk.getLastFlag() && sseAudioChunk.getIndex() == 0){
// 多个chunk,第一个
prefix = "[fst-audio]";
}
sseAudioChunk.getSseEmitter().send(prefix + mediaFile.getHttpUrl());
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.audio.CreateSpeechRequest.builder",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value"
] |
[((2820, 2969), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((2820, 2944), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((2820, 2913), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((2820, 2881), 'com.theokanning.openai.audio.CreateSpeechRequest.builder'), ((4228, 4258), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((4586, 4614), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
|
package com.example.qa_backend.Controller;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.client.OpenAiApi;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import okhttp3.OkHttpClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import retrofit2.Retrofit;
import javax.annotation.PostConstruct;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import static com.theokanning.openai.service.OpenAiService.*;
@CrossOrigin
@RestController
public class ChatWithOpenAIController {
@Value("${openai.api.key}")
private String token;
private OpenAiApi api;
private OpenAiService service;
private final ExecutorService executorService = Executors.newFixedThreadPool(10);
@PostConstruct
public void init() {
ObjectMapper mapper = defaultObjectMapper();
Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress( "127.0.0.1",15236));
OkHttpClient client = defaultClient(token, Duration.ofSeconds(100000))
.newBuilder()
.proxy(proxy)
.build();
Retrofit retrofit = defaultRetrofit(client, mapper);
api = retrofit.create(OpenAiApi.class);
service = new OpenAiService(api);
}
@RequestMapping("/chat")
@PreAuthorize("@authCheck.authorityCheck(0)")
public String chat(@RequestBody Map<String, String> requestBody) {
String question = requestBody.get("question");
List<ChatMessage> messages = new ArrayList<>();
ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), question);
messages.add(userMessage);
// OpenAiService service = new OpenAiService("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", Duration.ofSeconds(100000));
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo-0613")
.messages(messages)
.temperature(0.5)
.maxTokens(2048)
.topP(1D)
.frequencyPenalty(0D)
.presencePenalty(0D)
.build();
System.out.println(question);
try {
Future<ChatMessage> future = executorService.submit(() -> service.createChatCompletion(chatCompletionRequest).getChoices().get(0).getMessage());
ChatMessage text = future.get();
return text.getContent();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value"
] |
[((2228, 2256), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
|
package com.github.hakenadu.plantuml.service.completion;
import java.util.Arrays;
import java.util.Optional;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Service;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.service.OpenAiService;
/**
* Abstract base class for {@link CompletionService} implementations using the
* OpenAI API
*/
@ConditionalOnProperty(prefix = "plantuml-editor.openai", name = "chat", havingValue = "true", matchIfMissing = false)
@Service
@Profile("completion")
public class OpenAiChatCompletionService extends OpenAiCompletionService {
private static final String CURRENT_PLANTUML_SPEC_PLACEHOLDER = "%currentPlantumlSpec";
private static final String TEXTUAL_DESCRIPTION_PLACEHOLDER = "%textualDescription";
private final ChatMessage systemScopeMessage;
private final String promptPattern;
public OpenAiChatCompletionService(final @Value("${plantuml-editor.openai.system-scope}") String systemScope,
final @Value("${plantuml-editor.openai.prompt-pattern}") String promptPattern) {
this.systemScopeMessage = new ChatMessage("system", systemScope);
if (!promptPattern.contains(CURRENT_PLANTUML_SPEC_PLACEHOLDER)) {
throw new IllegalArgumentException(CURRENT_PLANTUML_SPEC_PLACEHOLDER + " missing in prompt-pattern");
}
if (!promptPattern.contains(TEXTUAL_DESCRIPTION_PLACEHOLDER)) {
throw new IllegalArgumentException(TEXTUAL_DESCRIPTION_PLACEHOLDER + " missing in prompt-pattern");
}
this.promptPattern = promptPattern;
}
@Override
protected String getCompletion(final OpenAiService openAiService, final String originalSpec,
final String textualDescription) {
final ChatCompletionRequest chatCompletionRequest = createChatCompletionRequest(originalSpec,
textualDescription);
final ChatCompletionResult chatCompletionResult = openAiService.createChatCompletion(chatCompletionRequest);
/*
* not really safe and many implications ... but many python examples work
* exactly like this so it should be okay ^^
*/
return chatCompletionResult.getChoices().get(0).getMessage().getContent();
}
private ChatCompletionRequest createChatCompletionRequest(final String originalSpec,
final String textualDescription) {
return ChatCompletionRequest.builder().model(getModel()).maxTokens(getMaxTokens())
.messages(Arrays.asList(systemScopeMessage,
new ChatMessage("user", createUserPrompt(originalSpec, textualDescription))))
.build();
}
private String createUserPrompt(final String originalSpec, final String textualDescription) {
final String currentPlantumlSpec = Optional.ofNullable(originalSpec).orElse("");
final String userPrompt = promptPattern.replaceAll(CURRENT_PLANTUML_SPEC_PLACEHOLDER, currentPlantumlSpec)
.replaceAll(TEXTUAL_DESCRIPTION_PLACEHOLDER, textualDescription);
return userPrompt;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((2575, 2795), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2575, 2782), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2575, 2650), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2575, 2624), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2933, 2977), 'java.util.Optional.ofNullable')]
|
package com.lambdaAndSpring.screenmatch.service;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
public class ConsultationGPT {
public static String obterTraducao(String texto) {
OpenAiService service = new OpenAiService("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");
CompletionRequest requisicao = CompletionRequest.builder()
.model("gpt-3.5-turbo-instruct")
.prompt("Traduza este texto do inglês para o portugês: " + texto)
.maxTokens(1000)
.temperature(0.7)
.build();
var resposta = service.createCompletion(requisicao);
return resposta.getChoices().get(0).getText();
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((400, 657), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((400, 631), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((400, 596), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((400, 562), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((400, 477), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package com.example.fyp.controller;
import java.time.Duration;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import com.example.fyp.entity.Account;
import com.example.fyp.entity.Analysis;
import com.example.fyp.entity.Employee;
import com.example.fyp.entity.Recording;
import com.example.fyp.model.ResponseStatus;
import com.example.fyp.repo.AnalysisRepository;
import com.example.fyp.repo.EmployeeRepository;
import com.example.fyp.repo.RecordingRepository;
import com.example.fyp.service.AccountServiceImpl;
import com.example.fyp.service.AnalysisService;
import com.example.fyp.service.EmployeeService;
import com.example.fyp.service.RecordingListService;
import com.example.fyp.service.StorageService;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.service.OpenAiService;
//Controller to handle uploading recording files
@RestController
@RequestMapping("/audio")
public class UploadController {
private final RecordingListService recordingListService;
private final AccountServiceImpl accountServiceImpl;
@Autowired
public UploadController(RecordingListService recordingListService, AccountServiceImpl accountServiceImpl) {
this.recordingListService = recordingListService;
this.accountServiceImpl = accountServiceImpl;
}
@Autowired
private StorageService service;
@Autowired
private RecordingRepository recRepo;
@Autowired
private EmployeeRepository empRepo;
@Autowired
private EmployeeService empService;
@Autowired
private AnalysisRepository analysisRepo;
@Autowired
private AnalysisService analysisService;
// Upload audio
@PostMapping("/uploadAudio")
public ResponseEntity<String> uploadFile(@RequestParam("audio") MultipartFile file, @RequestParam String lastModifiedDate) {
// Retrieve the current authentication token
LocalDateTime modDate = LocalDateTime.parse(lastModifiedDate);
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
String email = authentication.getName();
Account account = accountServiceImpl.loadUserDetailsByUsername(email);
return new ResponseEntity<>(service.uploadFile(file, account, modDate), HttpStatus.OK);
}
// Download existing recording file
@GetMapping("/download/{fileName}")
public ResponseEntity<ByteArrayResource> downloadFile(@PathVariable String fileName){
byte[] data = service.downloadFile(fileName);
ByteArrayResource resource = new ByteArrayResource(data);
return ResponseEntity
.ok()
.contentLength(data.length)
.header("Content-type", "application/octet-stream")
.header("Content-disposition", "attachment; filename=\"" + fileName + " \"")
.body(resource);
}
// Delete recording file
@DeleteMapping("/deleteFile")
public ResponseEntity<String> deleteFile(@RequestParam String fileName, @RequestParam String recID){
//get the recording object that is about to be deleted
Integer recording_id = Integer.parseInt(recID);
Optional<Recording> recordingToDelete = recRepo.findById(recording_id);
//check if there are any employees assigned to the recording
if(recordingToDelete.get().getEmployee() != null) {
//get the employee details of the recording
Integer empID = recordingToDelete.get().getEmployee().getEmployeeId();
Optional<Employee> emp = empRepo.findById(empID);
emp.get().decrementNumCallsHandled();
}
recRepo.delete(recordingToDelete.get());
return new ResponseEntity<>(service.deleteFile((recordingToDelete.get().getTimeStamp()+"_"+fileName)), HttpStatus.OK);
}
// Update recording file's assigned employee
@PostMapping("/updateAudioEmployee")
public ResponseEntity<?> updateRecordingEmployee(@RequestParam String currentDate, @RequestParam String employeeId, @RequestParam String employeeName) {
// Retrieve the current authentication token
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
String email = authentication.getName();
Integer account_id = accountServiceImpl.getAccountId(email);
ResponseStatus response = new ResponseStatus();
Integer empID = Integer.parseInt(employeeId);
List<Map<String, Object>> recList = recordingListService.getRecordingList(account_id);
Optional<Employee> emp = empRepo.findById(empID);
try {
LocalDateTime currTime = LocalDateTime.parse(currentDate);
for (Map<String, Object> rec : recList) {
if (((LocalDateTime) rec.get("uploadDate")).isAfter(currTime)) {
System.out.println(rec.get("recordingName"));
if(((Employee) rec.get("employee")) != null) {
if(((Integer)((Employee) rec.get("employee")).getEmployeeId()) == emp.get().getEmployeeId()) {
//skip if recording already assigned to same employee
continue;
}
((Employee) rec.get("employee")).decrementNumCallsHandled();
}
Optional<Recording> r = recRepo.findById((Integer) rec.get("recordingId"));
(r.get()).setEmployee(emp.get());
// (r.get()).setEmployeeName(employeeName);
emp.get().incrementNumCallsHandled();
recRepo.save(r.get());
}
}
empRepo.save(emp.get());
// RESPONSE DATA
response.setSuccess(true);
response.setData(recList);
return ResponseEntity.status(HttpStatus.OK).body(response);
} catch (Exception e) {
// RESPONSE DATA
response.setSuccess(false);
response.setMessage("Fail to get All Employees.");
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(response);
}
}
// Update file's assigned employee based on the file's delimited value
@PostMapping("/updateRecordingEmployeeByDelimiter")
public ResponseEntity<?> updateRecordingEmployeeByDelimiter(@RequestParam String recordingID, @RequestParam String empName) {
ResponseStatus response = new ResponseStatus();
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
String email = authentication.getName();
Integer account_id = accountServiceImpl.getAccountId(email);
List<Map<String, Object>> empList = empService.getAllEmployee(account_id);
empList = empList.stream()
.filter(emp -> ((String) emp.get("employeeName")).contains(empName))
.collect(Collectors.toList());
Integer recID = Integer.parseInt(recordingID);
Optional<Recording> recToUpdate = recRepo.findById(recID);
try {
Optional<Employee> employeeToAssign;
if(empList.size() == 1) {
employeeToAssign = empRepo.findById(((Integer) empList.get(0).get("employeeId")));
} else {
response.setSuccess(false);
response.setMessage("Duplicate entries detected!");
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(response);
}
if(recToUpdate.get().getEmployee() != null) {
//check for double assignment
if(recToUpdate.get().getEmployee().getEmployeeId() == employeeToAssign.get().getEmployeeId()) {
response.setSuccess(false);
response.setMessage("Employee already assigned to this recording!");
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(response);
}
recToUpdate.get().getEmployee().decrementNumCallsHandled();
}
recToUpdate.get().setEmployee(employeeToAssign.get());
// recToUpdate.get().setEmployeeName(empName);
employeeToAssign.get().incrementNumCallsHandled();
recRepo.save(recToUpdate.get());
empRepo.save(employeeToAssign.get());
// RESPONSE DATA
response.setSuccess(true);
return ResponseEntity.status(HttpStatus.OK).body(response);
} catch (Exception e) {
response.setSuccess(false);
response.setMessage("Fail to update Employee.");
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(response);
}
}
// Unassign employees from recording files
@PostMapping("/unassignEmployees")
public ResponseEntity<?> unassignEmployees(@RequestParam String currentDate) {
// Retrieve the current authentication token
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
String email = authentication.getName();
Integer account_id = accountServiceImpl.getAccountId(email);
ResponseStatus response = new ResponseStatus();
List<Map<String, Object>> recList = recordingListService.getRecordingList(account_id);
try {
LocalDateTime currTime = LocalDateTime.parse(currentDate);
for (Map<String, Object> rec : recList) {
if (((LocalDateTime) rec.get("uploadDate")).isAfter(currTime)) {
Optional<Recording> r = recRepo.findById((Integer) rec.get("recordingId"));
if(r.get().getEmployee() != null) {
r.get().getEmployee().decrementNumCallsHandled();
r.get().setEmployee(null);
recRepo.save(r.get());
}
}
}
// RESPONSE DATA
response.setSuccess(true);
response.setData(recList);
return ResponseEntity.status(HttpStatus.OK).body(response);
} catch (Exception e) {
// RESPONSE DATA
response.setSuccess(false);
response.setMessage("Fail to unassign All Employees.");
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(response);
}
}
// Get all recent uploaded recordings
// Get all recent uploaded recordings
@GetMapping("/getRecordings")
public ResponseEntity<?> getAllRecording(@RequestParam(required = false) String currentDate) {
ResponseStatus<List<Map<String, Object>>> response = new ResponseStatus<>();
try {
// Retrieve the current authentication token
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
String email = authentication.getName();
Integer account_id = accountServiceImpl.getAccountId(email);
System.out.println("ACCOUNT ID: " + account_id);
List<Map<String, Object>> recList = recordingListService.getRecordingList(account_id);
System.out.println("RECORDING: " + recList);
if (currentDate != null && !currentDate.isEmpty()) {
LocalDateTime currTime = LocalDateTime.parse(currentDate);
recList = recList.stream()
.filter(rec -> ((LocalDateTime) rec.get("uploadDate")).isAfter(currTime))
.collect(Collectors.toList());
}
Iterator<Map<String, Object>> iterator = recList.iterator();
while (iterator.hasNext()) {
Map<String, Object> recIter = iterator.next();
Optional<Recording> r = recRepo.findById((Integer) recIter.get("recordingId"));
if (r.get().getAnalysis() != null) {
iterator.remove();
}
}
// RESPONSE DATA
response.setSuccess(true);
response.setMessage("Successfully Retrieve All Recordings.");
response.setData(recList);
return ResponseEntity.status(HttpStatus.OK).body(response);
} catch (Exception ex) {
// RESPONSE DATA
response.setSuccess(false);
response.setMessage("Fail to get All Recordings.");
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(response);
}
}
// For GPT analysis
@Value("${apiKey}")
private String apiKeyContent;
// Analyze
@PostMapping("/analyze")
private String recordAnalyzer(@RequestBody Integer recordingId) throws RuntimeException {
// get company
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
String email = authentication.getName();
Integer account_id = accountServiceImpl.getAccountId(email);
Account account = accountServiceImpl.loadUserDetailsByUsername(email);
String company = account.getCompanyField();
Integer analysisId = analysisService.getAnalysisId(recordingId);
String apiKey = apiKeyContent;
String currentModel = "text-davinci-003";
Analysis analysis = analysisService.findAnalysisById(analysisId);
// Set up OpenAI API
OpenAiService openAiService = new OpenAiService(apiKey, Duration.ofSeconds(30));
// Merge all the transcripts and prefix with Agent / Customer
List<Object[]> unformattedTranscripts = service.getTranscriptsByAnalysisId(analysisId);
String formattedTranscripts = "";
for (Object[] innerArray : unformattedTranscripts) {
if ((boolean) innerArray[0]) {
formattedTranscripts += "Agent: " + (String) innerArray[1] + "\n";
} else {
formattedTranscripts += "Customer: " + (String) innerArray[1] + "\n";
}
}
System.out.println("FORMATTED TRANSCRIPTS: \n" + formattedTranscripts);
// // Combined prompt
// String systemInstructions = "You are an AI language model. Answer each of the following question in 1 word except the second question. End each answer with '\n'.\n";
// String prompt = formattedTranscripts.toString()
// + "Decide if this conversation category is Inquiry, Complaint, or Warranty: \n"
// + "Summarize this customer service conversation into 1 paragraph: \n"
// + "Decide if the customer's sentiment is positive or negative based on this conversation (positive means the customer is not in emotional state, otherwise negative): \n"
// + "Decide if the agent's sentiment is positive or negative based on this conversation (positive means the agent is not in emotional state, otherwise negative): \n"
// + "Decide if the call sentiment is positive or negative based on this conversation (positive means the call's objectives are achieved, otherwise negative): ";
// CompletionRequest categoryRequest = CompletionRequest.builder()
// .model(currentModel)
// .prompt(systemInstructions + prompt)
// .echo(true)
// .maxTokens(300)
// .build();
// String response = openAiService.createCompletion(categoryRequest).getChoices().get(0).getText();
// String categoryRaw = response.substring(systemInstructions.length() + prompt.length()).trim();
// return categoryRaw;
// Category
String prompt = "Decide if this conversation category is Inquiry, Complaint, or Warranty: "
+ formattedTranscripts;
CompletionRequest categoryRequest = CompletionRequest.builder()
.model(currentModel)
.prompt(prompt)
.echo(true)
.maxTokens(60)
.build();
String response = openAiService.createCompletion(categoryRequest).getChoices().get(0).getText();
String categoryRaw = response.substring(prompt.length()).trim();
String category;
if (categoryRaw.toLowerCase().indexOf("inquiry") != -1) {
category = "Inquiry";
} else if (categoryRaw.toLowerCase().indexOf("complaint") != -1) {
category = "Complaint";
} else if (categoryRaw.toLowerCase().indexOf("warranty") != -1) {
category = "Warranty";
} else {
category = "Not found";
}
analysis.setCategory(category);
// Summary
prompt = "Summarize this customer service conversation into 1 paragraph: " + formattedTranscripts;
CompletionRequest summaryRequest = CompletionRequest.builder()
.model(currentModel)
.prompt(prompt)
.echo(true)
.maxTokens(300)
.build();
response = openAiService.createCompletion(summaryRequest).getChoices().get(0).getText();
String summary = response.substring(prompt.length()).trim();
analysis.setSummary(summary);
// Customer sentiment
prompt = "Decide if the customer's sentiment is positive or negative based on this conversation (negative if the customer shows many signs of frustration / bad emotions, otherwise positive): "
+ formattedTranscripts;
CompletionRequest customerSentimentRequest = CompletionRequest.builder()
.model(currentModel)
.prompt(prompt)
.echo(true)
.maxTokens(60)
.build();
response = openAiService.createCompletion(customerSentimentRequest).getChoices().get(0).getText();
String customerSentimentRaw = response.substring(prompt.length()).trim();
String customerSentiment;
if (customerSentimentRaw.toLowerCase().indexOf("positive") != -1) {
customerSentiment = "Positive";
} else if (customerSentimentRaw.toLowerCase().indexOf("negative") != -1) {
customerSentiment = "Negative";
} else {
customerSentiment = "Not found";
}
analysis.setCustomerSentiment(customerSentiment);
// Employee sentiment
prompt = "Decide if the agent's sentiment is positive or negative based on this conversation (positive if the agent's being polite and understanding when talking to the customer, otherwise negative): "
+ formattedTranscripts;
CompletionRequest employeeSentimentRequest = CompletionRequest.builder()
.model(currentModel)
.prompt(prompt)
.echo(true)
.maxTokens(1000)
.build();
response = openAiService.createCompletion(employeeSentimentRequest).getChoices().get(0).getText();
String employeeSentimentRaw = response.substring(prompt.length()).trim();
String employeeSentiment;
if (employeeSentimentRaw.toLowerCase().indexOf("positive") != -1) {
employeeSentiment = "Positive";
} else if (employeeSentimentRaw.toLowerCase().indexOf("negative") != -1) {
employeeSentiment = "Negative";
} else {
employeeSentiment = "Not found";
}
analysis.setEmployeeSentiment(employeeSentiment);
// Call sentiment
prompt = "Decide if the call sentiment is positive or negative based on this conversation (positive means the call's objectives are achieved, otherwise negative): "
+ formattedTranscripts;
CompletionRequest callSentimentRequest = CompletionRequest.builder()
.model(currentModel)
.prompt(prompt)
.echo(true)
.maxTokens(60)
.build();
response = openAiService.createCompletion(callSentimentRequest).getChoices().get(0).getText();
String callSentimentRaw = response.substring(prompt.length()).trim();
String callSentiment;
if (callSentimentRaw.toLowerCase().indexOf("positive") != -1) {
callSentiment = "Positive";
} else if (callSentimentRaw.toLowerCase().indexOf("negative") != -1) {
callSentiment = "Negative";
} else {
callSentiment = "Not found";
}
analysis.setRecordingSentiment(callSentiment);
// Update Employee Recording Sentiment
Recording recording = recRepo.findById(recordingId).get();
Employee employee = recording.getEmployee();
if (employee != null) {
if (callSentiment.equals("Positive")) {
employee.setNumPositiveSentiment(employee.getNumPositiveSentiment() + 1);
} else {
employee.setNumNegativeSentiment(employee.getNumNegativeSentiment() + 1);
}
}
// Main issue
prompt = "This is a " + company + " company. Describe the main issue into just a few words based on this conversation: "
+ formattedTranscripts;
CompletionRequest mainIssueRequest = CompletionRequest.builder()
.model(currentModel)
.prompt(prompt)
.echo(true)
.maxTokens(60)
.build();
response = openAiService.createCompletion(mainIssueRequest).getChoices().get(0).getText();
String mainIssue = response.substring(prompt.length()).trim();
analysis.setMainIssue(mainIssue);
// Employee performance
prompt = "This is a " + company + " company and please provide an objective assessment of the agent's Interaction Skill using the following parameters:\n"
+ //
"\n" + //
"- Fluency: rating/100\n" + //
"- Hospitality: rating/100\n" + //
"- Problem Solving: rating/100\n" + //
"- Personalization: rating/100" + //
" Based on the following conversation: " + formattedTranscripts + //
// "Please provide an assessment of the conversation above on a scale of 1 to
// 100 where 1 is very poor, 50 is average and 100 is excellent. You do not
// always have to give high values.\r\n" + //
"You can use this guideline as a guide to rate the quality of the conversation " + //
"Guideline: " + //
"1.Rate conversations above 75 when they exhibit clear communication, engage participants effectively, and maintain a strong flow of information."
+ //
"2.Assign a rating between 40 and 74 for conversations that demonstrate moderate quality. These conversations convey the main points but might lack some depth or engagement."
+ //
"3.Use a rating of range 1 - 39 for conversations that exhibit poor communication, confusion, or lack of engagement. These conversations struggle to convey coherent information."
+ //
"Please provide your ratings for each parameter. Your ratings should reflect your unbiased evaluation of the agent's skills. Keep in mind that the ratings should be within a reasonable range in accordance with the guideline given and should not be overly high."
+ //
"It is best that the rating should rarely be above 85 unless it exceptionally adhere and excelled to guideline number 1."
+ //
// An average score around 80 is expected.\r\n" + //
"[You do not need to explain anything. Just respond with the format given.]";
// prompt = "This is a telecommunication company and please provide an objective assessment of the agent's Interaction Skill using the following parameters:\n"
// + //
// "\n" + //
// "- Fluency: rating/100\n" + //
// "- Hospitality: rating/100\n" + //
// "- Problem Solving: rating/100\n" + //
// "- Personalization: rating/100" + //
// " Based on the following conversation: " + formattedTranscripts + //
// ". Please provide your ratings for each parameter. Your ratings should reflect your unbiased evaluation of the agent's skills. Keep in mind that the ratings should be within a reasonable range and should not be overly high. An average score around 80 is expected.\r\n"
// + //
// "[You do not need to explain anything. Just respond with the format given.]";
CompletionRequest employeePerformanceRequest = CompletionRequest.builder()
.model(currentModel)
.prompt(prompt)
.echo(true)
.maxTokens(1000)
.build();
response = openAiService.createCompletion(employeePerformanceRequest).getChoices().get(0).getText();
String employeePerformance = response.substring(prompt.length()).trim();
// System.out.println(employeePerformance);
double fluency = analysisService.getScore(employeePerformance, "fluency:");
double hospitality = analysisService.getScore(employeePerformance, "hospitality:");
double problem = analysisService.getScore(employeePerformance, "problem solving:");
double personalization = analysisService.getScore(employeePerformance, "personalization:");
double average = (fluency + hospitality + problem + personalization) / 4;
analysis.setFluency(fluency);
analysis.setHospitality(hospitality);
analysis.setProblemSolving(problem);
analysis.setPersonalization(personalization);
analysis.setAveragePerformance(average);
// Negative Emotions
prompt = "List down 3 short sentences spoken by our agent which is prefixed with 'Agent:' in the conversation that you think can be improved in terms of good hospitality and manner. Answer in the following format: \n"
+
"'1|[old sentence spoken by agent]|[explanation why the old sentence can be improved or impolite]|[improved sentence]'\n"
+
"'2|[old sentence spoken by agent]|[explanation]|[improved sentence]'\n" +
"'3|[old sentence spoken by agent]|[explanation]|[improved sentence]'\n" +
"Pleae follow the format. Based on this conversation:\n" + formattedTranscripts + "\n";
CompletionRequest negativeEmotionsRequest = CompletionRequest.builder()
.model(currentModel)
.prompt(prompt)
.echo(true)
.maxTokens(1500)
.build();
response = openAiService.createCompletion(negativeEmotionsRequest).getChoices().get(0).getText();
String negativeEmotions = response.substring(prompt.length()).trim();
analysis.setNegativeEmotion(negativeEmotions);
analysisService.saveAnalysis(analysis);
return "completed";
}
// Analyze
// @PostMapping("/analyze")
// private String recordAnalyzer(@RequestBody List<Integer> recordingIds) throws RuntimeException {
// // get company
// Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
// String email = authentication.getName();
// Integer account_id = accountServiceImpl.getAccountId(email);
// Account account = accountServiceImpl.loadUserDetailsByUsername(email);
// String company = account.getCompanyField();
// List<Integer> analysisIds = new ArrayList<>();
// for (Integer recordingId : recordingIds) {
// analysisIds.add(analysisService.getAnalysisId(recordingId));
// }
// for (int i = 0; i < analysisIds.size(); i ++) {
// System.out.println("ANALYSIS ID: " + analysisIds.get(i));
// String apiKey = apiKeyContent;
// String currentModel = "text-davinci-003";
// Analysis analysis = analysisService.findAnalysisById(analysisIds.get(i));
// // Set up OpenAI API
// OpenAiService openAiService = new OpenAiService(apiKey);
// // Merge all the transcripts and prefix with Agent / Customer
// List<Object[]> unformattedTranscripts = service.getTranscriptsByAnalysisId(analysisIds.get(i));
// String formattedTranscripts = "";
// for (Object[] innerArray : unformattedTranscripts) {
// if ((boolean) innerArray[0]) {
// formattedTranscripts += "Agent: " + (String) innerArray[1] + "\n";
// } else {
// formattedTranscripts += "Customer: " + (String) innerArray[1] + "\n";
// }
// }
// System.out.println("FORMATTED TRANSCRIPTS: \n" + formattedTranscripts);
// // // Combined prompt
// // String systemInstructions = "You are an AI language model. Answer each of the following question in 1 word except the second question. End each answer with '\n'.\n";
// // String prompt = formattedTranscripts.toString()
// // + "Decide if this conversation category is Inquiry, Complaint, or Warranty: \n"
// // + "Summarize this customer service conversation into 1 paragraph: \n"
// // + "Decide if the customer's sentiment is positive or negative based on this conversation (positive means the customer is not in emotional state, otherwise negative): \n"
// // + "Decide if the agent's sentiment is positive or negative based on this conversation (positive means the agent is not in emotional state, otherwise negative): \n"
// // + "Decide if the call sentiment is positive or negative based on this conversation (positive means the call's objectives are achieved, otherwise negative): ";
// // CompletionRequest categoryRequest = CompletionRequest.builder()
// // .model(currentModel)
// // .prompt(systemInstructions + prompt)
// // .echo(true)
// // .maxTokens(300)
// // .build();
// // String response = openAiService.createCompletion(categoryRequest).getChoices().get(0).getText();
// // String categoryRaw = response.substring(systemInstructions.length() + prompt.length()).trim();
// // return categoryRaw;
// // Category
// String prompt = "Decide if this conversation category is Inquiry, Complaint, or Warranty: "
// + formattedTranscripts;
// CompletionRequest categoryRequest = CompletionRequest.builder()
// .model(currentModel)
// .prompt(prompt)
// .echo(true)
// .maxTokens(60)
// .build();
// String response = openAiService.createCompletion(categoryRequest).getChoices().get(0).getText();
// String categoryRaw = response.substring(prompt.length()).trim();
// String category;
// if (categoryRaw.toLowerCase().indexOf("inquiry") != -1) {
// category = "Inquiry";
// } else if (categoryRaw.toLowerCase().indexOf("complaint") != -1) {
// category = "Complaint";
// } else if (categoryRaw.toLowerCase().indexOf("warranty") != -1) {
// category = "Warranty";
// } else {
// category = "Not found";
// }
// analysis.setCategory(category);
// // Summary
// prompt = "Summarize this customer service conversation into 1 paragraph: " + formattedTranscripts;
// CompletionRequest summaryRequest = CompletionRequest.builder()
// .model(currentModel)
// .prompt(prompt)
// .echo(true)
// .maxTokens(300)
// .build();
// response = openAiService.createCompletion(summaryRequest).getChoices().get(0).getText();
// String summary = response.substring(prompt.length()).trim();
// analysis.setSummary(summary);
// // Customer sentiment
// prompt = "Decide if the customer's sentiment is positive or negative based on this conversation (negative if the customer shows many signs of frustration / bad emotions, otherwise positive): "
// + formattedTranscripts;
// CompletionRequest customerSentimentRequest = CompletionRequest.builder()
// .model(currentModel)
// .prompt(prompt)
// .echo(true)
// .maxTokens(60)
// .build();
// response = openAiService.createCompletion(customerSentimentRequest).getChoices().get(0).getText();
// String customerSentimentRaw = response.substring(prompt.length()).trim();
// String customerSentiment;
// if (customerSentimentRaw.toLowerCase().indexOf("positive") != -1) {
// customerSentiment = "Positive";
// } else if (customerSentimentRaw.toLowerCase().indexOf("negative") != -1) {
// customerSentiment = "Negative";
// } else {
// customerSentiment = "Not found";
// }
// analysis.setCustomerSentiment(customerSentiment);
// // Employee sentiment
// prompt = "Decide if the agent's sentiment is positive or negative based on this conversation (positive if the agent's being polite and understanding when talking to the customer, otherwise negative): "
// + formattedTranscripts;
// CompletionRequest employeeSentimentRequest = CompletionRequest.builder()
// .model(currentModel)
// .prompt(prompt)
// .echo(true)
// .maxTokens(1000)
// .build();
// response = openAiService.createCompletion(employeeSentimentRequest).getChoices().get(0).getText();
// String employeeSentimentRaw = response.substring(prompt.length()).trim();
// String employeeSentiment;
// if (employeeSentimentRaw.toLowerCase().indexOf("positive") != -1) {
// employeeSentiment = "Positive";
// } else if (employeeSentimentRaw.toLowerCase().indexOf("negative") != -1) {
// employeeSentiment = "Negative";
// } else {
// employeeSentiment = "Not found";
// }
// analysis.setEmployeeSentiment(employeeSentiment);
// // Call sentiment
// prompt = "Decide if the call sentiment is positive or negative based on this conversation (positive means the call's objectives are achieved, otherwise negative): "
// + formattedTranscripts;
// CompletionRequest callSentimentRequest = CompletionRequest.builder()
// .model(currentModel)
// .prompt(prompt)
// .echo(true)
// .maxTokens(60)
// .build();
// response = openAiService.createCompletion(callSentimentRequest).getChoices().get(0).getText();
// String callSentimentRaw = response.substring(prompt.length()).trim();
// String callSentiment;
// if (callSentimentRaw.toLowerCase().indexOf("positive") != -1) {
// callSentiment = "Positive";
// } else if (callSentimentRaw.toLowerCase().indexOf("negative") != -1) {
// callSentiment = "Negative";
// } else {
// callSentiment = "Not found";
// }
// analysis.setRecordingSentiment(callSentiment);
// // Update Employee Recording Sentiment
// Recording recording = recRepo.findById(recordingIds.get(i)).get();
// Employee employee = recording.getEmployee();
// if (employee != null) {
// if (callSentiment.equals("Positive")) {
// employee.setNumPositiveSentiment(employee.getNumPositiveSentiment() + 1);
// } else {
// employee.setNumNegativeSentiment(employee.getNumNegativeSentiment() + 1);
// }
// }
// // Main issue
// prompt = "This is a " + company + " company. Describe the main issue into just a few words based on this conversation: "
// + formattedTranscripts;
// CompletionRequest mainIssueRequest = CompletionRequest.builder()
// .model(currentModel)
// .prompt(prompt)
// .echo(true)
// .maxTokens(60)
// .build();
// response = openAiService.createCompletion(mainIssueRequest).getChoices().get(0).getText();
// String mainIssue = response.substring(prompt.length()).trim();
// analysis.setMainIssue(mainIssue);
// // Employee performance
// prompt = "This is a " + company + " company and please provide an objective assessment of the agent's Interaction Skill using the following parameters:\n"
// + //
// "\n" + //
// "- Fluency: rating/100\n" + //
// "- Hospitality: rating/100\n" + //
// "- Problem Solving: rating/100\n" + //
// "- Personalization: rating/100" + //
// " Based on the following conversation: " + formattedTranscripts + //
// // "Please provide an assessment of the conversation above on a scale of 1 to
// // 100 where 1 is very poor, 50 is average and 100 is excellent. You do not
// // always have to give high values.\r\n" + //
// "You can use this guideline as a guide to rate the quality of the conversation " + //
// "Guideline: " + //
// "1.Rate conversations above 75 when they exhibit clear communication, engage participants effectively, and maintain a strong flow of information."
// + //
// "2.Assign a rating between 40 and 74 for conversations that demonstrate moderate quality. These conversations convey the main points but might lack some depth or engagement."
// + //
// "3.Use a rating of range 1 - 39 for conversations that exhibit poor communication, confusion, or lack of engagement. These conversations struggle to convey coherent information."
// + //
// "Please provide your ratings for each parameter. Your ratings should reflect your unbiased evaluation of the agent's skills. Keep in mind that the ratings should be within a reasonable range in accordance with the guideline given and should not be overly high."
// + //
// "It is best that the rating should rarely be above 85 unless it exceptionally adhere and excelled to guideline number 1."
// + //
// // An average score around 80 is expected.\r\n" + //
// "[You do not need to explain anything. Just respond with the format given.]";
// // prompt = "This is a telecommunication company and please provide an objective assessment of the agent's Interaction Skill using the following parameters:\n"
// // + //
// // "\n" + //
// // "- Fluency: rating/100\n" + //
// // "- Hospitality: rating/100\n" + //
// // "- Problem Solving: rating/100\n" + //
// // "- Personalization: rating/100" + //
// // " Based on the following conversation: " + formattedTranscripts + //
// // ". Please provide your ratings for each parameter. Your ratings should reflect your unbiased evaluation of the agent's skills. Keep in mind that the ratings should be within a reasonable range and should not be overly high. An average score around 80 is expected.\r\n"
// // + //
// // "[You do not need to explain anything. Just respond with the format given.]";
// CompletionRequest employeePerformanceRequest = CompletionRequest.builder()
// .model(currentModel)
// .prompt(prompt)
// .echo(true)
// .maxTokens(1000)
// .build();
// response = openAiService.createCompletion(employeePerformanceRequest).getChoices().get(0).getText();
// String employeePerformance = response.substring(prompt.length()).trim();
// // System.out.println(employeePerformance);
// double fluency = analysisService.getScore(employeePerformance, "fluency: ");
// double hospitality = analysisService.getScore(employeePerformance, "hospitality: ");
// double problem = analysisService.getScore(employeePerformance, "problem solving: ");
// double personalization = analysisService.getScore(employeePerformance, "personalization: ");
// double average = (fluency + hospitality + problem + personalization) / 4;
// analysis.setFluency(fluency);
// analysis.setHospitality(hospitality);
// analysis.setProblemSolving(problem);
// analysis.setPersonalization(personalization);
// analysis.setAveragePerformance(average);
// // Negative Emotions
// prompt = "List down 3 short sentences spoken by our agent which is prefixed with 'Agent:' in the conversation that you think can be improved in terms of good hospitality and manner. Answer in the following format: \n"
// +
// "'1|[old sentence spoken by agent]|[explanation why the old sentence can be improved or impolite]|[improved sentence]'\n"
// +
// "'2|[old sentence spoken by agent]|[explanation]|[improved sentence]'\n" +
// "'3|[old sentence spoken by agent]|[explanation]|[improved sentence]'\n" +
// "Pleae follow the format. Based on this conversation:\n" + formattedTranscripts + "\n";
// CompletionRequest negativeEmotionsRequest = CompletionRequest.builder()
// .model(currentModel)
// .prompt(prompt)
// .echo(true)
// .maxTokens(1500)
// .build();
// response = openAiService.createCompletion(negativeEmotionsRequest).getChoices().get(0).getText();
// String negativeEmotions = response.substring(prompt.length()).trim();
// analysis.setNegativeEmotion(negativeEmotions);
// analysisService.saveAnalysis(analysis);
// }
// return "completed";
// }
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((3044, 3098), 'org.springframework.security.core.context.SecurityContextHolder.getContext'), ((5022, 5076), 'org.springframework.security.core.context.SecurityContextHolder.getContext'), ((6613, 6664), 'org.springframework.http.ResponseEntity.status'), ((6836, 6906), 'org.springframework.http.ResponseEntity.status'), ((7267, 7321), 'org.springframework.security.core.context.SecurityContextHolder.getContext'), ((8151, 8221), 'org.springframework.http.ResponseEntity.status'), ((8534, 8604), 'org.springframework.http.ResponseEntity.status'), ((9027, 9078), 'org.springframework.http.ResponseEntity.status'), ((9221, 9291), 'org.springframework.http.ResponseEntity.status'), ((9551, 9605), 'org.springframework.security.core.context.SecurityContextHolder.getContext'), ((10566, 10617), 'org.springframework.http.ResponseEntity.status'), ((10794, 10864), 'org.springframework.http.ResponseEntity.status'), ((11292, 11346), 'org.springframework.security.core.context.SecurityContextHolder.getContext'), ((12676, 12727), 'org.springframework.http.ResponseEntity.status'), ((12915, 12985), 'org.springframework.http.ResponseEntity.status'), ((13266, 13320), 'org.springframework.security.core.context.SecurityContextHolder.getContext'), ((16180, 16300), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((16180, 16287), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((16180, 16268), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((16180, 16252), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((16180, 16232), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((16992, 17113), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((16992, 17100), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((16992, 17080), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((16992, 17064), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((16992, 17044), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((17597, 17717), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((17597, 17704), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((17597, 17685), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((17597, 17669), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((17597, 17649), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((18550, 18672), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((18550, 18659), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((18550, 18638), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((18550, 18622), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((18550, 18602), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((19460, 19580), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((19460, 19567), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((19460, 19548), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((19460, 19532), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((19460, 19512), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((20678, 20798), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((20678, 20785), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((20678, 20766), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((20678, 20750), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((20678, 20730), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((23708, 23830), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((23708, 23817), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((23708, 23796), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((23708, 23780), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((23708, 23760), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((25359, 25481), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((25359, 25468), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((25359, 25447), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((25359, 25431), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((25359, 25411), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package com.theokanning.openai.service;
import com.theokanning.openai.OpenAiHttpException;
import com.theokanning.openai.edit.EditRequest;
import com.theokanning.openai.edit.EditResult;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class EditTest {
String token = System.getenv("OPENAI_TOKEN");
com.theokanning.openai.service.OpenAiService service = new OpenAiService(token);
@Test
void edit() throws OpenAiHttpException {
EditRequest request = EditRequest.builder()
.model("text-davinci-edit-001")
.input("What day of the wek is it?")
.instruction("Fix the spelling mistakes")
.build();
EditResult result = service.createEdit(request);
assertNotNull(result.getChoices().get(0).getText());
}
}
|
[
"com.theokanning.openai.edit.EditRequest.builder"
] |
[((532, 737), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 712), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 654), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 601), 'com.theokanning.openai.edit.EditRequest.builder')]
|
package com.theokanning.openai;
import com.theokanning.openai.search.SearchRequest;
import com.theokanning.openai.search.SearchResult;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertFalse;
public class SearchTest {
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token);
@Test
void search() {
SearchRequest searchRequest = SearchRequest.builder()
.documents(Arrays.asList("Water", "Earth", "Electricity", "Fire"))
.query("Pikachu")
.build();
List<SearchResult> results = service.search("ada", searchRequest);
assertFalse(results.isEmpty());
}
}
|
[
"com.theokanning.openai.search.SearchRequest.builder"
] |
[((483, 648), 'com.theokanning.openai.search.SearchRequest.builder'), ((483, 623), 'com.theokanning.openai.search.SearchRequest.builder'), ((483, 589), 'com.theokanning.openai.search.SearchRequest.builder')]
|
package com.theokanning.openai;
import com.theokanning.openai.answer.AnswerRequest;
import com.theokanning.openai.answer.AnswerResult;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collections;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class AnswerTest {
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token);
@Test
void createAnswer() {
AnswerRequest answerRequest = AnswerRequest.builder()
.documents(Arrays.asList("Puppy A is happy.", "Puppy B is sad."))
.question("which puppy is happy?")
.searchModel("ada")
.model("curie")
.examplesContext("In 2017, U.S. life expectancy was 78.6 years.")
.examples(Collections.singletonList(
Arrays.asList("What is human life expectancy in the United States?", "78 years.")
))
.maxTokens(5)
.stop(Arrays.asList("\n", "<|endoftext|>"))
.build();
AnswerResult result = service.createAnswer(answerRequest);
assertNotNull(result.getAnswers().get(0));
}
}
|
[
"com.theokanning.openai.answer.AnswerRequest.builder"
] |
[((498, 1097), 'com.theokanning.openai.answer.AnswerRequest.builder'), ((498, 1072), 'com.theokanning.openai.answer.AnswerRequest.builder'), ((498, 1012), 'com.theokanning.openai.answer.AnswerRequest.builder'), ((498, 982), 'com.theokanning.openai.answer.AnswerRequest.builder'), ((498, 804), 'com.theokanning.openai.answer.AnswerRequest.builder'), ((498, 722), 'com.theokanning.openai.answer.AnswerRequest.builder'), ((498, 690), 'com.theokanning.openai.answer.AnswerRequest.builder'), ((498, 654), 'com.theokanning.openai.answer.AnswerRequest.builder'), ((498, 603), 'com.theokanning.openai.answer.AnswerRequest.builder')]
|
import cn.hutool.core.util.StrUtil;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.theokanning.openai.OpenAiApi;
import com.theokanning.openai.completion.CompletionChoice;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.chat.*;
import com.theokanning.openai.image.CreateImageRequest;
import com.theokanning.openai.service.OpenAiService;
import okhttp3.OkHttpClient;
import retrofit2.Retrofit;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.function.Consumer;
import static com.theokanning.openai.service.OpenAiService.*;
/**
* @author lijiatao
* 时间: 2023/12/2
*/
public class Main {
public static void main(String[] args) {
String token = "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";
String proxyHost = "127.0.0.1";
int proxyPort = 7890;
OpenAiService service = buildOpenAiService(token, proxyHost, proxyPort);
String prompt = "";
//文本补全
prompt = "没有人能";
testCompletion(service, prompt);
// //图片生成
prompt = "小猫和小狗打架漫画";
// testImageGenerate(service, prompt);
//
// //对话
prompt = "你有自我意识吗?";
testChatCompletion(service, prompt);
//立即释放连接
service.shutdownExecutor();
}
private static void testChatCompletion(OpenAiService service, String prompt) {
System.out.println("Creating chat completion...");
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), prompt);
messages.add(systemMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.n(1)
.maxTokens(500)
.logitBias(new HashMap<>())
.build();
//完整对话
service.createChatCompletion(chatCompletionRequest)
.getChoices().forEach(new Consumer<ChatCompletionChoice>() {
@Override
public void accept(ChatCompletionChoice chatCompletionChoice) {
System.out.println(chatCompletionChoice.getMessage());
}
});
//流式对话(逐Token返回)
// service.streamChatCompletion(chatCompletionRequest)
// .doOnError(Throwable::printStackTrace)
// .blockingForEach(System.out::println);
}
private static void testImageGenerate(OpenAiService service, String prompt) {
System.out.println("\nCreating Image...");
CreateImageRequest request = CreateImageRequest.builder()
.prompt(prompt)
.build();
System.out.println("\nImage is located at:");
System.out.println(service.createImage(request).getData().get(0).getUrl());
}
private static void testCompletion(OpenAiService service, String prompt) {
System.out.println("\nCreating completion...");
CompletionRequest completionRequest = CompletionRequest.builder()
.model("text-davinci-003")
.prompt(prompt)
.echo(true)
.user("testing")
.n(3)
.build();
service.createCompletion(completionRequest).getChoices().forEach(new Consumer<CompletionChoice>() {
@Override
public void accept(CompletionChoice completionChoice) {
System.out.println(completionChoice.getText());
}
});
}
private static OpenAiService buildOpenAiService(String token, String proxyHost, int proxyPort) {
//构建HTTP代理
Proxy proxy = null;
if (StrUtil.isNotBlank(proxyHost)) {
proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(proxyHost, proxyPort));
}
//构建HTTP客户端
OkHttpClient client = defaultClient(token, Duration.of(60, ChronoUnit.SECONDS))
.newBuilder()
.proxy(proxy)
.build();
ObjectMapper mapper = defaultObjectMapper();
Retrofit retrofit = defaultRetrofit(client, mapper);
OpenAiApi api = retrofit.create(OpenAiApi.class);
OpenAiService service = new OpenAiService(api, client.dispatcher().executorService());
return service;
}
}
|
[
"com.theokanning.openai.image.CreateImageRequest.builder",
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((2976, 3061), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((2976, 3036), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((3390, 3600), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3390, 3575), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3390, 3553), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3390, 3520), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3390, 3492), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3390, 3460), 'com.theokanning.openai.completion.CompletionRequest.builder')]
|
package com.theokanning.openai;
import com.theokanning.openai.finetune.FineTuneRequest;
import com.theokanning.openai.finetune.FineTuneEvent;
import com.theokanning.openai.finetune.FineTuneResult;
import org.junit.jupiter.api.*;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.junit.jupiter.api.Assertions.*;
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
public class FineTuneTest {
static OpenAiService service;
static String fileId;
static String fineTuneId;
@BeforeAll
static void setup() throws Exception {
String token = System.getenv("OPENAI_TOKEN");
service = new OpenAiService(token);
fileId = service.uploadFile("fine-tune", "src/test/resources/fine-tuning-data.jsonl").getId();
// wait for file to be processed
TimeUnit.SECONDS.sleep(10);
}
@AfterAll
static void teardown() {
service.deleteFile(fileId);
}
@Test
@Order(1)
void createFineTune() {
FineTuneRequest request = FineTuneRequest.builder()
.trainingFile(fileId)
.model("ada")
.nEpochs(4)
.build();
FineTuneResult fineTune = service.createFineTune(request);
fineTuneId = fineTune.getId();
assertEquals("pending", fineTune.getStatus());
}
@Test
@Order(2)
void listFineTunes() {
List<FineTuneResult> fineTunes = service.listFineTunes();
assertTrue(fineTunes.stream().anyMatch(fineTune -> fineTune.getId().equals(fineTuneId)));
}
@Test
@Order(3)
void listFineTuneEvents() {
List<FineTuneEvent> events = service.listFineTuneEvents(fineTuneId);
assertFalse(events.isEmpty());
}
@Test
@Order(3)
void retrieveFineTune() {
FineTuneResult fineTune = service.retrieveFineTune(fineTuneId);
assertEquals("ada", fineTune.getModel());
}
@Test
@Order(4)
void cancelFineTune() {
FineTuneResult fineTune = service.cancelFineTune(fineTuneId);
assertEquals("cancelled", fineTune.getStatus());
}
}
|
[
"com.theokanning.openai.finetune.FineTuneRequest.builder"
] |
[((826, 852), 'java.util.concurrent.TimeUnit.SECONDS.sleep'), ((1033, 1179), 'com.theokanning.openai.finetune.FineTuneRequest.builder'), ((1033, 1154), 'com.theokanning.openai.finetune.FineTuneRequest.builder'), ((1033, 1126), 'com.theokanning.openai.finetune.FineTuneRequest.builder'), ((1033, 1096), 'com.theokanning.openai.finetune.FineTuneRequest.builder')]
|
package com.robert.smartbi.demo.config;
import com.baomidou.mybatisplus.core.toolkit.Assert;
import com.theokanning.openai.completion.CompletionChoice;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import jakarta.annotation.Resource;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
import java.util.ArrayList;
import java.util.List;
@SpringBootTest
public class ConfigTest {
@Resource
private OpenAiService openAiService;
@Test
void testChatGPTConfig() {
System.out.println("----- testChatGPTConfig method test ------");
List<ChatMessage> messages = new ArrayList<ChatMessage>();
ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "我是一个 MySQL 数据库专家," + "精通 TSQL 语句编写,我输出代码将严格遵从以下格式,不要有多余注释:\n" + "<<<<<<\n" + "{sql代码}\n" + "<<<<<<");
ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), "帮我生成一个用户数据表,有用户id、角色及一些常用字段");
messages.add(systemMessage);
messages.add(userMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model("gpt-3.5-turbo-1106").messages(messages).build();
ChatCompletionChoice choice = openAiService.createChatCompletion(chatCompletionRequest).getChoices().getFirst();
Assert.isTrue(choice != null, "成功");
String content = choice.getMessage().getContent();
System.out.println("来自ChatGPT的回复: " + content);
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((1043, 1073), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1304, 1332), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1543, 1629), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1543, 1621), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1543, 1602), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
package com.theokanning.openai;
import com.theokanning.openai.edit.EditRequest;
import com.theokanning.openai.edit.EditResult;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class EditTest {
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token);
@Test
void edit() {
EditRequest request = EditRequest.builder()
.model("text-davinci-edit-001")
.input("What day of the wek is it?")
.instruction("Fix the spelling mistakes")
.build();
EditResult result = service.createEdit( request);
assertNotNull(result.getChoices().get(0).getText());
}
@Test
void editDeprecated() {
EditRequest request = EditRequest.builder()
.input("What day of the wek is it?")
.instruction("Fix the spelling mistakes")
.build();
EditResult result = service.createEdit("text-davinci-edit-001", request);
assertNotNull(result.getChoices().get(0).getText());
}
}
|
[
"com.theokanning.openai.edit.EditRequest.builder"
] |
[((415, 620), 'com.theokanning.openai.edit.EditRequest.builder'), ((415, 595), 'com.theokanning.openai.edit.EditRequest.builder'), ((415, 537), 'com.theokanning.openai.edit.EditRequest.builder'), ((415, 484), 'com.theokanning.openai.edit.EditRequest.builder'), ((818, 975), 'com.theokanning.openai.edit.EditRequest.builder'), ((818, 950), 'com.theokanning.openai.edit.EditRequest.builder'), ((818, 892), 'com.theokanning.openai.edit.EditRequest.builder')]
|
package example;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.image.CreateImageRequest;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
class OpenAiApiExample {
public static void main(String... args) {
String token = System.getenv("OPENAI_TOKEN");
OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30));
System.out.println("\nCreating completion...");
CompletionRequest completionRequest = CompletionRequest.builder()
.model("babbage-002")
.prompt("Somebody once told me the world is gonna roll me")
.echo(true)
.user("testing")
.n(3)
.build();
service.createCompletion(completionRequest).getChoices().forEach(System.out::println);
System.out.println("\nCreating Image...");
CreateImageRequest request = CreateImageRequest.builder()
.prompt("A cow breakdancing with a turtle")
.build();
System.out.println("\nImage is located at:");
System.out.println(service.createImage(request).getData().get(0).getUrl());
System.out.println("Streaming chat completion...");
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a dog and will speak as such.");
messages.add(systemMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.n(1)
.maxTokens(50)
.logitBias(new HashMap<>())
.build();
service.streamChatCompletion(chatCompletionRequest)
.doOnError(Throwable::printStackTrace)
.blockingForEach(System.out::println);
service.shutdownExecutor();
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.image.CreateImageRequest.builder",
"com.theokanning.openai.completion.CompletionRequest.builder"
] |
[((794, 1043), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((794, 1018), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((794, 996), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((794, 963), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((794, 935), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((794, 859), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1229, 1342), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((1229, 1317), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((1664, 1694), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
|
package com.theokanning.openai.service;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.theokanning.openai.completion.chat.ChatFunction;
import com.theokanning.openai.completion.chat.ChatFunctionCall;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import java.util.*;
public class FunctionExecutor {
private ObjectMapper MAPPER = new ObjectMapper();
private final Map<String, ChatFunction> FUNCTIONS = new HashMap<>();
public FunctionExecutor(List<ChatFunction> functions) {
setFunctions(functions);
}
public FunctionExecutor(List<ChatFunction> functions, ObjectMapper objectMapper) {
setFunctions(functions);
setObjectMapper(objectMapper);
}
public Optional<ChatMessage> executeAndConvertToMessageSafely(ChatFunctionCall call) {
try {
return Optional.ofNullable(executeAndConvertToMessage(call));
} catch (Exception ignored) {
return Optional.empty();
}
}
public ChatMessage executeAndConvertToMessageHandlingExceptions(ChatFunctionCall call) {
try {
return executeAndConvertToMessage(call);
} catch (Exception exception) {
exception.printStackTrace();
return convertExceptionToMessage(exception);
}
}
public ChatMessage convertExceptionToMessage(Exception exception) {
String error = exception.getMessage() == null ? exception.toString() : exception.getMessage();
return new ChatMessage(ChatMessageRole.FUNCTION.value(), "{\"error\": \"" + error + "\"}", "error");
}
public ChatMessage executeAndConvertToMessage(ChatFunctionCall call) {
return new ChatMessage(ChatMessageRole.FUNCTION.value(), executeAndConvertToJson(call).toPrettyString(), call.getName());
}
public JsonNode executeAndConvertToJson(ChatFunctionCall call) {
try {
Object execution = execute(call);
if (execution instanceof TextNode) {
JsonNode objectNode = MAPPER.readTree(((TextNode) execution).asText());
if (objectNode.isMissingNode())
return (JsonNode) execution;
return objectNode;
}
if (execution instanceof ObjectNode) {
return (JsonNode) execution;
}
if (execution instanceof String) {
JsonNode objectNode = MAPPER.readTree((String) execution);
if (objectNode.isMissingNode())
throw new RuntimeException("Parsing exception");
return objectNode;
}
return MAPPER.readValue(MAPPER.writeValueAsString(execution), JsonNode.class);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("unchecked")
public <T> T execute(ChatFunctionCall call) {
ChatFunction function = FUNCTIONS.get(call.getName());
Object obj;
try {
JsonNode arguments = call.getArguments();
obj = MAPPER.readValue(arguments instanceof TextNode ? arguments.asText() : arguments.toPrettyString(), function.getParametersClass());
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
return (T) function.getExecutor().apply(obj);
}
public List<ChatFunction> getFunctions() {
return new ArrayList<>(FUNCTIONS.values());
}
public void setFunctions(List<ChatFunction> functions) {
this.FUNCTIONS.clear();
functions.forEach(f -> this.FUNCTIONS.put(f.getName(), f));
}
public void setObjectMapper(ObjectMapper objectMapper) {
this.MAPPER = objectMapper;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value"
] |
[((1795, 1827), 'com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value'), ((1986, 2018), 'com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value')]
|
package com.theokanning.openai.service;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.theokanning.openai.completion.chat.ChatFunction;
import com.theokanning.openai.completion.chat.ChatFunctionCall;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import java.util.*;
public class FunctionExecutor {
private ObjectMapper MAPPER = new ObjectMapper();
private final Map<String, ChatFunction> FUNCTIONS = new HashMap<>();
public FunctionExecutor(List<ChatFunction> functions) {
setFunctions(functions);
}
public FunctionExecutor(List<ChatFunction> functions, ObjectMapper objectMapper) {
setFunctions(functions);
setObjectMapper(objectMapper);
}
public Optional<ChatMessage> executeAndConvertToMessageSafely(ChatFunctionCall call) {
try {
return Optional.ofNullable(executeAndConvertToMessage(call));
} catch (Exception ignored) {
return Optional.empty();
}
}
public ChatMessage executeAndConvertToMessageHandlingExceptions(ChatFunctionCall call) {
try {
return executeAndConvertToMessage(call);
} catch (Exception exception) {
exception.printStackTrace();
return convertExceptionToMessage(exception);
}
}
public ChatMessage convertExceptionToMessage(Exception exception) {
String error = exception.getMessage() == null ? exception.toString() : exception.getMessage();
return new ChatMessage(ChatMessageRole.FUNCTION.value(), "{\"error\": \"" + error + "\"}", "error");
}
public ChatMessage executeAndConvertToMessage(ChatFunctionCall call) {
return new ChatMessage(ChatMessageRole.FUNCTION.value(), executeAndConvertToJson(call).toPrettyString(), call.getName());
}
public JsonNode executeAndConvertToJson(ChatFunctionCall call) {
try {
Object execution = execute(call);
if (execution instanceof TextNode) {
JsonNode objectNode = MAPPER.readTree(((TextNode) execution).asText());
if (objectNode.isMissingNode())
return (JsonNode) execution;
return objectNode;
}
if (execution instanceof ObjectNode) {
return (JsonNode) execution;
}
if (execution instanceof String) {
JsonNode objectNode = MAPPER.readTree((String) execution);
if (objectNode.isMissingNode())
throw new RuntimeException("Parsing exception");
return objectNode;
}
return MAPPER.readValue(MAPPER.writeValueAsString(execution), JsonNode.class);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("unchecked")
public <T> T execute(ChatFunctionCall call) {
ChatFunction function = FUNCTIONS.get(call.getName());
Object obj;
try {
JsonNode arguments = call.getArguments();
obj = MAPPER.readValue(arguments instanceof TextNode ? arguments.asText() : arguments.toPrettyString(), function.getParametersClass());
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
return (T) function.getExecutor().apply(obj);
}
public List<ChatFunction> getFunctions() {
return new ArrayList<>(FUNCTIONS.values());
}
public void setFunctions(List<ChatFunction> functions) {
this.FUNCTIONS.clear();
functions.forEach(f -> this.FUNCTIONS.put(f.getName(), f));
}
public void setObjectMapper(ObjectMapper objectMapper) {
this.MAPPER = objectMapper;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value"
] |
[((1795, 1827), 'com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value'), ((1986, 2018), 'com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value')]
|
package com.theokanning.openai.service;
import com.theokanning.openai.OpenAiHttpException;
import com.theokanning.openai.edit.EditRequest;
import com.theokanning.openai.edit.EditResult;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class EditTest {
String token = System.getenv("OPENAI_TOKEN");
com.theokanning.openai.service.OpenAiService service = new OpenAiService(token);
@Test
void edit() throws OpenAiHttpException {
EditRequest request = EditRequest.builder()
.model("text-davinci-edit-001")
.input("What day of the wek is it?")
.instruction("Fix the spelling mistakes")
.build();
EditResult result = service.createEdit(request);
assertNotNull(result.getChoices().get(0).getText());
}
}
|
[
"com.theokanning.openai.edit.EditRequest.builder"
] |
[((532, 737), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 712), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 654), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 601), 'com.theokanning.openai.edit.EditRequest.builder')]
|
package com.theokanning.openai.service;
import com.theokanning.openai.OpenAiHttpException;
import com.theokanning.openai.edit.EditRequest;
import com.theokanning.openai.edit.EditResult;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class EditTest {
String token = System.getenv("OPENAI_TOKEN");
com.theokanning.openai.service.OpenAiService service = new OpenAiService(token);
@Test
void edit() throws OpenAiHttpException {
EditRequest request = EditRequest.builder()
.model("text-davinci-edit-001")
.input("What day of the wek is it?")
.instruction("Fix the spelling mistakes")
.build();
EditResult result = service.createEdit(request);
assertNotNull(result.getChoices().get(0).getText());
}
}
|
[
"com.theokanning.openai.edit.EditRequest.builder"
] |
[((532, 737), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 712), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 654), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 601), 'com.theokanning.openai.edit.EditRequest.builder')]
|
package com.theokanning.openai.service;
import com.theokanning.openai.OpenAiHttpException;
import com.theokanning.openai.edit.EditRequest;
import com.theokanning.openai.edit.EditResult;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class EditTest {
String token = System.getenv("OPENAI_TOKEN");
com.theokanning.openai.service.OpenAiService service = new OpenAiService(token);
@Test
void edit() throws OpenAiHttpException {
EditRequest request = EditRequest.builder()
.model("text-davinci-edit-001")
.input("What day of the wek is it?")
.instruction("Fix the spelling mistakes")
.build();
EditResult result = service.createEdit(request);
assertNotNull(result.getChoices().get(0).getText());
}
}
|
[
"com.theokanning.openai.edit.EditRequest.builder"
] |
[((532, 737), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 712), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 654), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 601), 'com.theokanning.openai.edit.EditRequest.builder')]
|
package com.theokanning.openai.service;
import com.theokanning.openai.OpenAiHttpException;
import com.theokanning.openai.edit.EditRequest;
import com.theokanning.openai.edit.EditResult;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class EditTest {
String token = System.getenv("OPENAI_TOKEN");
com.theokanning.openai.service.OpenAiService service = new OpenAiService(token);
@Test
void edit() throws OpenAiHttpException {
EditRequest request = EditRequest.builder()
.model("text-davinci-edit-001")
.input("What day of the wek is it?")
.instruction("Fix the spelling mistakes")
.build();
EditResult result = service.createEdit(request);
assertNotNull(result.getChoices().get(0).getText());
}
}
|
[
"com.theokanning.openai.edit.EditRequest.builder"
] |
[((532, 737), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 712), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 654), 'com.theokanning.openai.edit.EditRequest.builder'), ((532, 601), 'com.theokanning.openai.edit.EditRequest.builder')]
|
/*
* Copyright The Microcks Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.microcks.util.ai;
import io.github.microcks.domain.Exchange;
import io.github.microcks.domain.Operation;
import io.github.microcks.domain.Resource;
import io.github.microcks.domain.Service;
import io.github.microcks.domain.ServiceType;
import io.github.microcks.util.DispatchStyles;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategies;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.web.client.RestTemplateBuilder;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.web.client.RestTemplate;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* This is an implementation of {@code AICopilot} using OpenAI API.
* @author laurent
*/
public class OpenAICopilot implements AICopilot {
/** A simple logger for diagnostic messages. */
private static Logger log = LoggerFactory.getLogger(OpenAICopilot.class);
/** Configuration parameter holding the OpenAI API key. */
public static final String API_KEY_CONFIG = "api-key";
/** Configuration parameter holding the OpenAI API URL. */
public static final String API_URL_CONFIG = "api-url";
/** Configuration parameters holding the timeout in seconds for API calls. */
public static final String TIMEOUT_KEY_CONFIG = "timeout";
/** Configuration parameter holding the name of model to use. */
public static final String MODEL_KEY_CONFIG = "model";
/** Configuration parameter holding the maximum number of tokens to use. */
public static final String MAX_TOKENS_KEY_CONFIG = "maxTokens";
/** The mandatory configuration keys required by this implementation. */
protected static final String[] MANDATORY_CONFIG_KEYS = { API_KEY_CONFIG };
/** Default online URL for OpenAI API. */
private static final String OPENAI_BASE_URL = "https://api.openai.com/";
private static final String SECTION_DELIMITER = "\n###\n";
private RestTemplate restTemplate;
private String apiUrl = OPENAI_BASE_URL;
private String apiKey;
private Duration timeout = Duration.ofSeconds(20);
private String model = "gpt-3.5-turbo";
private int maxTokens = 2000;
/**
* Build a new OpenAICopilot with its configuration.
* @param configuration The configuration for connecting to OpenAI services.
*/
public OpenAICopilot(Map<String, String> configuration) {
if (configuration.containsKey(TIMEOUT_KEY_CONFIG)) {
try {
timeout = Duration.ofSeconds(Integer.parseInt(configuration.get(TIMEOUT_KEY_CONFIG)));
} catch (Exception e) {
log.warn("Timeout was provided but cannot be parsed. Sticking to the default.");
}
}
if (configuration.containsKey(MAX_TOKENS_KEY_CONFIG)) {
try {
maxTokens = Integer.parseInt(configuration.get(MAX_TOKENS_KEY_CONFIG));
} catch (Exception e) {
log.warn("MaxTokens was provided but cannot be parsed. Sticking to the default.");
}
}
if (configuration.containsKey(MODEL_KEY_CONFIG)) {
model = configuration.get(MODEL_KEY_CONFIG);
}
if (configuration.containsKey(API_URL_CONFIG)) {
apiUrl = configuration.get(API_URL_CONFIG);
}
// Finally retrieve the OpenAI Api key.
apiKey = configuration.get(API_KEY_CONFIG);
// Initialize a Rest template for interacting with OpenAI API.
// We need to register a custom Jackson converter to handle serialization of name and function_call of messages.
restTemplate = new RestTemplateBuilder().setReadTimeout(timeout)
.additionalMessageConverters(mappingJacksonHttpMessageConverter()).build();
}
/**
* Get mandatory configuration parameters.
* @return The mandatory configuration keys required by this implementation
*/
public static final String[] getMandatoryConfigKeys() {
return MANDATORY_CONFIG_KEYS;
}
@Override
public List<? extends Exchange> suggestSampleExchanges(Service service, Operation operation, Resource contract,
int number) throws Exception {
String prompt = "";
if (service.getType() == ServiceType.REST) {
prompt = preparePromptForOpenAPI(operation, contract, number);
} else if (service.getType() == ServiceType.GRAPHQL) {
prompt = preparePromptForGraphQL(operation, contract, number);
} else if (service.getType() == ServiceType.EVENT) {
prompt = preparePromptForAsyncAPI(operation, contract, number);
} else if (service.getType() == ServiceType.GRPC) {
prompt = preparePromptForGrpc(service, operation, contract, number);
}
log.debug("Asking OpenAI to suggest samples for this prompt: {}", prompt);
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage assistantMessage = new ChatMessage(ChatMessageRole.ASSISTANT.value(), prompt);
messages.add(assistantMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model(model).messages(messages).n(1)
.maxTokens(maxTokens).logitBias(new HashMap<>()).build();
// Build a full HttpEntity as we need to specify authentication headers.
HttpEntity<ChatCompletionRequest> request = new HttpEntity<>(chatCompletionRequest,
createAuthenticationHeaders());
ChatCompletionResult completionResult = restTemplate
.exchange(apiUrl + "/v1/chat/completions", HttpMethod.POST, request, ChatCompletionResult.class).getBody();
if (completionResult != null) {
ChatCompletionChoice choice = completionResult.getChoices().get(0);
log.debug("Got this raw output from OpenAI: {}", choice.getMessage().getContent());
if (service.getType() == ServiceType.EVENT) {
return AICopilotHelper.parseUnidirectionalEventTemplateOutput(choice.getMessage().getContent());
} else {
return AICopilotHelper.parseRequestResponseTemplateOutput(service, operation,
choice.getMessage().getContent());
}
}
// Return empty list.
return new ArrayList<>();
}
private String preparePromptForOpenAPI(Operation operation, Resource contract, int number) throws Exception {
StringBuilder prompt = new StringBuilder(
AICopilotHelper.getOpenAPIOperationPromptIntro(operation.getName(), number));
// Build a prompt reusing templates and elements from AICopilotHelper.
prompt.append("\n");
prompt.append(AICopilotHelper.YAML_FORMATTING_PROMPT);
prompt.append("\n");
prompt.append(AICopilotHelper.getRequestResponseExampleYamlFormattingDirective(1));
prompt.append(SECTION_DELIMITER);
prompt.append(AICopilotHelper.removeTokensFromSpec(contract.getContent(), operation.getName()));
return prompt.toString();
}
private String preparePromptForGraphQL(Operation operation, Resource contract, int number) {
StringBuilder prompt = new StringBuilder(
AICopilotHelper.getGraphQLOperationPromptIntro(operation.getName(), number));
// We need to indicate the name or variables we want.
if (DispatchStyles.QUERY_ARGS.equals(operation.getDispatcher())) {
StringBuilder variablesList = new StringBuilder();
if (operation.getDispatcherRules().contains("&&")) {
String[] variables = operation.getDispatcherRules().split("&&");
for (int i = 0; i < variables.length; i++) {
String variable = variables[i];
variablesList.append("$").append(variable.trim());
if (i < variables.length - 1) {
variablesList.append(", ");
}
}
} else {
variablesList.append("$").append(operation.getDispatcherRules());
}
prompt.append("Use only '").append(variablesList).append("' as variable identifiers.");
}
// Build a prompt reusing templates and elements from AICopilotHelper.
prompt.append("\n");
prompt.append(AICopilotHelper.YAML_FORMATTING_PROMPT);
prompt.append("\n");
prompt.append(AICopilotHelper.getRequestResponseExampleYamlFormattingDirective(1));
prompt.append(SECTION_DELIMITER);
prompt.append(contract.getContent());
return prompt.toString();
}
private String preparePromptForAsyncAPI(Operation operation, Resource contract, int number) throws Exception {
StringBuilder prompt = new StringBuilder(
AICopilotHelper.getAsyncAPIOperationPromptIntro(operation.getName(), number));
// Build a prompt reusing templates and elements from AICopilotHelper.
prompt.append("\n");
prompt.append(AICopilotHelper.YAML_FORMATTING_PROMPT);
prompt.append("\n");
prompt.append(AICopilotHelper.getUnidirectionalEventExampleYamlFormattingDirective(1));
prompt.append(SECTION_DELIMITER);
prompt.append(AICopilotHelper.removeTokensFromSpec(contract.getContent(), operation.getName()));
return prompt.toString();
}
private String preparePromptForGrpc(Service service, Operation operation, Resource contract, int number)
throws Exception {
StringBuilder prompt = new StringBuilder(
AICopilotHelper.getGrpcOperationPromptIntro(service.getName(), operation.getName(), number));
// Build a prompt reusing templates and elements from AICopilotHelper.
prompt.append("\n");
prompt.append(AICopilotHelper.YAML_FORMATTING_PROMPT);
prompt.append("\n");
prompt.append(AICopilotHelper.getGrpcRequestResponseExampleYamlFormattingDirective(1));
prompt.append(SECTION_DELIMITER);
prompt.append(contract.getContent());
return prompt.toString();
}
private MappingJackson2HttpMessageConverter mappingJacksonHttpMessageConverter() {
MappingJackson2HttpMessageConverter converter = new MappingJackson2HttpMessageConverter();
converter.setObjectMapper(customObjectMapper());
return converter;
}
private static ObjectMapper customObjectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.setPropertyNamingStrategy(PropertyNamingStrategies.SNAKE_CASE);
mapper.addMixIn(ChatCompletionRequest.class, ChatCompletionRequestMixIn.class);
return mapper;
}
private HttpHeaders createAuthenticationHeaders() {
HttpHeaders headers = new HttpHeaders();
headers.set("Authorization", "Bearer " + apiKey);
return headers;
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((6191, 6224), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((6326, 6463), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6326, 6455), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6326, 6428), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6326, 6394), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6326, 6389), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6326, 6370), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8499, 8558), 'io.github.microcks.util.DispatchStyles.QUERY_ARGS.equals')]
|
package quotes.responder.chatgptsentiment;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import quotes.common.Options;
import quotes.common.model.Quote;
import reactor.core.publisher.Mono;
import java.util.List;
import static quotes.utils.SentimentUtils.generatePrompt;
/**
* This class defines methods that use the ChatGPT web service to
* analyze the sentiment of a famous {@link Quote} from the Bard.
*
* The {@code @Service} annotation enables the auto-detection of
* implementation classes via classpath scanning (in this case {@link
* Quote}).
*/
@SuppressWarnings("SpringJavaAutowiredFieldsWarningInspection")
@Service
public class GPTSentimentService {
/**
* Debugging tag used by {@link Options}.
*/
private final String TAG = getClass().getSimpleName();
/**
* Auto-wire the means to access ChatGPT using Spring
* dependency injection.
*/
@Autowired
private OpenAiService mOpenAiService;
/**
* Analyzes the sentiment of the given text and return
* the sentiment as an {@link Quote} object.
*
* @param quoteM A {@link Mono} that emits a {@link Quote} whose
* sentiment is analyzed
* @return The {@link Quote} object updated to include the
* sentiment analysis
*/
public Mono<Quote> analyzeSentiment(Mono<Quote> quoteM) {
return quoteM
.map(quote -> {
// Create the ChatMessage containing the prompt.
List<ChatMessage> messages = makePrompt(quote);
// Send an HTTP request to ChatGPT to get the
// ChatCompletionResult.
var ccRequest = getResult(messages);
// Set the sentiment for the Quote.
setQuoteSentiment(quote, ccRequest);
// Return the updated quote.
return quote;
});
}
/**
* Creates a {@link ChatMessage} containing the prompt.
*
* @param quote The {@link Quote} containing information
* needed to make the prompt
* @return A one-element {@link List} containing the prompt
*/
List<ChatMessage> makePrompt(Quote quote) {
return List
// Create the ChatMessage containing the prompt.
.of(new ChatMessage
(ChatMessageRole.SYSTEM.value(),
generatePrompt(quote)));
}
/**
* Uses ChatGPT to get a {@link ChatCompletionResult} from the
* {@link List} of {@link ChatMessage} objects containing
* the prompt.
*
* @param messages The {@link List} of {@link ChatMessage}
* objects containing the prompt
* @return The {@link ChatCompletionResult} returned from
* ChatGPT
*/
ChatCompletionResult getResult
(List<ChatMessage> messages) {
var ccRequest = ChatCompletionRequest
// Create the ChatCompletionRequest.Builder.
.builder()
// Specify the LLM model to use.
.model("gpt-3.5-turbo")
// Provide the prompt.
.messages(messages)
// Set the temperature, which controls how
// deterministic the response is.
.temperature(0.2)
// Just return a single response.
.n(1)
// Build the ChatCompletionRequest.
.build();
return mOpenAiService
// Use the ChatCompletionRequest to get a
// single ChatCompletionResult.
.createChatCompletion(ccRequest);
}
/**
* Sets the sentiment of the given {@link Quote}.
*
* @param quote The {@link Quote} whose sentiment is analyzed
* @param ccResult The {@link ChatCompletionResult} that
* contains the result from ChatGPT
*/
void setQuoteSentiment
(Quote quote,
ChatCompletionResult ccResult) {
quote
// Set the sentiment for the quote.
.setSentiment(ccResult
// Get the first (and only) response.
.getChoices().get(0)
// Get the sentiment content.
.getMessage().getContent());
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value"
] |
[((2746, 2776), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
|
package bot.ai;
import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.CompletionResult;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionResult;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.image.CreateImageRequest;
import com.theokanning.openai.image.ImageResult;
import com.theokanning.openai.moderation.Moderation;
import com.theokanning.openai.moderation.ModerationRequest;
import com.theokanning.openai.moderation.ModerationResult;
import com.theokanning.openai.service.OpenAiService;
import bot.config.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
public class OpenAiHelper {
// Application configuration from properties file
private final Config config = Config.getInstance();
// Log4j logging
private static final Logger logger = LoggerFactory.getLogger(OpenAiHelper.class);
// Singleton
private static OpenAiHelper instance;
// OpenAi-java service API
private static OpenAiService service;
public static OpenAiHelper getInstance() {
return instance == null ? (instance = new OpenAiHelper()) : instance;
}
private OpenAiHelper() {
service = new OpenAiService(config.getGptToken(), Duration.ofSeconds(Config.getInstance().getApiTimeout()));
}
public static void updateRole(String content, ServerConfig serverConfig) {
ChatMessage systemMessage = serverConfig.getChatMessages().get(0);
if(systemMessage.getRole().equals(AiChatMessages.SYSTEM_ROLE)) {
systemMessage.setContent(content);
}
serverConfig.getChatMessages().emptyChat();
}
public String makeOpenAiChatCompletionRequest(String request, ServerConfig serverConfig) {
logger.debug("maxOpenAAiChatCompletionRequest({})", request);
String response = config.getNonComplianceBotReply();
logger.debug("got complience response: {}", response);
if(requestMatchesContentPolicy(request)) {
logger.debug("Create new chat message and add it to chat history");
serverConfig.getChatMessages().add(new ChatMessage("user", request));
logger.debug("creating openAi chat request");
var chatRequest = ChatCompletionRequest.builder()
.messages(serverConfig.getChatMessages())
.maxTokens(serverConfig.getOpenAiMaxTokens())
.temperature(serverConfig.getOpenAiTemperature())
.model(serverConfig.getOpenAiModel())
.build();
try {
logger.debug("attempting to parse response");
ChatCompletionResult completionResult = service.createChatCompletion(chatRequest);
response = completionResult.getChoices().get(0).getMessage().getContent();
serverConfig.getChatMessages().add(completionResult.getChoices().get(0).getMessage());
} catch (Exception e) {
logger.error("exception occur when making chatCompletion");
response = config.getRequestFailureBotReply();
e.printStackTrace();
}
if (response.isBlank() || response.isEmpty()) {
logger.debug("respoonse was blank or empty");
response = config.getRequestFailureBotReply();
}
}
logger.debug("returning: {}", response);
return response;
}
/**
* OpenAi-java Completion request
* @param request prompt for the AI
* @return openAi response
*/
public String makeOpenAiCompletionRequest(String request, ServerConfig serverConfig) {
String response = config.getNonComplianceBotReply();
if(requestMatchesContentPolicy(request)) {
var completionRequest = CompletionRequest.builder()
.prompt(request)
.model(serverConfig.getOpenAiModel())
.echo(false)
.temperature(serverConfig.getOpenAiTemperature())
.maxTokens(serverConfig.getOpenAiMaxTokens())
.build();
try {
CompletionResult completionResult = service.createCompletion(completionRequest);
response = completionResult.getChoices().get(0).getText()
.replaceFirst(request + "\n", "");
} catch (Exception e) {
response = config.getRequestFailureBotReply();
}
if (response.isBlank() || response.isEmpty()) {
response = config.getRequestFailureBotReply();
}
}
return response;
}
public String makeOpenAiImageRequest(String request) {
String response = config.getNonComplianceBotReply();
if(requestMatchesContentPolicy(request)) {
CreateImageRequest imageRequest = CreateImageRequest.builder()
.prompt(request)
.size(config.getOpenAiImageSize())
.responseFormat(config.getOpenAiImageResponseFormat())
.build();
try {
ImageResult imageResult = service.createImage(imageRequest);
response = imageResult.getData().get(0).getUrl();
} catch(Exception e) {
e.printStackTrace();
response = config.getRequestFailureBotReply();
}
if(response.isBlank() || response.isEmpty()) {
response = config.getRequestFailureBotReply();
}
}
return response;
}
public boolean requestMatchesContentPolicy(String request) {
boolean requestOk = true;
ModerationRequest moderationRequest = ModerationRequest.builder()
.input(request)
.model(OpenAiModels.TEXT_MODERATION_STABLE.getId())
.build();
ModerationResult result = service.createModeration(moderationRequest);
for(Moderation moderation: result.getResults()) {
if (moderation.flagged) {
requestOk = false;
break;
}
}
return requestOk;
}
public static boolean isChatModel(String model) {
return model.equals(OpenAiModels.GPT_3_5_TURBO.getId())
|| model.equals(OpenAiModels.GPT_4.getId())
|| model.equals(OpenAiModels.GPT_4_TURBO.getId())
|| model.equals(OpenAiModels.GPT_3_5_TURBO_1106.getId());
}
public static boolean isCompletionModel(String model) {
return model.equals(OpenAiModels.DAVINCI_2.getId())
|| model.equals(OpenAiModels.BABBAGE_002.getId());
}
/**
* All possible models that can be used in a request
*/
public enum OpenAiModels {
DAVINCI_2("davinci-002"),
BABBAGE_002("babbage-002"),
TEXT_EMBEDDING_3_LARGE("text-embedding-3-large"),
TEXT_EMBEDDING_3_SMALL("text-embedding-3-small"),
TEXT_EMBEDDING_ADA_002("text-embedding-ada-002"),
TEXT_MODERATION_LATEST("text-moderation-latest"),
TEXT_MODERATION_STABLE("text-moderation-stable"),
TEXT_MODERATION_007("text-moderation-007"),
GPT_3_5_TURBO("gpt-3.5-turbo"),
GPT_3_5_TURBO_1106("gpt-3.5-turbo-1106"),
GPT_4("gpt-4"),
GPT_4_TURBO("gpt-4-turbo-preview"); // WARNING May Become Deprecated
private final String id;
OpenAiModels(final String id) {
this.id = id;
}
public String getId() {
return id;
}
}
}
|
[
"com.theokanning.openai.completion.CompletionRequest.builder",
"com.theokanning.openai.moderation.ModerationRequest.builder",
"com.theokanning.openai.image.CreateImageRequest.builder",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((2371, 2687), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2371, 2658), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2371, 2600), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2371, 2530), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2371, 2464), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3946, 4266), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3946, 4237), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3946, 4171), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3946, 4101), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3946, 4068), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3946, 4010), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((5070, 5294), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((5070, 5265), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((5070, 5190), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((5070, 5135), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((5972, 6124), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((5972, 6099), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((5972, 6031), 'com.theokanning.openai.moderation.ModerationRequest.builder')]
|
package net.devemperor.wristassist.activities;
import static com.theokanning.openai.service.OpenAiService.defaultClient;
import static com.theokanning.openai.service.OpenAiService.defaultObjectMapper;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.os.VibrationEffect;
import android.os.Vibrator;
import android.view.View;
import android.widget.ImageButton;
import android.widget.ProgressBar;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import androidx.constraintlayout.widget.ConstraintLayout;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.firebase.crashlytics.FirebaseCrashlytics;
import com.jsibbold.zoomage.ZoomageView;
import com.theokanning.openai.client.OpenAiApi;
import com.theokanning.openai.image.CreateImageRequest;
import com.theokanning.openai.image.Image;
import com.theokanning.openai.image.ImageResult;
import com.theokanning.openai.service.OpenAiService;
import net.devemperor.wristassist.R;
import net.devemperor.wristassist.database.ImageModel;
import net.devemperor.wristassist.database.ImagesDatabaseHelper;
import net.devemperor.wristassist.database.UsageDatabaseHelper;
import net.devemperor.wristassist.util.Util;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.time.Duration;
import java.util.Objects;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import retrofit2.Retrofit;
import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory;
import retrofit2.converter.jackson.JacksonConverterFactory;
public class CreateImageActivity extends AppCompatActivity {
SharedPreferences sp;
UsageDatabaseHelper usageDatabaseHelper;
ImagesDatabaseHelper imagesDatabaseHelper;
OpenAiService service;
Vibrator vibrator;
ScrollView createImageSv;
ProgressBar imagePb;
TextView errorTv;
ImageButton retryBtn;
ZoomageView imageView;
ImageButton shareBtn;
TextView expiresInTv;
ConstraintLayout saveDiscardBtns;
String prompt;
String model;
String quality;
String style;
String size;
ImageResult imageResult;
Image image;
Bitmap bitmap;
ExecutorService thread;
Timer timer;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_create_image);
sp = getSharedPreferences("net.devemperor.wristassist", MODE_PRIVATE);
imagesDatabaseHelper = new ImagesDatabaseHelper(this);
usageDatabaseHelper = new UsageDatabaseHelper(this);
String apiKey = sp.getString("net.devemperor.wristassist.api_key", "noApiKey");
String apiHost = sp.getString("net.devemperor.wristassist.custom_server_host", "https://api.openai.com/");
ObjectMapper mapper = defaultObjectMapper(); // replaces all control chars (#10 @ GH)
OkHttpClient client = defaultClient(apiKey.replaceAll("[^ -~]", ""), Duration.ofSeconds(120)).newBuilder().build();
Retrofit retrofit = new Retrofit.Builder()
.baseUrl(apiHost)
.client(client)
.addConverterFactory(JacksonConverterFactory.create(mapper))
.addCallAdapterFactory(RxJava2CallAdapterFactory.create())
.build();
OpenAiApi api = retrofit.create(OpenAiApi.class);
service = new OpenAiService(api);
vibrator = (Vibrator) getSystemService(VIBRATOR_SERVICE);
createImageSv = findViewById(R.id.create_image_sv);
imagePb = findViewById(R.id.image_pb);
errorTv = findViewById(R.id.error_image_tv);
retryBtn = findViewById(R.id.retry_image_btn);
imageView = findViewById(R.id.create_image_iv);
shareBtn = findViewById(R.id.share_image_btn);
expiresInTv = findViewById(R.id.expires_image_tv);
saveDiscardBtns = findViewById(R.id.save_discard_image_btns);
prompt = getIntent().getStringExtra("net.devemperor.wristassist.prompt");
model = sp.getBoolean("net.devemperor.wristassist.image_model", false) ? "dall-e-3" : "dall-e-2";
quality = sp.getBoolean("net.devemperor.wristassist.image_quality", false) ? "hd" : "standard";
style = sp.getBoolean("net.devemperor.wristassist.image_style", false) ? "natural" : "vivid";
size = sp.getBoolean("net.devemperor.wristassist.image_model", false) ? "1024x1024" : sp.getString("net.devemperor.wristassist.image_size", "1024x1024");
createAndDownloadImage();
createImageSv.requestFocus();
}
@Override
protected void onDestroy() {
super.onDestroy();
timer.cancel();
if (thread != null) {
thread.shutdownNow();
}
}
private void createAndDownloadImage() {
imagePb.setVisibility(View.VISIBLE);
errorTv.setVisibility(View.GONE);
retryBtn.setVisibility(View.GONE);
thread = Executors.newSingleThreadExecutor();
thread.execute(() -> {
timer = new Timer();
try {
CreateImageRequest cir = CreateImageRequest.builder()
.responseFormat("url")
.n(1)
.prompt(prompt)
.model(model)
.quality(quality)
.size(size)
.style(style)
.build();
imageResult = service.createImage(cir);
image = imageResult.getData().get(0);
timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
long minutes = (imageResult.getCreated()*1000 + 60*60*1000 - System.currentTimeMillis()) / 60 / 1000;
runOnUiThread(() -> {
if (minutes <= 0) {
expiresInTv.setVisibility(View.GONE);
shareBtn.setVisibility(View.GONE);
timer.cancel();
} else {
expiresInTv.setText(getString(R.string.wristassist_image_expires_in, minutes));
}
});
}
}, 0, 60*1000);
usageDatabaseHelper.edit(model, 1, Util.calcCostImage(model, quality, size));
OkHttpClient downloadClient = new OkHttpClient();
Request request = new Request.Builder().url(image.getUrl()).build();
Response response = downloadClient.newCall(request).execute();
if (!response.isSuccessful()) {
throw new IOException("Unexpected code " + response);
}
assert response.body() != null;
InputStream inputStream = response.body().byteStream();
bitmap = BitmapFactory.decodeStream(inputStream);
if (bitmap == null) {
throw new IOException("Bitmap is null");
} else {
runOnUiThread(() -> {
if (sp.getBoolean("net.devemperor.wristassist.vibrate", true)) {
vibrator.vibrate(VibrationEffect.createOneShot(300, VibrationEffect.DEFAULT_AMPLITUDE));
}
imageView.setImageBitmap(bitmap);
imagePb.setVisibility(View.GONE);
imageView.setVisibility(View.VISIBLE);
shareBtn.setVisibility(View.VISIBLE);
expiresInTv.setVisibility(View.VISIBLE);
saveDiscardBtns.setVisibility(View.VISIBLE);
});
}
} catch (RuntimeException | IOException e) {
FirebaseCrashlytics fc = FirebaseCrashlytics.getInstance();
fc.setCustomKey("settings", sp.getAll().toString());
fc.setUserId(sp.getString("net.devemperor.wristassist.userid", "null"));
fc.recordException(e);
fc.sendUnsentReports();
e.printStackTrace();
runOnUiThread(() -> {
imagePb.setVisibility(View.GONE);
errorTv.setVisibility(View.VISIBLE);
retryBtn.setVisibility(View.VISIBLE);
timer.cancel();
if (sp.getBoolean("net.devemperor.wristassist.vibrate", true)) {
vibrator.vibrate(VibrationEffect.createWaveform(new long[]{50, 50, 50, 50, 50}, new int[]{-1, 0, -1, 0, -1}, -1));
}
if (Objects.requireNonNull(e.getMessage()).contains("SocketTimeoutException")) {
errorTv.setText(R.string.wristassist_timeout);
} else if (e.getMessage().contains("API key")) {
errorTv.setText(getString(R.string.wristassist_invalid_api_key_message));
} else if (e.getMessage().contains("rejected")) {
errorTv.setText(R.string.wristassist_image_request_rejected);
} else if (e.getMessage().contains("quota") || e.getMessage().contains("limit")) {
errorTv.setText(R.string.wristassist_quota_exceeded);
} else if (e.getMessage().contains("does not exist")) {
errorTv.setText(R.string.wristassist_no_access);
} else {
errorTv.setText(R.string.wristassist_no_internet);
}
});
}
});
}
public void retry(View view) {
createAndDownloadImage();
}
public void shareImage(View view) {
Intent intent = new Intent(this, QRCodeActivity.class);
intent.putExtra("net.devemperor.wristassist.image_url", image.getUrl());
startActivity(intent);
}
public void saveImage(View view) {
Toast.makeText(this, R.string.wristassist_saving, Toast.LENGTH_SHORT).show();
ImageModel imageModel;
if (model.equals("dall-e-3")) {
imageModel = new ImageModel(-1, prompt, image.getRevisedPrompt(), model, quality, size, style, imageResult.getCreated() * 1000, image.getUrl());
} else {
imageModel = new ImageModel(-1, prompt, null, model, null, size, null, imageResult.getCreated() * 1000, image.getUrl());
}
int id = imagesDatabaseHelper.add(imageModel);
try {
FileOutputStream out = openFileOutput("image_" + id + ".png", MODE_PRIVATE);
bitmap.compress(Bitmap.CompressFormat.PNG, 90, out);
out.flush();
out.close();
} catch (IOException e) {
e.printStackTrace();
}
timer.cancel();
Intent data = new Intent();
data.putExtra("net.devemperor.wristassist.imageId", id);
setResult(RESULT_OK, data);
finish();
}
public void discardImage(View view) {
timer.cancel();
finish();
}
}
|
[
"com.theokanning.openai.image.CreateImageRequest.builder"
] |
[((5450, 5782), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((5450, 5749), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((5450, 5711), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((5450, 5675), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((5450, 5633), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((5450, 5595), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((5450, 5555), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((5450, 5525), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((9084, 9157), 'java.util.Objects.requireNonNull'), ((10410, 10486), 'android.widget.Toast.makeText')]
|
package com.npcvillagers.npcvillage.services;
import com.google.gson.*;
import com.microsoft.azure.cognitiveservices.vision.contentmoderator.*;
import com.microsoft.azure.cognitiveservices.vision.contentmoderator.models.*;
import com.npcvillagers.npcvillage.models.Npc;
import com.theokanning.openai.service.OpenAiService;
import com.theokanning.openai.moderation.Moderation;
import com.theokanning.openai.moderation.ModerationRequest;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.FileReader;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
public class OpenAiApiHandler {
@Autowired
NpcFactory npcFactory;
public OpenAiApiHandler(NpcFactory npcFactory) {
this.npcFactory = npcFactory;
}
private static final String SEED_MESSAGE_SYSTEM = "You are a dungeon master's assistant for their creation of dungeons and dragons NPCs.";
private static final String SEED_MESSAGE_USER_PATH = "src/main/resources/static/json/seedUserMessage.json";
private static final String SEED_MESSAGE_USER = getSeedUserMessage();
private static final String SEED_MESSAGE_ASSISTANT_PATH = "src/main/resources/static/json/seedAssistantMessage.json";
private static final String SEED_MESSAGE_ASSISTANT = getSeedAssistantMessage();
public Npc processNpc(Npc npc) {
String userNpcJsonString = npcFactory.toPrettyJsonString(npc);
long startTime = System.nanoTime();
// Ensure that the user's inputs comply with OpenAI content policies
enforceContentPolicy(userNpcJsonString);
// Call the OpenAI API and create a JSON format string
String generatedNpcJsonString = generateOpenAiChatMessage(userNpcJsonString);
Npc updatedNpc = npcFactory.updateNpcFromContent(npc, generatedNpcJsonString);
long endTime = System.nanoTime();
// Calculate the elapsed time in seconds
double elapsedTime = (endTime - startTime) / 1_000_000_000.0;
System.out.println("Elapsed time: " + elapsedTime + " seconds");
return updatedNpc;
}
// We use the OpenAI moderations endpoint and the Azure Content Moderator to ensure what the user typed doesn't break content policy. Based on testing, the OpenAI moderations endpoint is not sufficient on its own in certain edge cases. We use it in concert with the Azure Moderator Client.
private void enforceContentPolicy(String userNpcJsonString) {
enforceOpenAiContentPolicy(userNpcJsonString);
enforceAzureContentPolicy(userNpcJsonString);
}
private void enforceOpenAiContentPolicy(String userNpcJsonString) {
String token = System.getenv("OPENAI_TOKEN");
if (token == null) {
throw new RuntimeException("Error: OPENAI_TOKEN environment variable not set");
}
OpenAiService service = null;
try {
// Set duration to 60 seconds to avoid a socket exception for long response times
service = new OpenAiService(token, Duration.ofSeconds(60));
ModerationRequest moderationRequest = ModerationRequest.builder()
.input(userNpcJsonString)
.model("text-moderation-latest")
.build();
List<Moderation> moderationResults = service.createModeration(moderationRequest).getResults();
// Check if any results were returned
if (moderationResults.isEmpty()) {
throw new RuntimeException("Error: No moderation results returned");
}
Moderation moderationScore = moderationResults.get(0);
// Check if the content is flagged by OpenAI
if (moderationScore.isFlagged()) {
// Throw an exception indicating content policy violation
throw new IllegalArgumentException("Content violates the content policy. Please modify your NPC");
}
} catch (Exception e) {
throw new RuntimeException("Error enforcing OpenAI content policy", e);
} finally {
if (service != null) {
service.shutdownExecutor();
}
}
}
private void enforceAzureContentPolicy(String userNpcJsonString) {
String azureModeratorEndpoint = System.getenv("AZURE_MODERATOR_ENDPOINT");
if (azureModeratorEndpoint == null) {
throw new RuntimeException("Error: AZURE_MODERATOR_ENDPOINT environment variable not set");
}
String azureModeratorSubscriptionKey = System.getenv("AZURE_MODERATOR_SUBSCRIPTION_KEY");
if (azureModeratorEndpoint == null) {
throw new RuntimeException("Error: AZURE_MODERATOR_SUBSCRIPTION_KEY environment variable not set");
}
try {
// Create Azure Content Moderator client
ContentModeratorClient azureModeratorClient = ContentModeratorManager.authenticate(
AzureRegionBaseUrl.fromString(azureModeratorEndpoint), azureModeratorSubscriptionKey);
// Detect the language of the text
DetectedLanguage detectedLanguage = azureModeratorClient.textModerations().detectLanguage("text/plain", userNpcJsonString.getBytes());
if (detectedLanguage == null) {
throw new RuntimeException("Failed to detect the language of the text");
}
// Screen the text
ScreenTextOptionalParameter screenTextOptionalParameter = new ScreenTextOptionalParameter().withLanguage(detectedLanguage.detectedLanguage());
Screen screen = azureModeratorClient.textModerations().screenText("text/plain", userNpcJsonString.getBytes(), screenTextOptionalParameter);
// If there are any matched items in the Auto-detected language, PII or Classification categories.
if ((screen.pII() != null) ||
(screen.classification() != null &&
(screen.classification().reviewRecommended()))) {
throw new IllegalArgumentException("Content violates the content policy. Please modify your NPC");
}
} catch (APIErrorException e) {
// Handle API exceptions here
throw new RuntimeException("An error occurred while screening the content with Azure Content Moderator", e);
} catch (Exception e) {
// Handle other exceptions here
throw new RuntimeException("An unexpected error occurred", e);
}
}
private String generateOpenAiChatMessage(String userNpcJsonString) {
String token = System.getenv("OPENAI_TOKEN");
if (token == null) {
throw new RuntimeException("Error: OPENAI_TOKEN environment variable not set");
}
OpenAiService service = null;
try {
// Set duration to 60 seconds to avoid a socket exception for long response times
service = new OpenAiService(token, Duration.ofSeconds(60));
// Seed the chat with system context, example input, and example output, and add the user's Npc to the messages
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), SEED_MESSAGE_SYSTEM);
final ChatMessage seedUserMessage = new ChatMessage(ChatMessageRole.USER.value(), SEED_MESSAGE_USER);
final ChatMessage seedAssistantMessage = new ChatMessage(ChatMessageRole.ASSISTANT.value(), SEED_MESSAGE_ASSISTANT);
final ChatMessage userNpcRequestMessage = new ChatMessage(ChatMessageRole.USER.value(), userNpcJsonString);
messages.add(systemMessage);
messages.add(seedUserMessage);
messages.add(seedAssistantMessage);
messages.add(userNpcRequestMessage);
// Send the API request
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.n(1)
.temperature(0.8)
.maxTokens(1000)
.logitBias(new HashMap<>())
.build();
// Extract the message content of the response
List<ChatCompletionChoice> choices = service.createChatCompletion(chatCompletionRequest).getChoices();
if (choices.isEmpty()) {
throw new RuntimeException("Error: No response from OpenAI");
}
String content = choices.get(0).getMessage().getContent();
return content;
} catch (Exception e) {
throw new RuntimeException("Error generating OpenAI chat message", e);
} finally {
if (service != null) {
service.shutdownExecutor();
}
}
}
private static String getSeedUserMessage() {
String prettyJsonString = getPrettyJsonString(SEED_MESSAGE_USER_PATH);
String originalMessage = "Generate a JSON-formatted NPC (Non-Player Character) for use in a Dungeons and Dragons campaign. The JSON should ONLY contain the following fields: \"name\", \"age\", \"voice\", \"description\", \"personality\", \"motivation\", \"ideal\", \"bond\", \"flaw\", and \"history\". Please ensure that the fields \"ideal\", \"bond\", and \"flaw\" are described from a first-person point of view. IMPORTANT: Do not include any other fields beyond the ones specifically mentioned here, such as species, subspecies, gender, alignment or any other. IMPORTANT: The NPC's \"history\" or \"motivation\" should clearly reflect the given \"playerRelationship\" input, which describes the relationship of the NPC to the players. This relationship should be clearly visible in the NPC's backstory or motivation. IMPORTANT: Do not mis-gender your creation. IMPORTANT: Give the NPC's \"age\" in years.\n\n";
String fullMessage = originalMessage + prettyJsonString;
return fullMessage;
}
private static String getSeedAssistantMessage() {
return getPrettyJsonString(SEED_MESSAGE_ASSISTANT_PATH);
}
private static String getPrettyJsonString(String filePath) {
try (FileReader reader = new FileReader(filePath)) {
// create a Gson instance with pretty printing
Gson gson = new GsonBuilder().setPrettyPrinting().create();
// parse the JSON file into a JsonArray
JsonArray jsonArray = JsonParser.parseReader(reader).getAsJsonArray();
// convert the JsonArray into a pretty printed string
String prettyJsonString = gson.toJson(jsonArray);
// Replace escaped quotes with actual quotes
prettyJsonString = prettyJsonString.replace("\\\"", "\"");
return prettyJsonString;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
public NpcFactory getNpcFactory() {
return npcFactory;
}
public void setNpcFactory(NpcFactory npcFactory) {
this.npcFactory = npcFactory;
}
}
|
[
"com.theokanning.openai.moderation.ModerationRequest.builder",
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.USER.value",
"com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value"
] |
[((3501, 3656), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((3501, 3627), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((3501, 3574), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((7638, 7668), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((7756, 7784), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((7875, 7908), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((8005, 8033), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
|
package openai;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.amazonaws.services.lambda.runtime.LambdaLogger;
import com.theokanning.openai.completion.chat.ChatCompletionChoice;
import com.theokanning.openai.completion.chat.ChatCompletionChunk;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.completion.chat.ChatMessage;
import com.theokanning.openai.completion.chat.ChatMessageRole;
import com.theokanning.openai.service.OpenAiService;
import io.reactivex.Flowable;
import utils.BasicUtils;
import utils.LambdaLoggerImpl;
public class ChatGPT {
private static final Logger logger = Logger.getLogger(ChatGPT.class.getName());
static {
logger.setLevel(BasicUtils.logLevel());
}
private final String PREFIX = this.getClass().getName() + " ";
public ChatGPT() {
}
public String converse(String textBody) {
logger.log(Level.INFO , "Conversing with ChatGPT: "+textBody);
String token = System.getenv("CHATGPT_ENV");
OpenAiService service = new OpenAiService(token);
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(),
textBody);
messages.add(systemMessage);
ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model("gpt-3.5-turbo")
.messages(messages).n(1).maxTokens(50).logitBias(new HashMap<>()).build();
Flowable<ChatCompletionChunk> streamChatCompletion = service.streamChatCompletion(chatCompletionRequest);
final StringBuilder sb = new StringBuilder();
final StringBuilder finishReason = new StringBuilder();
streamChatCompletion.blockingForEach(b -> {
List<ChatCompletionChoice> choices = b.getChoices();
choices.forEach(e -> {
sb.append(e.getMessage().getContent());
finishReason.delete(0, finishReason.length());
finishReason.append(e.getFinishReason());
});
});
String str = sb.toString();
logger.log(Level.INFO , "finished responding: "+str);
return str;
}
public static void main(String[] args) {
ChatGPT ai = new ChatGPT();
String converse = ai.converse("What is the difference between bonds and stocks");
System.out.println("======");
System.out.println(converse);
}
}
|
[
"com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value",
"com.theokanning.openai.completion.chat.ChatCompletionRequest.builder"
] |
[((1263, 1293), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1389, 1521), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1389, 1513), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1389, 1486), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1389, 1472), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1389, 1467), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1389, 1443), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.