13 Commits
1.0 ... 2.5

Author SHA1 Message Date
c753d51e6b 2.5:Folia支持,fix #4 2024-08-16 00:08:39 +08:00
478a1077a6 Folia 2024-08-09 12:01:03 +08:00
b2e38e09d0 2.4 2024-08-07 19:57:14 +08:00
f021715cec 删除自动更新配置文件功能 2024-08-07 19:34:33 +08:00
fa5d79e252 支持 异步发送 2024-08-07 18:15:15 +08:00
acdfcda8db 支持 自定义prompt 2024-08-07 17:55:29 +08:00
aa8aadac49 2.3:[BUG]提问乱码/答非所问 #3 2024-07-22 13:24:04 +08:00
0944aa1a85 添加连续对话 调试模式 2024-07-21 14:18:47 +08:00
b8aaaaf6a3 2.2
- 修复插件无法加载
- 修复 #2:模型切换实际并不生效
2024-07-14 10:57:49 +08:00
138b1337ac 2.1
- 接入bStats
- 调整配置文件,添加更新功能,现在更新插件应该不用再备份配置文件了
- 修改了gradle中错误的依赖
2024-07-14 01:25:47 +08:00
73edeba78c 2.0
- 更换为jodd-http库,减少插件大小
- 支持指令补全
- 现在可以在配置文件中修改所有消息的颜色了
- 默认配置文件中增加了一些模型
2024-07-12 19:39:55 +08:00
293c9257aa 更换jodd-http,进一步压缩插件大小 2024-07-12 19:32:44 +08:00
65665d88df 更换okhttp为apache httpclient,尝试减少插件体积 2024-07-12 19:04:05 +08:00
11 changed files with 486 additions and 157 deletions

View File

@@ -4,7 +4,7 @@ plugins {
} }
group = 'com' group = 'com'
version = '1.0' version = '2.5'
repositories { repositories {
mavenCentral() mavenCentral()
@@ -16,18 +16,23 @@ repositories {
name = "sonatype" name = "sonatype"
url = "https://oss.sonatype.org/content/groups/public/" url = "https://oss.sonatype.org/content/groups/public/"
} }
maven {
url = 'https://repo.codemc.io/repository/maven-public/'
}
} }
dependencies { dependencies {
compileOnly "org.spigotmc:spigot-api:1.13-R0.1-SNAPSHOT" compileOnly "org.spigotmc:spigot-api:1.13-R0.1-SNAPSHOT"
implementation 'com.squareup.okhttp3:okhttp:4.9.3' implementation 'org.bstats:bstats-bukkit:3.0.2'
implementation 'org.jodd:jodd-http:6.3.0'
implementation 'org.json:json:20231013' implementation 'org.json:json:20231013'
} }
shadowJar { shadowJar {
archiveFileName = "MineChatGPT-${project.version}.jar" archiveFileName = "MineChatGPT-${project.version}.jar"
relocate 'okhttp3', 'com.ddaodan.minechatgpt.libs.okhttp3' relocate 'jodd', 'com.ddaodan.shaded.jodd'
relocate 'org.json', 'com.ddaodan.minechatgpt.libs.org.json' relocate 'org.json', 'com.ddaodan.minechatgpt.libs.org.json'
relocate 'org.bstats', 'com.ddaodan.minechatgpt.libs.org.bstats'
} }
def targetJavaVersion = 8 def targetJavaVersion = 8

View File

@@ -1,3 +1,23 @@
# 更新日志 # 更新日志
## 2.5
- 允许插件在Folia加载
- 修复 #4[BUG]部分情况下回复乱码
## 2.4
- 添加自定义prompt
- 移除了没有任何效果的配置文件自动更新功能
## 2.3
- 修复 #3[BUG]提问乱码/答非所问
## 2.2
- 修复插件无法加载
- 修复 #2:模型切换实际并不生效
## 2.1
- 接入bStats
- 调整配置文件,添加更新功能,现在更新插件应该不用再备份配置文件了
- 修改了gradle中错误的依赖
## 2.0
- 更换为jodd-http库减少插件大小
- 支持指令补全
- 现在可以在配置文件中修改所有消息的颜色了
- 默认配置文件中增加了一些模型
## 1.0 ## 1.0
- 插件发布 - 插件发布

134
readme.md
View File

@@ -1,60 +1,138 @@
# MineChatGPT # MineChatGPT
在Minecraft中与ChatGPT交流 在Minecraft中与ChatGPT交流
理论支持全版本,欢迎测试
所有的代码都是ChatGPT写的哦 所有的代码都是ChatGPT写的哦
## 功能 ## 功能
- [x] OpenAPI格式 - OpenAPI格式
- [x] 自定义模型 - 自定义模型
- [x] ChatGPT反代 - ChatGPT反代
- [ ] 指令补全 - 指令补全
- [ ] 上下文对话 - 上下文对话
- [ ] 自定义prompt - 自定义prompt
- Folia支持
## 安装 ## 安装
- 下载插件放在plugins文件夹中 1. 下载插件放在plugins文件夹中
- 重启服务器 2. 重启服务器
> 为兼容更多版本插件没有规定Bukkit API version因此在较高版本加载插件时控制台可能会出现以下错误信息这属于正常现象。
> ```
> [Server thread/WARN]: Initializing Legacy Material Support. Unless you have legacy plugins and/or data this is a bug!
> [Server thread/WARN]: Legacy plugin MineChatGPT v1.0 does not specify an api-version.
> ```
3. 打开配置文件`config.yml`,修改以下两项设置:
```yaml
# API 相关设置
api:
# 你的 OpenAI API key用于身份验证
# 获取 API key 的方法:访问 https://platform.openai.com/account/api-keys 并创建一个新的 API key
key: "sk-your_openai_api_key"
# OpenAI API 的基础 URL用于构建请求
base_url: "https://api.openai.com/v1"
```
4. 在控制台中输入`/chatgpt reload`重新加载配置文件
## 截图
- 服务端截图Spigot 1.20.1
![](https://i.ddaodan.cn/images/CWindowssystem32cmd.exe_20240712406.png)
- 插件截图
![](https://i.ddaodan.cn/images/Minecraft_1.20.1_-__20240712407.png)
- 对话截图使用FastGPT训练的自定义知识库
![](https://i.ddaodan.cn/images/Minecraft_1.20.1_-__20240712408.png)
## 配置文件`config.yml` ## 配置文件`config.yml`
```yaml ```yaml
# API 相关设置 # API 相关设置
api: api:
# 你的 OpenAI API key用于身份验证 # 你的 OpenAI API key用于身份验证
# 获取 API key 的方法:访问 //platform.openai.com/account/api-keys 并创建一个新的 API key # 获取 API key 的方法:访问 https://platform.openai.com/account/api-keys 并创建一个新的 API key
key: "your_openai_api_key" key: "sk-your_openai_api_key"
# OpenAI API 的基础 URL用于构建请求 # OpenAI API 的基础 URL用于构建请求
base_url: "https://api.openai.com/v1" base_url: "https://api.openai.com/v1"
# 支持的模型列表 # 支持的模型列表
models: models:
# OpenAI ChatGPT # OpenAI ChatGPT
- "gpt-3.5-turbo" - "gpt-3.5-turbo"
- "gpt-3.5-turbo-instruct"
- "gpt-4" - "gpt-4"
- "gpt-4-turbo"
- "gpt-4-turbo-preview"
- "gpt-4o"
- "gpt-4o-mini"
# Google Gemini
# - "gemini-pro"
# - "gemini-1.5-pro"
# Anthropic Claude
# - "claude-3-opus"
# - "claude-3-5-sonnet"
# 以及更多...
# 默认使用的模型 # 默认使用的模型
default_model: "gpt-3.5-turbo" default_model: "gpt-3.5-turbo"
conversation:
# 连续对话开关
context_enabled: false
max_history_size: 10
prompt: "You are a helpful assistant.use Chinese."
# 消息相关设置 # 消息相关设置
messages: messages:
reload: "已重新加载配置文件!" reload: "&a已重新加载配置文件!"
help: "===== MineChatGPT 帮助 =====" clear: "&a对话历史已清空"
help_ask: "/chatgpt <text> - 向ChatGPT提问" help: "&e===== MineChatGPT 帮助 ====="
help_reload: "/chatgpt reload - 重新加载配置文件" help_ask: "&e/chatgpt <text> - 向ChatGPT提问"
help_model: "/chatgpt model <model_name> - 切换至其他模型" help_reload: "&e/chatgpt reload - 重新加载配置文件"
help_modellist: "/chatgpt modellist - 可用的模型列表" help_model: "&e/chatgpt model <model_name> - 切换至其他模型"
usage: "输入: /chatgpt model <model_name>" help_modellist: "&e/chatgpt modellist - 可用的模型列表"
model_switch: "已切换至模型 %s" help_context: "&e/chatgpt context - 切换连续对话模式"
chatgpt_error: "无法联系ChatGPT。" help_clear: "/chatgpt clear - 清空对话历史"
chatgpt_response: "ChatGPT: %s" context_toggle: "&a连续对话模式已%s"
question: "你: %s" context_toggle_enabled: "开启"
invalid_model: "模型无效。使用 /chatgpt modellist 查看可用模型。" context_toggle_disabled: "关闭"
available_models: "可用模型列表:" current_model_info: "&e当前模型%s输入 /chatgpt model <model_name> 来切换模型。"
no_permission: "你没有权限使用这个指令。需要的权限:%s" model_switch: "&a已切换至模型 %s"
chatgpt_error: "&c无法联系ChatGPT。"
chatgpt_response: "&bChatGPT: %s"
question: "&b你: %s"
invalid_model: "&c模型无效。使用 /chatgpt modellist 查看可用模型。"
available_models: "&e可用模型列表"
no_permission: "&c你没有权限使用这个指令。需要的权限%s"
# 如果你不知道这是什么,请不要动
debug: false
``` ```
## 指令与权限
|指令|权限|描述|
|-|-|-|
|`/chatgpt`|minechatgpt.use|查看插件帮助|
|`/chatgpt <text>`|minechatgpt.use|向ChatGPT提问|
|`/chatgpt reload`|minechatgpt.reload|重新加载配置文件|
|`/chatgpt model <model_name>`|minechatgpt.model|切换至其他模型|
|`/chatgpt modellist`|minechatgpt.modellist|查看可用的模型列表|
|`/chatgpt context`|minechatgpt.context|切换连续对话模式|
|`/chatgpt clear`|minechatgpt.clear|清空对话历史|
## 兼容的版本
只列出经过测试的版本
|服务端|支持情况|
|-|-|
|Luminol 1.21|✔ 支持|
|Mohist 1.20.1|✔ 支持|
|Spigot 1.20.1|✔ 支持|
|Spigot 1.12.2|✔ 支持|
|KCauldron 1.7.10|× 不支持|
## 常见问题 ## 常见问题
### `Failed to contact ChatGPT.` `无法联系ChatGPT。` ### 提问后显示`Failed to contact ChatGPT.` `无法联系ChatGPT。`
检查控制台输出的错误内容。 检查控制台输出的错误内容。
### `connect timeout` `connect reset` ### 提问后后台有`connect timeout` `connect reset`等类似的提示
检查`config.yml`中的`base_url`能否正常访问。如果你无法连接到OpenAI官方的API地址可以考虑使用其他反代。 检查`config.yml`中的`base_url`能否正常访问。如果你无法连接到OpenAI官方的API地址可以考虑使用其他反代。
### 我可以添加其他模型吗?
可以只要模型支持OpenAI的API就可以使用。
### 我没有ChatGPT的账号可以用吗
可以目前有很多代理网站可以很轻松地使用而且还支持其他模型费用通常来说也会比官方便宜。如果你愿意也可以使用我的代理目前仅在我的QQ群226385797中提供。
### 是否会支持Folia
理论上插件可以在Folia上运行但插件的代码并没有针对Folia进行过优化因此可能会有一些问题。如果你愿意可以尝试使用Folia运行插件但不保证插件可以正常运行。
## 赞助 ## 赞助
![afdian-ddaodan.jpeg](https://i.ddaodan.cn/images/afdian-ddaodan.jpeg) [![](https://i.ddaodan.cn/images/afdian-ddaodan.jpeg)](https://afdian.com/a/ddaodan)
## 统计
[![](https://bstats.org/signatures/bukkit/MineChatGPT.svg)](https://bstats.org/plugin/bukkit/MineChatGPT/22635)

View File

@@ -3,138 +3,206 @@ package com.ddaodan.MineChatGPT;
import org.bukkit.command.Command; import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor; import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender; import org.bukkit.command.CommandSender;
import org.bukkit.ChatColor;
import okhttp3.*;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import java.io.IOException;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
import java.util.List; import java.util.List;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.nio.charset.StandardCharsets;
import jodd.http.HttpRequest;
import jodd.http.HttpResponse;
public class CommandHandler implements CommandExecutor { public class CommandHandler implements CommandExecutor {
private final Main plugin; private final Main plugin;
private final ConfigManager configManager; private final ConfigManager configManager;
private static final Logger logger = Logger.getLogger(CommandHandler.class.getName()); private static final Logger logger = Logger.getLogger(CommandHandler.class.getName());
private final Map<String, ConversationContext> userContexts;
private final Map<String, Boolean> userContextEnabled;
public CommandHandler(Main plugin, ConfigManager configManager) { public CommandHandler(Main plugin, ConfigManager configManager) {
this.plugin = plugin; this.plugin = plugin;
this.configManager = configManager; this.configManager = configManager;
this.userContexts = new HashMap<>();
this.userContextEnabled = new HashMap<>();
} }
@Override @Override
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
String userId = sender.getName();
if (!userContexts.containsKey(userId)) {
userContexts.put(userId, new ConversationContext(configManager.getMaxHistorySize()));
userContextEnabled.put(userId, configManager.isContextEnabled());
}
ConversationContext conversationContext = userContexts.get(userId);
boolean contextEnabled = userContextEnabled.get(userId);
if (command.getName().equalsIgnoreCase("chatgpt")) { if (command.getName().equalsIgnoreCase("chatgpt")) {
if (args.length == 0) { if (args.length == 0) {
sendHelpMessage(sender); sendHelpMessage(sender);
return true; return true;
} }
String subCommand = args[0]; String subCommand = args[0];
if (subCommand.equalsIgnoreCase("reload")) { if (subCommand.equalsIgnoreCase("reload")) {
if (!sender.hasPermission("minechatgpt.reload")) { if (!sender.hasPermission("minechatgpt.reload")) {
sender.sendMessage(ChatColor.RED + String.format(configManager.getNoPermissionMessage(), "minechatgpt.reload")); sender.sendMessage(configManager.getNoPermissionMessage().replace("%s", "minechatgpt.reload"));
return true; return true;
} }
configManager.reloadConfig(); configManager.reloadConfig();
sender.sendMessage(ChatColor.GREEN + configManager.getReloadMessage()); sender.sendMessage(configManager.getReloadMessage());
return true; return true;
} else if (subCommand.equalsIgnoreCase("model")) { } else if (subCommand.equalsIgnoreCase("model")) {
if (!sender.hasPermission("minechatgpt.model")) { if (!sender.hasPermission("minechatgpt.model")) {
sender.sendMessage(ChatColor.RED + String.format(configManager.getNoPermissionMessage(), "minechatgpt.model")); sender.sendMessage(configManager.getNoPermissionMessage().replace("%s", "minechatgpt.model"));
return true; return true;
} }
if (args.length < 2) { if (args.length < 2) {
sender.sendMessage(ChatColor.RED + configManager.getUsageMessage()); String currentModel = configManager.getCurrentModel();
sender.sendMessage(configManager.getCurrentModelInfoMessage().replace("%s", currentModel));
return true; return true;
} }
String model = args[1]; String model = args[1];
List<String> models = configManager.getModels(); List<String> models = configManager.getModels();
if (models.contains(model)) { if (models.contains(model)) {
// Logic to switch model configManager.setCurrentModel(model);
sender.sendMessage(ChatColor.GREEN + String.format(configManager.getModelSwitchMessage(), model)); sender.sendMessage(configManager.getModelSwitchMessage().replace("%s", model));
} else { } else {
sender.sendMessage(ChatColor.RED + configManager.getInvalidModelMessage()); sender.sendMessage(configManager.getInvalidModelMessage());
} }
return true; return true;
} else if (subCommand.equalsIgnoreCase("modellist")) { } else if (subCommand.equalsIgnoreCase("modellist")) {
if (!sender.hasPermission("minechatgpt.modellist")) { if (!sender.hasPermission("minechatgpt.modellist")) {
sender.sendMessage(ChatColor.RED + String.format(configManager.getNoPermissionMessage(), "minechatgpt.modellist")); sender.sendMessage(configManager.getNoPermissionMessage().replace("%s", "minechatgpt.modellist"));
return true; return true;
} }
List<String> models = configManager.getModels(); List<String> models = configManager.getModels();
sender.sendMessage(ChatColor.YELLOW + configManager.getAvailableModelsMessage()); sender.sendMessage(configManager.getAvailableModelsMessage());
for (String model : models) { for (String model : models) {
sender.sendMessage(ChatColor.YELLOW + "- " + model); sender.sendMessage("- " + model);
} }
return true; return true;
} else if (args.length > 0 && args[0].equalsIgnoreCase("context")) {
contextEnabled = !contextEnabled;
userContextEnabled.put(userId, contextEnabled);
String status = contextEnabled ? configManager.getContextToggleEnabledMessage() : configManager.getContextToggleDisabledMessage();
sender.sendMessage(configManager.getContextToggleMessage().replace("%s", status));
return true;
} else if (subCommand.equalsIgnoreCase("clear")) {
if (!sender.hasPermission("minechatgpt.clear")) {
sender.sendMessage(configManager.getNoPermissionMessage().replace("%s", "minechatgpt.clear"));
return true;
}
conversationContext.clearHistory();
sender.sendMessage(configManager.getClearMessage());
return true;
} else { } else {
if (!sender.hasPermission("minechatgpt.use")) { if (!sender.hasPermission("minechatgpt.use")) {
sender.sendMessage(ChatColor.RED + String.format(configManager.getNoPermissionMessage(), "minechatgpt.use")); sender.sendMessage(configManager.getNoPermissionMessage().replace("%s", "minechatgpt.use"));
return true; return true;
} }
String question = String.join(" ", args); String question = String.join(" ", args);
// Logic to send question to ChatGPT if (contextEnabled) {
sender.sendMessage(ChatColor.AQUA + String.format(configManager.getQuestionMessage(), question)); conversationContext.addMessage(question);
askChatGPT(sender, question); }
sender.sendMessage(configManager.getQuestionMessage().replace("%s", question));
askChatGPT(sender, question, conversationContext, contextEnabled);
return true; return true;
} }
} }
return false; return false;
} }
private void askChatGPT(CommandSender sender, String question) { private void askChatGPT(CommandSender sender, String question, ConversationContext conversationContext, boolean contextEnabled) {
OkHttpClient client = new OkHttpClient(); String utf8Question = convertToUTF8(question);
MediaType mediaType = MediaType.parse("application/json");
JSONObject json = new JSONObject(); JSONObject json = new JSONObject();
json.put("model", configManager.getDefaultModel()); json.put("model", configManager.getDefaultModel());
JSONArray messages = new JSONArray(); JSONArray messages = new JSONArray();
// 添加自定义 prompt
String customPrompt = configManager.getCustomPrompt();
if (!customPrompt.isEmpty()) {
JSONObject promptMessage = new JSONObject();
promptMessage.put("role", "system");
promptMessage.put("content", customPrompt);
messages.put(promptMessage);
}
JSONObject message = new JSONObject(); JSONObject message = new JSONObject();
message.put("role", "user"); message.put("role", "user");
message.put("content", question); message.put("content", utf8Question);
messages.put(message); messages.put(message);
if (contextEnabled) {
String history = conversationContext.getConversationHistory();
if (!history.isEmpty()) {
JSONObject historyMessage = new JSONObject();
historyMessage.put("role", "system");
historyMessage.put("content", history);
messages.put(historyMessage);
}
}
json.put("messages", messages); json.put("messages", messages);
json.put("model", configManager.getCurrentModel());
RequestBody body = RequestBody.create(mediaType, json.toString()); if (configManager.isDebugMode()) {
Request request = new Request.Builder() logger.info("Built request: " + json.toString());
.url(configManager.getBaseUrl() + "/chat/completions")
.post(body)
.addHeader("Content-Type", "application/json")
.addHeader("Authorization", "Bearer " + configManager.getApiKey())
.build();
client.newCall(request).enqueue(new Callback() {
@Override
public void onFailure(Call call, IOException e) {
e.printStackTrace();
logger.log(Level.SEVERE, "Failed to contact ChatGPT: " + e.getMessage(), e);
sender.sendMessage(ChatColor.RED + configManager.getChatGPTErrorMessage());
} }
@Override HttpRequest request = HttpRequest.post(configManager.getBaseUrl() + "/chat/completions")
public void onResponse(Call call, Response response) throws IOException { .header("Content-Type", "application/json; charset=UTF-8")
if (response.isSuccessful()) { .header("Authorization", "Bearer " + configManager.getApiKey())
String responseBody = response.body().string(); .bodyText(json.toString());
JSONObject jsonResponse = new JSONObject(responseBody);
if (configManager.isDebugMode()) {
logger.info("Sending request to ChatGPT: " + request.toString());
}
//HttpResponse response = request.send();
CompletableFuture.supplyAsync(() -> request.send())
.thenAccept(response -> {
if (configManager.isDebugMode()) {
logger.info("Received response from ChatGPT: " + response.toString());
}
if (response.statusCode() == 200) {
String responseBody = response.bodyText();
String utf8ResponseBody = new String(responseBody.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8);
JSONObject jsonResponse = new JSONObject(utf8ResponseBody);
String answer = jsonResponse.getJSONArray("choices").getJSONObject(0).getJSONObject("message").getString("content"); String answer = jsonResponse.getJSONArray("choices").getJSONObject(0).getJSONObject("message").getString("content");
sender.sendMessage(ChatColor.AQUA + String.format(configManager.getChatGPTResponseMessage(), answer)); sender.sendMessage(configManager.getChatGPTResponseMessage().replace("%s", answer));
if (contextEnabled) {
conversationContext.addMessage(answer); // 仅在启用上下文时添加AI响应到历史记录
}
} else { } else {
String errorBody = response.body().string(); String errorBody = response.bodyText();
logger.log(Level.SEVERE, "Failed to get a response from ChatGPT: " + errorBody); logger.log(Level.SEVERE, "Failed to get a response from ChatGPT: " + errorBody);
sender.sendMessage(ChatColor.RED + configManager.getChatGPTErrorMessage()); sender.sendMessage(configManager.getChatGPTErrorMessage());
}
response.close();
} }
})
.exceptionally(e -> {
logger.log(Level.SEVERE, "Exception occurred while processing request: " + e.getMessage(), e);
sender.sendMessage(configManager.getChatGPTErrorMessage());
return null;
}); });
} }
private String convertToUTF8(String input) {
try {
// 尝试将输入字符串转换为 UTF-8 编码
byte[] bytes = input.getBytes(StandardCharsets.UTF_8);
return new String(bytes, StandardCharsets.UTF_8);
} catch (Exception e) {
logger.severe("Failed to convert input to UTF-8: " + e.getMessage());
return input; // 如果转换失败,返回原始输入
}
}
private void sendHelpMessage(CommandSender sender) { private void sendHelpMessage(CommandSender sender) {
sender.sendMessage(ChatColor.YELLOW + configManager.getHelpMessage()); sender.sendMessage(configManager.getHelpMessage());
sender.sendMessage(ChatColor.YELLOW + configManager.getHelpAskMessage()); sender.sendMessage(configManager.getHelpAskMessage());
sender.sendMessage(ChatColor.YELLOW + configManager.getHelpReloadMessage()); sender.sendMessage(configManager.getHelpReloadMessage());
sender.sendMessage(ChatColor.YELLOW + configManager.getHelpModelMessage()); sender.sendMessage(configManager.getHelpModelMessage());
sender.sendMessage(ChatColor.YELLOW + configManager.getHelpModelListMessage()); sender.sendMessage(configManager.getHelpModelListMessage());
sender.sendMessage(configManager.getHelpContextMessage());
sender.sendMessage(configManager.getHelpClearMessage());
} }
} }

View File

@@ -1,91 +1,115 @@
package com.ddaodan.MineChatGPT; package com.ddaodan.MineChatGPT;
import org.bukkit.ChatColor;
import org.bukkit.configuration.file.FileConfiguration; import org.bukkit.configuration.file.FileConfiguration;
import java.util.List; import java.util.List;
public class ConfigManager { public class ConfigManager {
private final Main plugin; private final Main plugin;
private FileConfiguration config; private FileConfiguration config;
private String currentModel;
public ConfigManager(Main plugin) { public ConfigManager(Main plugin) {
this.plugin = plugin; this.plugin = plugin;
reloadConfig(); reloadConfig();
} }
public boolean isDebugMode() {
return config.getBoolean("debug", false);
}
public void reloadConfig() { public void reloadConfig() {
plugin.reloadConfig(); plugin.reloadConfig();
config = plugin.getConfig(); config = plugin.getConfig();
currentModel = config.getString("default_model");
} }
private String translateColorCodes(String message) {
return ChatColor.translateAlternateColorCodes('&', message);
}
public String getCurrentModel() {
return currentModel;
}
public void setCurrentModel(String model) {
currentModel = model;
}
public String getApiKey() { public String getApiKey() {
return config.getString("api.key"); return config.getString("api.key");
} }
public String getBaseUrl() { public String getBaseUrl() {
return config.getString("api.base_url"); return config.getString("api.base_url");
} }
public String getDefaultModel() { public String getDefaultModel() {
return config.getString("default_model"); return config.getString("default_model");
} }
public String getReloadMessage() { public String getReloadMessage() {
return config.getString("messages.reload"); return translateColorCodes(config.getString("messages.reload"));
} }
public List<String> getModels() { public List<String> getModels() {
return config.getStringList("models"); return config.getStringList("models");
} }
public String getCustomPrompt() {
return config.getString("prompt", "You are a helpful assistant.");
}
public String getHelpMessage() { public String getHelpMessage() {
return config.getString("messages.help"); return translateColorCodes(config.getString("messages.help"));
} }
public String getHelpAskMessage() { public String getHelpAskMessage() {
return config.getString("messages.help_ask"); return translateColorCodes(config.getString("messages.help_ask"));
} }
public String getHelpModelListMessage() {
return config.getString("messages.help_modellist");
}
public String getHelpReloadMessage() { public String getHelpReloadMessage() {
return config.getString("messages.help_reload"); return translateColorCodes(config.getString("messages.help_reload"));
} }
public String getHelpModelMessage() { public String getHelpModelMessage() {
return config.getString("messages.help_model"); return translateColorCodes(config.getString("messages.help_model"));
} }
public String getHelpModelListMessage() {
public String getUsageMessage() { return translateColorCodes(config.getString("messages.help_modellist"));
return config.getString("messages.usage"); }
public String getHelpContextMessage() {
return translateColorCodes(config.getString("messages.help_context", "/chatgpt context - Toggle context mode."));
}
public String getHelpClearMessage() {
return translateColorCodes(config.getString("messages.help_clear", "/chatgpt clear - Clear conversation history."));
} }
public String getModelSwitchMessage() { public String getModelSwitchMessage() {
return config.getString("messages.model_switch"); return translateColorCodes(config.getString("messages.model_switch"));
} }
public String getChatGPTErrorMessage() { public String getChatGPTErrorMessage() {
return config.getString("messages.chatgpt_error"); return translateColorCodes(config.getString("messages.chatgpt_error"));
} }
public String getChatGPTResponseMessage() { public String getChatGPTResponseMessage() {
return config.getString("messages.chatgpt_response"); return translateColorCodes(config.getString("messages.chatgpt_response"));
} }
public String getQuestionMessage() { public String getQuestionMessage() {
return config.getString("messages.question"); return translateColorCodes(config.getString("messages.question"));
} }
public String getInvalidModelMessage() { public String getInvalidModelMessage() {
return config.getString("messages.invalid_model"); return translateColorCodes(config.getString("messages.invalid_model"));
} }
public String getAvailableModelsMessage() { public String getAvailableModelsMessage() {
return config.getString("messages.available_models"); return translateColorCodes(config.getString("messages.available_models"));
} }
public String getNoPermissionMessage() { public String getNoPermissionMessage() {
return config.getString("messages.no_permission"); return translateColorCodes(config.getString("messages.no_permission"));
}
public String getCurrentModelInfoMessage() {
return translateColorCodes(config.getString("messages.current_model_info"));
}
public int getMaxHistorySize() {
return config.getInt("conversation.max_history_size", 10);
}
public boolean isContextEnabled() {
return config.getBoolean("conversation.context_enabled", false);
}
public String getContextToggleMessage() {
return translateColorCodes(config.getString("messages.context_toggle", "Context is now %s."));
}
public String getContextToggleEnabledMessage() {
return translateColorCodes(config.getString("messages.context_toggle_enabled", "enabled"));
}
public String getContextToggleDisabledMessage() {
return translateColorCodes(config.getString("messages.context_toggle_disabled", "disabled"));
}
public String getClearMessage() {
return translateColorCodes(config.getString("messages.clear", "Conversation history has been cleared."));
} }
} }

View File

@@ -0,0 +1,29 @@
package com.ddaodan.MineChatGPT;
import java.util.LinkedList;
import java.util.Queue;
public class ConversationContext {
private Queue<String> conversationHistory;
private int maxHistorySize;
public ConversationContext(int maxHistorySize) {
this.maxHistorySize = maxHistorySize;
this.conversationHistory = new LinkedList<>();
}
public void addMessage(String message) {
if (conversationHistory.size() >= maxHistorySize) {
conversationHistory.poll();
}
conversationHistory.offer(message);
}
public String getConversationHistory() {
return String.join("\n", conversationHistory);
}
public void clearHistory() {
conversationHistory.clear();
}
}

View File

@@ -1,19 +1,29 @@
package com.ddaodan.MineChatGPT; package com.ddaodan.MineChatGPT;
import org.bukkit.plugin.java.JavaPlugin; import org.bukkit.plugin.java.JavaPlugin;
import org.bstats.bukkit.Metrics;
import java.util.Objects; import java.util.Objects;
public final class Main extends JavaPlugin { public final class Main extends JavaPlugin {
private ConfigManager configManager; private ConfigManager configManager;
private CommandHandler commandHandler; private CommandHandler commandHandler;
private MineChatGPTTabCompleter tabCompleter;
@Override @Override
public void onEnable() { public void onEnable() {
saveDefaultConfig(); saveDefaultConfig();
configManager = new ConfigManager(this); configManager = new ConfigManager(this);
commandHandler = new CommandHandler(this, configManager); commandHandler = new CommandHandler(this, configManager);
tabCompleter = new MineChatGPTTabCompleter(configManager);
Objects.requireNonNull(getCommand("chatgpt")).setExecutor(commandHandler); Objects.requireNonNull(getCommand("chatgpt")).setExecutor(commandHandler);
Objects.requireNonNull(getCommand("chatgpt")).setTabCompleter(tabCompleter);
if (configManager.isDebugMode()) {
getLogger().info( "DEBUG MODE IS TRUE!!!!!");
}
// Initialize bStats
int pluginId = 22635;
new Metrics(this, pluginId);
} }
@Override @Override

View File

@@ -0,0 +1,38 @@
package com.ddaodan.MineChatGPT;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.command.TabCompleter;
import java.util.ArrayList;
import java.util.List;
public class MineChatGPTTabCompleter implements TabCompleter {
private final ConfigManager configManager;
public MineChatGPTTabCompleter(ConfigManager configManager) {
this.configManager = configManager;
}
@Override
public List<String> onTabComplete(CommandSender sender, Command command, String alias, String[] args) {
List<String> completions = new ArrayList<>();
if (args.length == 1) {
// 补全子命令
completions.add("reload");
completions.add("model");
completions.add("modellist");
completions.add("context");
completions.add("clear");
} else if (args.length == 2 && args[0].equalsIgnoreCase("model")) {
// 补全模型名称
completions.addAll(configManager.getModels());
}
// 过滤补全列表以匹配输入
completions.removeIf(completion -> !completion.toLowerCase().startsWith(args[args.length - 1].toLowerCase()));
return completions;
}
}

View File

@@ -9,22 +9,47 @@ api:
models: models:
# OpenAI ChatGPT # OpenAI ChatGPT
- "gpt-3.5-turbo" - "gpt-3.5-turbo"
- "gpt-3.5-turbo-instruct"
- "gpt-4" - "gpt-4"
- "gpt-4-turbo"
- "gpt-4-turbo-preview"
- "gpt-4o"
- "gpt-4o-mini"
# Google Gemini
# - "gemini-pro"
# - "gemini-1.5-pro"
# Anthropic Claude
# - "claude-3-opus"
# - "claude-3-5-sonnet"
# And more...
# The default model to use # The default model to use
default_model: "gpt-3.5-turbo" default_model: "gpt-3.5-turbo"
conversation:
# Continuous conversation switch
context_enabled: false
max_history_size: 10
prompt: "You are a helpful assistant."
# Message settings # Message settings
messages: messages:
reload: "Configuration reloaded successfully!" reload: "&aConfiguration reloaded successfully!"
help: "===== MineChatGPT Help =====" clear: "Conversation history has been cleared!"
help_ask: "/chatgpt <text> - Ask ChatGPT a question." help: "&e===== MineChatGPT Help ====="
help_reload: "/chatgpt reload - Reload the configuration file." help_ask: "&e/chatgpt <text> - Ask ChatGPT a question."
help_model: "/chatgpt model <model_name> - Switch to a different model." help_reload: "&e/chatgpt reload - Reload the configuration file."
help_modellist: "/chatgpt modellist - List available models." help_model: "&e/chatgpt model <model_name> - Switch to a different model."
usage: "Usage: /chatgpt model <model_name>" help_modellist: "&e/chatgpt modellist - List available models."
model_switch: "Model switched to %s" help_context: "/chatgpt context - Toggle context mode."
chatgpt_error: "Failed to contact ChatGPT." help_clear: "/chatgpt clear - Clear conversation history."
chatgpt_response: "ChatGPT: %s" context_toggle: "Context is now %s."
question: "Question: %s" context_toggle_enabled: "enabled"
invalid_model: "Invalid model. Use /chatgpt modellist to see available models." context_toggle_disabled: "disabled"
available_models: "Available models:" current_model_info: "&eCurrent model: %s. Use /chatgpt model <model_name> to switch models."
no_permission: "You do not have permission to use this command. Required permission: %s" model_switch: "&aModel switched to %s"
chatgpt_error: "&cFailed to contact ChatGPT."
chatgpt_response: "&bChatGPT: %s"
question: "&bYou: %s"
invalid_model: "&cInvalid model. Use /chatgpt modellist to see available models."
available_models: "&eAvailable models:"
no_permission: "&cYou do not have permission to use this command. Required permission: %s"
# If you don't know what this is, don't change it
debug: false

View File

@@ -1,30 +1,55 @@
# API 相关设置 # API 相关设置
api: api:
# 你的 OpenAI API key用于身份验证 # 你的 OpenAI API key用于身份验证
# 获取 API key 的方法:访问 //platform.openai.com/account/api-keys 并创建一个新的 API key # 获取 API key 的方法:访问 https://platform.openai.com/account/api-keys 并创建一个新的 API key
key: "your_openai_api_key" key: "sk-your_openai_api_key"
# OpenAI API 的基础 URL用于构建请求 # OpenAI API 的基础 URL用于构建请求
base_url: "https://api.openai.com/v1" base_url: "https://api.openai.com/v1"
# 支持的模型列表 # 支持的模型列表
models: models:
# OpenAI ChatGPT # OpenAI ChatGPT
- "gpt-3.5-turbo" - "gpt-3.5-turbo"
- "gpt-3.5-turbo-instruct"
- "gpt-4" - "gpt-4"
- "gpt-4-turbo"
- "gpt-4-turbo-preview"
- "gpt-4o"
- "gpt-4o-mini"
# Google Gemini
# - "gemini-pro"
# - "gemini-1.5-pro"
# Anthropic Claude
# - "claude-3-opus"
# - "claude-3-5-sonnet"
# 以及更多...
# 默认使用的模型 # 默认使用的模型
default_model: "gpt-3.5-turbo" default_model: "gpt-3.5-turbo"
conversation:
# 连续对话开关
context_enabled: false
max_history_size: 10
prompt: "You are a helpful assistant.use Chinese."
# 消息相关设置 # 消息相关设置
messages: messages:
reload: "已重新加载配置文件!" reload: "&a已重新加载配置文件!"
help: "===== MineChatGPT 帮助 =====" clear: "&a对话历史已清空"
help_ask: "/chatgpt <text> - 向ChatGPT提问" help: "&e===== MineChatGPT 帮助 ====="
help_reload: "/chatgpt reload - 重新加载配置文件" help_ask: "&e/chatgpt <text> - 向ChatGPT提问"
help_model: "/chatgpt model <model_name> - 切换至其他模型" help_reload: "&e/chatgpt reload - 重新加载配置文件"
help_modellist: "/chatgpt modellist - 可用的模型列表" help_model: "&e/chatgpt model <model_name> - 切换至其他模型"
usage: "输入: /chatgpt model <model_name>" help_modellist: "&e/chatgpt modellist - 可用的模型列表"
model_switch: "已切换至模型 %s" help_context: "&e/chatgpt context - 切换连续对话模式"
chatgpt_error: "无法联系ChatGPT。" help_clear: "/chatgpt clear - 清空对话历史"
chatgpt_response: "ChatGPT: %s" context_toggle: "&a连续对话模式已%s"
question: "你: %s" context_toggle_enabled: "开启"
invalid_model: "模型无效。使用 /chatgpt modellist 查看可用模型。" context_toggle_disabled: "关闭"
available_models: "可用模型列表:" current_model_info: "&e当前模型%s输入 /chatgpt model <model_name> 来切换模型。"
no_permission: "你没有权限使用这个指令。需要的权限:%s" model_switch: "&a已切换至模型 %s"
chatgpt_error: "&c无法联系ChatGPT。"
chatgpt_response: "&bChatGPT: %s"
question: "&b你: %s"
invalid_model: "&c模型无效。使用 /chatgpt modellist 查看可用模型。"
available_models: "&e可用模型列表"
no_permission: "&c你没有权限使用这个指令。需要的权限%s"
# 如果你不知道这是什么,请不要动
debug: false

View File

@@ -2,12 +2,13 @@ name: MineChatGPT
version: '${version}' version: '${version}'
main: com.ddaodan.MineChatGPT.Main main: com.ddaodan.MineChatGPT.Main
author: ddaodan author: ddaodan
folia-supported: true
description: A Spigot plugin for interacting with ChatGPT description: A Spigot plugin for interacting with ChatGPT
commands: commands:
chatgpt: chatgpt:
description: Interact with ChatGPT description: Interact with ChatGPT
usage: /chatgpt <question> usage: /chatgpt <text>
permissions: permissions:
minechatgpt.use: minechatgpt.use:
description: Allows using the /chatgpt command to interact with AI description: Allows using the /chatgpt command to interact with AI
@@ -21,3 +22,9 @@ permissions:
minechatgpt.modellist: minechatgpt.modellist:
description: Allows listing available models description: Allows listing available models
default: op default: op
minechatgpt.context:
description: Allows toggling context mode
default: true
minechatgpt.clear:
description: Allows clearing conversation history
default: true