feat(box): 实现 AI 对话窗口并添加网络 API 调用

- 新增 LMApi 类实现 API 调用获取 AI 回复
- 修改 LocalWindow 类,添加系统提示信息- 更新 Main 类,引入 Markdown处理库
- 在 build.gradle 中添加相关依赖
This commit is contained in:
tzdwindows 7
2025-02-09 12:06:50 +08:00
parent 022446eb32
commit db5d4b06c6
6 changed files with 150 additions and 30 deletions

View File

@@ -24,28 +24,22 @@ repositories {
dependencies { dependencies {
testImplementation platform('org.junit:junit-bom:5.10.0') testImplementation platform('org.junit:junit-bom:5.10.0')
testImplementation 'org.junit.jupiter:junit-jupiter' testImplementation 'org.junit.jupiter:junit-jupiter'
// https://mvnrepository.com/artifact/org.commonmark/commonmark
implementation 'org.commonmark:commonmark:0.24.0'
// https://mvnrepository.com/artifact/org.commonjava.googlecode.markdown4j/markdown4j
implementation 'org.commonjava.googlecode.markdown4j:markdown4j:2.2-cj-1.1'
// https://mvnrepository.com/artifact/com.google.code.gson/gson
implementation 'com.google.code.gson:gson:2.8.9'
} }
application { application {
mainClass = 'com.axis.innovators.box.Main' mainClass = 'com.axis.innovators.box.Main'
} }
shadowJar { jar {
archiveBaseName = 'app' manifest {
archiveClassifier = '' attributes 'Main-Class': 'com.axis.innovators.box.Main'
archiveVersion = '' }
}
launch4j {
mainClassName = 'com.axis.innovators.box.Main'
outfile = version + '.exe'
icon = "${projectDir}/logo.ico"
jar = shadowJar.archiveFile.get()
bundledJrePath = 'jre'
}
tasks.named('launch4j') {
dependsOn shadowJar
} }
test { test {

BIN
logo.ico

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.2 KiB

View File

@@ -6,12 +6,13 @@ import com.axis.innovators.box.events.SubscribeEvent;
import com.axis.innovators.box.gui.FridaWindow; import com.axis.innovators.box.gui.FridaWindow;
import com.axis.innovators.box.gui.LocalWindow; import com.axis.innovators.box.gui.LocalWindow;
import com.axis.innovators.box.gui.MainWindow; import com.axis.innovators.box.gui.MainWindow;
import com.axis.innovators.box.tools.FolderCreator;
import com.axis.innovators.box.tools.LibraryLoad; import com.axis.innovators.box.tools.LibraryLoad;
import org.markdown4j.Markdown4jProcessor;
import javax.swing.*; import javax.swing.*;
import java.awt.*; import java.awt.*;
import java.awt.event.ActionEvent; import java.awt.event.ActionEvent;
import java.io.IOException;
/** /**
* 主类 * 主类
@@ -33,7 +34,7 @@ public class Main {
// 我不想写这个了你们自己实现 // 我不想写这个了你们自己实现
} }
public static void main(String[] args) { public static void main(String[] args) throws IOException {
// 注册事件 // 注册事件
GlobalEventBus.EVENT_BUS.register(new Main()); GlobalEventBus.EVENT_BUS.register(new Main());
// 设置系统外观 // 设置系统外观

View File

@@ -210,6 +210,7 @@ public class LocalWindow extends JDialog {
contextHandles.getLast(), contextHandles.getLast(),
temperature, temperature,
prompt, prompt,
"你是一个乐于助人的AI助手请用友好自然的语气回答用户问题。",
this::publish this::publish
); );
} }

View File

@@ -9,7 +9,7 @@ import com.axis.innovators.box.tools.LibraryLoad;
*/ */
public class LM { public class LM {
public static boolean CUDA = true; public static boolean CUDA = true;
public final static String DEEP_SEEK = "G:/deepseek/deepseek/DeepSeek-R1-Distill-Qwen-1.5B-Q8_0/DeepSeek-R1-Distill-Qwen-1.5B-Q8_0.gguf"; public final static String DEEP_SEEK = FolderCreator.getModelFolder() + "\\DeepSeek-R1-Distill-Qwen-1.5B-Q8_0.gguf";
static { static {
if (!CUDA) { if (!CUDA) {
@@ -28,12 +28,32 @@ public class LM {
} }
LibraryLoad.loadLibrary("LM"); LibraryLoad.loadLibrary("LM");
} }
public static long llamaLoadModelFromFile(String pathModel){
return llamaLoadModelFromFile(pathModel,
false,
false,
false,
false,
null
);
}
/** /**
* 加载模型 * 加载模型
* @param pathModel 模型路径 * @param pathModel 模型路径
* @param vocabOnly 只负重词汇,不负重
* @param useMmap 尽可能使用mmap
* @param useMlock 强制系统将模型保存在RAM中
* @param checkTensors 验证模型张量数据
* @param progressCallback 进度回调
* @return 模型句柄 * @return 模型句柄
*/ */
public static native long llamaLoadModelFromFile(String pathModel); public static native long llamaLoadModelFromFile(String pathModel,
boolean vocabOnly,
boolean useMmap,
boolean useMlock,
boolean checkTensors,
ProgressCallback progressCallback);
/** /**
* 释放模型资源 * 释放模型资源
@@ -97,19 +117,20 @@ public class LM {
long ctxHandle, long ctxHandle,
float temperature, float temperature,
String prompt, String prompt,
String system,
MessageCallback messageCallback){ MessageCallback messageCallback){
return inference(modelHandle, return inference(modelHandle,
ctxHandle, ctxHandle,
temperature, temperature,
0.05f, 0.1f,
40, 100,
0.95f, 0.9f,
0, 0,
5, 64,
2.0f, 1.1f,
0, 0.0f,
0, 0.0f,
prompt, system + "\n用户" + prompt + "\n助手",
messageCallback messageCallback
); );
} }
@@ -148,7 +169,7 @@ public class LM {
MessageCallback messageCallback); MessageCallback messageCallback);
/** /**
* 回调接口 * 消息回调接口
*/ */
public interface MessageCallback { public interface MessageCallback {
/** /**
@@ -158,12 +179,24 @@ public class LM {
void onMessage(String message); void onMessage(String message);
} }
/**
* 进度回调接口
*/
public interface ProgressCallback {
/**
* 进度回调
* @param progress 进度0.0到1.0之间)
* @return 如果提供的progress_callback返回true模型将继续加载。如果返回false则立即中止模型加载。
*/
boolean onModelLoad(float progress);
}
public static void main(String[] args) { public static void main(String[] args) {
// 加载模型 // 加载模型
long modelHandle = llamaLoadModelFromFile(DEEP_SEEK); long modelHandle = llamaLoadModelFromFile(DEEP_SEEK);
// 创建新的上下文 // 创建新的上下文
long ctxHandle = createContext(modelHandle); long ctxHandle = createContext(modelHandle);
inference(modelHandle, ctxHandle, 0.2f, "Tell me how gpt works in an easy way, in Chinese", new MessageCallback() { inference(modelHandle, ctxHandle, 0.2f, "Tell me who you are in English","你是一个乐于助人的AI助手", new MessageCallback() {
@Override @Override
public void onMessage(String message) { public void onMessage(String message) {
// 回调输出 // 回调输出

View File

@@ -0,0 +1,91 @@
package org.tzd.lm;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
/**
* @author tzdwindows 7
*/
public class LMApi {
private static final String API_URL = "https://api.pearktrue.cn/api/deepseek/";
private static final Gson GSON = new Gson();
public static class Message {
private String role;
private String content;
public Message(String role, String content) {
this.role = role;
this.content = content;
}
public String getRole() { return role; }
public String getContent() { return content; }
public void setRole(String role) { this.role = role; }
public void setContent(String content) { this.content = content; }
}
/**
* 调用AI接口获取回复
* @param messages 消息列表
* @return AI回复内容
*/
public String getAIResponse(List<Message> messages) throws IOException {
String jsonInput = GSON.toJson(Map.of("messages", messages));
URL url = URI.create(API_URL).toURL();
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("Content-Type", "application/json");
conn.setDoOutput(true);
try(OutputStream os = conn.getOutputStream()) {
byte[] input = jsonInput.getBytes(StandardCharsets.UTF_8);
os.write(input, 0, input.length);
}
int responseCode = conn.getResponseCode();
if (responseCode == HttpURLConnection.HTTP_OK) {
try (BufferedReader br = new BufferedReader(
new InputStreamReader(conn.getInputStream(), "GBK"))) {
String response = readAllLines(br);
Map<String, String> result = GSON.fromJson(response,
new TypeToken<Map<String, String>>(){}.getType());
return result.get("message");
}
} else {
throw new IOException("API请求失败状态码" + responseCode);
}
}
private String readAllLines(BufferedReader reader) throws IOException {
StringBuilder sb = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
sb.append(line);
}
return sb.toString();
}
public static void main(String[] args) {
List<Message> messages = new ArrayList<>();
messages.add(new Message("system", "You are a helpful assistant."));
messages.add(new Message("user", "你好啊"));
try {
String reply = new LMApi().getAIResponse(messages);
System.out.println(reply);
} catch (IOException e) {
e.printStackTrace();
}
}
}