feat(box): 实现 AI 对话窗口并添加网络 API 调用
- 新增 LMApi 类实现 API 调用获取 AI 回复 - 修改 LocalWindow 类,添加系统提示信息- 更新 Main 类,引入 Markdown处理库 - 在 build.gradle 中添加相关依赖
This commit is contained in:
26
build.gradle
26
build.gradle
@@ -24,28 +24,22 @@ repositories {
|
||||
dependencies {
|
||||
testImplementation platform('org.junit:junit-bom:5.10.0')
|
||||
testImplementation 'org.junit.jupiter:junit-jupiter'
|
||||
// https://mvnrepository.com/artifact/org.commonmark/commonmark
|
||||
implementation 'org.commonmark:commonmark:0.24.0'
|
||||
// https://mvnrepository.com/artifact/org.commonjava.googlecode.markdown4j/markdown4j
|
||||
implementation 'org.commonjava.googlecode.markdown4j:markdown4j:2.2-cj-1.1'
|
||||
// https://mvnrepository.com/artifact/com.google.code.gson/gson
|
||||
implementation 'com.google.code.gson:gson:2.8.9'
|
||||
}
|
||||
|
||||
application {
|
||||
mainClass = 'com.axis.innovators.box.Main'
|
||||
}
|
||||
|
||||
shadowJar {
|
||||
archiveBaseName = 'app'
|
||||
archiveClassifier = ''
|
||||
archiveVersion = ''
|
||||
}
|
||||
|
||||
launch4j {
|
||||
mainClassName = 'com.axis.innovators.box.Main'
|
||||
outfile = version + '.exe'
|
||||
icon = "${projectDir}/logo.ico"
|
||||
jar = shadowJar.archiveFile.get()
|
||||
bundledJrePath = 'jre'
|
||||
}
|
||||
|
||||
tasks.named('launch4j') {
|
||||
dependsOn shadowJar
|
||||
jar {
|
||||
manifest {
|
||||
attributes 'Main-Class': 'com.axis.innovators.box.Main'
|
||||
}
|
||||
}
|
||||
|
||||
test {
|
||||
|
||||
@@ -6,12 +6,13 @@ import com.axis.innovators.box.events.SubscribeEvent;
|
||||
import com.axis.innovators.box.gui.FridaWindow;
|
||||
import com.axis.innovators.box.gui.LocalWindow;
|
||||
import com.axis.innovators.box.gui.MainWindow;
|
||||
import com.axis.innovators.box.tools.FolderCreator;
|
||||
import com.axis.innovators.box.tools.LibraryLoad;
|
||||
import org.markdown4j.Markdown4jProcessor;
|
||||
|
||||
import javax.swing.*;
|
||||
import java.awt.*;
|
||||
import java.awt.event.ActionEvent;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* 主类
|
||||
@@ -33,7 +34,7 @@ public class Main {
|
||||
// 我不想写这个了你们自己实现
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
public static void main(String[] args) throws IOException {
|
||||
// 注册事件
|
||||
GlobalEventBus.EVENT_BUS.register(new Main());
|
||||
// 设置系统外观
|
||||
|
||||
@@ -210,6 +210,7 @@ public class LocalWindow extends JDialog {
|
||||
contextHandles.getLast(),
|
||||
temperature,
|
||||
prompt,
|
||||
"你是一个乐于助人的AI助手,请用友好自然的语气回答用户问题。",
|
||||
this::publish
|
||||
);
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ import com.axis.innovators.box.tools.LibraryLoad;
|
||||
*/
|
||||
public class LM {
|
||||
public static boolean CUDA = true;
|
||||
public final static String DEEP_SEEK = "G:/deepseek/deepseek/DeepSeek-R1-Distill-Qwen-1.5B-Q8_0/DeepSeek-R1-Distill-Qwen-1.5B-Q8_0.gguf";
|
||||
public final static String DEEP_SEEK = FolderCreator.getModelFolder() + "\\DeepSeek-R1-Distill-Qwen-1.5B-Q8_0.gguf";
|
||||
|
||||
static {
|
||||
if (!CUDA) {
|
||||
@@ -28,12 +28,32 @@ public class LM {
|
||||
}
|
||||
LibraryLoad.loadLibrary("LM");
|
||||
}
|
||||
public static long llamaLoadModelFromFile(String pathModel){
|
||||
return llamaLoadModelFromFile(pathModel,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* 加载模型
|
||||
* @param pathModel 模型路径
|
||||
* @param vocabOnly 只负重词汇,不负重
|
||||
* @param useMmap 尽可能使用mmap
|
||||
* @param useMlock 强制系统将模型保存在RAM中
|
||||
* @param checkTensors 验证模型张量数据
|
||||
* @param progressCallback 进度回调
|
||||
* @return 模型句柄
|
||||
*/
|
||||
public static native long llamaLoadModelFromFile(String pathModel);
|
||||
public static native long llamaLoadModelFromFile(String pathModel,
|
||||
boolean vocabOnly,
|
||||
boolean useMmap,
|
||||
boolean useMlock,
|
||||
boolean checkTensors,
|
||||
ProgressCallback progressCallback);
|
||||
|
||||
/**
|
||||
* 释放模型资源
|
||||
@@ -97,19 +117,20 @@ public class LM {
|
||||
long ctxHandle,
|
||||
float temperature,
|
||||
String prompt,
|
||||
String system,
|
||||
MessageCallback messageCallback){
|
||||
return inference(modelHandle,
|
||||
ctxHandle,
|
||||
temperature,
|
||||
0.05f,
|
||||
40,
|
||||
0.95f,
|
||||
0.1f,
|
||||
100,
|
||||
0.9f,
|
||||
0,
|
||||
5,
|
||||
2.0f,
|
||||
0,
|
||||
0,
|
||||
prompt,
|
||||
64,
|
||||
1.1f,
|
||||
0.0f,
|
||||
0.0f,
|
||||
system + "\n用户:" + prompt + "\n助手:",
|
||||
messageCallback
|
||||
);
|
||||
}
|
||||
@@ -148,7 +169,7 @@ public class LM {
|
||||
MessageCallback messageCallback);
|
||||
|
||||
/**
|
||||
* 回调接口
|
||||
* 消息回调接口
|
||||
*/
|
||||
public interface MessageCallback {
|
||||
/**
|
||||
@@ -158,12 +179,24 @@ public class LM {
|
||||
void onMessage(String message);
|
||||
}
|
||||
|
||||
/**
|
||||
* 进度回调接口
|
||||
*/
|
||||
public interface ProgressCallback {
|
||||
/**
|
||||
* 进度回调
|
||||
* @param progress 进度(0.0到1.0之间)
|
||||
* @return 如果提供的progress_callback返回true,模型将继续加载。如果返回false,则立即中止模型加载。
|
||||
*/
|
||||
boolean onModelLoad(float progress);
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
// 加载模型
|
||||
long modelHandle = llamaLoadModelFromFile(DEEP_SEEK);
|
||||
// 创建新的上下文
|
||||
long ctxHandle = createContext(modelHandle);
|
||||
inference(modelHandle, ctxHandle, 0.2f, "Tell me how gpt works in an easy way, in Chinese", new MessageCallback() {
|
||||
inference(modelHandle, ctxHandle, 0.2f, "Tell me who you are in English","你是一个乐于助人的AI助手", new MessageCallback() {
|
||||
@Override
|
||||
public void onMessage(String message) {
|
||||
// 回调输出
|
||||
|
||||
91
src/main/java/org/tzd/lm/LMApi.java
Normal file
91
src/main/java/org/tzd/lm/LMApi.java
Normal file
@@ -0,0 +1,91 @@
|
||||
package org.tzd.lm;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import com.google.gson.reflect.TypeToken;
|
||||
|
||||
/**
|
||||
* @author tzdwindows 7
|
||||
*/
|
||||
public class LMApi {
|
||||
private static final String API_URL = "https://api.pearktrue.cn/api/deepseek/";
|
||||
private static final Gson GSON = new Gson();
|
||||
|
||||
public static class Message {
|
||||
private String role;
|
||||
private String content;
|
||||
|
||||
public Message(String role, String content) {
|
||||
this.role = role;
|
||||
this.content = content;
|
||||
}
|
||||
|
||||
public String getRole() { return role; }
|
||||
public String getContent() { return content; }
|
||||
public void setRole(String role) { this.role = role; }
|
||||
public void setContent(String content) { this.content = content; }
|
||||
}
|
||||
|
||||
/**
|
||||
* 调用AI接口获取回复
|
||||
* @param messages 消息列表
|
||||
* @return AI回复内容
|
||||
*/
|
||||
public String getAIResponse(List<Message> messages) throws IOException {
|
||||
String jsonInput = GSON.toJson(Map.of("messages", messages));
|
||||
|
||||
URL url = URI.create(API_URL).toURL();
|
||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||
conn.setRequestMethod("POST");
|
||||
conn.setRequestProperty("Content-Type", "application/json");
|
||||
conn.setDoOutput(true);
|
||||
|
||||
try(OutputStream os = conn.getOutputStream()) {
|
||||
byte[] input = jsonInput.getBytes(StandardCharsets.UTF_8);
|
||||
os.write(input, 0, input.length);
|
||||
}
|
||||
|
||||
int responseCode = conn.getResponseCode();
|
||||
if (responseCode == HttpURLConnection.HTTP_OK) {
|
||||
try (BufferedReader br = new BufferedReader(
|
||||
new InputStreamReader(conn.getInputStream(), "GBK"))) {
|
||||
String response = readAllLines(br);
|
||||
Map<String, String> result = GSON.fromJson(response,
|
||||
new TypeToken<Map<String, String>>(){}.getType());
|
||||
return result.get("message");
|
||||
}
|
||||
} else {
|
||||
throw new IOException("API请求失败,状态码:" + responseCode);
|
||||
}
|
||||
}
|
||||
|
||||
private String readAllLines(BufferedReader reader) throws IOException {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
sb.append(line);
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
List<Message> messages = new ArrayList<>();
|
||||
messages.add(new Message("system", "You are a helpful assistant."));
|
||||
messages.add(new Message("user", "你好啊"));
|
||||
|
||||
try {
|
||||
String reply = new LMApi().getAIResponse(messages);
|
||||
System.out.println(reply);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user