diff --git a/build.gradle b/build.gradle index 3223498..3d15d65 100644 --- a/build.gradle +++ b/build.gradle @@ -24,28 +24,22 @@ repositories { dependencies { testImplementation platform('org.junit:junit-bom:5.10.0') testImplementation 'org.junit.jupiter:junit-jupiter' + // https://mvnrepository.com/artifact/org.commonmark/commonmark + implementation 'org.commonmark:commonmark:0.24.0' + // https://mvnrepository.com/artifact/org.commonjava.googlecode.markdown4j/markdown4j + implementation 'org.commonjava.googlecode.markdown4j:markdown4j:2.2-cj-1.1' + // https://mvnrepository.com/artifact/com.google.code.gson/gson + implementation 'com.google.code.gson:gson:2.8.9' } application { mainClass = 'com.axis.innovators.box.Main' } -shadowJar { - archiveBaseName = 'app' - archiveClassifier = '' - archiveVersion = '' -} - -launch4j { - mainClassName = 'com.axis.innovators.box.Main' - outfile = version + '.exe' - icon = "${projectDir}/logo.ico" - jar = shadowJar.archiveFile.get() - bundledJrePath = 'jre' -} - -tasks.named('launch4j') { - dependsOn shadowJar +jar { + manifest { + attributes 'Main-Class': 'com.axis.innovators.box.Main' + } } test { diff --git a/logo.ico b/logo.ico deleted file mode 100644 index 57b1602..0000000 Binary files a/logo.ico and /dev/null differ diff --git a/src/main/java/com/axis/innovators/box/Main.java b/src/main/java/com/axis/innovators/box/Main.java index 855605a..261942c 100644 --- a/src/main/java/com/axis/innovators/box/Main.java +++ b/src/main/java/com/axis/innovators/box/Main.java @@ -6,12 +6,13 @@ import com.axis.innovators.box.events.SubscribeEvent; import com.axis.innovators.box.gui.FridaWindow; import com.axis.innovators.box.gui.LocalWindow; import com.axis.innovators.box.gui.MainWindow; -import com.axis.innovators.box.tools.FolderCreator; import com.axis.innovators.box.tools.LibraryLoad; +import org.markdown4j.Markdown4jProcessor; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; +import java.io.IOException; /** * 主类 @@ -33,7 +34,7 @@ public class Main { // 我不想写这个了你们自己实现 } - public static void main(String[] args) { + public static void main(String[] args) throws IOException { // 注册事件 GlobalEventBus.EVENT_BUS.register(new Main()); // 设置系统外观 diff --git a/src/main/java/com/axis/innovators/box/gui/LocalWindow.java b/src/main/java/com/axis/innovators/box/gui/LocalWindow.java index 473ee6a..a86c077 100644 --- a/src/main/java/com/axis/innovators/box/gui/LocalWindow.java +++ b/src/main/java/com/axis/innovators/box/gui/LocalWindow.java @@ -210,6 +210,7 @@ public class LocalWindow extends JDialog { contextHandles.getLast(), temperature, prompt, + "你是一个乐于助人的AI助手,请用友好自然的语气回答用户问题。", this::publish ); } diff --git a/src/main/java/org/tzd/lm/LM.java b/src/main/java/org/tzd/lm/LM.java index 694fe09..6e1b4b0 100644 --- a/src/main/java/org/tzd/lm/LM.java +++ b/src/main/java/org/tzd/lm/LM.java @@ -9,7 +9,7 @@ import com.axis.innovators.box.tools.LibraryLoad; */ public class LM { public static boolean CUDA = true; - public final static String DEEP_SEEK = "G:/deepseek/deepseek/DeepSeek-R1-Distill-Qwen-1.5B-Q8_0/DeepSeek-R1-Distill-Qwen-1.5B-Q8_0.gguf"; + public final static String DEEP_SEEK = FolderCreator.getModelFolder() + "\\DeepSeek-R1-Distill-Qwen-1.5B-Q8_0.gguf"; static { if (!CUDA) { @@ -28,12 +28,32 @@ public class LM { } LibraryLoad.loadLibrary("LM"); } + public static long llamaLoadModelFromFile(String pathModel){ + return llamaLoadModelFromFile(pathModel, + false, + false, + false, + false, + null + ); + } + /** * 加载模型 * @param pathModel 模型路径 + * @param vocabOnly 只负重词汇,不负重 + * @param useMmap 尽可能使用mmap + * @param useMlock 强制系统将模型保存在RAM中 + * @param checkTensors 验证模型张量数据 + * @param progressCallback 进度回调 * @return 模型句柄 */ - public static native long llamaLoadModelFromFile(String pathModel); + public static native long llamaLoadModelFromFile(String pathModel, + boolean vocabOnly, + boolean useMmap, + boolean useMlock, + boolean checkTensors, + ProgressCallback progressCallback); /** * 释放模型资源 @@ -97,19 +117,20 @@ public class LM { long ctxHandle, float temperature, String prompt, + String system, MessageCallback messageCallback){ return inference(modelHandle, ctxHandle, temperature, - 0.05f, - 40, - 0.95f, + 0.1f, + 100, + 0.9f, 0, - 5, - 2.0f, - 0, - 0, - prompt, + 64, + 1.1f, + 0.0f, + 0.0f, + system + "\n用户:" + prompt + "\n助手:", messageCallback ); } @@ -148,7 +169,7 @@ public class LM { MessageCallback messageCallback); /** - * 回调接口 + * 消息回调接口 */ public interface MessageCallback { /** @@ -158,12 +179,24 @@ public class LM { void onMessage(String message); } + /** + * 进度回调接口 + */ + public interface ProgressCallback { + /** + * 进度回调 + * @param progress 进度(0.0到1.0之间) + * @return 如果提供的progress_callback返回true,模型将继续加载。如果返回false,则立即中止模型加载。 + */ + boolean onModelLoad(float progress); + } + public static void main(String[] args) { // 加载模型 long modelHandle = llamaLoadModelFromFile(DEEP_SEEK); // 创建新的上下文 long ctxHandle = createContext(modelHandle); - inference(modelHandle, ctxHandle, 0.2f, "Tell me how gpt works in an easy way, in Chinese", new MessageCallback() { + inference(modelHandle, ctxHandle, 0.2f, "Tell me who you are in English","你是一个乐于助人的AI助手", new MessageCallback() { @Override public void onMessage(String message) { // 回调输出 diff --git a/src/main/java/org/tzd/lm/LMApi.java b/src/main/java/org/tzd/lm/LMApi.java new file mode 100644 index 0000000..3f46beb --- /dev/null +++ b/src/main/java/org/tzd/lm/LMApi.java @@ -0,0 +1,91 @@ +package org.tzd.lm; + +import java.io.*; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import com.google.gson.Gson; +import com.google.gson.reflect.TypeToken; + +/** + * @author tzdwindows 7 + */ +public class LMApi { + private static final String API_URL = "https://api.pearktrue.cn/api/deepseek/"; + private static final Gson GSON = new Gson(); + + public static class Message { + private String role; + private String content; + + public Message(String role, String content) { + this.role = role; + this.content = content; + } + + public String getRole() { return role; } + public String getContent() { return content; } + public void setRole(String role) { this.role = role; } + public void setContent(String content) { this.content = content; } + } + + /** + * 调用AI接口获取回复 + * @param messages 消息列表 + * @return AI回复内容 + */ + public String getAIResponse(List messages) throws IOException { + String jsonInput = GSON.toJson(Map.of("messages", messages)); + + URL url = URI.create(API_URL).toURL(); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.setRequestMethod("POST"); + conn.setRequestProperty("Content-Type", "application/json"); + conn.setDoOutput(true); + + try(OutputStream os = conn.getOutputStream()) { + byte[] input = jsonInput.getBytes(StandardCharsets.UTF_8); + os.write(input, 0, input.length); + } + + int responseCode = conn.getResponseCode(); + if (responseCode == HttpURLConnection.HTTP_OK) { + try (BufferedReader br = new BufferedReader( + new InputStreamReader(conn.getInputStream(), "GBK"))) { + String response = readAllLines(br); + Map result = GSON.fromJson(response, + new TypeToken>(){}.getType()); + return result.get("message"); + } + } else { + throw new IOException("API请求失败,状态码:" + responseCode); + } + } + + private String readAllLines(BufferedReader reader) throws IOException { + StringBuilder sb = new StringBuilder(); + String line; + while ((line = reader.readLine()) != null) { + sb.append(line); + } + return sb.toString(); + } + + public static void main(String[] args) { + List messages = new ArrayList<>(); + messages.add(new Message("system", "You are a helpful assistant.")); + messages.add(new Message("user", "你好啊")); + + try { + String reply = new LMApi().getAIResponse(messages); + System.out.println(reply); + } catch (IOException e) { + e.printStackTrace(); + } + } +} \ No newline at end of file