第二次提交
This commit is contained in:
34
build.gradle
34
build.gradle
@@ -1,12 +1,24 @@
|
||||
plugins {
|
||||
id 'java'
|
||||
id 'application'
|
||||
id 'com.github.johnrengelman.shadow' version '7.1.2'
|
||||
id 'edu.sc.seis.launch4j' version '2.5.4'
|
||||
}
|
||||
|
||||
def requiredJavaVersion = 20
|
||||
def currentJavaVersion = JavaVersion.current().majorVersion.toInteger()
|
||||
if (currentJavaVersion != requiredJavaVersion) {
|
||||
throw new GradleException("构建需要 JDK ${requiredJavaVersion},但当前是 JDK ${currentJavaVersion}。请更换 JDK 环境。")
|
||||
}
|
||||
|
||||
group = 'com.axis.innovators.box'
|
||||
version = '1.0-SNAPSHOT'
|
||||
|
||||
repositories {
|
||||
mavenLocal()
|
||||
maven { url "https://maven.aliyun.com/repository/public" }
|
||||
mavenCentral()
|
||||
jcenter()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
@@ -14,6 +26,28 @@ dependencies {
|
||||
testImplementation 'org.junit.jupiter:junit-jupiter'
|
||||
}
|
||||
|
||||
application {
|
||||
mainClass = 'com.axis.innovators.box.Main'
|
||||
}
|
||||
|
||||
shadowJar {
|
||||
archiveBaseName = 'app'
|
||||
archiveClassifier = ''
|
||||
archiveVersion = ''
|
||||
}
|
||||
|
||||
launch4j {
|
||||
mainClassName = 'com.axis.innovators.box.Main'
|
||||
outfile = version + '.exe'
|
||||
icon = "${projectDir}/logo.ico"
|
||||
jar = shadowJar.archiveFile.get()
|
||||
bundledJrePath = 'jre'
|
||||
}
|
||||
|
||||
tasks.named('launch4j') {
|
||||
dependsOn shadowJar
|
||||
}
|
||||
|
||||
test {
|
||||
useJUnitPlatform()
|
||||
}
|
||||
@@ -9,6 +9,16 @@ import java.io.File;
|
||||
public class FolderCreator {
|
||||
|
||||
public static final String LIBRARY_NAME = "library";
|
||||
public static final String MODEL_PATH = "model";
|
||||
|
||||
public static String getModelFolder() {
|
||||
String folder = createFolder(MODEL_PATH);
|
||||
if (folder == null) {
|
||||
System.out.println("Model folder creation failure");
|
||||
return null;
|
||||
}
|
||||
return folder;
|
||||
}
|
||||
|
||||
public static String getLibraryFolder() {
|
||||
String folder = createFolder(LIBRARY_NAME);
|
||||
|
||||
106
src/main/java/org/tzd/lm/LM.java
Normal file
106
src/main/java/org/tzd/lm/LM.java
Normal file
@@ -0,0 +1,106 @@
|
||||
package org.tzd.lm;
|
||||
|
||||
import com.axis.innovators.box.tools.FolderCreator;
|
||||
import com.axis.innovators.box.tools.LibraryLoad;
|
||||
|
||||
/**
|
||||
* LM推理类
|
||||
* @author tzdwindows 7
|
||||
*/
|
||||
public class LM {
|
||||
public static boolean CUDA = false;
|
||||
public final static String DEEP_SEEK = FolderCreator.getModelFolder() + "//DeepSeek-R1-Distill-Qwen-1.5B-Q8_0.gguf";
|
||||
|
||||
static {
|
||||
if (!CUDA) {
|
||||
LibraryLoad.loadLibrary("cpu/ggml-base");
|
||||
LibraryLoad.loadLibrary("cpu/ggml-cpu");
|
||||
LibraryLoad.loadLibrary("cpu/ggml");
|
||||
LibraryLoad.loadLibrary("cpu/llama");
|
||||
} else {
|
||||
LibraryLoad.loadLibrary("cuda/ggml-base");
|
||||
LibraryLoad.loadLibrary("cuda/ggml-cpu");
|
||||
LibraryLoad.loadLibrary("cuda/ggml-rpc");
|
||||
// cuda版本 cuda-cu12.4-x64(确保你有)
|
||||
LibraryLoad.loadLibrary("cuda/ggml-cuda");
|
||||
LibraryLoad.loadLibrary("cuda/ggml");
|
||||
LibraryLoad.loadLibrary("cuda/llama");
|
||||
}
|
||||
LibraryLoad.loadLibrary("LM");
|
||||
}
|
||||
/**
|
||||
* 加载模型
|
||||
* @param pathModel 模型路径
|
||||
* @return 模型句柄
|
||||
*/
|
||||
public static native long llamaLoadModelFromFile(String pathModel);
|
||||
|
||||
/**
|
||||
* 释放模型资源
|
||||
* @param modelHandle 模型句柄
|
||||
*/
|
||||
public static native void llamaFreeModel(long modelHandle);
|
||||
|
||||
/**
|
||||
* 上下文创建
|
||||
* @param modelHandle 上下文句柄
|
||||
* @return 上下文句柄
|
||||
*/
|
||||
public static native long createContext(long modelHandle);
|
||||
|
||||
/**
|
||||
* 释放上下文资源
|
||||
* @param ctxHandle 上下文句柄
|
||||
*/
|
||||
public static native void llamaFreeContext(long ctxHandle);
|
||||
|
||||
/**
|
||||
* 推理模型
|
||||
* @param modelHandle 模型句柄
|
||||
* @param ctxHandle 模型上下文句柄
|
||||
* @param temperature 温度
|
||||
* @param prompt 问题
|
||||
* @param messageCallback 回调接口
|
||||
* @return 最终内容
|
||||
*/
|
||||
public static native String inference(long modelHandle ,
|
||||
long ctxHandle,
|
||||
float temperature,
|
||||
String prompt,
|
||||
MessageCallback messageCallback);
|
||||
|
||||
/**
|
||||
* 回调接口
|
||||
*/
|
||||
public interface MessageCallback {
|
||||
/**
|
||||
* 接口回调
|
||||
* @param message 消息
|
||||
*/
|
||||
void onMessage(String message);
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
// 加载模型
|
||||
long modelHandle = llamaLoadModelFromFile(DEEP_SEEK);
|
||||
// 创建新的上下文
|
||||
long ctxHandle = createContext(modelHandle);
|
||||
inference(modelHandle, ctxHandle, 0.2f, "写一个ai", new MessageCallback() {
|
||||
@Override
|
||||
public void onMessage(String message) {
|
||||
// 回调输出
|
||||
System.out.print(message);
|
||||
}
|
||||
});
|
||||
// 推理模型
|
||||
inference(modelHandle, ctxHandle, 0.2f, "谢谢你", new MessageCallback() {
|
||||
@Override
|
||||
public void onMessage(String message) {
|
||||
// 回调输出
|
||||
System.out.print(message);
|
||||
}
|
||||
});
|
||||
// 清理上下文
|
||||
llamaFreeContext(ctxHandle);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user