@@ -59,6 +59,17 @@ public LlmModule(int modelType, String modulePath, String tokenizerPath, float t
59
59
mHybridData = initHybrid (modelType , modulePath , tokenizerPath , temperature , null );
60
60
}
61
61
62
+ /** Constructs a LLM Module for a model with given #LlmModuleConfig */
63
+ public LlmModule (LlmModuleConfig config ) {
64
+ mHybridData =
65
+ initHybrid (
66
+ config .modelType ,
67
+ config .modulePath ,
68
+ config .tokenizerPath ,
69
+ config .temperature ,
70
+ config .dataPath );
71
+ }
72
+
62
73
public void resetNative () {
63
74
mHybridData .resetNative ();
64
75
}
@@ -107,6 +118,19 @@ public int generate(String prompt, int seqLen, LlmCallback llmCallback, boolean
107
118
return generate (null , 0 , 0 , 0 , prompt , seqLen , llmCallback , echo );
108
119
}
109
120
121
+ /**
122
+ * Start generating tokens from the module.
123
+ *
124
+ * @param prompt Input prompt
125
+ * @param config the config for generation
126
+ * @param llmCallback callback object to receive results
127
+ */
128
+ public int generate (String prompt , LlmGenerationConfig config , LlmCallback llmCallback ) {
129
+ int seqLen = config .getSeqLen ();
130
+ boolean echo = config .isEcho ();
131
+ return generate (null , 0 , 0 , 0 , prompt , seqLen , llmCallback , echo );
132
+ }
133
+
110
134
/**
111
135
* Start generating tokens from the module.
112
136
*
0 commit comments