1 ### MavenStyleProject using code reflection with a Java-based ONNX programming model.
 2 
 3 Running the demo:
 4 ```
 5 JAVA_HOME=<path to the Babylon JDK home>;mvn process-test-classes exec:java -Dexec.classpathScope=test -Dexec.mainClass=oracle.code.onnx.MNISTDemo
 6 ```
 7 
 8 ### Onnx Generation API to create and run LLM Onnx models.
 9 
10 Example of direct execution of existing Onnx LLM model:
11 ```
12 // model-specific prompt format
13 static final String PROMPT_TEMPLATE = "<|...|>%s<|...|><|...|>";
14 
15 public static void main(String... args) {
16 
17     // compatible `libonnxruntime` library must be present in the same folder as `libonnxruntime-genai` library
18     // native library extension (.dylib, .so or .dll) is platform specific
19     System.load("path/To/libonnxruntime-genai.dylib");
20 
21     // model folder must contain the Onnx model file and all configuration and external data files
22     try (OnnxGenRuntimeSession session = new OnnxGenRuntimeSession(Path.of("path/To/Onnx/Model/Folder/")) {
23         // each LLM model has specific prompt format
24         session.prompt(PROMPT_TEMPLATE.formatted("Tell me a joke"), System.out::print);
25     }
26 }
27 ```
28 
29 Example of a custom LLM Onnx model generation from Java sources and execution:
30 ```
31 // model-specific prompt format
32 static final String PROMPT_TEMPLATE = "<|...|>%s<|...|><|...|>";
33 
34 public static void main(String... args) {
35 
36     // compatible `libonnxruntime` library must be present in the same folder as `libonnxruntime-genai` library
37     // native library extension (.dylib or .so or .dll) is platform specific
38     System.load("path/To/libonnxruntime-genai.dylib");
39 
40     // instance of a custom Onnx LLM model
41     MyCustomLLMModel myCustomModelInstance = ...;
42 
43     // target model folder must contain all configuration files
44     // `genai_config.json` must be configured following way:
45     //     - model filename to match generated model file name (below)
46     //     - model inputs to match main model method argument names
47     //     - model outputs to match main model result record component names
48     Path targetModelFolder = ...;
49 
50     // Onnx model file and external data file are generated to the target model folder
51     // and the session is created from the generated model
52     try (OnnxGenRuntimeSession session = OnnxGenRuntimeSession.buildFromCodeReflection(myCustomModelInstance, "myMainModelMethod", targetModelFolder, "MyModelFileName.onnx", "MyDataFileName")) {
53         // each LLM model has specific prompt format
54         session.prompt(PROMPT_TEMPLATE.formatted("Tell me a joke"), System.out::print);
55     }
56 }
57 ```
58 
59 Example of a custom LLM Onnx model Java source:
60 ```
61 import oracle.code.onnx.Tensor;
62 import jdk.incubator.code.CodeReflection;
63 import static oracle.code.onnx.OnnxOperators.*;
64 
65 public final class MyCustomLLMModel {
66 
67      public final Tensor<Float> myModelWeights...
68      public final Tensor<Byte> otherMyModelWeights...
69 
70      public MyCustomLLMModel(...) {
71          // initilize all weight tensors
72          // large tensors data can be memory-mapped
73          this.myModelWeights = ...
74          this.otherMyModelWeights = ...
75          ...
76      }
77 
78      // custom record with main model method response
79      public record MyModelResponse(Tensor<Float> logits, Tensor<Float> presentKey0, Tensor<Float> presentValue0, ...) {
80      }
81 
82      @CodeReflection
83      public MyModelResponse myMainModelMethod(Tensor<Long> inputIds, Tensor<Long> attentionMask, Tensor<Float> pastKey0, Tensor<Float> pastValue0, ...) {
84 
85          // computation of the model using oracle.code.onnx.OnnxOperators.* method calls
86          ...
87          Tensor<Float> logits = MatMul(...
88 
89          // composition of the return record
90          return new MyModelResponse(logits, key0, value0, ...);
91      }
92 }
93 ```