diff --git a/docs/source/android-section.md b/docs/source/android-section.md index a5774352bc1..dedb7df22d7 100644 --- a/docs/source/android-section.md +++ b/docs/source/android-section.md @@ -16,8 +16,16 @@ Deploy ExecuTorch on Android devices with hardware acceleration support. - {doc}`android-examples` — Explore Android Examples & Demos +## API Reference + +- [Java API Reference (Javadoc)](https://pytorch.org/executorch/main/javadoc/index.html) — Full Java class and method reference + ```{toctree} +:maxdepth: 1 :hidden: + using-executorch-android android-backends android-examples +Java API Reference (Javadoc) +``` diff --git a/docs/source/using-executorch-android.md b/docs/source/using-executorch-android.md index 443015b47be..4ebdee651a9 100644 --- a/docs/source/using-executorch-android.md +++ b/docs/source/using-executorch-android.md @@ -21,6 +21,7 @@ All ExecuTorch Android libraries are packaged into an Android library (AAR), exe The AAR artifact contains the Java library for users to integrate with their Java/Kotlin application code, as well as the corresponding JNI library (.so file), which is loaded by the Java code during initialization. - [Java library](https://github.com/pytorch/executorch/tree/main/extension/android/executorch_android/src/main/java/org/pytorch/executorch) +- [Java API Reference (Javadoc)](https://pytorch.org/executorch/main/javadoc/index.html) - JNI contains the JNI binding for the corresponding Java code, and ExecuTorch native library, including - Core ExecuTorch runtime libraries - XNNPACK backend @@ -240,4 +241,4 @@ using ExecuTorch AAR package. ## Java API reference -Please see [Java API reference](https://pytorch.org/executorch/main/javadoc/). +Please see [Java API reference](https://pytorch.org/executorch/main/javadoc/index.html). diff --git a/extension/android/executorch_android/build.gradle b/extension/android/executorch_android/build.gradle index f84aafe138c..1437fb9e846 100644 --- a/extension/android/executorch_android/build.gradle +++ b/extension/android/executorch_android/build.gradle @@ -114,3 +114,17 @@ repositories { url "https://central.sonatype.com/repository/maven-snapshots/" } } + +android.libraryVariants.all { variant -> + task("generate${variant.name.capitalize()}Javadoc", type: Javadoc) { + source = variant.javaCompileProvider.get().source + classpath += project.files(android.getBootClasspath().join(File.pathSeparator)) + classpath += variant.javaCompileProvider.get().classpath + options { + overview = "src/main/javadoc/overview.html" + windowTitle = "ExecuTorch Android Java API" + docTitle = "ExecuTorch Android Java API" + links("https://docs.oracle.com/en/java/javase/11/docs/api/") + } + } +} diff --git a/extension/android/executorch_android/src/main/java/org/pytorch/executorch/extension/llm/package-info.java b/extension/android/executorch_android/src/main/java/org/pytorch/executorch/extension/llm/package-info.java index 2fcc8c9ec6b..5327e562eab 100644 --- a/extension/android/executorch_android/src/main/java/org/pytorch/executorch/extension/llm/package-info.java +++ b/extension/android/executorch_android/src/main/java/org/pytorch/executorch/extension/llm/package-info.java @@ -1,2 +1,49 @@ -/** Extension for LLM related use cases for ExecuTorch Android Java/JNI package. */ +/** + * ExecuTorch LLM extension for Android. + * + *

This package provides Java bindings for running large language models (LLMs) + * on Android using ExecuTorch. It supports text generation, tokenization, + * and streaming token callbacks. + * + *

Quick Start

+ * + *
{@code
+ * import org.pytorch.executorch.extension.llm.LlmModule;
+ *
+ * // Load a Llama model
+ * LlmModule llm = new LlmModule(
+ *     "/data/local/tmp/llama.pte",
+ *     "/data/local/tmp/tokenizer.bin",
+ *     0.8f
+ * );
+ * llm.load();
+ *
+ * // Generate text token by token
+ * llm.generate("Hello, my name is", 200, new LlmCallback() {
+ *     public void onResult(String token) {
+ *         System.out.print(token);
+ *     }
+ *     public void onStats(String stats) {
+ *         System.out.println("\nStats: " + stats);
+ *     }
+ * });
+ * }
+ * + *

Key Classes

+ * + * + * + *

More Resources

+ * + * + */ package org.pytorch.executorch.extension.llm; diff --git a/extension/android/executorch_android/src/main/java/org/pytorch/executorch/package-info.java b/extension/android/executorch_android/src/main/java/org/pytorch/executorch/package-info.java index 01d55ebc72b..4c78995fe85 100644 --- a/extension/android/executorch_android/src/main/java/org/pytorch/executorch/package-info.java +++ b/extension/android/executorch_android/src/main/java/org/pytorch/executorch/package-info.java @@ -1,2 +1,59 @@ -/** ExecuTorch Android Java/JNI package. This is the main package for generic use cases. */ +/** + * ExecuTorch Android Java API. + * + *

This package provides Java bindings for running ExecuTorch models on Android. + * Use these classes to load a {@code .pte} model file and run inference directly + * from your Java or Kotlin Android app — no C++ required. + * + *

Quick Start

+ * + *

Step 1. Add the dependency to your {@code app/build.gradle.kts}: + * + *

{@code
+ * dependencies {
+ *     implementation("org.pytorch:executorch-android:${executorch_version}")
+ * }
+ * }
+ * + *

Step 2. Load your model and run inference: + * + *

{@code
+ * import org.pytorch.executorch.EValue;
+ * import org.pytorch.executorch.Module;
+ * import org.pytorch.executorch.Tensor;
+ *
+ * // Load your exported .pte model file
+ * Module module = Module.load("/data/local/tmp/model.pte");
+ *
+ * // Build an input tensor  e.g. a 1x3x224x224 image
+ * float[] inputData = new float[1 * 3 * 224 * 224];
+ * Tensor inputTensor = Tensor.fromBlob(inputData, new long[]{1, 3, 224, 224});
+ *
+ * // Run inference
+ * EValue[] output = module.forward(EValue.from(inputTensor));
+ *
+ * // Read the result
+ * float[] scores = output[0].toTensor().getDataAsFloatArray();
+ * }
+ * + *

Key Classes

+ * + * + * + *

More Resources

+ * + * + */ package org.pytorch.executorch; diff --git a/extension/android/executorch_android/src/main/javadoc/overview.html b/extension/android/executorch_android/src/main/javadoc/overview.html new file mode 100644 index 00000000000..01c15fb8b76 --- /dev/null +++ b/extension/android/executorch_android/src/main/javadoc/overview.html @@ -0,0 +1,89 @@ + + + + ExecuTorch Android Java API + + + +

+ The ExecuTorch Android Java API lets you run PyTorch models on Android + devices using a simple Java or Kotlin interface. +

+ +

+ ExecuTorch is PyTorch's solution for on-device AI — from smartphones to + microcontrollers. The Java API wraps the native ExecuTorch runtime and gives + you clean Java classes to load models, build tensors, and run inference. +

+ +

Quick Start

+ +

Add the library to your app:

+ +
+// app/build.gradle.kts
+dependencies {
+    implementation("org.pytorch:executorch-android:${executorch_version}")
+}
+
+ +

Load a model and run inference:

+ +
+import org.pytorch.executorch.EValue;
+import org.pytorch.executorch.Module;
+import org.pytorch.executorch.Tensor;
+
+// Load your exported .pte model
+Module module = Module.load("/data/local/tmp/model.pte");
+
+// Create an input tensor (1x3x224x224 image)
+float[] data = new float[1 * 3 * 224 * 224];
+Tensor input = Tensor.fromBlob(data, new long[]{1, 3, 224, 224});
+
+// Run inference
+EValue[] output = module.forward(EValue.from(input));
+float[] scores = output[0].toTensor().getDataAsFloatArray();
+
+ +

Packages

+ + + + + + + + + + + + + + +
org.pytorch.executorchCore API. Contains Module to load and run models, Tensor for tensor operations, + and EValue to wrap inputs and outputs.
org.pytorch.executorch.extension.llmLLM extension. Contains LlmModule for running large language models like Llama + with streaming token generation.
org.pytorch.executorch.annotationsAPI annotations. Experimental marks APIs that may change in future releases.
+ +

Resources

+ + + + +