From c4f0ed2beb7cc8060851d6343d3224dee79b868b Mon Sep 17 00:00:00 2001 From: Michal Harakal Date: Wed, 15 Apr 2026 11:29:43 +0200 Subject: [PATCH 1/3] Ops docs: fix unresolved partials, drop void backend, render LaTeX (#513) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Four follow-ups to the TensorOps surface-scan docs (#511) that landed with rendering bugs on the published site. Generator now only emits `include::partial$ops//.adoc[...]` directives for partials that actually exist on disk — Antora does not honour `[optional]` for resource-ref includes, so the prior blanket emission produced "unresolved include" errors on every op except matmul. Partial root is derived from the output dir's Antora `pages/` marker. Per-function backend chrome is removed entirely; platform coverage per op is not a concern yet. `@Backend` annotation gains `internal: Boolean = false`, applied to `VoidTensorOps` which is a shape-only sentinel, not a deployable backend. The KSP surface scan filters internal backends out, and the annotation-grouping path drops symbols whose enclosing class is `@Backend`-tagged (their coverage is already in the surface scan). Result: no standalone `voidtensorops.adoc` stub, no `void` column in badges or matrices. LaTeX in `stem:[…]` and `[stem]` blocks now renders as real math. The playbook sets `asciidoc.attributes.stem: latexmath` so expressions are parsed as LaTeX, and a supplemental UI override at `docs/supplemental-ui/partials/footer-scripts.hbs` injects MathJax v3 from jsDelivr with `\(…\)` / `\[…\]` delimiters matching Asciidoctor's output. Override is local so docs fixes don't require a skainet-docs-ui bundle release. Co-Authored-By: Claude Opus 4.6 (1M context) --- .../main/kotlin/GenerateDocumentationTask.kt | 115 +- docs/antora-playbook.yml | 12 + .../reference/operators/generated/index.adoc | 3 +- .../operators/generated/similarity.adoc | 26 - .../operators/generated/tensorops.adoc | 1306 +---------------- .../operators/generated/voidtensorops.adoc | 101 -- .../pages/reference/ops-status-matrix.adoc | 122 +- .../partials/footer-scripts.hbs | 27 + .../sk/ainet/lang/tensor/ops/VoidTensorOps.kt | 2 +- .../kotlin/sk/ainet/lang/ops/TensorOp.kt | 8 +- .../lang/ops/ksp/OperatorDocProcessor.kt | 21 +- 11 files changed, 184 insertions(+), 1559 deletions(-) delete mode 100644 docs/modules/ROOT/pages/reference/operators/generated/voidtensorops.adoc create mode 100644 docs/supplemental-ui/partials/footer-scripts.hbs diff --git a/build-logic/convention/src/main/kotlin/GenerateDocumentationTask.kt b/build-logic/convention/src/main/kotlin/GenerateDocumentationTask.kt index 6129ca8d..459d3fa6 100644 --- a/build-logic/convention/src/main/kotlin/GenerateDocumentationTask.kt +++ b/build-logic/convention/src/main/kotlin/GenerateDocumentationTask.kt @@ -165,9 +165,7 @@ abstract class GenerateDocumentationTask : DefaultTask() { /** * Short emoji-only rendering of a backend status, for use in the - * compact matrix cells. The long-form wording stays on the - * per-function backend-status table produced by - * [generateBackendStatusTable]. + * compact matrix cells. * * The vocabulary covers both the planning-style strings * (`supported` / `partial` / `not_supported` / `planned`) and @@ -261,6 +259,7 @@ abstract class GenerateDocumentationTask : DefaultTask() { private fun generateOperatorPage(operator: OperatorDoc, module: OperatorDocModule, outputDir: File) { val operatorFile = File(outputDir, "${operator.name.lowercase()}.adoc") + val partialsRoot = derivePartialsRoot(outputDir) operatorFile.writeText(buildString { appendLine("= ${operator.name}") appendLine("") @@ -270,11 +269,32 @@ abstract class GenerateDocumentationTask : DefaultTask() { appendLine("") operator.functions.forEach { function -> - generateFunctionSection(operator, function, this) + generateFunctionSection(operator, function, this, partialsRoot) } }) } + /** + * If [outputDir] lives under an Antora `modules//pages/...` tree, + * return the sibling `partials/` directory where hand-written prose + * snippets live. Returns `null` for flat doc layouts, in which case the + * generator assumes no partials exist and skips include directives. + * + * Antora resolves `partial$...` through its content catalog, and unlike + * plain AsciiDoctor it does *not* honor the `optional` attribute for + * missing resources — it logs an "unresolved include" error. So the + * generator must only emit include directives for partials that are + * actually present on disk. + */ + private fun derivePartialsRoot(outputDir: File): File? { + val path = outputDir.absolutePath.replace(File.separatorChar, '/') + val marker = "/pages/" + val idx = path.indexOf(marker) + if (idx < 0) return null + val moduleRoot = File(path.substring(0, idx)) + return File(moduleRoot, "partials") + } + /** * Per-function section layout fuses auto-derived facts (signature, * parameters, return type, backend matrix) with optional hand-written @@ -284,8 +304,15 @@ abstract class GenerateDocumentationTask : DefaultTask() { * single file per function carries all the human content, and missing * tags render as empty via `optional`, keeping un-prosed ops valid. */ - private fun generateFunctionSection(operator: OperatorDoc, function: FunctionDoc, builder: StringBuilder) { - val partialBase = "ops/${operator.name.lowercase()}/${function.name.lowercase()}.adoc" + private fun generateFunctionSection( + operator: OperatorDoc, + function: FunctionDoc, + builder: StringBuilder, + partialsRoot: File?, + ) { + val partialRelative = "ops/${operator.name.lowercase()}/${function.name.lowercase()}.adoc" + val partialFile = partialsRoot?.let { File(it, partialRelative) } + val hasPartial = partialFile?.isFile == true builder.apply { appendLine("== ${function.name}") appendLine("") @@ -314,24 +341,24 @@ abstract class GenerateDocumentationTask : DefaultTask() { appendLine("`${function.returnType}`") appendLine("") - // Human prose: math first so LaTeX sits right under the signature, - // then intuition and examples before the backend table, references - // last. All optional — ops with no partial still render cleanly. - appendLine("=== Definition") - appendLine("") - appendLine("include::partial\$$partialBase[tag=math,optional]") - appendLine("") - appendLine("=== Intuition") - appendLine("") - appendLine("include::partial\$$partialBase[tag=intuition,optional]") - appendLine("") - appendLine("=== Examples") - appendLine("") - appendLine("include::partial\$$partialBase[tag=examples,optional]") - appendLine("") - - if (includeBackendStatus.getOrElse(true) && function.statusByBackend.isNotEmpty()) { - generateBackendStatusTable(function, this) + // Human prose: only emitted when a partial actually exists on + // disk. Antora does not honor the `optional` attribute for + // `partial$` resource refs, so emitting includes for missing + // partials produces "unresolved include" errors on the + // published site. + if (hasPartial) { + appendLine("=== Definition") + appendLine("") + appendLine("include::partial\$$partialRelative[tag=math,optional]") + appendLine("") + appendLine("=== Intuition") + appendLine("") + appendLine("include::partial\$$partialRelative[tag=intuition,optional]") + appendLine("") + appendLine("=== Examples") + appendLine("") + appendLine("include::partial\$$partialRelative[tag=examples,optional]") + appendLine("") } if (function.notes.isNotEmpty()) { @@ -343,42 +370,12 @@ abstract class GenerateDocumentationTask : DefaultTask() { } } - appendLine("=== References") - appendLine("") - appendLine("include::partial\$$partialBase[tag=references,optional]") - appendLine("") - } - } - - private fun generateBackendStatusTable(function: FunctionDoc, builder: StringBuilder) { - builder.apply { - appendLine("=== Backend Support") - appendLine("") - appendLine("[cols=\"1,1,3\", options=\"header\"]") - appendLine("|===") - appendLine("| Backend | Status | Notes") - - function.statusByBackend.forEach { (backend, status) -> - val formattedStatus = formatStatus(status) - val notes = function.notes - .filter { it.backend.equals(backend, ignoreCase = true) } - .joinToString("; ") { it.message } - - appendLine("| $backend | $formattedStatus | ${notes.ifEmpty { "-" }}") + if (hasPartial) { + appendLine("=== References") + appendLine("") + appendLine("include::partial\$$partialRelative[tag=references,optional]") + appendLine("") } - - appendLine("|===") - appendLine("") - } - } - - private fun formatStatus(status: String): String { - return when (status.lowercase()) { - "supported" -> "✅ Supported" - "partial" -> "⚠️ Partial" - "not_supported" -> "❌ Not Supported" - "planned" -> "📋 Planned" - else -> status } } diff --git a/docs/antora-playbook.yml b/docs/antora-playbook.yml index f5b32045..1606511b 100644 --- a/docs/antora-playbook.yml +++ b/docs/antora-playbook.yml @@ -19,6 +19,13 @@ content: branches: HEAD asciidoc: + attributes: + # LaTeX syntax for stem:[…] and [stem] blocks. Without this the + # partials under partials/ops/**/*.adoc would be parsed as AsciiMath, + # which doesn't understand `\in`, `\mathbb`, `\sum_{…}^{…}` etc. and + # passes them through as literal text. MathJax is loaded by + # supplemental-ui/partials/footer-scripts.hbs. + stem: latexmath extensions: # Local mermaid block processor — renders every `[mermaid]` block # inline by invoking the @mermaid-js/mermaid-cli binary baked into @@ -36,6 +43,11 @@ ui: # this URL to a newer version is a one-line PR. url: https://github.com/SKaiNET-developers/skainet-docs-ui/releases/download/v1.1.1/ui-bundle.zip snapshot: true + # Local overrides layered on top of the UI bundle. The footer-scripts + # partial is replaced to inject MathJax so stem:[…] / [stem] blocks + # render properly; upstream the change to skainet-docs-ui once the + # math-in-ops-docs surface stabilises. + supplemental_files: ./supplemental-ui output: dir: ./build/site diff --git a/docs/modules/ROOT/pages/reference/operators/generated/index.adoc b/docs/modules/ROOT/pages/reference/operators/generated/index.adoc index 8ebeb8a3..54733c40 100644 --- a/docs/modules/ROOT/pages/reference/operators/generated/index.adoc +++ b/docs/modules/ROOT/pages/reference/operators/generated/index.adoc @@ -1,13 +1,12 @@ = AI-NET Operators Reference -Generated from version `1.0.0` on 2026-04-14 +Generated from version `1.0.0` on 2026-04-15 == Operators by Modality === Core * xref:reference/operators/generated/tensorops.adoc[TensorOps] -* xref:reference/operators/generated/voidtensorops.adoc[VoidTensorOps] === Composite diff --git a/docs/modules/ROOT/pages/reference/operators/generated/similarity.adoc b/docs/modules/ROOT/pages/reference/operators/generated/similarity.adoc index d93808b3..3e342f67 100644 --- a/docs/modules/ROOT/pages/reference/operators/generated/similarity.adoc +++ b/docs/modules/ROOT/pages/reference/operators/generated/similarity.adoc @@ -23,33 +23,7 @@ fun cosineDistance(other:Tensor, dim:Int, eps:Double): Tensor `Tensor` -=== Definition - -include::partial$ops/similarity/cosinedistance.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/similarity/cosinedistance.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/similarity/cosinedistance.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| cpu | implemented | - -| wasm | implemented | - -| apple | implemented | - -|=== - === Notes TIP: *all*: -=== References - -include::partial$ops/similarity/cosinedistance.adoc[tag=references,optional] - diff --git a/docs/modules/ROOT/pages/reference/operators/generated/tensorops.adoc b/docs/modules/ROOT/pages/reference/operators/generated/tensorops.adoc index 83af1cbd..656e25e9 100644 --- a/docs/modules/ROOT/pages/reference/operators/generated/tensorops.adoc +++ b/docs/modules/ROOT/pages/reference/operators/generated/tensorops.adoc @@ -22,30 +22,6 @@ fun add(a:Tensor, b:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/add.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/add.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/add.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/add.adoc[tag=references,optional] - == subtract === Signature @@ -64,30 +40,6 @@ fun subtract(a:Tensor, b:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/subtract.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/subtract.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/subtract.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/subtract.adoc[tag=references,optional] - == multiply === Signature @@ -106,30 +58,6 @@ fun multiply(a:Tensor, b:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/multiply.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/multiply.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/multiply.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/multiply.adoc[tag=references,optional] - == divide === Signature @@ -148,30 +76,6 @@ fun divide(a:Tensor, b:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/divide.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/divide.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/divide.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/divide.adoc[tag=references,optional] - == addScalar === Signature @@ -190,30 +94,6 @@ fun addScalar(a:Tensor, b:Number): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/addscalar.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/addscalar.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/addscalar.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/addscalar.adoc[tag=references,optional] - == subScalar === Signature @@ -232,30 +112,6 @@ fun subScalar(a:Tensor, b:Number): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/subscalar.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/subscalar.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/subscalar.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/subscalar.adoc[tag=references,optional] - == mulScalar === Signature @@ -274,30 +130,6 @@ fun mulScalar(a:Tensor, b:Number): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/mulscalar.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/mulscalar.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/mulscalar.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/mulscalar.adoc[tag=references,optional] - == divScalar === Signature @@ -316,30 +148,6 @@ fun divScalar(a:Tensor, b:Number): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/divscalar.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/divscalar.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/divscalar.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/divscalar.adoc[tag=references,optional] - == rsubScalar === Signature @@ -358,30 +166,6 @@ fun rsubScalar(a:Number, b:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/rsubscalar.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/rsubscalar.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/rsubscalar.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/rsubscalar.adoc[tag=references,optional] - == rdivScalar === Signature @@ -400,30 +184,6 @@ fun rdivScalar(a:Number, b:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/rdivscalar.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/rdivscalar.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/rdivscalar.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/rdivscalar.adoc[tag=references,optional] - == matmul === Signature @@ -454,14 +214,6 @@ include::partial$ops/tensorops/matmul.adoc[tag=intuition,optional] include::partial$ops/tensorops/matmul.adoc[tag=examples,optional] -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - === References include::partial$ops/tensorops/matmul.adoc[tag=references,optional] @@ -483,30 +235,6 @@ fun transpose(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/transpose.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/transpose.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/transpose.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/transpose.adoc[tag=references,optional] - == conv1d === Signature @@ -530,30 +258,6 @@ fun conv1d(input:Tensor, weight:Tensor, bias:Tensor, stride:Int, padding:Int, di `Tensor` -=== Definition - -include::partial$ops/tensorops/conv1d.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/conv1d.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/conv1d.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/conv1d.adoc[tag=references,optional] - == conv2d === Signature @@ -577,30 +281,6 @@ fun conv2d(input:Tensor, weight:Tensor, bias:Tensor, stride:Pair, padding:Pair, `Tensor` -=== Definition - -include::partial$ops/tensorops/conv2d.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/conv2d.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/conv2d.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/conv2d.adoc[tag=references,optional] - == conv3d === Signature @@ -624,30 +304,6 @@ fun conv3d(input:Tensor, weight:Tensor, bias:Tensor, stride:Triple, padding:Trip `Tensor` -=== Definition - -include::partial$ops/tensorops/conv3d.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/conv3d.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/conv3d.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/conv3d.adoc[tag=references,optional] - == convTranspose1d === Signature @@ -672,30 +328,6 @@ fun convTranspose1d(input:Tensor, weight:Tensor, bias:Tensor, stride:Int, paddin `Tensor` -=== Definition - -include::partial$ops/tensorops/convtranspose1d.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/convtranspose1d.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/convtranspose1d.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/convtranspose1d.adoc[tag=references,optional] - == maxPool2d === Signature @@ -716,30 +348,6 @@ fun maxPool2d(input:Tensor, kernelSize:Pair, stride:Pair, padding:Pair): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/maxpool2d.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/maxpool2d.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/maxpool2d.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/maxpool2d.adoc[tag=references,optional] - == avgPool2d === Signature @@ -761,30 +369,6 @@ fun avgPool2d(input:Tensor, kernelSize:Pair, stride:Pair, padding:Pair, countInc `Tensor` -=== Definition - -include::partial$ops/tensorops/avgpool2d.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/avgpool2d.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/avgpool2d.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/avgpool2d.adoc[tag=references,optional] - == upsample2d === Signature @@ -805,30 +389,6 @@ fun upsample2d(input:Tensor, scale:Pair, mode:UpsampleMode, alignCorners:Boolean `Tensor` -=== Definition - -include::partial$ops/tensorops/upsample2d.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/upsample2d.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/upsample2d.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/upsample2d.adoc[tag=references,optional] - == reshape === Signature @@ -847,31 +407,7 @@ fun reshape(tensor:Tensor, newShape:Shape): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/reshape.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/reshape.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/reshape.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/reshape.adoc[tag=references,optional] - -== flatten +== flatten === Signature @@ -890,30 +426,6 @@ fun flatten(tensor:Tensor, startDim:Int, endDim:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/flatten.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/flatten.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/flatten.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/flatten.adoc[tag=references,optional] - == concat === Signature @@ -932,30 +444,6 @@ fun concat(tensors:List, dim:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/concat.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/concat.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/concat.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/concat.adoc[tag=references,optional] - == split === Signature @@ -975,30 +463,6 @@ fun split(tensor:Tensor, splitSize:Int, dim:Int): List `List` -=== Definition - -include::partial$ops/tensorops/split.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/split.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/split.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/split.adoc[tag=references,optional] - == squeeze === Signature @@ -1017,30 +481,6 @@ fun squeeze(tensor:Tensor, dim:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/squeeze.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/squeeze.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/squeeze.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/squeeze.adoc[tag=references,optional] - == unsqueeze === Signature @@ -1059,30 +499,6 @@ fun unsqueeze(tensor:Tensor, dim:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/unsqueeze.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/unsqueeze.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/unsqueeze.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/unsqueeze.adoc[tag=references,optional] - == relu === Signature @@ -1100,30 +516,6 @@ fun relu(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/relu.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/relu.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/relu.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/relu.adoc[tag=references,optional] - == leakyRelu === Signature @@ -1142,30 +534,6 @@ fun leakyRelu(tensor:Tensor, negativeSlope:Float): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/leakyrelu.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/leakyrelu.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/leakyrelu.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/leakyrelu.adoc[tag=references,optional] - == elu === Signature @@ -1184,30 +552,6 @@ fun elu(tensor:Tensor, alpha:Float): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/elu.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/elu.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/elu.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/elu.adoc[tag=references,optional] - == softmax === Signature @@ -1226,30 +570,6 @@ fun softmax(tensor:Tensor, dim:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/softmax.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/softmax.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/softmax.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/softmax.adoc[tag=references,optional] - == logSoftmax === Signature @@ -1268,30 +588,6 @@ fun logSoftmax(tensor:Tensor, dim:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/logsoftmax.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/logsoftmax.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/logsoftmax.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/logsoftmax.adoc[tag=references,optional] - == sigmoid === Signature @@ -1309,30 +605,6 @@ fun sigmoid(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/sigmoid.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/sigmoid.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/sigmoid.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/sigmoid.adoc[tag=references,optional] - == silu === Signature @@ -1350,30 +622,6 @@ fun silu(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/silu.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/silu.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/silu.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/silu.adoc[tag=references,optional] - == gelu === Signature @@ -1391,30 +639,6 @@ fun gelu(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/gelu.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/gelu.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/gelu.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/gelu.adoc[tag=references,optional] - == sum === Signature @@ -1433,30 +657,6 @@ fun sum(tensor:Tensor, dim:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/sum.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/sum.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/sum.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/sum.adoc[tag=references,optional] - == mean === Signature @@ -1475,30 +675,6 @@ fun mean(tensor:Tensor, dim:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/mean.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/mean.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/mean.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/mean.adoc[tag=references,optional] - == variance === Signature @@ -1517,30 +693,6 @@ fun variance(tensor:Tensor, dim:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/variance.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/variance.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/variance.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/variance.adoc[tag=references,optional] - == sqrt === Signature @@ -1558,30 +710,6 @@ fun sqrt(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/sqrt.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/sqrt.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/sqrt.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/sqrt.adoc[tag=references,optional] - == abs === Signature @@ -1599,30 +727,6 @@ fun abs(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/abs.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/abs.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/abs.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/abs.adoc[tag=references,optional] - == sign === Signature @@ -1640,30 +744,6 @@ fun sign(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/sign.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/sign.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/sign.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/sign.adoc[tag=references,optional] - == clamp === Signature @@ -1683,30 +763,6 @@ fun clamp(tensor:Tensor, minVal:Float, maxVal:Float): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/clamp.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/clamp.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/clamp.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/clamp.adoc[tag=references,optional] - == narrow === Signature @@ -1727,30 +783,6 @@ fun narrow(tensor:Tensor, dim:Int, start:Int, length:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/narrow.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/narrow.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/narrow.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/narrow.adoc[tag=references,optional] - == pad2d === Signature @@ -1772,30 +804,6 @@ fun pad2d(tensor:Tensor, padLeft:Int, padRight:Int, padTop:Int, padBottom:Int): `Tensor` -=== Definition - -include::partial$ops/tensorops/pad2d.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/pad2d.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/pad2d.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/pad2d.adoc[tag=references,optional] - == unfold === Signature @@ -1816,30 +824,6 @@ fun unfold(tensor:Tensor, dim:Int, size:Int, step:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/unfold.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/unfold.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/unfold.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/unfold.adoc[tag=references,optional] - == lt === Signature @@ -1858,30 +842,6 @@ fun lt(tensor:Tensor, value:Float): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/lt.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/lt.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/lt.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/lt.adoc[tag=references,optional] - == ge === Signature @@ -1900,30 +860,6 @@ fun ge(tensor:Tensor, value:Float): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/ge.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/ge.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/ge.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/ge.adoc[tag=references,optional] - == tril === Signature @@ -1942,30 +878,6 @@ fun tril(tensor:Tensor, k:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/tril.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/tril.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/tril.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/tril.adoc[tag=references,optional] - == convert === Signature @@ -1984,30 +896,6 @@ fun convert(tensor:Tensor, targetType:TTo): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/convert.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/convert.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/convert.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/convert.adoc[tag=references,optional] - == gather === Signature @@ -2027,30 +915,6 @@ fun gather(input:Tensor, indices:Tensor, dim:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/gather.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/gather.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/gather.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/gather.adoc[tag=references,optional] - == indexSelect === Signature @@ -2070,30 +934,6 @@ fun indexSelect(input:Tensor, indices:Tensor, dim:Int): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/indexselect.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/indexselect.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/indexselect.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/indexselect.adoc[tag=references,optional] - == exp === Signature @@ -2111,30 +951,6 @@ fun exp(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/exp.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/exp.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/exp.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/exp.adoc[tag=references,optional] - == expm1 === Signature @@ -2152,30 +968,6 @@ fun expm1(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/expm1.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/expm1.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/expm1.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/expm1.adoc[tag=references,optional] - == sin === Signature @@ -2193,30 +985,6 @@ fun sin(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/sin.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/sin.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/sin.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/sin.adoc[tag=references,optional] - == cos === Signature @@ -2234,30 +1002,6 @@ fun cos(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/cos.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/cos.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/cos.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/cos.adoc[tag=references,optional] - == tanh === Signature @@ -2275,30 +1019,6 @@ fun tanh(tensor:Tensor): Tensor `Tensor` -=== Definition - -include::partial$ops/tensorops/tanh.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/tanh.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/tanh.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/tanh.adoc[tag=references,optional] - == scaledDotProductAttention === Signature @@ -2321,27 +1041,3 @@ fun scaledDotProductAttention(query:Tensor, key:Tensor, value:Tensor, mask:Tenso `Tensor` -=== Definition - -include::partial$ops/tensorops/scaleddotproductattention.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/tensorops/scaleddotproductattention.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/tensorops/scaleddotproductattention.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| void | implemented | - -|=== - -=== References - -include::partial$ops/tensorops/scaleddotproductattention.adoc[tag=references,optional] - diff --git a/docs/modules/ROOT/pages/reference/operators/generated/voidtensorops.adoc b/docs/modules/ROOT/pages/reference/operators/generated/voidtensorops.adoc deleted file mode 100644 index fdba6e90..00000000 --- a/docs/modules/ROOT/pages/reference/operators/generated/voidtensorops.adoc +++ /dev/null @@ -1,101 +0,0 @@ -= VoidTensorOps - -Package: `sk.ainet.lang.tensor.ops` - -Modality: Core - -== matmul - -=== Signature - -[source,kotlin] ----- -fun matmul(a:Tensor, b:Tensor): Tensor ----- - -=== Parameters - -* `a: Tensor` -* `b: Tensor` - -=== Return Type - -`Tensor` - -=== Definition - -include::partial$ops/voidtensorops/matmul.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/voidtensorops/matmul.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/voidtensorops/matmul.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| Metal | in_progress | ; -|=== - -=== Notes - -TIP: *Metal*: - -TIP: *Metal*: - -=== References - -include::partial$ops/voidtensorops/matmul.adoc[tag=references,optional] - -== transpose - -=== Signature - -[source,kotlin] ----- -fun transpose(tensor:Tensor): Tensor ----- - -=== Parameters - -* `tensor: Tensor` - -=== Return Type - -`Tensor` - -=== Definition - -include::partial$ops/voidtensorops/transpose.adoc[tag=math,optional] - -=== Intuition - -include::partial$ops/voidtensorops/transpose.adoc[tag=intuition,optional] - -=== Examples - -include::partial$ops/voidtensorops/transpose.adoc[tag=examples,optional] - -=== Backend Support - -[cols="1,1,3", options="header"] -|=== -| Backend | Status | Notes -| Metal | in_progress | ; -|=== - -=== Notes - -TIP: *Metal*: - -TIP: *Metal*: - -=== References - -include::partial$ops/voidtensorops/transpose.adoc[tag=references,optional] - diff --git a/docs/modules/ROOT/pages/reference/ops-status-matrix.adoc b/docs/modules/ROOT/pages/reference/ops-status-matrix.adoc index caf9cb73..d915ed11 100644 --- a/docs/modules/ROOT/pages/reference/ops-status-matrix.adoc +++ b/docs/modules/ROOT/pages/reference/ops-status-matrix.adoc @@ -1,74 +1,72 @@ = Operator Coverage Matrix :description: Cross-backend status for every operator function in SKaiNET. -Generated from `operators.json` version `1.0.0` on 2026-04-14. +Generated from `operators.json` version `1.0.0` on 2026-04-15. Rows are `Operator.function` pairs; columns are backends that appear in any function's `statusByBackend` map. A missing entry means the backend makes no claim about the function — treat it as "unknown", not "not supported". -[cols="2,1,1,1,1,1", options="header"] +[cols="2,1,1,1", options="header"] |=== -| Operator.function | Metal | apple | cpu | void | wasm +| Operator.function | apple | cpu | wasm -| `TensorOps.add` | — | — | — | ✅ | — -| `TensorOps.subtract` | — | — | — | ✅ | — -| `TensorOps.multiply` | — | — | — | ✅ | — -| `TensorOps.divide` | — | — | — | ✅ | — -| `TensorOps.addScalar` | — | — | — | ✅ | — -| `TensorOps.subScalar` | — | — | — | ✅ | — -| `TensorOps.mulScalar` | — | — | — | ✅ | — -| `TensorOps.divScalar` | — | — | — | ✅ | — -| `TensorOps.rsubScalar` | — | — | — | ✅ | — -| `TensorOps.rdivScalar` | — | — | — | ✅ | — -| `TensorOps.matmul` | — | — | — | ✅ | — -| `TensorOps.transpose` | — | — | — | ✅ | — -| `TensorOps.conv1d` | — | — | — | ✅ | — -| `TensorOps.conv2d` | — | — | — | ✅ | — -| `TensorOps.conv3d` | — | — | — | ✅ | — -| `TensorOps.convTranspose1d` | — | — | — | ✅ | — -| `TensorOps.maxPool2d` | — | — | — | ✅ | — -| `TensorOps.avgPool2d` | — | — | — | ✅ | — -| `TensorOps.upsample2d` | — | — | — | ✅ | — -| `TensorOps.reshape` | — | — | — | ✅ | — -| `TensorOps.flatten` | — | — | — | ✅ | — -| `TensorOps.concat` | — | — | — | ✅ | — -| `TensorOps.split` | — | — | — | ✅ | — -| `TensorOps.squeeze` | — | — | — | ✅ | — -| `TensorOps.unsqueeze` | — | — | — | ✅ | — -| `TensorOps.relu` | — | — | — | ✅ | — -| `TensorOps.leakyRelu` | — | — | — | ✅ | — -| `TensorOps.elu` | — | — | — | ✅ | — -| `TensorOps.softmax` | — | — | — | ✅ | — -| `TensorOps.logSoftmax` | — | — | — | ✅ | — -| `TensorOps.sigmoid` | — | — | — | ✅ | — -| `TensorOps.silu` | — | — | — | ✅ | — -| `TensorOps.gelu` | — | — | — | ✅ | — -| `TensorOps.sum` | — | — | — | ✅ | — -| `TensorOps.mean` | — | — | — | ✅ | — -| `TensorOps.variance` | — | — | — | ✅ | — -| `TensorOps.sqrt` | — | — | — | ✅ | — -| `TensorOps.abs` | — | — | — | ✅ | — -| `TensorOps.sign` | — | — | — | ✅ | — -| `TensorOps.clamp` | — | — | — | ✅ | — -| `TensorOps.narrow` | — | — | — | ✅ | — -| `TensorOps.pad2d` | — | — | — | ✅ | — -| `TensorOps.unfold` | — | — | — | ✅ | — -| `TensorOps.lt` | — | — | — | ✅ | — -| `TensorOps.ge` | — | — | — | ✅ | — -| `TensorOps.tril` | — | — | — | ✅ | — -| `TensorOps.convert` | — | — | — | ✅ | — -| `TensorOps.gather` | — | — | — | ✅ | — -| `TensorOps.indexSelect` | — | — | — | ✅ | — -| `TensorOps.exp` | — | — | — | ✅ | — -| `TensorOps.expm1` | — | — | — | ✅ | — -| `TensorOps.sin` | — | — | — | ✅ | — -| `TensorOps.cos` | — | — | — | ✅ | — -| `TensorOps.tanh` | — | — | — | ✅ | — -| `TensorOps.scaledDotProductAttention` | — | — | — | ✅ | — -| `VoidTensorOps.matmul` | 🚧 | — | — | — | — -| `VoidTensorOps.transpose` | 🚧 | — | — | — | — -| `Similarity.cosineDistance` | — | ✅ | ✅ | — | ✅ +| `TensorOps.add` | — | — | — +| `TensorOps.subtract` | — | — | — +| `TensorOps.multiply` | — | — | — +| `TensorOps.divide` | — | — | — +| `TensorOps.addScalar` | — | — | — +| `TensorOps.subScalar` | — | — | — +| `TensorOps.mulScalar` | — | — | — +| `TensorOps.divScalar` | — | — | — +| `TensorOps.rsubScalar` | — | — | — +| `TensorOps.rdivScalar` | — | — | — +| `TensorOps.matmul` | — | — | — +| `TensorOps.transpose` | — | — | — +| `TensorOps.conv1d` | — | — | — +| `TensorOps.conv2d` | — | — | — +| `TensorOps.conv3d` | — | — | — +| `TensorOps.convTranspose1d` | — | — | — +| `TensorOps.maxPool2d` | — | — | — +| `TensorOps.avgPool2d` | — | — | — +| `TensorOps.upsample2d` | — | — | — +| `TensorOps.reshape` | — | — | — +| `TensorOps.flatten` | — | — | — +| `TensorOps.concat` | — | — | — +| `TensorOps.split` | — | — | — +| `TensorOps.squeeze` | — | — | — +| `TensorOps.unsqueeze` | — | — | — +| `TensorOps.relu` | — | — | — +| `TensorOps.leakyRelu` | — | — | — +| `TensorOps.elu` | — | — | — +| `TensorOps.softmax` | — | — | — +| `TensorOps.logSoftmax` | — | — | — +| `TensorOps.sigmoid` | — | — | — +| `TensorOps.silu` | — | — | — +| `TensorOps.gelu` | — | — | — +| `TensorOps.sum` | — | — | — +| `TensorOps.mean` | — | — | — +| `TensorOps.variance` | — | — | — +| `TensorOps.sqrt` | — | — | — +| `TensorOps.abs` | — | — | — +| `TensorOps.sign` | — | — | — +| `TensorOps.clamp` | — | — | — +| `TensorOps.narrow` | — | — | — +| `TensorOps.pad2d` | — | — | — +| `TensorOps.unfold` | — | — | — +| `TensorOps.lt` | — | — | — +| `TensorOps.ge` | — | — | — +| `TensorOps.tril` | — | — | — +| `TensorOps.convert` | — | — | — +| `TensorOps.gather` | — | — | — +| `TensorOps.indexSelect` | — | — | — +| `TensorOps.exp` | — | — | — +| `TensorOps.expm1` | — | — | — +| `TensorOps.sin` | — | — | — +| `TensorOps.cos` | — | — | — +| `TensorOps.tanh` | — | — | — +| `TensorOps.scaledDotProductAttention` | — | — | — +| `Similarity.cosineDistance` | ✅ | ✅ | ✅ -| *Done* | *0 / 58* | *1 / 58* | *1 / 58* | *55 / 58* | *1 / 58* +| *Done* | *1 / 56* | *1 / 56* | *1 / 56* |=== Per-function detail including notes lives in xref:reference/operators/generated/index.adoc[Operator reference]. diff --git a/docs/supplemental-ui/partials/footer-scripts.hbs b/docs/supplemental-ui/partials/footer-scripts.hbs new file mode 100644 index 00000000..a5f9a867 --- /dev/null +++ b/docs/supplemental-ui/partials/footer-scripts.hbs @@ -0,0 +1,27 @@ + + +{{#if env.SITE_SEARCH_PROVIDER}} +{{> search-scripts}} +{{/if}} +{{!-- + MathJax v3 loaded from CDN so LaTeX in stem:[…] / [stem] blocks renders + as real math instead of literal \$…\$ passthrough. Configured to match + Asciidoctor's delimiter output: inline \(…\) and display \[…\]. The + `ignoreHtmlClass`/`processHtmlClass` pair scopes the scan to Asciidoctor's + stemblock + paragraph nodes so MathJax doesn't rewrite unrelated page + chrome. Kept here (supplemental-ui override of the skainet-docs-ui bundle) + rather than upstreamed so docs changes don't require a UI bundle release. +--}} + + diff --git a/skainet-lang/skainet-lang-core/src/commonMain/kotlin/sk/ainet/lang/tensor/ops/VoidTensorOps.kt b/skainet-lang/skainet-lang-core/src/commonMain/kotlin/sk/ainet/lang/tensor/ops/VoidTensorOps.kt index 4ff09b3a..eef1859c 100644 --- a/skainet-lang/skainet-lang-core/src/commonMain/kotlin/sk/ainet/lang/tensor/ops/VoidTensorOps.kt +++ b/skainet-lang/skainet-lang-core/src/commonMain/kotlin/sk/ainet/lang/tensor/ops/VoidTensorOps.kt @@ -9,7 +9,7 @@ import sk.ainet.lang.tensor.data.DenseTensorDataFactory import sk.ainet.lang.types.DType import sk.ainet.lang.tensor.data.views.UnsqueezedTensorData -@Backend(id = "void", displayName = "Shape-only") +@Backend(id = "void", displayName = "Shape-only", internal = true) public class VoidTensorOps : TensorOps { private val dataFactory = DenseTensorDataFactory() diff --git a/skainet-lang/skainet-lang-ksp-annotations/src/commonMain/kotlin/sk/ainet/lang/ops/TensorOp.kt b/skainet-lang/skainet-lang-ksp-annotations/src/commonMain/kotlin/sk/ainet/lang/ops/TensorOp.kt index 76358d2c..95ddb74f 100644 --- a/skainet-lang/skainet-lang-ksp-annotations/src/commonMain/kotlin/sk/ainet/lang/ops/TensorOp.kt +++ b/skainet-lang/skainet-lang-ksp-annotations/src/commonMain/kotlin/sk/ainet/lang/ops/TensorOp.kt @@ -63,10 +63,16 @@ public annotation class InProgress( * and lowercase. * @param displayName Human-readable label for rendered tables. Defaults * to [id] if left empty. + * @param internal Marks the backend as internal-only — a shape/dtype + * sentinel, test double, or profiling stub that should never appear in + * user-facing docs or coverage matrices. `VoidTensorOps` is the canonical + * example: it exists so the KMP build and shape propagation work without + * a real compute backend, but it has no runtime on any target. */ @Target(AnnotationTarget.CLASS) @Retention(AnnotationRetention.SOURCE) public annotation class Backend( val id: String, - val displayName: String = "" + val displayName: String = "", + val internal: Boolean = false, ) diff --git a/skainet-lang/skainet-lang-ksp-processor/src/main/kotlin/sk/ainet/lang/ops/ksp/OperatorDocProcessor.kt b/skainet-lang/skainet-lang-ksp-processor/src/main/kotlin/sk/ainet/lang/ops/ksp/OperatorDocProcessor.kt index 9e4e42ba..799c6e4f 100644 --- a/skainet-lang/skainet-lang-ksp-processor/src/main/kotlin/sk/ainet/lang/ops/ksp/OperatorDocProcessor.kt +++ b/skainet-lang/skainet-lang-ksp-processor/src/main/kotlin/sk/ainet/lang/ops/ksp/OperatorDocProcessor.kt @@ -81,9 +81,20 @@ class OperatorDocProcessor( .filterIsInstance() .filter { it.validate() } - val allSymbols = (notImplementedSymbols + inProgressSymbols + testInProgressSymbols + dslOpSymbols).toList() + val rawSymbols = (notImplementedSymbols + inProgressSymbols + testInProgressSymbols + dslOpSymbols).toList() + + // Drop symbols whose enclosing class is `@Backend`-tagged: those are + // backend implementors of `TensorOps` and their coverage is already + // reflected in the TensorOps surface scan's backend matrix. Emitting a + // standalone page for them would duplicate info and, worse, produce a + // stub page showing only the handful of methods that happen to carry + // a status annotation. + val allSymbols = rawSymbols.filterNot { symbol -> + val parent = (symbol as? KSFunctionDeclaration)?.parentDeclaration as? KSClassDeclaration + parent?.annotations?.any { it.shortName.asString() == "Backend" } == true + } - logger.info("Found ${allSymbols.size} annotated symbols") + logger.info("Found ${allSymbols.size} annotated symbols (dropped ${rawSymbols.size - allSymbols.size} on @Backend classes)") // Group annotation-discovered symbols by their containing class/package to create operators val annotationOps = if (allSymbols.isNotEmpty()) groupSymbolsByOperator(allSymbols) else emptyList() @@ -135,11 +146,17 @@ class OperatorDocProcessor( val tensorOpsName = resolver.getKSNameFromString("sk.ainet.lang.tensor.ops.TensorOps") val tensorOps = resolver.getClassDeclarationByName(tensorOpsName) ?: return emptyList() + // Backend classes marked `internal = true` are shape/dtype + // sentinels or test doubles (e.g. `VoidTensorOps`). Drop them + // from the surface scan so they never appear in user-facing + // pages or coverage matrices. val backendClasses: List> = resolver .getSymbolsWithAnnotation("sk.ainet.lang.ops.Backend") .filterIsInstance() .mapNotNull { cls -> val ann = cls.annotations.find { it.shortName.asString() == "Backend" } ?: return@mapNotNull null + val isInternal = ann.arguments.find { it.name?.asString() == "internal" }?.value as? Boolean == true + if (isInternal) return@mapNotNull null val id = ann.arguments.find { it.name?.asString() == "id" }?.value?.toString() ?: return@mapNotNull null id to cls From 3327cba3dc8eed0ce3f2546f43f752d7a1adc513 Mon Sep 17 00:00:00 2001 From: Michal Harakal Date: Wed, 15 Apr 2026 11:47:06 +0200 Subject: [PATCH 2/3] Ops docs: real version stamp + KDoc @param extraction (#513) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Two related polish items on top of the partial/void/LaTeX fixes. Generated pages now stamp the canonical SKaiNET version — sourced from `VERSION_NAME` in the root `gradle.properties` (the same value published to Maven Central) — instead of the hardcoded "1.0.0" placeholder the KSP processor shipped with. The value is passed via a new `skainet.version` KSP option configured in `skainet-lang-core/build.gradle.kts` and read through `environment.options` on the processor side. Falls back to `"unknown"` when the option is absent so unit-test fixtures that drive the processor directly don't need to thread the value through. Parameter descriptions on the generated operator pages now come from KDoc `@param` directives on the underlying `TensorOps` interface. A new `parseKDocParams` helper walks KSP's raw `docString`, pulls `@param ` blocks (with continuation-line support up to the next `@` or blank line), and feeds the map into `extractParameters`. Params without KDoc still render as `name: Type` with no description, so ops that haven't been documented yet stay valid. `scaledDotProductAttention` is the reference example — its six params all now carry shape and semantics prose on the generated page. Co-Authored-By: Claude Opus 4.6 (1M context) --- .../reference/operators/generated/index.adoc | 2 +- .../operators/generated/similarity.adoc | 3 + .../operators/generated/tensorops.adoc | 6 ++ .../pages/reference/ops-status-matrix.adoc | 2 +- .../skainet-lang-core/build.gradle.kts | 10 +++ .../lang/ops/ksp/OperatorDocProcessor.kt | 74 ++++++++++++++++--- 6 files changed, 86 insertions(+), 11 deletions(-) diff --git a/docs/modules/ROOT/pages/reference/operators/generated/index.adoc b/docs/modules/ROOT/pages/reference/operators/generated/index.adoc index 54733c40..bcda32ca 100644 --- a/docs/modules/ROOT/pages/reference/operators/generated/index.adoc +++ b/docs/modules/ROOT/pages/reference/operators/generated/index.adoc @@ -1,6 +1,6 @@ = AI-NET Operators Reference -Generated from version `1.0.0` on 2026-04-15 +Generated from version `0.18.0` on 2026-04-15 == Operators by Modality diff --git a/docs/modules/ROOT/pages/reference/operators/generated/similarity.adoc b/docs/modules/ROOT/pages/reference/operators/generated/similarity.adoc index 3e342f67..0901bb89 100644 --- a/docs/modules/ROOT/pages/reference/operators/generated/similarity.adoc +++ b/docs/modules/ROOT/pages/reference/operators/generated/similarity.adoc @@ -16,8 +16,11 @@ fun cosineDistance(other:Tensor, dim:Int, eps:Double): Tensor === Parameters * `other: Tensor` + The other tensor to calculate the distance to. * `dim: Int` + The dimension along which to calculate the cosine distance. Default is -1 (last dimension). * `eps: Double` + A small value to avoid division by zero. Default is 1e-8. === Return Type diff --git a/docs/modules/ROOT/pages/reference/operators/generated/tensorops.adoc b/docs/modules/ROOT/pages/reference/operators/generated/tensorops.adoc index 656e25e9..cdc9503d 100644 --- a/docs/modules/ROOT/pages/reference/operators/generated/tensorops.adoc +++ b/docs/modules/ROOT/pages/reference/operators/generated/tensorops.adoc @@ -1031,11 +1031,17 @@ fun scaledDotProductAttention(query:Tensor, key:Tensor, value:Tensor, mask:Tenso === Parameters * `query: Tensor` + [batch, nHeads, seqLen, headDim] * `key: Tensor` + [batch, nKVHeads, kvLen, headDim] * `value: Tensor` + [batch, nKVHeads, kvLen, headDim] * `mask: Tensor` + optional additive mask [batch, 1, seqLen, kvLen] (e.g. causal) * `scale: Float` + scaling factor, defaults to 1/sqrt(headDim) * `causal: Boolean` + if true, apply causal masking (ignore [mask] parameter) === Return Type diff --git a/docs/modules/ROOT/pages/reference/ops-status-matrix.adoc b/docs/modules/ROOT/pages/reference/ops-status-matrix.adoc index d915ed11..5d93a07b 100644 --- a/docs/modules/ROOT/pages/reference/ops-status-matrix.adoc +++ b/docs/modules/ROOT/pages/reference/ops-status-matrix.adoc @@ -1,7 +1,7 @@ = Operator Coverage Matrix :description: Cross-backend status for every operator function in SKaiNET. -Generated from `operators.json` version `1.0.0` on 2026-04-15. +Generated from `operators.json` version `0.18.0` on 2026-04-15. Rows are `Operator.function` pairs; columns are backends that appear in any function's `statusByBackend` map. A missing entry means the backend makes no claim about the function — treat it as "unknown", not "not supported". diff --git a/skainet-lang/skainet-lang-core/build.gradle.kts b/skainet-lang/skainet-lang-core/build.gradle.kts index c25b9c19..10a8f943 100644 --- a/skainet-lang/skainet-lang-core/build.gradle.kts +++ b/skainet-lang/skainet-lang-core/build.gradle.kts @@ -82,6 +82,16 @@ dependencies { add("kspCommonMainMetadata", project(":skainet-lang:skainet-lang-ksp-processor")) } +// Pass the canonical SKaiNET version (VERSION_NAME in the root +// gradle.properties — same value that's published to Maven Central) to +// the OperatorDocProcessor so generated ops pages stamp a real version +// instead of the hardcoded "1.0.0" placeholder. Read at configuration +// time via providers.gradleProperty so build-cache entries invalidate +// when the version bumps. +ksp { + arg("skainet.version", providers.gradleProperty("VERSION_NAME").getOrElse("unknown")) +} + tasks.matching { it.name.startsWith("dokka") }.configureEach { dependsOn("kspCommonMainKotlinMetadata") } diff --git a/skainet-lang/skainet-lang-ksp-processor/src/main/kotlin/sk/ainet/lang/ops/ksp/OperatorDocProcessor.kt b/skainet-lang/skainet-lang-ksp-processor/src/main/kotlin/sk/ainet/lang/ops/ksp/OperatorDocProcessor.kt index 799c6e4f..e50b6455 100644 --- a/skainet-lang/skainet-lang-ksp-processor/src/main/kotlin/sk/ainet/lang/ops/ksp/OperatorDocProcessor.kt +++ b/skainet-lang/skainet-lang-ksp-processor/src/main/kotlin/sk/ainet/lang/ops/ksp/OperatorDocProcessor.kt @@ -52,7 +52,8 @@ data class Note( */ class OperatorDocProcessor( private val codeGenerator: CodeGenerator, - private val logger: KSPLogger + private val logger: KSPLogger, + private val options: Map = emptyMap(), ) : SymbolProcessor { private var alreadyGenerated = false @@ -278,15 +279,63 @@ class OperatorDocProcessor( } private fun extractParameters(function: KSFunctionDeclaration): List { + val paramDocs = parseKDocParams(function.docString) return function.parameters.map { param -> + val name = param.name?.asString() ?: "" ParameterDoc( - param.name?.asString() ?: "", - param.type.resolve().declaration.simpleName.asString(), - "" // TODO: Extract from KDoc if available + name = name, + type = param.type.resolve().declaration.simpleName.asString(), + description = paramDocs[name].orEmpty(), ) } } + /** + * Parse `@param ` blocks out of a KDoc comment + * and return a map from parameter name to description. Descriptions + * span subsequent indented continuation lines up until the next + * `@` or a blank line, matching how Dokka reads KDoc. + * + * Returns an empty map when [docString] is null or contains no + * `@param` directives — callers then fall back to no description, + * keeping pages for undocumented ops valid. + */ + private fun parseKDocParams(docString: String?): Map { + if (docString.isNullOrBlank()) return emptyMap() + val result = linkedMapOf() + var current: StringBuilder? = null + docString.lineSequence().forEach { raw -> + // KSP hands back the KDoc with leading `*` markers still + // attached on continuation lines; strip the canonical + // ` * ` / `*` prefix before pattern-matching. + val line = raw.trimStart().removePrefix("*").trimStart() + val paramMatch = Regex("^@param\\s+(\\S+)\\s*(.*)$").matchEntire(line) + when { + paramMatch != null -> { + val (name, rest) = paramMatch.destructured + val sb = StringBuilder(rest.trim()) + result[name] = sb + current = sb + } + line.startsWith("@") -> { + // Another KDoc tag ends the current @param block. + current = null + } + line.isBlank() -> { + current = null + } + else -> { + current?.let { sb -> + if (sb.isNotEmpty()) sb.append(' ') + sb.append(line.trim()) + } + } + } + } + return result.mapValues { (_, sb) -> sb.toString().trim() } + .filterValues { it.isNotEmpty() } + } + private fun extractReturnType(function: KSFunctionDeclaration): String { return function.returnType?.resolve()?.declaration?.simpleName?.asString() ?: "Unit" } @@ -377,10 +426,17 @@ class OperatorDocProcessor( } } - private fun extractVersion(): String { - // TODO: Extract from project metadata - return "1.0.0" - } + /** + * Canonical SKaiNET version stamped into every generated operator + * page. Sourced from the `skainet.version` KSP option, which the + * `skainet-lang-core` build script populates from the root + * `gradle.properties` `VERSION_NAME` (the same value published to + * Maven Central). Falls back to `"unknown"` when the option isn't + * passed — e.g. when the processor is exercised from a unit test + * fixture that doesn't thread the option through. + */ + private fun extractVersion(): String = + options["skainet.version"]?.takeIf { it.isNotBlank() } ?: "unknown" private fun extractCommitSha(): String { // TODO: Extract from git metadata @@ -492,6 +548,6 @@ class OperatorDocProcessor( */ class OperatorDocProcessorProvider : SymbolProcessorProvider { override fun create(environment: SymbolProcessorEnvironment): SymbolProcessor { - return OperatorDocProcessor(environment.codeGenerator, environment.logger) + return OperatorDocProcessor(environment.codeGenerator, environment.logger, environment.options) } } \ No newline at end of file From fdb4323e7737f81baedf4589341494412dc53516 Mon Sep 17 00:00:00 2001 From: Michal Harakal Date: Wed, 15 Apr 2026 12:01:38 +0200 Subject: [PATCH 3/3] Ops docs: KDoc matmul params as reference example (#513) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit matmul is the walk-through example for the ops docs pipeline — it already has a prose partial under partials/ops/tensorops/matmul.adoc and gets embedded into the explanation/theory page. Give its interface declaration the matching @param KDoc so the generator's new param-description extraction has something to render on the reference page, mirroring the pattern set by scaledDotProductAttention. Co-Authored-By: Claude Opus 4.6 (1M context) --- .../pages/reference/operators/generated/tensorops.adoc | 2 ++ .../kotlin/sk/ainet/lang/tensor/ops/TensorOps.kt | 10 ++++++++++ 2 files changed, 12 insertions(+) diff --git a/docs/modules/ROOT/pages/reference/operators/generated/tensorops.adoc b/docs/modules/ROOT/pages/reference/operators/generated/tensorops.adoc index cdc9503d..cd3c671b 100644 --- a/docs/modules/ROOT/pages/reference/operators/generated/tensorops.adoc +++ b/docs/modules/ROOT/pages/reference/operators/generated/tensorops.adoc @@ -196,7 +196,9 @@ fun matmul(a:Tensor, b:Tensor): Tensor === Parameters * `a: Tensor` + left operand, shape `[…, m, k]`. The last dimension `k` must match the second-to-last dimension of [b]. * `b: Tensor` + right operand, shape `[…, k, n]`. Leading dimensions are broadcast against [a] using the usual broadcasting rules. === Return Type diff --git a/skainet-lang/skainet-lang-core/src/commonMain/kotlin/sk/ainet/lang/tensor/ops/TensorOps.kt b/skainet-lang/skainet-lang-core/src/commonMain/kotlin/sk/ainet/lang/tensor/ops/TensorOps.kt index 3f730336..47c0001d 100644 --- a/skainet-lang/skainet-lang-core/src/commonMain/kotlin/sk/ainet/lang/tensor/ops/TensorOps.kt +++ b/skainet-lang/skainet-lang-core/src/commonMain/kotlin/sk/ainet/lang/tensor/ops/TensorOps.kt @@ -38,6 +38,16 @@ public interface TensorOps { public fun rdivScalar(a: Number, b: Tensor): Tensor // Linear algebra operations + /** + * Matrix product of two tensors. For 2-D inputs this is the standard + * matrix multiplication `C = A · B`; for higher-rank inputs the leading + * dimensions are broadcast and the last two are contracted. + * + * @param a left operand, shape `[…, m, k]`. The last dimension `k` must + * match the second-to-last dimension of [b]. + * @param b right operand, shape `[…, k, n]`. Leading dimensions are + * broadcast against [a] using the usual broadcasting rules. + */ @Diff public fun matmul(a: Tensor, b: Tensor): Tensor @Diff