Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
87 commits
Select commit Hold shift + click to select a range
e29b9b4
add first draft for new AI job runner
hotzenklotz Aug 25, 2025
11c7306
updated credits per voxel information
hotzenklotz Aug 25, 2025
27bd4fa
more refinement
hotzenklotz Aug 25, 2025
d1cacc6
close drawer on job start
hotzenklotz Aug 26, 2025
43332a4
added alignment job AI UI
hotzenklotz Aug 26, 2025
fc45bda
more stuff
hotzenklotz Aug 27, 2025
5762b8f
add training data section
hotzenklotz Aug 28, 2025
32850db
WIP
hotzenklotz Aug 29, 2025
e233a68
added training annotation volume calculation
hotzenklotz Sep 2, 2025
50e1f6b
properly connect AI job drawer open/close state
hotzenklotz Sep 2, 2025
933980a
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Sep 2, 2025
4dff13f
stuff
hotzenklotz Sep 2, 2025
b486d32
more fixes
hotzenklotz Sep 3, 2025
0e2b3bb
stuff
hotzenklotz Sep 3, 2025
d6e9930
add more validation
hotzenklotz Sep 3, 2025
48a1e72
added more validation
hotzenklotz Sep 4, 2025
5d7c727
refinement
hotzenklotz Sep 4, 2025
21bcfe9
sdf
hotzenklotz Sep 4, 2025
c4cd9a0
get a CI build
hotzenklotz Sep 5, 2025
89589e1
type fixes
hotzenklotz Sep 5, 2025
e8ddb99
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Sep 5, 2025
fcc5cdb
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Sep 10, 2025
e0beade
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Sep 11, 2025
b3025e2
correctly connect tabs
hotzenklotz Sep 11, 2025
3a38dad
integrate training with several annotations
hotzenklotz Sep 12, 2025
70f3f3b
use react-query
hotzenklotz Sep 12, 2025
6780296
formatting
hotzenklotz Sep 12, 2025
26e0f58
fix job cost calculation
hotzenklotz Sep 15, 2025
7774cde
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Sep 15, 2025
f95d18e
move AI job drawer in dom tree
hotzenklotz Sep 15, 2025
ecb823a
add highlighting on hover
hotzenklotz Sep 15, 2025
d278c70
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Sep 16, 2025
62a70e4
remove unused components
hotzenklotz Sep 16, 2025
820c24b
simplify start_job_form for material_volume
hotzenklotz Sep 16, 2025
f459ff8
fix type errors
hotzenklotz Sep 16, 2025
0a2bdc8
moved ai job components to new directory
hotzenklotz Sep 16, 2025
fa8edb4
improved wording
hotzenklotz Sep 16, 2025
287f6c2
more fine tuning
hotzenklotz Sep 16, 2025
9b8c5a7
restore CI
hotzenklotz Sep 16, 2025
074ad0d
fix typing
hotzenklotz Sep 16, 2025
654cc70
Update frontend/javascripts/viewer/view/ai_jobs/utils.ts
hotzenklotz Sep 16, 2025
3562126
apply coderabbit PR feedback
hotzenklotz Sep 16, 2025
86df8ad
Merge branch 'new_ai_job_ui' of github.com:scalableminds/webknossos i…
hotzenklotz Sep 16, 2025
fda54f0
refinement
hotzenklotz Sep 16, 2025
e9c7c20
fix swapped job commands
hotzenklotz Sep 16, 2025
568bdab
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Sep 16, 2025
e703629
update docs
hotzenklotz Sep 18, 2025
eba6428
more documentation
hotzenklotz Sep 19, 2025
9ebe694
restore tooltip
hotzenklotz Sep 19, 2025
717d643
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Sep 19, 2025
ea0ae64
more docs
hotzenklotz Sep 22, 2025
827014e
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Sep 22, 2025
332f6d8
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Sep 23, 2025
6abbba4
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Sep 23, 2025
d48ba0a
Update docs/automation/ai_segmentation.md
hotzenklotz Sep 24, 2025
da071fc
Update docs/automation/ai_training.md
hotzenklotz Sep 24, 2025
e219568
Update docs/automation/ai_training.md
hotzenklotz Sep 24, 2025
cb5092d
Update docs/automation/ai_segmentation.md
hotzenklotz Sep 24, 2025
b4ed39b
Update docs/automation/ai_training.md
hotzenklotz Sep 24, 2025
1380067
Update docs/automation/alignment.md
hotzenklotz Sep 24, 2025
f46e9a0
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Sep 25, 2025
6c1d7d7
apply docs feedback
hotzenklotz Sep 25, 2025
1b9f46d
Update frontend/javascripts/viewer/view/action_bar_view.tsx
hotzenklotz Sep 25, 2025
dca9d5a
Update docs/automation/alignment.md
hotzenklotz Sep 25, 2025
178c018
apply docs feedback
hotzenklotz Sep 25, 2025
67227e4
Merge branch 'new_ai_job_ui' of github.com:scalableminds/webknossos i…
hotzenklotz Sep 25, 2025
e3afc27
fix coderabbit mistakes
hotzenklotz Sep 25, 2025
585660b
Update frontend/javascripts/viewer/view/ai_jobs/run_ai_model/ai_model…
hotzenklotz Sep 26, 2025
c6bb705
Update frontend/javascripts/viewer/view/ai_jobs/utils.ts
hotzenklotz Sep 26, 2025
6b4b106
Update docs/automation/ai_training.md
hotzenklotz Oct 10, 2025
95d2251
Update docs/automation/alignment.md
hotzenklotz Oct 10, 2025
d4b2bab
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Oct 14, 2025
7fb0d86
Update frontend/javascripts/viewer/view/ai_jobs/run_ai_model/ai_analy…
hotzenklotz Oct 15, 2025
dcd6fb9
apply review feedback
hotzenklotz Oct 15, 2025
02e2375
applied feedback
hotzenklotz Oct 15, 2025
570f329
Merge branch 'new_ai_job_ui' of github.com:scalableminds/webknossos i…
hotzenklotz Oct 15, 2025
393288c
split fetch_annotation_info into sub methods
hotzenklotz Oct 15, 2025
9eb4d0d
improve error handling for fetchAnnotationInfos
hotzenklotz Oct 15, 2025
6f99cd5
Update docs/automation/ai_training.md
hotzenklotz Oct 15, 2025
c98305a
Update docs/automation/ai_training.md
hotzenklotz Oct 15, 2025
7f08de1
Update frontend/javascripts/viewer/view/ai_jobs/run_ai_model/ai_analy…
hotzenklotz Oct 15, 2025
c752393
Update docs/automation/ai_training.md
hotzenklotz Oct 15, 2025
23bb6d8
apply more feedback
hotzenklotz Oct 15, 2025
2172baa
Merge branch 'new_ai_job_ui' of github.com:scalableminds/webknossos i…
hotzenklotz Oct 15, 2025
3e63eb6
Unify admonition syntax to include a space after “!!!”
hotzenklotz Oct 15, 2025
092c363
more PR feedback
hotzenklotz Oct 15, 2025
8c91307
Merge branch 'master' of github.com:scalableminds/webknossos into new…
hotzenklotz Oct 15, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 8 additions & 4 deletions app/models/job/JobService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -261,10 +261,14 @@ class JobService @Inject()(wkConf: WkConf,

private def getJobCostPerGVx(jobCommand: JobCommand): Fox[BigDecimal] =
jobCommand match {
case JobCommand.infer_neurons => Fox.successful(wkConf.Features.neuronInferralCostPerGVx)
case JobCommand.infer_mitochondria => Fox.successful(wkConf.Features.mitochondriaInferralCostPerGVx)
case JobCommand.align_sections => Fox.successful(wkConf.Features.alignmentCostPerGVx)
case _ => Fox.failure(s"Unsupported job command $jobCommand")
case JobCommand.infer_neurons => Fox.successful(wkConf.Features.neuronInferralCostPerGVx)
case JobCommand.infer_nuclei => Fox.successful(wkConf.Features.mitochondriaInferralCostPerGVx)
case JobCommand.infer_mitochondria => Fox.successful(wkConf.Features.mitochondriaInferralCostPerGVx)
case JobCommand.infer_instances => Fox.successful(wkConf.Features.mitochondriaInferralCostPerGVx)
case JobCommand.train_neuron_model => Fox.successful(BigDecimal(0))
case JobCommand.train_instance_model => Fox.successful(BigDecimal(0))
Comment on lines +268 to +269
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should also be read from application.conf, no?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, that would be good. We have to do another pass on the pricing models anyways.

case JobCommand.align_sections => Fox.successful(wkConf.Features.alignmentCostPerGVx)
case _ => Fox.failure(s"Unsupported job command $jobCommand")
}

def calculateJobCostInCredits(boundingBoxInTargetMag: BoundingBox, jobCommand: JobCommand): Fox[BigDecimal] =
Expand Down
64 changes: 37 additions & 27 deletions docs/automation/ai_segmentation.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,40 +4,50 @@ While WEBKNOSSOS is great for manual annotation, some datasets are either too bi

The automated analysis features are designed to provide a general solution to a wide range of (EM) datasets. Since datasets differ in staining protocols, imaging modalities, imaging resolution & fidelity, your results may vary. [Please contact us](mailto:[email protected]) for customized, fine-tuned solutions for your dataset.

We plan to add more automated analysis features in the future. If you want to work with us on an automated analysis project, [please contact us](mailto:[email protected]).
We would love to integrate analysis solutions for more modalities and use cases.
You can launch the AI analysis dialog using the `AI Analysis` button in the toolbar at the top. This will open a dropdown menu with three options:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Align terminology with UI: dialog → drawer flow.

Use “drawer” and clarify the dropdown triggers it.

-You can launch the AI analysis dialog using the `AI Analysis` button in the toolbar at the top. This will open a dropdown menu with three options:
+Use the `AI Analysis` button in the top toolbar to open a dropdown with three options. Selecting an option opens the AI Jobs drawer:
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
You can launch the AI analysis dialog using the `AI Analysis` button in the toolbar at the top. This will open a dropdown menu with three options:
Use the `AI Analysis` button in the top toolbar to open a dropdown with three options. Selecting an option opens the AI Jobs drawer:
🤖 Prompt for AI Agents
In docs/automation/ai_segmentation.md around line 7, the text uses "dialog" but
the UI uses a "drawer"; update the wording to replace "dialog" with "drawer" and
clarify that selecting an option from the toolbar dropdown triggers/open the
drawer (e.g., "AI Analysis button in the toolbar opens a dropdown — choose an
option to open the AI Analysis drawer"). Ensure terminology matches the UI and
mention the dropdown triggers the drawer.


!!!info
Automated analysis is only available on [webknossos.org](https://webknossos.org) at the moment.
If you want to set up on-premise automated analysis at your institute/workplace, then [please contact sales](mailto:[email protected]).
- **Run AI model:** Run pre-trained or custom AI models on your dataset.
- **Train AI model:** Train your own AI models on your WEBKNOSSOS annotations to match your specific needs. Coming soon.
- **AI Alignment:** Align datasets. Read more about [image alignment](./alignment.md).


## Pre-trained Models

WEBKNOSSOS offers several pre-trained models to get you started quickly:

* **Neuron Segmentation:** This analysis is designed to work with serial electron microscopy data of neural tissue (brain/cortex) and will segment all neurons within the dataset. It is tuned for serial block-face electron microscopy (SBEM), MultiSEM and focused ion beam-SEM (FIB-SEM) data.
* **Mitochondria Detection:** Run a pre-trained instance segmentation model for mitochondria detection. Optimized for EM data.
* **Nuclei Detection:** (Coming Soon) Run a pre-trained instance segmentation model for nuclei detection. Optimized for EM data.

## Your Custom Models

This section will list any custom models that you have trained or uploaded to your organization. While these build on the foundation of our pre-trained models, you can customize your models to identify or segment biological structures of your interest or fine-tune models to the contrast and staining of your images. Training will be enabled soon.

## Neuron Segmentation
As a first trial, WEBKNOSSOS includes neuron segmentation. This analysis is designed to work with serial block-face electron microscopy (SBEM) data of neural tissue (brain/cortex) and will segment all neurons within the dataset.
## Analysis Settings

You can launch the AI analysis dialog using the `AI Analysis` button in the toolbar at the top. Use the `Start AI neuron segmentation` button in the dialog to start the analysis.
Before starting the analysis, you need to configure the following settings:

![Neuron segmentations can be launched from the tool bar.](../images/process_dataset.jpg)
/// caption
Neuron segmentations can be launched from the tool bar.
///
* **New Dataset Name:** The name of the new dataset that will be created with the segmentation results.
* **Image Data Layer:** The image layer from your current dataset that the model will use for analysis.
* **Bounding Box:** The region of interest that you want to analyze. You can choose to analyze the full dataset or a specific bounding box that you have created.

Computation time for this analysis depends directly on the size of your dataset.
You can also access **Advanced Settings** to further customize the analysis.

## Credit Information

This section provides an overview of your available credits in your organization and the estimated cost for the analysis. Cost varies depending on the size of your dataset and the type of model you want to run.

---

Computation time for any analysis depends directly on the size of your dataset.
Expect a few hours for medium-sized volumetric EM datasets.
The finished analysis will be available as a new dataset from your dashboard. You can monitor the status and progress of the analysis job from the [`Processing Jobs` page](./jobs.md) or wait for the email notification.

![Starting a new neuron segmentation.](../images/neuron_segmentation_start.jpeg)
/// caption
Starting a new neuron segmentation.
///
![Monitor the segmentation progress from the Jobs page.](../images/nuclei_segmentation_job.jpeg)
/// caption
Monitor the segmentation progress from the Jobs page.
///

## Mitochondria detection
Similarly to the neuron segmentation, you can now start a mitochondria detection in WEBKNOSSOS. In the AI analysis dialog, choose `Mitochondria detection`, select a bounding box, and go to `Processing Jobs` to look at the result.

## Custom Analysis
You'll soon be able to train custom classifiers directly in WEBKNOSSOS. [Contact us](mailto:[email protected]) for an invite to join the beta program.
We plan to add more AI analysis features in the future. If you want to work with us on an automated analysis project, [please contact us](mailto:[email protected]).
We would love to integrate analysis solutions for more modalities and use cases.

If you are interested in specialized, automated analysis, image segmentation, object detection etc. then feel free to [contact us](mailto:[email protected]). The WEBKNOSSOS development teams offers [commercial analysis services](https://webknossos.org/services/automated-segmentation) for that.
<!-- Keep info in sync with docs/automation/ai_training.md -->
!!! info
Automated analysis is only available on [webknossos.org](https://webknossos.org) at the moment.
If you want to set up on-premise automated analysis at your institute/workplace, then [please contact sales](mailto:[email protected]).
58 changes: 58 additions & 0 deletions docs/automation/ai_training.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# AI Model Training

WEBKNOSSOS allows you to train your own AI models for image segmentation. This feature is currently in early access.

<!-- Keep info in sync with docs/automation/ai_segmentation.md -->
!!! info
AI Model Training is only available on [webknossos.org](https://webknossos.org) at the moment.
If you want to set up on-premise automated analysis at your institute/workplace, then [please contact sales](mailto:[email protected]).
Comment on lines +7 to +8
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Tighten wording and use “on‑premises”.
Swap “at the moment” for “currently” (or drop it) and change “on-premise” to the correct “on‑premises” phrasing to keep the copy concise and polished.

🧰 Tools
🪛 LanguageTool

[style] ~7-~7: For conciseness, consider replacing this expression with an adverb.
Context: ...webknossos.org](https://webknossos.org) at the moment. If you want to set up on-premise ...

(AT_THE_MOMENT)


[grammar] ~7-~7: There might be a mistake here.
Context: ...(https://webknossos.org) at the moment. If you want to set up on-premise automat...

(QB_NEW_EN)

🤖 Prompt for AI Agents
In docs/automation/ai_training.md around lines 7 to 8, tighten the wording:
replace "at the moment" with "currently" (or remove it) and change "on-premise"
to the correct "on‑premises" phrasing; update the sentence to read concisely
(e.g., "AI Model Training is only available on webknossos.org currently. If you
want to set up on‑premises automated analysis at your institute/workplace,
please contact sales.") ensuring punctuation and link remain intact.




## Preparing Your Annotations

Before you can start training, prepare your ground truth annotation(s). The training process requires at least one volume annotation with at least one bounding box.

Here is a step-by-step guide to prepare your data:

1. **Create an annotation:** Start by creating a new annotation or opening an existing one.
2. **Define bounding boxes:** Create one or more bounding boxes that define the areas you want to use for training.
- It is important that the bounding boxes are not too small. WEBKNOSSOS checks that each bounding box has a minimum extent of **32 voxels in each dimension**.
- Bounding boxes that are not aligned with the selected magnification will be automatically shrunk to fit.
- For optimal training, all bounding boxes should have dimensions that are multiples of the smallest box dimensions.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

might be clearer and more consistent with the other points.

Suggested change
- For optimal training, all bounding boxes should have dimensions that are multiples of the smallest box dimensions.
- For optimal training, all bounding boxes should have extents that are multiples of the smallest box dimensions.

3. **Label segments:** Within your bounding boxes, label the segmentation of your structures of interest. Use the volume annotation tool to manually annotate structures. This will be your ground truth data. For neurons, we recommend to densely label each structure with a unique ID. For instances segmentations you only need to label the structures you want to train on, e.g. nuclei, mitochondria, soma, vesicles, etc.

## Configuring the Training
To start a training, click on the `AI Analysis` button in the toolbar and select `Train AI model` from the dropdown menu.
This will open a dialog where you can configure and start your training job.
### Select AI Training Task
Comment on lines +26 to +28
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Update instructions to match the drawer-based UI.
The AI job UI no longer opens a modal dialog—it now launches a drawer with a “Model Training” tab. Leaving the old wording misleads users following these steps. Please update this section to reference the drawer and tab so the docs stay accurate.

🤖 Prompt for AI Agents
In docs/automation/ai_training.md around lines 26 to 28, the text still refers
to a modal dialog for starting AI training; update it to reference the new
drawer-based UI and the “Model Training” tab. Change the sentence to instruct
users to click the AI Analysis button, select Train AI model, then use the
opened drawer and switch to the “Model Training” tab to configure and start the
job, ensuring wording clearly replaces “dialog” with “drawer” and mentions the
tab name.


First, you need to select the type of model you want to train. Both models are optimized for SEM, FIB-SEM, SBEM, and Multi-SEM microscopes:

* **EM Neuron Model:** Train a new AI model for dense EM neuron segmentation.
* **EM Instances Model:** Train a new AI model for EM instance segmentation. This is optimized for nuclei, mitochondria, and other cell types.
### Training Data

In this section, you need to specify the data that will be used for training.

* **Image Data Layer:** Select the raw image layer.
* **Ground Truth Layer:** Select the segmentation layer that you created.
* **Magnification:** Choose the magnification that should be used for training.

You can also add more training data from other annotations by clicking the `+` button and referencing annotations by ID or WEBKNOSSOS URLs.

### Training Settings

* **Model Name:** Give your new model a descriptive name.
* **Comments:** Add any comments or notes about the training for future reference.
* **Max Distance (nm):** (Only for EM Instances Model) The maximum cross-section length ("diameter") for each identified object in nanometers e.g. Nuclei: 1000nm, Vesicles: 80nm.

### Credit Information

This section provides an overview of your available credits in your organization and the estimated cost for the training. Cost varies depending on the size of your dataset and the type of model you want to train.

## Launching the Training

After configuring everything, you can start the training by clicking the `Start Training` button. You can monitor the progress of your training job from the [`Processing Jobs` page](./jobs.md) or wait for the email notification. Training might take a few hours depending on the size of your dataset.

Once the training is finished, you can find an overview of all your trained models on the `Admin` > `AI Models` page in the navbar. Please refer to the [AI Segmentation](./ai_segmentation.md) guide for more information on how to apply your trained models to your dataset.
22 changes: 19 additions & 3 deletions docs/automation/alignment.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,24 @@
# Alignment
# Image Alignment

For single-tile image stacks, an alignment is directly possible from within WEBKNOSSOS.
Simply upload the dataset, open it and select the "Alignment" tab in the AI Analysis dialog.
Simply upload the dataset, open it and select the "AI Analysis" button in the toolbar. From the dropdown, choose "Run AI Alignment".

You can even annotate landmarks with the skeleton tool and use that to let WEBKNOSSOS align the dataset. Often these landmarks are not necessary, but for particularly hard to align sections, they can be quite useful. When manual landmarks are used, they don't need to cover the entire dataset.
This will open a dialog where you can configure and start the alignment.

## Select AI Alignment Task

* **Align Sections:** Align all sections of this dataset along the Z-axis using features in neighboring sections. This only supports datasets with a single tile per section (no in-plane stitching needed).
* **Align & stitch multiple tiles:** (Coming Soon) For stitching and aligning datasets with multiple tiles per section, please contact us via email for a quote. [Learn more about our alignment services.](https://webknossos.org/services/alignment)

## Alignment Settings

* **New Dataset Name:** The name of the new dataset that will be created with the aligned images.
* **Manual Matches:** You can use manual matches from a skeleton annotation to help the alignment process in specific spots. This can be useful for particularly hard-to-align sections. When manual landmarks are used, they don't need to cover the entire dataset. Instead, these manual landmarks can be placed to help with tricky cases such as a big gap, tear or jump between two sections.

## Credit Information

This section provides an overview of your available credits in your organization and the estimated cost for the alignment. Cost varies depending on the size of your dataset.

Computation time for the alignment depends directly on the size of your dataset. The finished analysis will be available as a new dataset from your dashboard. You can monitor the status and progress of the analysis job from the [`Processing Jobs` page](./jobs.md) or wait for the email notification.

For multi-tile image stacks, please refer to our [Alignment services](https://webknossos.org/services/alignment).
2 changes: 1 addition & 1 deletion docs/automation/jobs.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ WEBKNOSSOS will notify you via email upon completion or failure of any job.

Example workflows:

- [AI segmentation](./ai_segmentation.md), e.g., nuclei inferral
- [AI segmentation](./ai_segmentation.md), e.g. running AI model for nuclei segmentation and mitchondria detection
- [converting datasets on upload](../data/upload_ui.md)
- [mesh file (pre)-computation](../meshes/precomputed_meshes.md)
- [applying a merger mode annotation](../proofreading/merger_mode.md)
Expand Down
2 changes: 1 addition & 1 deletion docs/data/wkw.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ Volume annotations can be downloaded and imported using ZIP files that contain [
The ZIP archive contains one NML file that holds meta information including the dataset name and the user's position.
Additionally, there is another embedded ZIP file that contains the volume annotations in WKW file format.

!!!info
!!! info
In contrast to on-disk WKW datasets, the WKW files in downloaded volume annotations only contain a single 32^3 bucket in each file.
Therefore, also the addressing of the WKW files (e.g. `z48/y5444/x5748.wkw`) is in steps of 32 instead of 1024.

Expand Down
2 changes: 1 addition & 1 deletion docs/proofreading/proofreading_tool.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,5 +31,5 @@ If case you want to reload, hide or remove a 3D mesh during proofreading, you ca

In addition to the handy shortcuts available from the right-click context menu, users can also directly modify the supervoxel graph like any other skeleton to manually add/remove nodes and edges for fine-grained control.

!!!info
!!! info
The proofreading tool requires a supervoxel graph representation of a segmentation to work. At this time, these can only be obtained from the [Voxelytics AI segmentation pipeline](https://voxelytics.com). We are actively working to make this available for more users, so please reach out to us to get you started and provide feedback: [[email protected]](mailto:[email protected])
2 changes: 1 addition & 1 deletion docs/sharing/dataset_sharing.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,5 +66,5 @@ To share a dataset publicly, follow these steps:
It is recommended to add a description before sharing a dataset publicly.
///

!!!info
!!! info
We recommend giving your datasets a meaningful name and description. Both are featured next to the data viewport in the `Info` tab in the main WEBKNOSSOS UI.
22 changes: 11 additions & 11 deletions frontend/javascripts/admin/api/jobs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import type {
RenderAnimationOptions,
} from "types/api_types";
import type { UnitLong, Vector3, Vector6 } from "viewer/constants";
import type { SplitMergerEvaluationSettings } from "viewer/view/action-bar/ai_job_modals/components/collapsible_split_merger_evaluation_settings";
import type { SplitMergerEvaluationSettings } from "viewer/view/ai_jobs/components/collapsible_split_merger_evaluation_settings";
import { assertResponseLimit } from "./api_utils";

function transformBackendJobToAPIJob(job: any): APIJob {
Expand Down Expand Up @@ -188,7 +188,7 @@ export function startComputeSegmentIndexFileJob(
});
}

export function startNucleiInferralJob(
export function runPretrainedNucleiInferenceJob(
datasetId: string,
layerName: string,
newDatasetName: string,
Expand All @@ -204,7 +204,7 @@ export function startNucleiInferralJob(
});
}

export function startNeuronInferralJob(
export function runPretrainedNeuronInferencelJob(
datasetId: string,
layerName: string,
bbox: Vector6,
Expand Down Expand Up @@ -322,7 +322,7 @@ export function startMaterializingVolumeAnnotationJob(
);
}

export function startMitochondriaInferralJob(
export function runPretrainedMitochondriaInferenceJob(
datasetId: string,
layerName: string,
bbox: Vector6,
Expand Down Expand Up @@ -368,7 +368,7 @@ export enum APIAiModelCategory {
EM_NUCLEI = "em_nuclei",
}

type AiModelTrainingAnnotationSpecification = {
export type AiModelTrainingAnnotationSpecification = {
annotationId: string;
colorLayerName: string;
segmentationLayerName: string;
Expand Down Expand Up @@ -406,7 +406,7 @@ export function runInstanceModelTraining(params: RunInstanceModelTrainingParamet
});
}

export type BaseModelInferenceParameters = {
export type BaseCustomModelInferenceParameters = {
annotationId?: string;
aiModelId: string;
datasetDirectoryName: string;
Expand All @@ -418,21 +418,21 @@ export type BaseModelInferenceParameters = {
invertColorLayer: boolean;
// maskAnnotationLayerName?: string | null
};
type RunNeuronModelInferenceParameters = BaseModelInferenceParameters;
type RunCustomNeuronModelInferenceParameters = BaseCustomModelInferenceParameters;

type RunInstanceModelInferenceParameters = BaseModelInferenceParameters & {
type RunCustomInstanceModelInferenceParameters = BaseCustomModelInferenceParameters & {
seedGeneratorDistanceThreshold: number;
};

export function runNeuronModelInferenceWithAiModelJob(params: RunNeuronModelInferenceParameters) {
export function runCustomNeuronModelInferenceJob(params: RunCustomNeuronModelInferenceParameters) {
return Request.sendJSONReceiveJSON("/api/aiModels/inferences/runCustomNeuronModelInference", {
method: "POST",
data: JSON.stringify({ ...params, boundingBox: params.boundingBox.join(",") }),
});
}

export function runInstanceModelInferenceWithAiModelJob(
params: RunInstanceModelInferenceParameters,
export function runCustomInstanceModelInferenceJob(
params: RunCustomInstanceModelInferenceParameters,
) {
return Request.sendJSONReceiveJSON("/api/aiModels/inferences/runCustomInstanceModelInference", {
method: "POST",
Expand Down
Loading
Loading