diff --git a/build.gradle.kts b/build.gradle.kts index 533340dc6..04fa507d6 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -58,6 +58,7 @@ dependencies { } implementation("net.java.dev.jna:jna-platform:5.14.0") +// implementation("net.clearvolume:cleargl") implementation("org.janelia.saalfeldlab:n5") implementation("org.janelia.saalfeldlab:n5-imglib2") implementation("org.apache.logging.log4j:log4j-api:2.20.0") @@ -67,9 +68,6 @@ dependencies { // SciJava dependencies - implementation("org.yaml:snakeyaml") { - version { strictly("1.33") } - } implementation("org.scijava:scijava-common") implementation("org.scijava:ui-behaviour") implementation("org.scijava:script-editor") @@ -135,6 +133,8 @@ dependencies { // OME implementation("ome:formats-bsd") implementation("ome:formats-gpl") + + implementation("org.mastodon:mastodon:1.0.0-beta-34") } //kapt { @@ -148,6 +148,19 @@ dependencies { val isRelease: Boolean get() = System.getProperty("release") == "true" +//kotlin { +// jvmToolchain(21) +//// compilerOptions { +//// jvmTarget = JvmTarget.JVM_21 +//// freeCompilerArgs = listOf("-Xinline-classes", "-opt-in=kotlin.RequiresOptIn") +//// } +//} +// +//java { +// targetCompatibility = JavaVersion.VERSION_21 +// sourceCompatibility = JavaVersion.VERSION_21 +//} + tasks { withType().all { val version = System.getProperty("java.version").substringBefore('.').toInt() diff --git a/gradle.properties b/gradle.properties index 91aaef276..a380f1c73 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,7 +1,7 @@ org.gradle.jvmargs=-XX:MaxMetaspaceSize=2g org.gradle.caching=true jvmTarget=21 -#useLocalScenery=true +useLocalScenery=true kotlinVersion=1.9.23 dokkaVersion=1.9.10 scijavaParentPOMVersion=37.0.0 diff --git a/src/main/java/sc/iview/commands/file/OpenDirofTif.java b/src/main/java/sc/iview/commands/file/OpenDirofTif.java new file mode 100644 index 000000000..e565332c2 --- /dev/null +++ b/src/main/java/sc/iview/commands/file/OpenDirofTif.java @@ -0,0 +1,85 @@ +/*- + * #%L + * Scenery-backed 3D visualization package for ImageJ. + * %% + * Copyright (C) 2016 - 2021 SciView developers. + * %% + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * #L% + */ +package sc.iview.commands.file; + +import org.scijava.command.Command; +import org.scijava.io.IOService; +import org.scijava.log.LogService; +import org.scijava.plugin.Menu; +import org.scijava.plugin.Parameter; +import org.scijava.plugin.Plugin; +import sc.iview.SciView; + +import java.io.File; +import java.io.IOException; + +import static sc.iview.commands.MenuWeights.FILE; +import static sc.iview.commands.MenuWeights.FILE_OPEN; + +/** + * Command to open a file in SciView + * + * @author Kyle Harrington + * + */ +@Plugin(type = Command.class, menuRoot = "SciView", // + menu = { @Menu(label = "File", weight = FILE), // + @Menu(label = "Open Directory of tif...", weight = FILE_OPEN) }) +public class OpenDirofTif implements Command { + + @Parameter + private IOService io; + + @Parameter + private LogService log; + + @Parameter + private SciView sciView; + + // TODO: Find a more extensible way than hard-coding the extensions. + @Parameter(style = "directory") + private File file; + + @Parameter + private int onlyFirst = 0; + + @Override + public void run() { + try { + if(onlyFirst > 0) { + sciView.openDirTiff(file.toPath(), onlyFirst); + } else { + sciView.openDirTiff(file.toPath(), null); + } + } + catch (final IOException | IllegalArgumentException exc) { + log.error( exc ); + } + } +} diff --git a/src/main/java/sc/iview/commands/file/OpenN5.kt b/src/main/java/sc/iview/commands/file/OpenN5.kt index 42d3a2202..4dcf650ce 100644 --- a/src/main/java/sc/iview/commands/file/OpenN5.kt +++ b/src/main/java/sc/iview/commands/file/OpenN5.kt @@ -139,6 +139,7 @@ class OpenN5 : DynamicCommand() { DataType.FLOAT64 -> N5Utils.openVolatile(reader, dataset) DataType.OBJECT -> TODO() null -> TODO() + DataType.STRING -> TODO() } val wrapped = VolatileViews.wrapAsVolatile(img) diff --git a/src/main/java/sc/iview/commands/file/OpenTrackFile.java b/src/main/java/sc/iview/commands/file/OpenTrackFile.java new file mode 100644 index 000000000..b2c3e7a7f --- /dev/null +++ b/src/main/java/sc/iview/commands/file/OpenTrackFile.java @@ -0,0 +1,79 @@ +/*- + * #%L + * Scenery-backed 3D visualization package for ImageJ. + * %% + * Copyright (C) 2016 - 2021 SciView developers. + * %% + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * #L% + */ +package sc.iview.commands.file; + +import org.scijava.command.Command; +import org.scijava.io.IOService; +import org.scijava.log.LogService; +import org.scijava.plugin.Menu; +import org.scijava.plugin.Parameter; +import org.scijava.plugin.Plugin; +import sc.iview.SciView; + +import java.io.File; +import java.io.IOException; + +import static sc.iview.commands.MenuWeights.FILE; +import static sc.iview.commands.MenuWeights.FILE_OPEN; + +/** + * Command to open a file in SciView + * + * @author Kyle Harrington + * + */ +@Plugin(type = Command.class, menuRoot = "SciView", // + menu = { @Menu(label = "File", weight = FILE), // + @Menu(label = "Open Track File", weight = FILE_OPEN) }) +public class OpenTrackFile implements Command { + + @Parameter + private IOService io; + + @Parameter + private LogService log; + + @Parameter + private SciView sciView; + + // TODO: Find a more extensible way than hard-coding the extensions. + @Parameter(style = "open,extensions:csv") + private File file; + + @Override + public void run() { + try { + sciView.openTrackFile(file); + + } + catch (final IOException | IllegalArgumentException exc) { + log.error( exc ); + } + } +} diff --git a/src/main/java/sc/iview/event/NodeTaggedEvent.java b/src/main/java/sc/iview/event/NodeTaggedEvent.java new file mode 100644 index 000000000..c0dc6feb4 --- /dev/null +++ b/src/main/java/sc/iview/event/NodeTaggedEvent.java @@ -0,0 +1,10 @@ +package sc.iview.event; + +import graphics.scenery.Node; + + +public class NodeTaggedEvent extends NodeEvent { + public NodeTaggedEvent(final Node node ) { + super( node ); + } +} diff --git a/src/main/kotlin/sc/iview/SciView.kt b/src/main/kotlin/sc/iview/SciView.kt index 5e7d92578..29858c628 100644 --- a/src/main/kotlin/sc/iview/SciView.kt +++ b/src/main/kotlin/sc/iview/SciView.kt @@ -55,9 +55,9 @@ import graphics.scenery.utils.ExtractsNatives.Companion.getPlatform import graphics.scenery.utils.LogbackUtils import graphics.scenery.utils.SceneryPanel import graphics.scenery.utils.Statistics -import graphics.scenery.utils.extensions.times import graphics.scenery.volumes.Colormap import graphics.scenery.volumes.RAIVolume +import graphics.scenery.volumes.TransferFunction import graphics.scenery.volumes.Volume import graphics.scenery.volumes.Volume.Companion.fromXML import graphics.scenery.volumes.Volume.Companion.setupId @@ -85,6 +85,7 @@ import net.imglib2.type.numeric.integer.UnsignedByteType import net.imglib2.view.Views import org.joml.Quaternionf import org.joml.Vector3f +import org.joml.Vector4f import org.scijava.Context import org.scijava.`object`.ObjectService import org.scijava.display.Display @@ -100,6 +101,7 @@ import org.scijava.thread.ThreadService import org.scijava.util.ColorRGB import org.scijava.util.Colors import org.scijava.util.VersionUtils +import sc.iview.commands.demo.animation.ParticleDemo import sc.iview.commands.edit.InspectorInteractiveCommand import sc.iview.event.NodeActivatedEvent import sc.iview.event.NodeAddedEvent @@ -110,7 +112,6 @@ import sc.iview.ui.CustomPropertyUI import sc.iview.ui.MainWindow import sc.iview.ui.SwingMainWindow import sc.iview.ui.TaskManager -import ucar.units.ConversionException import java.awt.event.WindowListener import java.io.File import java.io.IOException @@ -118,6 +119,7 @@ import java.net.JarURLConnection import java.net.URL import java.nio.ByteBuffer import java.nio.FloatBuffer +import java.nio.file.Path import java.time.LocalDate import java.time.format.DateTimeFormatter import java.util.* @@ -134,6 +136,7 @@ import kotlin.concurrent.thread import javax.swing.JOptionPane import kotlin.math.cos import kotlin.math.sin +import kotlin.system.measureTimeMillis /** * Main SciView class. @@ -784,6 +787,132 @@ class SciView : SceneryBase, CalibratedRealInterval { } } + @Throws(IOException::class) + fun openDirTiff(source: Path, onlyFirst: Int? = null) + { + val v = Volume.fromPath(source, hub, onlyFirst) + v.name = "volume" + v.spatial().position = Vector3f(-3.0f, 10.0f, 0.0f) + v.colormap = Colormap.get("jet") + v.spatial().scale = Vector3f(15.0f, 15.0f,45.0f) + v.transferFunction = TransferFunction.ramp(0.05f, 0.8f) + v.metadata["animating"] = true + v.converterSetups.firstOrNull()?.setDisplayRange(0.0, 1500.0) + v.visible = true + + v.spatial().wantsComposeModel = true + v.spatial().updateWorld(true) +// System.out.println("v.model: " + v.model) + addChild(v) +// System.out.println("v.getDimensions: "+ v.getDimensions()) +// +// System.out.println(" v.pixelToWorldRatio: "+ v.pixelToWorldRatio) +// System.out.println("v.world.matrix: " + v.spatial().world) + } + + data class PointInTrack( + val t: Int, + val loc: Vector3f, + val cellId: Long, + val parentId: Long, + val nodeScore: Float, + val edgeScore: Float + ) + + data class Track( + val track: List, + val trackId: Int + ) + + @Throws(IOException::class) + fun openTrackFile(file: File) + { + val lines = file.readLines() + var track = ArrayList() + val tracks = ArrayList() + val separator = "," + + var lastTrackId = -1 + lines.drop(1).forEach { line -> + val tokens = line.split(separator) + val t = tokens[0].toInt() + val z = tokens[1].toFloat() -2000f + val y = tokens[2].toFloat() -800f + val x = tokens[3].toFloat() -1300f + val cellId = tokens[4].toLong() + val parentId = tokens[5].toLong() + val trackId = tokens[6].toInt() + val nodeScore = tokens[7].toFloat() + val edgeScore = tokens[8].toFloat()/45.0f + + val currentPointInTrack = PointInTrack( + t, + Vector3f(x,y,z), + cellId, + parentId, + nodeScore, + edgeScore + ) + if(lastTrackId != trackId) + { + lastTrackId = trackId + val sortedTrack = track.sortedBy { it.t } + tracks.add(Track(sortedTrack, trackId)) + + track.clear() + } + track.add(currentPointInTrack) + } + val timeCost = measureTimeMillis { + addTracks(tracks) + } + println("time: $timeCost") + } + + fun addTracks(tracks: ArrayList) + { + val rng = Random(17) + for(track in tracks) + { + if(track.trackId > 10) + { + continue + } + System.out.println("add track: "+ track.trackId.toString() ) + val master = Cylinder(0.1f, 1.0f, 10) +// master.setMaterial (ShaderMaterial.fromFiles("DefaultDeferredInstanced.vert", "DefaultDeferred.frag")) + master.setMaterial(ShaderMaterial.fromClass(ParticleDemo::class.java)) + master.ifMaterial{ + ambient = Vector3f(0.1f, 0f, 0f) + diffuse = Vector3f(0.05f, 0f, 0f) + metallic = 0.01f + roughness = 0.5f + } + + val mInstanced = InstancedNode(master) + mInstanced.name = "TrackID-${track.trackId}" + mInstanced.instancedProperties["Color"] = { Vector4f(1.0f) } + addNode(mInstanced) + + var cnt = 0 + val a = rng.nextFloat() + val b = rng.nextFloat() + track.track.windowed(2,1).forEach { pair -> + cnt = cnt + 1 + val element = mInstanced.addInstance() + element.name ="EdgeID-$cnt" + element.instancedProperties["Color"] = { Vector4f( a,b,pair[0].edgeScore, 1.0f) } + element.spatial().orientBetweenPoints(Vector3f(pair[0].loc).mul(0.1f) , Vector3f(pair[1].loc).mul(0.1f) , rescale = true, reposition = true) + //mInstanced.instances.add(element) + + } + } + + } + + + + /** * Open a file specified by the source path. The file can be anything that SciView knows about: mesh, volume, point cloud * @param source string of a data source @@ -1687,38 +1816,73 @@ class SciView : SceneryBase, CalibratedRealInterval { return renderer } + private var originalFOV = camera?.fov + /** * Enable VR rendering */ fun toggleVRRendering() { var renderer = renderer ?: return + // Save camera's original settings if we switch from 2D to VR + if (!vrActive) { + originalFOV = camera?.fov + } + + // If turning off VR, store the controls state before deactivating + if (vrActive) { + // We're about to turn off VR + controls.stashControls() + } + vrActive = !vrActive val cam = scene.activeObserver as? DetachedHeadCamera ?: return var ti: TrackerInput? = null var hmdAdded = false - if (!hub.has(SceneryElement.HMDInput)) { - try { - val hmd = OpenVRHMD(false, true) - if (hmd.initializedAndWorking()) { - hub.add(SceneryElement.HMDInput, hmd) - ti = hmd - } else { - logger.warn("Could not initialise VR headset, just activating stereo rendering.") + + if (vrActive) { + + // VR activation logic + if (!hub.has(SceneryElement.HMDInput)) { + try { + val hmd = OpenVRHMD(false, true) + if (hmd.initializedAndWorking()) { + hub.add(SceneryElement.HMDInput, hmd) + ti = hmd + } else { + logger.warn("Could not initialise VR headset, just activating stereo rendering.") + } + } catch (e: Exception) { + logger.error("Could not add OpenVRHMD: $e") } hmdAdded = true - } catch (e: Exception) { - logger.error("Could not add OpenVRHMD: $e") + } else { + ti = hub.getWorkingHMD() + } + + // Set tracker on the DetachedHeadCamera + if (ti != null) { + cam.tracker = ti + logger.info("tracker set") } } else { - ti = hub.getWorkingHMD() - } - if (vrActive && ti != null) { - cam.tracker = ti - } else { + // VR deactivation logic + // Convert back to normal Camera + logger.info("Shutting down VR") cam.tracker = null + + // Reset FOV to original value when turning off VR + originalFOV?.let { camera?.fov = it } + + // Restore controls after turning off VR + controls.restoreControls() + + // Reset input controls to ensure proper camera behavior + inputSetup() } - renderer.pushMode = false + + // Enable push mode if VR is inactive, and the other way round + renderer.pushMode = !vrActive // we need to force reloading the renderer as the HMD might require device or instance extensions if (renderer is VulkanRenderer && hmdAdded) { @@ -1732,8 +1896,17 @@ class SciView : SceneryBase, CalibratedRealInterval { e.printStackTrace() } } - - renderer.toggleVR() + } + logger.debug("Replaced renderer.") + renderer.toggleVR() + // Cleanup HMD after VR has been toggled off + if (!vrActive) { + if (hub.has(SceneryElement.HMDInput)) { + val hmd = hub.get(SceneryElement.HMDInput) as? OpenVRHMD + hmd?.close() + // TODO hub.remove(hmd) + logger.debug("Closed HMD.") + } } } diff --git a/src/main/kotlin/sc/iview/commands/MenuWeights.kt b/src/main/kotlin/sc/iview/commands/MenuWeights.kt index 610deeadb..9cefbddd8 100644 --- a/src/main/kotlin/sc/iview/commands/MenuWeights.kt +++ b/src/main/kotlin/sc/iview/commands/MenuWeights.kt @@ -121,6 +121,7 @@ object MenuWeights { const val DEMO_ADVANCED_BDVSLICING = 2.0 const val DEMO_ADVANCED_MESHTEXTURE = 3.0 const val DEMO_ADVANCED_INSTANCE_BENCHMARK = 4.0 + const val DEMO_ADVANCED_EYETRACKING =5.0 // Help const val HELP_HELP = 0.0 const val HELP_ABOUT = 200.0 diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/CellTrackingBase.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/CellTrackingBase.kt new file mode 100644 index 000000000..8268e200a --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/CellTrackingBase.kt @@ -0,0 +1,1021 @@ +package sc.iview.commands.demo.advanced + +import graphics.scenery.* +import graphics.scenery.attribute.material.Material +import graphics.scenery.controls.OpenVRHMD +import graphics.scenery.controls.TrackedDevice +import graphics.scenery.controls.TrackedDeviceType +import graphics.scenery.controls.TrackerRole +import graphics.scenery.controls.behaviours.AnalogInputWrapper +import graphics.scenery.controls.behaviours.VRTouch +import graphics.scenery.primitives.Cylinder +import graphics.scenery.primitives.TextBoard +import graphics.scenery.ui.* +import graphics.scenery.utils.MaybeIntersects +import graphics.scenery.utils.SystemHelpers +import graphics.scenery.utils.extensions.* +import graphics.scenery.utils.lazyLogger +import graphics.scenery.volumes.RAIVolume +import graphics.scenery.volumes.Volume +import org.joml.* +import org.mastodon.mamut.model.Spot +import org.scijava.ui.behaviour.ClickBehaviour +import org.scijava.ui.behaviour.DragBehaviour +import sc.iview.SciView +import sc.iview.commands.demo.advanced.HedgehogAnalysis.SpineGraphVertex +import sc.iview.controls.behaviours.MoveInstanceVR +import sc.iview.controls.behaviours.MultiButtonManager +import sc.iview.controls.behaviours.VR2HandNodeTransform +import sc.iview.controls.behaviours.VRGrabTheWorld +import java.io.BufferedWriter +import java.io.FileWriter +import java.nio.file.Path +import java.util.concurrent.atomic.AtomicInteger +import kotlin.concurrent.thread + +/** + * Base class for different VR cell tracking purposes. It includes functionality to add spines and edgehogs, + * as used by [EyeTracking], and registers controller bindings via [inputSetup]. It is possible to register observers + * that listen to timepoint changes with [registerObserver]. + * @param [sciview] The [SciView] instance to use + */ +open class CellTrackingBase( + open var sciview: SciView +) { + val logger by lazyLogger(System.getProperty("scenery.LogLevel", "info")) + + lateinit var sessionId: String + lateinit var sessionDirectory: Path + + lateinit var hmd: OpenVRHMD + + val hedgehogs = Mesh() + val hedgehogIds = AtomicInteger(0) + lateinit var volume: Volume + + val referenceTarget = Icosphere(0.004f, 2) + + @Volatile var eyeTrackingActive = false + var playing = false + var direction = PlaybackDirection.Backward + var volumesPerSecond = 6f + var skipToNext = false + var skipToPrevious = false + + var volumeScaleFactor = 1.0f + + private lateinit var lightTetrahedron: List + + val volumeTPWidget = TextBoard() + + /** determines whether the volume and hedgehogs should keep listening for updates or not */ + var cellTrackingActive: Boolean = false + + /** Takes a list of [SpineGraphVertex] and its positions to create the corresponding track in Mastodon. + * In the case of controller tracking, the points were already sent to Mastodon one by one via [singleLinkTrackedCallback] and the list is not needed. + * Set the first boolean to true if the coordinates are in world space. The bridge will convert them to Mastodon coords. + * The first Spot defines whether to start with an existing spot, so the lambda will use that as starting point. + * The second spot defines whether we want to merge into this spot. */ + var trackCreationCallback: ((List>?, Boolean, Spot?, Spot?) -> Unit)? = null + /** Passes the current time point, the cursor position and its radius to the bridge to either create a new spot + * or delete an existing spot if there is a spot selected. + * The deleteBranch flag indicates whether we want to delete the whole branch or just a spot. */ + var spotCreateDeleteCallback: ((tp: Int, sciviewPos: Vector3f, radius: Float, deleteBranch: Boolean) -> Unit)? = null + /** Select a spot based on the controller tip's position, current time point and a multiple of the radius + * in which a selection event is counted as valid. addOnly prevents deselection from clicking away. */ + var spotSelectCallback: ((sciviewPos: Vector3f, tp: Int, radiusFactor: Float, addOnly: Boolean) -> Pair)? = null + var spotMoveInitCallback: ((Vector3f) -> Unit)? = null + var spotMoveDragCallback: ((Vector3f) -> Unit)? = null + var spotMoveEndCallback: ((Vector3f) -> Unit)? = null + /** Links a selected spot to the closest spot to handle merge events. */ + var spotLinkCallback: (() -> Unit)? = null + /** Generates a single link between a new position and the previously annotated one. + * Sends the position data to the bridge for intermediary keeping. The integer is the timepoint. + * The Float contains the cursor's radius in sciview space. + * The boolean specifies whether the link preview should be rendered. */ + var singleLinkTrackedCallback: ((pos: Vector3f, tp: Int, radius: Float, preview: Boolean) -> Unit)? = null + var toggleTrackingPreviewCallback: ((Boolean) -> Unit)? = null + var rebuildGeometryCallback: (() -> Unit)? = null + + var stageSpotsCallback: (() -> Unit)? = null + var predictSpotsCallback: ((all: Boolean) -> Unit)? = null + var trainSpotsCallback: (() -> Unit)? = null + var neighborLinkingCallback: (() -> Unit)? = null + // TODO add train flow functionality + var trainFlowCallback: (() -> Unit)? = null + /** Reverts to the point previously saved by Mastodon's undo recorder. Also handles redo events if undo is set to false. */ + var mastodonUndoRedoCallback: ((undo: Boolean) -> Unit)? = null + /** Returns a list of spots currently selected in Mastodon. Used to determine whether to scale the cursor or the spots. */ + var getSelectionCallback: (() -> List)? = null + /** Adjusts the radii of spots, both in sciview and Mastodon. */ + var scaleSpotsCallback: ((radius: Float, update: Boolean) -> Unit)? = null + + enum class HedgehogVisibility { Hidden, PerTimePoint, Visible } + + enum class PlaybackDirection { Forward, Backward } + + enum class ElephantMode { StageSpots, TrainAll, PredictTP, PredictAll, NNLinking } + + var hedgehogVisibility = HedgehogVisibility.Hidden + + var leftVRController: TrackedDevice? = null + var rightVRController: TrackedDevice? = null + + var cursor = CursorTool + var leftElephantColumn: Column? = null + var leftColumnPredict: Column? = null + var leftColumnLink: Column? = null + var leftUndoMenu: Column? = null + + var enableTrackingPreview = true + + val leftMenuList = mutableListOf() + var leftMenuIndex = 0 + + val grabButtonManager = MultiButtonManager() + val resetRotationBtnManager = MultiButtonManager() + + val mapper = CellTrackingButtonMapper + + private val observers = mutableListOf() + + open fun run() { + sciview.toggleVRRendering() + logger.info("VR mode has been toggled") + hmd = sciview.hub.getWorkingHMD() as? OpenVRHMD ?: throw IllegalStateException("Could not find headset") + + // Try to load the correct button mapping corresponding to the controller layout + val isProfileLoaded = mapper.loadProfile(hmd.manufacturer) + if (!isProfileLoaded) { + throw IllegalStateException("Could not load profile, headset type unknown!") + } + val shell = Box(Vector3f(20.0f, 20.0f, 20.0f), insideNormals = true) + shell.ifMaterial { + cullingMode = Material.CullingMode.Front + diffuse = Vector3f(0.4f, 0.4f, 0.4f) + } + + shell.spatial().position = Vector3f(0.0f, 0.0f, 0.0f) + shell.name = "Shell" + sciview.addNode(shell) + + lightTetrahedron = Light.createLightTetrahedron( + Vector3f(0.0f, 0.0f, 0.0f), + spread = 5.0f, + radius = 15.0f, + intensity = 5.0f + ) + lightTetrahedron.forEach { sciview.addNode(it) } + + val volnodes = sciview.findNodes { node -> Volume::class.java.isAssignableFrom(node.javaClass) } + + val v = (volnodes.firstOrNull() as? Volume) + if(v == null) { + logger.warn("No volume found, bailing") + return + } else { + logger.info("found ${volnodes.size} volume nodes. Using the first one: ${volnodes.first()}") + volume = v + } + + thread { + logger.info("Adding onDeviceConnect handlers") + hmd.events.onDeviceConnect.add { hmd, device, timestamp -> + logger.info("onDeviceConnect called, cam=${sciview.camera}") + if(device.type == TrackedDeviceType.Controller) { + logger.info("Got device ${device.name} at $timestamp") + device.model?.let { hmd.attachToNode(device, it, sciview.camera) } + when (device.role) { + TrackerRole.Invalid -> {} + TrackerRole.LeftHand -> leftVRController = device + TrackerRole.RightHand -> rightVRController = device + } + if (device.role == TrackerRole.RightHand) { + attachCursorAndTPWidget() + device.model?.name = "rightHand" + } else if (device.role == TrackerRole.LeftHand) { + device.model?.name = "leftHand" + setupElephantMenu() + setupGeneralMenu() + inputSetup() + logger.info("Set up navigation and editing controls.") + } + } + } + } + + cellTrackingActive = true + launchUpdaterThread() + } + + /** Registers a new observer that will get updated whenever the VR user triggers a timepoint update. */ + fun registerObserver(observer: TimepointObserver) { + observers.add(observer) + } + + /** Unregisters the timepoint observer. */ + fun unregisterObserver(observer: TimepointObserver) { + observers.remove(observer) + } + + /** Notifies all active observers of a change of timepoint. */ + private fun notifyObservers(timepoint: Int) { + observers.forEach { it.onTimePointChanged(timepoint) } + } + + /** Attaches a column of [Gui3DElement]s to the left VR controller and adds the column to [leftMenuList]. */ + protected fun createWristMenuColumn( + vararg elements: Gui3DElement, + debug: Boolean = false, + name: String = "Menu" + ): Column { + val column = Column(*elements, centerVertically = true, centerHorizontally = true) + column.ifSpatial { + scale = Vector3f(0.05f) + position = Vector3f(0.05f, 0.05f, column.height / 20f + 0.1f) + rotation = Quaternionf().rotationXYZ(-1.57f, 1.57f, 0f) + } + leftVRController?.model?.let { + sciview.addNode(column, parent = it, activePublish = false) + if (debug) { + column.children.forEach { child -> + val bb = BoundingGrid() + bb.node = child + bb.gridColor = Vector3f(0.5f, 1f, 0.4f) + sciview.addNode(bb, parent = it) + } + } + } + column.name = name + column.pack() + leftMenuList.add(column) + return column + } + + var controllerTrackingActive = false + + /** Intermediate storage for a single track created with the controllers. + * Once tracking is finished, this track is sent to Mastodon. */ + var controllerTrackList = mutableListOf>() + var startWithExistingSpot: Spot? = null + + /** This lambda is called every time the user performs a click with controller-based tracking. */ + val trackCellsWithController = ClickBehaviour { _, _ -> + if (!controllerTrackingActive) { + controllerTrackingActive = true + cursor.setTrackingColor() + // we dont want animation, because we track step by step + playing = false + // Assume the user didn't click on an existing spot to start the track. + startWithExistingSpot = null + } + // play the volume backwards, step by step, so cell split events can simply be turned into a merge event + if (volume.currentTimepoint > 0) { + val p = cursor.getPosition() + // did the user click on an existing cell and wants to merge the track into it? + val (selected, isValidSelection) = + spotSelectCallback?.invoke(p, volume.currentTimepoint, cursor.radius, false) ?: (null to false) + // If this is the first spot we track, and its a valid existing spot, mark it as such + if (isValidSelection && controllerTrackList.size == 0) { + startWithExistingSpot = selected + logger.info("Set startWithExistingPost to $startWithExistingSpot") + } + logger.debug("Tracked a new spot at position $p") + logger.debug("Do we want to merge? $isValidSelection. Selected spot is $selected") + // Create a placeholder link during tracking for immediate feedback + singleLinkTrackedCallback?.invoke(p, volume.currentTimepoint, cursor.radius, enableTrackingPreview) + + volume.goToTimepoint(volume.currentTimepoint - 1) + // If the user clicked a cell and its *not* the first in the track, we assume it is a merge event and end the tracking + if (isValidSelection && controllerTrackList.size > 1) { + endControllerTracking(selected) + } + // This will also redraw all geometry using Mastodon as source + notifyObservers(volume.currentTimepoint) + } else { + sciview.camera?.showMessage("Reached the first time point!", centered = true, distance = 2f, size = 0.2f) + // Let's head back to the last timepoint for starting a new track fast-like + volume.goToLastTimepoint() + endControllerTracking() + } + } + + /** Stops the current controller tracking process and sends the created track to Mastodon. */ + private fun endControllerTracking(mergeSpot: Spot? = null) { + if (controllerTrackingActive) { + logger.info("Ending controller tracking now and sending ${controllerTrackList.size} spots to Mastodon to chew on.") + controllerTrackingActive = false + trackCreationCallback?.invoke(null, true, startWithExistingSpot, mergeSpot) + controllerTrackList.clear() + cursor.resetColor() + } + } + + fun setupElephantMenu() { + val unpressedColor = Vector3f(0.81f, 0.81f, 1f) + val touchingColor = Vector3f(0.7f, 0.65f, 1f) + val pressedColor = Vector3f(0.54f, 0.44f, 0.96f) + val stageSpotsButton = Button( + "Stage all", + command = { updateElephantActions(ElephantMode.StageSpots) }, byTouch = true, depressDelay = 500, + color = unpressedColor, touchingColor = touchingColor, pressedColor = pressedColor) + val trainAllButton = Button( + "Train All TPs", + command = { updateElephantActions(ElephantMode.TrainAll) }, byTouch = true, depressDelay = 500, + color = unpressedColor, touchingColor = touchingColor, pressedColor = pressedColor) + val predictAllButton = Button( + "Predict All", + command = { updateElephantActions(ElephantMode.PredictAll) }, byTouch = true, depressDelay = 500, + color = unpressedColor, touchingColor = touchingColor, pressedColor = pressedColor) + val predictTPButton = Button( + "Predict TP", + command = { updateElephantActions(ElephantMode.PredictTP) }, byTouch = true, depressDelay = 500, + color = unpressedColor, touchingColor = touchingColor, pressedColor = pressedColor) + val linkingButton = Button( + "NN linking", + command = { updateElephantActions(ElephantMode.NNLinking) }, byTouch = true, depressDelay = 500, + color = unpressedColor, touchingColor = touchingColor, pressedColor = pressedColor) + + leftElephantColumn = + createWristMenuColumn(stageSpotsButton, name = "Stage Menu") + leftElephantColumn?.visible = false + leftColumnPredict = createWristMenuColumn(trainAllButton, predictTPButton, predictAllButton, name = "Train/Predict Menu") + leftColumnPredict?.visible = false + leftColumnLink = createWristMenuColumn(linkingButton, name = "Linking Menu") + leftColumnLink?.visible = false + } + + var lastButtonTime = System.currentTimeMillis() + + /** Ensure that only a single Elephant action is triggered at a time */ + private fun updateElephantActions(mode: ElephantMode) { + val buttonTime = System.currentTimeMillis() + + if ((buttonTime - lastButtonTime) > 1000) { + + thread { + when (mode) { + ElephantMode.StageSpots -> stageSpotsCallback?.invoke() + ElephantMode.TrainAll -> trainSpotsCallback?.invoke() + ElephantMode.PredictTP -> predictSpotsCallback?.invoke(false) + ElephantMode.PredictAll -> predictSpotsCallback?.invoke(true) + ElephantMode.NNLinking -> neighborLinkingCallback?.invoke() + } + + logger.info("We locked the buttons for ${(buttonTime-lastButtonTime)} ms ") + lastButtonTime = buttonTime + } + + } else { + sciview.camera?.showMessage("Have some patience!", duration = 1500, distance = 2f, size = 0.2f, centered = true) + } + + } + + fun setupGeneralMenu() { + + val cam = sciview.camera ?: throw IllegalStateException("Could not find camera") + + val color = Vector3f(0.8f) + val pressedColor = Vector3f(0.95f, 0.35f, 0.25f) + val touchingColor = Vector3f(0.7f, 0.55f, 0.55f) + + val undoButton = Button( + "Undo", + command = { mastodonUndoRedoCallback?.invoke(true) }, byTouch = true, depressDelay = 250, + color = color, pressedColor = pressedColor, touchingColor = touchingColor + ) + val redoButton = Button( + "Redo", + command = {mastodonUndoRedoCallback?.invoke(false)}, byTouch = true, depressDelay = 250, + color = color, pressedColor = pressedColor, touchingColor = touchingColor + ) + val toggleTrackingPreviewBtn = ToggleButton( + "Preview Off", "Preview On", command = { + enableTrackingPreview = !enableTrackingPreview + toggleTrackingPreviewCallback?.invoke(enableTrackingPreview) + }, byTouch = true, + color = color, + touchingColor = Vector3f(0.67f, 0.9f, 0.63f), + pressedColor = Vector3f(0.35f, 0.95f, 0.25f), + default = true + ) + val togglePlaybackDirBtn = ToggleButton( + textFalse = "BW", textTrue = "FW", command = { + direction = if (direction == PlaybackDirection.Forward) { + PlaybackDirection.Backward + } else { + PlaybackDirection.Forward + } + }, byTouch = true, + color = Vector3f(0.52f, 0.87f, 0.86f), + touchingColor = color, + pressedColor = Vector3f(0.84f, 0.87f, 0.52f) + ) + val playSlowerBtn = Button( + "<", command = { + volumesPerSecond = maxOf(volumesPerSecond - 1f, 1f) + cam.showMessage( + "Speed: ${"%.0f".format(volumesPerSecond)} vol/s", + distance = 1.2f, size = 0.2f, centered = true + ) + }, byTouch = true, depressDelay = 250, + color = color, pressedColor = pressedColor, touchingColor = touchingColor + ) + val playFasterBtn = Button( + ">", command = { + volumesPerSecond = minOf(volumesPerSecond + 1f, 20f) + cam.showMessage( + "Speed: ${"%.0f".format(volumesPerSecond)} vol/s", + distance = 1.2f, size = 0.2f, centered = true + ) + }, byTouch = true, depressDelay = 250, + color = color, pressedColor = pressedColor, touchingColor = touchingColor + ) + val goToLastBtn = Button( + ">|", command = { + playing = false + volume.goToLastTimepoint() + notifyObservers(volume.currentTimepoint) + cam.showMessage("Jumped to timepoint ${volume.currentTimepoint}.", + distance = 1.2f, size = 0.2f, centered = true) + }, byTouch = true, depressDelay = 250, + color = color, pressedColor = pressedColor, touchingColor = touchingColor + ) + val goToFirstBtn = Button( + "|<", command = { + playing = false + volume.goToFirstTimepoint() + notifyObservers(volume.currentTimepoint) + cam.showMessage("Jumped to timepoint ${volume.currentTimepoint}.", + distance = 1.2f, size = 0.2f, centered = true) + }, byTouch = true, depressDelay = 250, + color = color, pressedColor = pressedColor, touchingColor = touchingColor + ) + + val timeControlRow = Row(goToFirstBtn, playSlowerBtn, togglePlaybackDirBtn, playFasterBtn, goToLastBtn) + + leftUndoMenu = createWristMenuColumn(undoButton, redoButton, name = "Left Undo Menu") + leftUndoMenu?.visible = false + val previewMenu = createWristMenuColumn(toggleTrackingPreviewBtn, name = "Preview Menu") + previewMenu.visible = false + val timeMenu = createWristMenuColumn(timeControlRow, name = "Time Menu") + timeMenu.visible = false + } + + + private fun cycleLeftMenus() { + leftMenuList.forEach { it.visible = false } + leftMenuIndex = (leftMenuIndex + 1) % leftMenuList.size + logger.debug("Cycling to ${leftMenuList[leftMenuIndex].name}") + leftMenuList[leftMenuIndex].visible = true + } + + + fun addHedgehog() { + logger.info("added hedgehog") + val hedgehog = Cylinder(0.005f, 1.0f, 16) + hedgehog.visible = false + hedgehog.setMaterial(ShaderMaterial.fromFiles("DeferredInstancedColor.frag", "DeferredInstancedColor.vert")) + val hedgehogInstanced = InstancedNode(hedgehog) + hedgehogInstanced.visible = false + hedgehogInstanced.instancedProperties["ModelMatrix"] = { hedgehog.spatial().world} + hedgehogInstanced.instancedProperties["Metadata"] = { Vector4f(0.0f, 0.0f, 0.0f, 0.0f) } + hedgehogs.addChild(hedgehogInstanced) + } + + /** Attach a spherical cursor to the right controller. */ + private fun attachCursorAndTPWidget(debug: Boolean = false) { + // Only attach if not already attached + if (sciview.findNodes { it.name == "VR Cursor" }.isNotEmpty()) { + return + } + + volumeTPWidget.text = volume.currentTimepoint.toString() + volumeTPWidget.name = "Volume Timepoint Widget" + volumeTPWidget.fontColor = Vector4f(0.4f, 0.45f, 1f, 1f) + volumeTPWidget.spatial { + scale = Vector3f(0.07f) + position = Vector3f(-0.05f, -0.05f, 0.12f) + rotation = Quaternionf().rotationXYZ(-1.57f, -1.57f, 0f) + } + + rightVRController?.model?.let { + cursor.attachCursor(sciview, it) + sciview.addNode(volumeTPWidget, activePublish = false, parent = it) + } + } + + /** Object that represents the 3D cursor in form of a sphere. It needs to be attached to a VR controller via [attachCursor]. + * The current cursor position can be obtained with [getPosition]. The current radius is stored in [radius]. + * The tool can be scaled up and down with [scaleByFactor]. + * [resetColor], [setSelectColor] and [setTrackingColor] allow changing the cursor's color to reflect the currently active operation. */ + object CursorTool { + private val logger by lazyLogger() + var radius: Float = 0.007f + private set + val cursor = Sphere(radius) + private val initPos = Vector3f(-0.01f, -0.05f, -0.03f) + + fun getPosition() = cursor.spatial().worldPosition() + + fun attachCursor(sciview: SciView, parent: Node, debug: Boolean = false) { + cursor.name = "VR Cursor" + cursor.material { + diffuse = Vector3f(0.15f, 0.2f, 1f) + } + cursor.spatial().position = initPos + sciview.addNode(cursor, parent = parent) + + if (debug) { + val bb = BoundingGrid() + bb.node = cursor + bb.name = "Cursor BB" + bb.lineWidth = 2f + bb.gridColor = Vector3f(1f, 0.3f, 0.25f) + sciview.addNode(bb, parent = parent) + } + logger.info("Attached cursor to controller.") + } + + fun scaleByFactor(factor: Float) { + var clampedFac = 1f + // Only apply the factor if we are in the radius range 0.001f - 0.1f + if ((factor < 1f && radius > 0.001f) || (factor > 1f && radius < 0.15f)) { + clampedFac = factor + } + radius *= clampedFac + cursor.spatial().scale = Vector3f(radius/0.007f) + cursor.spatial().position = Vector3f(initPos) + Vector3f(initPos).normalize().times(radius - 0.007f) + } + + fun resetColor() { + cursor.material().diffuse = Vector3f(0.15f, 0.2f, 1f) + } + + fun setSelectColor() { + cursor.material().diffuse = Vector3f(1f, 0.25f, 0.25f) + } + + fun setTrackingColor() { + cursor.material().diffuse = Vector3f(0.65f, 1f, 0.22f) + } + + } + + open fun inputSetup() + { + val cam = sciview.camera ?: throw IllegalStateException("Could not find camera") + + sciview.sceneryInputHandler?.let { handler -> + listOf( + "move_forward_fast", + "move_back_fast", + "move_left_fast", + "move_right_fast").forEach { name -> + handler.getBehaviour(name)?.let { behaviour -> + mapper.setKeyBindAndBehavior(hmd, name, behaviour) + } + } + } + + val toggleHedgehog = ClickBehaviour { _, _ -> + val current = HedgehogVisibility.entries.indexOf(hedgehogVisibility) + hedgehogVisibility = HedgehogVisibility.entries.get((current + 1) % 3) + + when(hedgehogVisibility) { + HedgehogVisibility.Hidden -> { + hedgehogs.visible = false + hedgehogs.runRecursive { it.visible = false } + cam.showMessage("Hedgehogs hidden",distance = 2f, size = 0.2f, centered = true) + } + + HedgehogVisibility.PerTimePoint -> { + hedgehogs.visible = true + cam.showMessage("Hedgehogs shown per timepoint",distance = 2f, size = 0.2f, centered = true) + } + + HedgehogVisibility.Visible -> { + hedgehogs.visible = true + cam.showMessage("Hedgehogs visible",distance = 2f, size = 0.2f, centered = true) + } + } + } + + val nextTimepoint = ClickBehaviour { _, _ -> + skipToNext = true + } + + val prevTimepoint = ClickBehaviour { _, _ -> + skipToPrevious = true + } + + class ScaleCursorOrSpotsBehavior(val factor: Float): DragBehaviour { + var isSelected = false + override fun init(p0: Int, p1: Int) { + // determine whether we selected spots or not + isSelected = getSelectionCallback?.invoke()?.isNotEmpty() ?: false + } + + override fun drag(p0: Int, p1: Int) { + if (isSelected) { + scaleSpotsCallback?.invoke(factor, false) + } else { + // Make cursor movement a little stronger than + cursor.scaleByFactor(factor * factor) + } + } + + override fun end(p0: Int, p1: Int) { + scaleSpotsCallback?.invoke(factor, true) + } + } + + val scaleCursorOrSpotsUp = AnalogInputWrapper(ScaleCursorOrSpotsBehavior(1.02f), sciview.currentScene) + + val scaleCursorOrSpotsDown = AnalogInputWrapper(ScaleCursorOrSpotsBehavior(0.98f), sciview.currentScene) + + val faster = ClickBehaviour { _, _ -> + volumesPerSecond = maxOf(minOf(volumesPerSecond+0.2f, 20f), 1f) + cam.showMessage("Speed: ${"%.1f".format(volumesPerSecond)} vol/s",distance = 1.2f, size = 0.2f, centered = true) + } + + val slower = ClickBehaviour { _, _ -> + volumesPerSecond = maxOf(minOf(volumesPerSecond-0.2f, 20f), 1f) + cam.showMessage("Speed: ${"%.1f".format(volumesPerSecond)} vol/s",distance = 2f, size = 0.2f, centered = true) + } + + val playPause = ClickBehaviour { _, _ -> + playing = !playing + if(playing) { + cam.showMessage("Playing",distance = 2f, size = 0.2f, centered = true) + } else { + cam.showMessage("Paused",distance = 2f, size = 0.2f, centered = true) + } + } + + val deleteLastHedgehog = ConfirmableClickBehaviour( + armedAction = { timeout -> + cam.showMessage("Deleting last track, press again to confirm.",distance = 2f, size = 0.2f, + messageColor = Vector4f(1.0f, 1.0f, 1.0f, 1.0f), + backgroundColor = Vector4f(1.0f, 0.2f, 0.2f, 1.0f), + duration = timeout.toInt(), + centered = true) + + }, + confirmAction = { + hedgehogs.children.removeLast() + volume.children.last { it.name.startsWith("Track-") }?.let { lastTrack -> + volume.removeChild(lastTrack) + } + val hedgehogId = hedgehogIds.get() + val hedgehogFile = sessionDirectory.resolve("Hedgehog_${hedgehogId}_${SystemHelpers.formatDateTime()}.csv").toFile() + val hedgehogFileWriter = BufferedWriter(FileWriter(hedgehogFile, true)) + hedgehogFileWriter.newLine() + hedgehogFileWriter.newLine() + hedgehogFileWriter.write("# WARNING: TRACK $hedgehogId IS INVALID\n") + hedgehogFileWriter.close() + + cam.showMessage("Last track deleted.",distance = 2f, size = 0.2f, + messageColor = Vector4f(1.0f, 0.2f, 0.2f, 1.0f), + backgroundColor = Vector4f(1.0f, 1.0f, 1.0f, 1.0f), + duration = 1000, + centered = true + ) + }) + + mapper.setKeyBindAndBehavior(hmd, "stepFwd", nextTimepoint) + mapper.setKeyBindAndBehavior(hmd, "stepBwd", prevTimepoint) +// mapper.setKeyBindAndBehavior(hmd, "faster", faster) +// mapper.setKeyBindAndBehavior(hmd, "slower", slower) + mapper.setKeyBindAndBehavior(hmd, "playback", playPause) + mapper.setKeyBindAndBehavior(hmd, "radiusIncrease", scaleCursorOrSpotsUp) + mapper.setKeyBindAndBehavior(hmd, "radiusDecrease", scaleCursorOrSpotsDown) + + /** Local class that handles double assignment of the left A key which is used to cycle menus as well as + * reset the rotation when pressed while the [VR2HandNodeTransform] is active. */ + class CycleMenuAndLockAxisBehavior(val button: OpenVRHMD.OpenVRButton, val role: TrackerRole) + : DragBehaviour { + fun registerConfig() { + logger.debug("Setting up keybinds for CycleMenuAndLockAxisBehavior") + resetRotationBtnManager.registerButtonConfig(button, role) + } + override fun init(x: Int, y: Int) { + resetRotationBtnManager.pressButton(button, role) + if (!resetRotationBtnManager.isTwoHandedActive()) { + cycleLeftMenus() + } + } + override fun drag(x: Int, y: Int) {} + override fun end(x: Int, y: Int) { + resetRotationBtnManager.releaseButton(button, role) + } + } + + val leftAButtonBehavior = CycleMenuAndLockAxisBehavior(OpenVRHMD.OpenVRButton.A, TrackerRole.LeftHand) + leftAButtonBehavior.let { + it.registerConfig() + mapper.setKeyBindAndBehavior(hmd, "cycleMenu", it) + } + + mapper.setKeyBindAndBehavior(hmd, "controllerTracking", trackCellsWithController) + + /** Several behaviors mapped per default to the right menu button. If controller tracking is active, + * end the tracking. If not, clicking will either create or delete a spot, depending on whether the user + * previously selected a spot. Holding the button for more than 0.5s deletes the whole connected branch. */ + class AddDeleteResetBehavior : DragBehaviour { + var start = System.currentTimeMillis() + var wasExecuted = false + override fun init(x: Int, y: Int) { + start = System.currentTimeMillis() + wasExecuted = false + } + override fun drag(x: Int, y: Int) { + if (System.currentTimeMillis() - start > 500 && !wasExecuted) { + val p = cursor.getPosition() + spotCreateDeleteCallback?.invoke(volume.currentTimepoint, p, cursor.radius, true) + wasExecuted = true + } + } + override fun end(x: Int, y: Int) { + if (controllerTrackingActive) { + endControllerTracking() + } else { + val p = cursor.getPosition() + logger.debug("Got cursor position: $p") + if (!wasExecuted) { + spotCreateDeleteCallback?.invoke(volume.currentTimepoint, p, cursor.radius, false) + } + } + } + } + + mapper.setKeyBindAndBehavior(hmd, "addDeleteReset", AddDeleteResetBehavior()) + + class DragSelectBehavior: DragBehaviour { + var time = System.currentTimeMillis() + override fun init(x: Int, y: Int) { + time = System.currentTimeMillis() + val p = cursor.getPosition() + cursor.setSelectColor() + spotSelectCallback?.invoke(p, volume.currentTimepoint, cursor.radius, false) + } + override fun drag(x: Int, y: Int) { + // Only perform the selection method ten times a second + if (System.currentTimeMillis() - time > 100) { + val p = cursor.getPosition() + spotSelectCallback?.invoke(p, volume.currentTimepoint, cursor.radius, true) + time = System.currentTimeMillis() + } + } + override fun end(x: Int, y: Int) { + cursor.resetColor() + } + } + + mapper.setKeyBindAndBehavior(hmd, "select", DragSelectBehavior()) + + // this behavior is needed for touching the menu buttons + VRTouch.createAndSet(sciview.currentScene, hmd, listOf(TrackerRole.RightHand), false, customTip = cursor.cursor) + + VRGrabTheWorld.createAndSet( + sciview.currentScene, + hmd, + listOf(OpenVRHMD.OpenVRButton.Side), + listOf(TrackerRole.LeftHand), + grabButtonManager, + 1.5f + ) + + VR2HandNodeTransform.createAndSet( + hmd, + OpenVRHMD.OpenVRButton.Side, + sciview.currentScene, + lockYaxis = false, + target = volume, + onEndCallback = rebuildGeometryCallback, + resetRotationBtnManager = resetRotationBtnManager, + resetRotationButton = MultiButtonManager.ButtonConfig(leftAButtonBehavior.button, leftAButtonBehavior.role) + ) + + // drag behavior can stay enabled regardless of current tool mode + MoveInstanceVR.createAndSet( + sciview.currentScene, hmd, listOf(OpenVRHMD.OpenVRButton.Side), listOf(TrackerRole.RightHand), + grabButtonManager, + { cursor.getPosition() }, + spotMoveInitCallback, + spotMoveDragCallback, + spotMoveEndCallback, + ) + + hmd.allowRepeats += OpenVRHMD.OpenVRButton.Trigger to TrackerRole.LeftHand + logger.info("Registered VR controller bindings.") + + } + + /** + * Launches a thread that updates the volume time points, the hedgehog visibility and reference target color. + */ + fun launchUpdaterThread() { + thread { + while(!sciview.isInitialized) { Thread.sleep(200) } + + while(sciview.running && cellTrackingActive) { + if(playing || skipToNext || skipToPrevious) { + val oldTimepoint = volume.viewerState.currentTimepoint + if (skipToNext || playing) { + skipToNext = false + if(direction == PlaybackDirection.Forward) { + notifyObservers(oldTimepoint + 1) + } else { + notifyObservers(oldTimepoint - 1) + } + } else { + skipToPrevious = false + if(direction == PlaybackDirection.Forward) { + notifyObservers(oldTimepoint - 1) + } else { + notifyObservers(oldTimepoint + 1) + } + } + val newTimepoint = volume.viewerState.currentTimepoint + + + if(hedgehogs.visible) { + if(hedgehogVisibility == HedgehogVisibility.PerTimePoint) { + hedgehogs.children.forEach { hh -> + val hedgehog = hh as InstancedNode + hedgehog.instances.forEach { + if (it.metadata.isNotEmpty()) { + it.visible = (it.metadata["spine"] as SpineMetadata).timepoint == volume.viewerState.currentTimepoint + } + } + } + } else { + hedgehogs.children.forEach { hh -> + val hedgehog = hh as InstancedNode + hedgehog.instances.forEach { it.visible = true } + } + } + } + + if(eyeTrackingActive && newTimepoint == 0) { + eyeTrackingActive = false + playing = false + referenceTarget.ifMaterial { diffuse = Vector3f(0.5f, 0.5f, 0.5f)} + logger.info("Deactivated eye tracking by reaching timepoint 0.") + sciview.camera!!.showMessage("Tracking deactivated.",distance = 2f, size = 0.2f, centered = true) + dumpHedgehog() + } + } + + Thread.sleep((1000.0f/volumesPerSecond).toLong()) + } + logger.info("CellTracking updater thread has stopped.") + } + } + + open fun addSpine(center: Vector3f, direction: Vector3f, volume: Volume, confidence: Float, timepoint: Int) { + val cam = sciview.camera as? DetachedHeadCamera ?: return + val sphere = volume.boundingBox?.getBoundingSphere() ?: return + + val sphereDirection = sphere.origin.minus(center) + val sphereDist = Math.sqrt(sphereDirection.x * sphereDirection.x + sphereDirection.y * sphereDirection.y + sphereDirection.z * sphereDirection.z) - sphere.radius + + val p1 = center + val temp = direction.mul(sphereDist + 2.0f * sphere.radius) + val p2 = Vector3f(center).add(temp) + + val spine = (hedgehogs.children.last() as InstancedNode).addInstance() + spine.spatial().orientBetweenPoints(p1, p2, true, true) + spine.visible = false + + val intersection = volume.spatial().intersectAABB(p1, (p2 - p1).normalize(), true) + + if (volume.boundingBox?.isInside(cam.spatial().position)!!) { + logger.info("Can't track inside the volume! Please move out of the volume and try again") + return + } + if(intersection is MaybeIntersects.Intersection) { + // get local entry and exit coordinates, and convert to UV coords + val localEntry = (intersection.relativeEntry) + val localExit = (intersection.relativeExit) + // TODO We dont need the local direction for grid traversal, but its still in the spine metadata for now + val localDirection = Vector3f(0f) + val (samples, samplePos) = volume.sampleRayGridTraversal(localEntry, localExit) ?: (null to null) + val volumeScale = (volume as RAIVolume).getVoxelScale() + + if (samples != null && samplePos != null) { + val metadata = SpineMetadata( + timepoint, + center, + direction, + intersection.distance, + localEntry, + localExit, + localDirection, + cam.headPosition, + cam.headOrientation, + cam.spatial().position, + confidence, + samples.map { it ?: 0.0f }, + samplePos.map { it?.mul(volumeScale) ?: Vector3f(0f) } + ) + val count = samples.filterNotNull().count { it > 0.2f } + + spine.metadata["spine"] = metadata + spine.instancedProperties["ModelMatrix"] = { spine.spatial().world } + // TODO: Show confidence as color for the spine + spine.instancedProperties["Metadata"] = + { Vector4f(confidence, timepoint.toFloat() / volume.timepointCount, count.toFloat(), 0.0f) } + } + } + } + + /** + * Dumps a given hedgehog including created tracks to a file. + * If [hedgehog] is null, the last created hedgehog will be used, otherwise the given one. + * If [hedgehog] is not null, the cell track will not be added to the scene. + */ + fun dumpHedgehog(){ + logger.info("dumping hedgehog...") + val lastHedgehog = hedgehogs.children.last() as InstancedNode + val hedgehogId = hedgehogIds.incrementAndGet() + + val hedgehogFile = sessionDirectory.resolve("Hedgehog_${hedgehogId}_${SystemHelpers.formatDateTime()}.csv").toFile() + val hedgehogFileWriter = hedgehogFile.bufferedWriter() + hedgehogFileWriter.write("Timepoint;Origin;Direction;LocalEntry;LocalExit;LocalDirection;HeadPosition;HeadOrientation;Position;Confidence;Samples\n") + + val trackFile = sessionDirectory.resolve("Tracks.tsv").toFile() + val trackFileWriter = BufferedWriter(FileWriter(trackFile, true)) + if(!trackFile.exists()) { + trackFile.createNewFile() + trackFileWriter.write("# BionicTracking cell track listing for ${sessionDirectory.fileName}\n") + trackFileWriter.write("# TIME\tX\tYt\t\tZ\tTRACK_ID\tPARENT_TRACK_ID\tSPOT\tLABEL\n") + } + + + val spines = lastHedgehog.instances.mapNotNull { spine -> + spine.metadata["spine"] as? SpineMetadata + } + + spines.forEach { metadata -> + hedgehogFileWriter.write("${metadata.timepoint};${metadata.origin};${metadata.direction};${metadata.localEntry};${metadata.localExit};${metadata.localDirection};${metadata.headPosition};${metadata.headOrientation};${metadata.position};${metadata.confidence};${metadata.samples.joinToString(";")}\n") + } + hedgehogFileWriter.close() + + val existingAnalysis = lastHedgehog.metadata["HedgehogAnalysis"] as? HedgehogAnalysis.Track + val track = if(existingAnalysis is HedgehogAnalysis.Track) { + existingAnalysis + } else { + val h = HedgehogAnalysis(spines, Matrix4f(volume.spatial().world)) + h.run() + } + + if(track == null) { + logger.warn("No track returned") + sciview.camera?.showMessage("No track returned", distance = 1.2f, size = 0.2f,messageColor = Vector4f(1.0f, 0.0f, 0.0f,1.0f)) + return + } + + lastHedgehog.metadata["HedgehogAnalysis"] = track + lastHedgehog.metadata["Spines"] = spines + + val parentId = 0 + val volumeDimensions = volume.getDimensions() + + trackFileWriter.newLine() + trackFileWriter.newLine() + trackFileWriter.write("# START OF TRACK $hedgehogId, child of $parentId\n") + if (trackCreationCallback != null && rebuildGeometryCallback != null) { + trackCreationCallback?.invoke(track.points, false, null, null) + rebuildGeometryCallback?.invoke() + } else { + logger.warn("Tried to send track data to Mastodon but couldn't find the callbacks!") + } + track.points.windowed(2, 1).forEach { pair -> + val p = Vector3f(pair[0].first).mul(Vector3f(volumeDimensions)) // direct product + val tp = pair[0].second.timepoint + trackFileWriter.write("$tp\t${p.x()}\t${p.y()}\t${p.z()}\t${hedgehogId}\t$parentId\t0\t0\n") + } + trackFileWriter.close() + } + + /** + * Stops the current tracking environment and restore the original state. + * This method should be overridden if functionality is extended, to make sure any extra objects are also deleted. + */ + open fun stop() { + cellTrackingActive = false + lightTetrahedron.forEach { sciview.deleteNode(it) } + // Try to find and delete possibly existing VR objects + listOf("Shell", "leftHand", "rightHand").forEach { + val n = sciview.find(it) + n?.let { sciview.deleteNode(n) } + } + logger.info("Cleaned up basic VR objects.") + sciview.toggleVRRendering() + logger.info("Shut down eye tracking environment and disabled VR.") + } + +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/CellTrackingButtonMapper.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/CellTrackingButtonMapper.kt new file mode 100644 index 000000000..cadfe77e9 --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/CellTrackingButtonMapper.kt @@ -0,0 +1,163 @@ +package sc.iview.commands.demo.advanced + +import graphics.scenery.controls.OpenVRHMD +import graphics.scenery.controls.TrackerRole +import graphics.scenery.controls.OpenVRHMD.Manufacturer +import graphics.scenery.controls.OpenVRHMD.OpenVRButton +import graphics.scenery.utils.lazyLogger +import org.scijava.ui.behaviour.Behaviour + + +/** This input mapping manager provides several preconfigured profiles for different VR controller layouts. + * The active profile is stored in [currentProfile]. + * To change profile, call [loadProfile] with the new [Manufacturer] type. + * Note that for Quest-like layouts, the lower button always equals [OpenVRButton.A] + * and the upper button is always [OpenVRButton.Menu]. */ +object CellTrackingButtonMapper { + + var eyeTracking: ButtonConfig? = null + var controllerTracking: ButtonConfig? = null + var grabObserver: ButtonConfig? = null + var grabSpot: ButtonConfig? = null + var playback: ButtonConfig? = null + var cycleMenu: ButtonConfig? = null + var faster: ButtonConfig? = null + var slower: ButtonConfig? = null + var stepFwd: ButtonConfig? = null + var stepBwd: ButtonConfig? = null + var addDeleteReset: ButtonConfig? = null + var select: ButtonConfig? = null + var move_forward_fast: ButtonConfig? = null + var move_back_fast: ButtonConfig? = null + var move_left_fast: ButtonConfig? = null + var move_right_fast: ButtonConfig? = null + var radiusIncrease: ButtonConfig? = null + var radiusDecrease: ButtonConfig? = null + + private var currentProfile: Manufacturer = Manufacturer.Oculus + + val logger by lazyLogger(System.getProperty("scenery.LogLevel", "info")) + + private val profiles = mapOf( + Manufacturer.HTC to mapOf( + "eyeTracking" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Trigger), + "controllerTracking" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Trigger), + "grabObserver" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Side), + "grabSpot" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Side), + "playback" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Menu), + "cycleMenu" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Menu), + "faster" to null, + "slower" to null, + "radiusIncrease" to null, + "radiusDecrease" to null, + "stepFwd" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Left), + "stepBwd" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Right), + "addDeleteReset" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Up), + "select" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Down), + "move_forward_fast" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Up), + "move_back_fast" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Down), + "move_left_fast" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Left), + "move_right_fast" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Right), + ), + + Manufacturer.Oculus to mapOf( + "eyeTracking" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Trigger), + "controllerTracking" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Trigger), + "grabObserver" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Side), + "grabSpot" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Side), + "playback" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.A), + "cycleMenu" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Menu), +// "faster" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Up), +// "slower" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Down), + "stepFwd" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Left), + "stepBwd" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Right), + "addDeleteReset" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Menu), + "select" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.A), + "move_forward_fast" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Up), + "move_back_fast" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Down), + "move_left_fast" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Left), + "move_right_fast" to ButtonConfig(TrackerRole.LeftHand, OpenVRButton.Right), + "radiusIncrease" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Up), + "radiusDecrease" to ButtonConfig(TrackerRole.RightHand, OpenVRButton.Down), + ) + ) + + init { + loadProfile(Manufacturer.Oculus) + } + + /** Load the current profile's button mapping */ + fun loadProfile(p: Manufacturer): Boolean { + currentProfile = p + val profile = profiles[currentProfile] ?: return false + eyeTracking = profile["eyeTracking"] + controllerTracking = profile["controllerTracking"] + grabObserver = profile["grabObserver"] + grabSpot = profile["grabSpot"] + playback = profile["playback"] + cycleMenu = profile["cycleMenu"] + faster = profile["faster"] + slower = profile["slower"] + stepFwd = profile["stepFwd"] + stepBwd = profile["stepBwd"] + addDeleteReset = profile["addDeleteReset"] + select = profile["select"] + move_forward_fast = profile["move_forward_fast"] + move_back_fast = profile["move_back_fast"] + move_left_fast = profile["move_left_fast"] + move_right_fast = profile["move_right_fast"] + radiusIncrease = profile["radiusIncrease"] + radiusDecrease = profile["radiusDecrease"] + return true + } + + fun getCurrentMapping(): Map?{ + return profiles[currentProfile] + } + + fun getMapFromName(name: String): ButtonConfig? { + return when (name) { + "eyeTracking" -> eyeTracking + "controllerTracking" -> controllerTracking + "grabObserver" -> grabObserver + "grabSpot" -> grabSpot + "playback" -> playback + "cycleMenu" -> cycleMenu + "faster" -> faster + "slower" -> slower + "stepFwd" -> stepFwd + "stepBwd" -> stepBwd + "addDeleteReset" -> addDeleteReset + "select" -> select + "move_forward_fast" -> move_forward_fast + "move_back_fast" -> move_back_fast + "move_left_fast" -> move_left_fast + "move_right_fast" -> move_right_fast + "radiusIncrease" -> radiusIncrease + "radiusDecrease" -> radiusDecrease + else -> null + } + } + + /** Sets a keybinding and behavior for an [hmd], using the [name] string, a [behavior] + * and the keybinding if found in the current profile. */ + fun setKeyBindAndBehavior(hmd: OpenVRHMD, name: String, behavior: Behaviour) { + val config = getMapFromName(name) + if (config != null) { + hmd.addKeyBinding(name, config.r, config.b) + hmd.addBehaviour(name, behavior) + logger.debug("Added behavior $behavior to ${config.r}, ${config.b}.") + } else { + logger.warn("No valid button mapping found for key '$name' in current profile!") + } + } +} + + +/** Combines the [TrackerRole] ([r]) and the [OpenVRHMD.OpenVRButton] ([b]) into a single configuration. */ +data class ButtonConfig ( + /** The [TrackerRole] of this button configuration. */ + var r: TrackerRole, + /** The [OpenVRButton] of this button configuration. */ + var b: OpenVRButton +) \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/ConfirmableClickBehaviour.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/ConfirmableClickBehaviour.kt new file mode 100644 index 000000000..a846758ff --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/ConfirmableClickBehaviour.kt @@ -0,0 +1,50 @@ +package sc.iview.commands.demo.advanced + +import org.scijava.ui.behaviour.ClickBehaviour +import java.util.concurrent.atomic.AtomicBoolean +import kotlin.concurrent.thread + + +/** + * [ClickBehaviour] that waits [timeout] for confirmation by re-executing the behaviour. + * Executes [armedAction] on first invocation, and [confirmAction] on second invocation, if + * it happens within [timeout]. * If [delayedExecution] is true, + * the [armedAction] will only be executed after the [timeout] ran out and no second click was registered. + * @author Ulrik Guenther + */ +class ConfirmableClickBehaviour( + val armedAction: (Long) -> Any, + val confirmAction: (Long) -> Any, + var timeout: Long = 3000, + val delayedExecution: Boolean = false +) : ClickBehaviour { + /** Whether the action is armed at the moment. Action becomes disarmed after [timeout]. */ + private var armed: Boolean = false + + /** Whether the [confirmAction] was fired. Needed for [delayedExecution]. */ + private var confirmed: AtomicBoolean = AtomicBoolean(false) + + /** + * Action fired at position [x]/[y]. Parameters not used in VR actions. + */ + override fun click(x: Int, y: Int) { + if (!armed) { + armed = true + if (!delayedExecution) { + armedAction.invoke(timeout) + } + + thread { + Thread.sleep(timeout) + armed = false + // Only trigger the delayed armedAction if no confirmedAction was triggered in the meantime + if (delayedExecution && !confirmed.get()) { + armedAction.invoke(timeout) + } + } + } else { + confirmed.set(true) + confirmAction.invoke(timeout) + } + } +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/EyeTracking.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/EyeTracking.kt new file mode 100644 index 000000000..811e9cba5 --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/EyeTracking.kt @@ -0,0 +1,295 @@ +package sc.iview.commands.demo.advanced + +import graphics.scenery.* +import graphics.scenery.controls.OpenVRHMD.OpenVRButton +import graphics.scenery.controls.TrackedDeviceType +import graphics.scenery.controls.TrackerRole +import graphics.scenery.controls.eyetracking.PupilEyeTracker +import graphics.scenery.primitives.Cylinder +import graphics.scenery.primitives.TextBoard +import graphics.scenery.textures.Texture +import graphics.scenery.ui.Button +import graphics.scenery.ui.Column +import graphics.scenery.ui.ToggleButton +import graphics.scenery.utils.SystemHelpers +import graphics.scenery.utils.extensions.minus +import graphics.scenery.utils.extensions.xyz +import graphics.scenery.utils.extensions.xyzw +import net.imglib2.type.numeric.integer.UnsignedByteType +import org.joml.* +import org.scijava.ui.behaviour.ClickBehaviour +import sc.iview.SciView +import java.awt.image.DataBufferByte +import java.io.ByteArrayInputStream +import java.nio.file.Files +import java.nio.file.Paths +import javax.imageio.ImageIO +import kotlin.concurrent.thread +import kotlin.math.PI + +/** + * Tracking class used for communicating with eye trackers, tracking cells with them in a sciview VR environment. + * It calls the Hedgehog analysis on the eye tracking results and communicates the results to Mastodon via + * [trackCreationCallback], which is called on every spine graph vertex that is extracted + */ +class EyeTracking( + sciview: SciView +): CellTrackingBase(sciview) { + + val pupilTracker = PupilEyeTracker(calibrationType = PupilEyeTracker.CalibrationType.WorldSpace, port = System.getProperty("PupilPort", "50020").toInt()) + val calibrationTarget = Icosphere(0.02f, 2) + val laser = Cylinder(0.005f, 0.2f, 10) + + val confidenceThreshold = 0.60f + + private lateinit var debugBoard: TextBoard + + var leftEyeTrackColumn: Column? = null + + override fun run() { + // Do all the things for general VR startup before setting up the eye tracking environment + super.run() + + sessionId = "BionicTracking-generated-${SystemHelpers.formatDateTime()}" + sessionDirectory = Files.createDirectory(Paths.get(System.getProperty("user.home"), "Desktop", sessionId)) + + referenceTarget.visible = false + referenceTarget.ifMaterial{ + roughness = 1.0f + metallic = 0.0f + diffuse = Vector3f(0.8f, 0.8f, 0.8f) + } + referenceTarget.name = "Reference Target" + sciview.camera?.addChild(referenceTarget) + + calibrationTarget.visible = false + calibrationTarget.material { + roughness = 1.0f + metallic = 0.0f + diffuse = Vector3f(1.0f, 1.0f, 1.0f) + } + calibrationTarget.name = "Calibration Target" + sciview.camera?.addChild(calibrationTarget) + + laser.visible = false + laser.ifMaterial{diffuse = Vector3f(1.0f, 1.0f, 1.0f) } + laser.name = "Laser" + sciview.addNode(laser) + + val bb = BoundingGrid() + bb.node = volume + bb.visible = false + + sciview.addNode(hedgehogs) + + val eyeFrames = Mesh("eyeFrames") + val left = Box(Vector3f(1.0f, 1.0f, 0.001f)) + val right = Box(Vector3f(1.0f, 1.0f, 0.001f)) + left.spatial().position = Vector3f(-1.0f, 1.5f, 0.0f) + left.spatial().rotation = left.spatial().rotation.rotationZ(PI.toFloat()) + right.spatial().position = Vector3f(1.0f, 1.5f, 0.0f) + eyeFrames.addChild(left) + eyeFrames.addChild(right) + + sciview.addNode(eyeFrames) + + val pupilFrameLimit = 20 + var lastFrame = System.nanoTime() + + pupilTracker.subscribeFrames { eye, texture -> + if(System.nanoTime() - lastFrame < pupilFrameLimit*10e5) { + return@subscribeFrames + } + + val node = if(eye == 1) { + left + } else { + right + } + + val stream = ByteArrayInputStream(texture) + val image = ImageIO.read(stream) + val data = (image.raster.dataBuffer as DataBufferByte).data + + node.ifMaterial { + textures["diffuse"] = Texture( + Vector3i(image.width, image.height, 1), + 3, + UnsignedByteType(), + BufferUtils.allocateByteAndPut(data) + ) } + + lastFrame = System.nanoTime() + } + + // TODO: Replace with cam.showMessage() + debugBoard = TextBoard() + debugBoard.name = "debugBoard" + debugBoard.spatial().scale = Vector3f(0.05f, 0.05f, 0.05f) + debugBoard.spatial().position = Vector3f(0.0f, -0.3f, -0.9f) + debugBoard.text = "" + debugBoard.visible = false + sciview.camera?.addChild(debugBoard) + + hmd.events.onDeviceConnect.add { hmd, device, timestamp -> + if (device.type == TrackedDeviceType.Controller) { + setupEyeTracking() + setupEyeTrackingMenu() + } + } + + } + + + private fun setupEyeTracking() { + val cam = sciview.camera as? DetachedHeadCamera ?: return + + val toggleTracking = ClickBehaviour { _, _ -> + if (!pupilTracker.isCalibrated) { + logger.warn("Can't do eye tracking because eye trackers are not calibrated yet.") + return@ClickBehaviour + } + if (eyeTrackingActive) { + logger.info("deactivated tracking through user input.") + referenceTarget.ifMaterial { diffuse = Vector3f(0.5f, 0.5f, 0.5f) } + cam.showMessage("Tracking deactivated.",distance = 2f, size = 0.2f, centered = true) + dumpHedgehog() + playing = false + } else { + logger.info("activating tracking...") + playing = true + addHedgehog() + referenceTarget.ifMaterial { diffuse = Vector3f(1.0f, 0.0f, 0.0f) } + cam.showMessage("Tracking active.",distance = 2f, size = 0.2f, centered = true) + } + eyeTrackingActive = !eyeTrackingActive + } + + mapper.setKeyBindAndBehavior(hmd, "eyeTracking", toggleTracking) + } + + private fun calibrateEyeTrackers(force: Boolean = false) { + thread { + val cam = sciview.camera as? DetachedHeadCamera ?: return@thread + pupilTracker.gazeConfidenceThreshold = confidenceThreshold + if (!pupilTracker.isCalibrated || force) { + logger.info("Calibrating pupil trackers...") + + volume.visible = false + + pupilTracker.onCalibrationInProgress = { + cam.showMessage( + "Crunching equations ...", + distance = 2f, size = 0.2f, + messageColor = Vector4f(1.0f, 0.8f, 0.0f, 1.0f), + duration = 15000, centered = true + ) + } + + pupilTracker.onCalibrationFailed = { + cam.showMessage( + "Calibration failed.", + distance = 2f, size = 0.2f, + messageColor = Vector4f(1.0f, 0.0f, 0.0f, 1.0f), + centered = true + ) + } + + pupilTracker.onCalibrationSuccess = { + cam.showMessage( + "Calibration succeeded!", + distance = 2f, size = 0.2f, + messageColor = Vector4f(0.0f, 1.0f, 0.0f, 1.0f), + centered = true + ) + + for (i in 0 until 20) { + referenceTarget.ifMaterial{diffuse = Vector3f(0.0f, 1.0f, 0.0f) } + Thread.sleep(100) + referenceTarget.ifMaterial { diffuse = Vector3f(0.8f, 0.8f, 0.8f) } + Thread.sleep(30) + } + + if (!pupilTracker.isCalibrated) { + hmd.removeBehaviour("start_calibration") + hmd.removeKeyBinding("start_calibration") + } + + volume.visible = true + playing = false + } + + pupilTracker.unsubscribeFrames() + sciview.deleteNode(sciview.find("eyeFrames")) + + logger.info("Starting eye tracker calibration") + cam.showMessage("Follow the white rabbit.", distance = 2f, size = 0.2f,duration = 1500, centered = true) + + pupilTracker.calibrate(cam, hmd, + generateReferenceData = true, + calibrationTarget = calibrationTarget) + + pupilTracker.onGazeReceived = when (pupilTracker.calibrationType) { + + PupilEyeTracker.CalibrationType.WorldSpace -> { gaze -> + if (gaze.confidence > confidenceThreshold) { + val p = gaze.gazePoint() + referenceTarget.visible = true + // Pupil has mm units, so we divide by 1000 here to get to scenery units + referenceTarget.spatial().position = p + (cam.children.find { it.name == "debugBoard" } as? TextBoard)?.text = "${String.format("%.2f", p.x())}, ${String.format("%.2f", p.y())}, ${String.format("%.2f", p.z())}" + + val headCenter = cam.spatial().viewportToWorld(Vector2f(0.0f, 0.0f)) + val pointWorld = Matrix4f(cam.spatial().world).transform(p.xyzw()).xyz() + val direction = (pointWorld - headCenter).normalize() + + if (eyeTrackingActive) { + addSpine(headCenter, direction, volume, gaze.confidence, volume.viewerState.currentTimepoint) + } + } + } + } + logger.info("Calibration routine done.") + } + } + } + + private fun setupEyeTrackingMenu() { + + val calibrateButton = Button("Calibrate", + command = { calibrateEyeTrackers() }, + byTouch = true, depressDelay = 500) + + val toggleHedgehogsBtn = ToggleButton( + "Hedgehogs Off", + "Hedgehogs On", + command = { + hedgehogVisibility = if (hedgehogVisibility == HedgehogVisibility.Hidden) { + HedgehogVisibility.PerTimePoint + } else { + HedgehogVisibility.Hidden + } + }, + byTouch = true + ) + leftEyeTrackColumn = createWristMenuColumn(toggleHedgehogsBtn, calibrateButton, name = "Eye Tracking Menu") + leftEyeTrackColumn?.visible = false + } + + /** Toggles the VR rendering off, cleans up eyetracking-related scene objects and removes the light tetrahedron + * that was created for the calibration routine. */ + override fun stop() { + + pupilTracker.unsubscribeFrames() + logger.info("Stopped volume and hedgehog updater thread.") + val n = sciview.find("eyeFrames") + n?.let { sciview.deleteNode(it) } + // Delete definitely existing objects + listOf(referenceTarget, calibrationTarget, laser, debugBoard, hedgehogs).forEach { + sciview.deleteNode(it) + } + logger.info("Successfully cleaned up eye tracking environemt.") + super.stop() + } + +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/EyeTrackingCommand.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/EyeTrackingCommand.kt new file mode 100644 index 000000000..97d927c5f --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/EyeTrackingCommand.kt @@ -0,0 +1,48 @@ +package sc.iview.commands.demo.advanced + +import graphics.scenery.* +import org.joml.* +import org.scijava.command.Command +import org.scijava.command.CommandService +import org.scijava.plugin.Menu +import org.scijava.plugin.Parameter +import org.scijava.plugin.Plugin +import sc.iview.SciView +import sc.iview.commands.MenuWeights +import sc.iview.commands.demo.advanced.HedgehogAnalysis.SpineGraphVertex +import java.util.HashMap + +@Plugin( + type = Command::class, + menuRoot = "SciView", + menu = [Menu(label = "Demo", weight = MenuWeights.DEMO), + Menu(label = "Advanced", weight = MenuWeights.DEMO_ADVANCED), + Menu(label = "Utilize Eye Tracker for Cell Tracking", weight = MenuWeights.DEMO_ADVANCED_EYETRACKING)] +) +/** + * Command class that forwards to the [EyeTracking] class to perform the actual tracking and analysis. + */ +class EyeTrackingCommand : Command { + + @Parameter + private lateinit var sv: SciView + + override fun run() { + // the actual eye tracking logic happens in here + val eyeTracking = EyeTracking(sv) + eyeTracking.run() + } + + companion object { + + @Throws(Exception::class) + @JvmStatic + fun main(args: Array) { + val sv = SciView.create() + val command = sv.scijavaContext!!.getService(CommandService::class.java) + val argmap = HashMap() + argmap["sv"] = sv + command.run(EyeTrackingCommand::class.java, true, argmap) + } + } +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/HedgehogAnalysis.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/HedgehogAnalysis.kt new file mode 100644 index 000000000..0d9c068cd --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/HedgehogAnalysis.kt @@ -0,0 +1,445 @@ +package sc.iview.commands.demo.advanced + +import org.joml.Vector3f +import org.joml.Matrix4f +import org.joml.Quaternionf +import graphics.scenery.utils.extensions.* +import graphics.scenery.utils.lazyLogger +import org.slf4j.LoggerFactory +import java.io.File +import kotlin.math.sqrt + +/** + * Performs analysis over a collection of eye-tracking spines (aka hedgehog). Extracts a list of local maxima from + * the sampled volume, removes statistical outliers and performs a graph optimization over the remaining maxima to + * extract the likeliest path of the cell. The companion object contains methods to load CSV files. + * @author Ulrik Günther + */ +class HedgehogAnalysis(val spines: List, val localToWorld: Matrix4f) { + + private val logger by lazyLogger() + + val timepoints = LinkedHashMap>() + + var avgConfidence = 0.0f + private set + var totalSampleCount = 0 + private set + + data class Track( + val points: List>, + val confidence: Float + ) + + init { + logger.info("Starting analysis with ${spines.size} spines") + + spines.forEach { spine -> + val timepoint = spine.timepoint + val current = timepoints[timepoint] + + if(current == null) { + timepoints[timepoint] = arrayListOf(spine) + } else { + current.add(spine) + } + + avgConfidence += spine.confidence + totalSampleCount++ + } + + avgConfidence /= totalSampleCount + } + + /** + * From a [list] of Floats, return both the index of local maxima, and their value, + * packaged nicely as a Pair + */ + private fun localMaxima(list: List): List> { + return list.windowed(3, 1).mapIndexed { index, l -> + val left = l[0] + val center = l[1] + val right = l[2] + + // we have a match at center + if (left < center && center > right) { + index * 1 + 1 to center + } else { + null + } + }.filterNotNull() + } + + data class SpineGraphVertex(val timepoint: Int, + val position: Vector3f, + val worldPosition: Vector3f, + val index: Int, + val value: Float, + val metadata : SpineMetadata? = null, + var previous: SpineGraphVertex? = null, + var next: SpineGraphVertex? = null) { + + fun distance(): Float { + val n = next + return if(n != null) { + val t = (n.worldPosition - this.worldPosition) + sqrt(t.x*t.x + t.y*t.y + t.z*t.z) + } else { + 0.0f + } + } + + fun drop() { + previous?.next = next + next?.previous = previous + } + + override fun toString() : String { + return "SpineGraphVertex for t=$timepoint, pos=$position,index=$index, worldPos=$worldPosition, value=$value" + } + } + + fun Iterable.stddev() = sqrt((this.map { (it - this.average()) * (it - this.average()) }.sum() / this.count())) + + fun Vector3f.toQuaternionf(forward: Vector3f = Vector3f(0.0f, 0.0f, -1.0f)): Quaternionf { + val cross = forward.cross(this) + val q = Quaternionf(cross.x(), cross.y(), cross.z(), this.dot(forward)) + + val x = sqrt((q.w + sqrt(q.x*q.x + q.y*q.y + q.z*q.z + q.w*q.w)) / 2.0f) + + return Quaternionf(q.x/(2.0f * x), q.y/(2.0f * x), q.z/(2.0f * x), x) + } + + data class VertexWithDistance(val vertex: SpineGraphVertex, val distance: Float) + + fun run(): Track? { + + // Adapt thresholds based on data from the first spine + val startingThreshold = timepoints.entries.first().value.first.samples.min() * 2f + 0.002f + val localMaxThreshold = timepoints.entries.first().value.first.samples.max() * 0.2f + val zscoreThreshold = 2.0f + val removeTooFarThreshold = 5.0f + + if(timepoints.isEmpty()) { + return null + } + + + //step1: find the startingPoint by using startingThreshold + val startingPoint = timepoints.entries.firstOrNull { entry -> + entry.value.any { metadata -> metadata.samples.any { it > startingThreshold } } + } ?: return null + + logger.info("Starting point is ${startingPoint.key}/${timepoints.size} (threshold=$startingThreshold), localMayThreshold=$localMaxThreshold") + + // filter timepoints, remove all before the starting point + timepoints.filter { it.key > startingPoint.key } + .forEach { timepoints.remove(it.key) } + + // Stop timepoints after reaching 0 + val result = mutableMapOf>() + var foundZero = false + + for ((time, value) in timepoints) { + if (foundZero) { + break + } + result[time] = value + if (time == 0) { + foundZero = true + } + } + timepoints.clear() + timepoints.putAll(result) + + logger.info("${timepoints.size} timepoints left") + + fun gaussSmoothing(samples: List, iterations: Int): List { + var smoothed = samples.toList() + val kernel = listOf(0.25f, 0.5f, 0.25f) + for (i in 0 until iterations) { + val newSmoothed = ArrayList(smoothed.size) + // Handle the first element + newSmoothed.add(smoothed[0] * 0.75f + smoothed[1] * 0.25f) + // Apply smoothing to the middle elements + for (j in 1 until smoothed.size - 1) { + val value = kernel[0] * smoothed[j-1] + kernel[1] * smoothed[j] + kernel[2] * smoothed[j+1] + newSmoothed.add(value) + } + // Handle the last element + newSmoothed.add(smoothed[smoothed.size - 2] * 0.25f + smoothed[smoothed.size - 1] * 0.75f) + + smoothed = newSmoothed + } + return smoothed + } + + // step2: find the maxIndices along the spine + // this will be a list of lists, where each entry in the first-level list + // corresponds to a time point, which then contains a list of vertices within that timepoint. + val candidates: List> = timepoints.map { tp -> + val vs = tp.value.mapIndexedNotNull { i, spine -> + // First apply a subtle smoothing kernel to prevent many close/similar local maxima + val smoothedSamples = gaussSmoothing(spine.samples, 4) + // determine local maxima (and their indices) along the spine, aka, actual things the user might have + // seen when looking into the direction of the spine + val maxIndices = localMaxima(smoothedSamples) + logger.debug("Local maxima at ${tp.key}/$i are: ${maxIndices.joinToString(",")}") + + // if there actually are local maxima, generate a graph vertex for them with all the necessary metadata + if(maxIndices.isNotEmpty()) { + //maxIndices. +// filter the maxIndices which are too far away, which can be removed + //filter { it.first <1200}. + maxIndices.map { index -> + logger.debug("Generating vertex at index $index") + // get the position of the current index along the spine + val position = spine.samplePosList[index.first] + val worldPosition = localToWorld.transform((Vector3f(position)).xyzw()).xyz() + SpineGraphVertex(tp.key, + position, + worldPosition, + index.first, + index.second, + spine) + } + } else { + null + } + } + vs + }.flatten() + + logger.info("SpineGraphVertices extracted") + + // step3: connect localMaximal points between 2 candidate spines according to the shortest path principle + // get the initial vertex, this one is assumed to always be in front, and have a local maximum - aka, what + // the user looks at first is assumed to be the actual cell they want to track + val initial = candidates.first().first { it.value > startingThreshold } + var current = initial + var shortestPath = candidates.drop(1).mapIndexedNotNull { time, vs -> + // calculate world-space distances between current point, and all candidate + // vertices, sorting them by distance + val vertices = vs + .filter { it.value > localMaxThreshold } + .map { vertex -> + val t = current.worldPosition - vertex.worldPosition + val distance = t.length() + VertexWithDistance(vertex, distance) + } + .sortedBy { it.distance } + + val closest = vertices.firstOrNull() + if(closest != null && closest.distance > 0) { + // create a linked list between current and closest vertices + current.next = closest.vertex + closest.vertex.previous = current + current = closest.vertex + current + } else { + null + } + }.toMutableList() + + // calculate average path lengths over all + val beforeCount = shortestPath.size + var avgPathLength = shortestPath.map { it.distance() }.average().toFloat() + var stdDevPathLength = shortestPath.map { it.distance() }.stddev().toFloat() + logger.info("Average path length=$avgPathLength, stddev=$stdDevPathLength") + + fun zScore(value: Float, m: Float, sd: Float) = ((value - m)/sd) + + //step4: if some path is longer than multiple average length, it should be removed + // TODO Don't remove vertices along the path, as that doesn't translate well to Mastodon tracks. Find a different way? +// while (shortestPath.any { it.distance() >= removeTooFarThreshold * avgPathLength }) { +// shortestPath = shortestPath.filter { it.distance() < removeTooFarThreshold * avgPathLength }.toMutableList() +// shortestPath.windowed(3, 1, partialWindows = true).forEach { +// // this reconnects the neighbors after the offending vertex has been removed +// it.getOrNull(0)?.next = it.getOrNull(1) +// it.getOrNull(1)?.previous = it.getOrNull(0) +// it.getOrNull(1)?.next = it.getOrNull(2) +// it.getOrNull(2)?.previous = it.getOrNull(1) +// } +// } + + // recalculate statistics after offending vertex removal + avgPathLength = shortestPath.map { it.distance() }.average().toFloat() + stdDevPathLength = shortestPath.map { it.distance() }.stddev().toFloat() + + //step5: remove some vertices according to zscoreThreshold +// var remaining = shortestPath.count { zScore(it.distance(), avgPathLength, stdDevPathLength) > zscoreThreshold } +// logger.info("Iterating: ${shortestPath.size} vertices remaining, with $remaining failing z-score criterion") +// while(remaining > 0) { +// val outliers = shortestPath +// .filter { zScore(it.distance(), avgPathLength, stdDevPathLength) > zscoreThreshold } +// .map { +// val idx = shortestPath.indexOf(it) +// listOf(idx-1,idx,idx+1) +// }.flatten() +// +// shortestPath = shortestPath.filterIndexed { index, _ -> index !in outliers }.toMutableList() +// remaining = shortestPath.count { zScore(it.distance(), avgPathLength, stdDevPathLength) > zscoreThreshold } +// +// shortestPath.windowed(3, 1, partialWindows = true).forEach { +// it.getOrNull(0)?.next = it.getOrNull(1) +// it.getOrNull(1)?.previous = it.getOrNull(0) +// it.getOrNull(1)?.next = it.getOrNull(2) +// it.getOrNull(2)?.previous = it.getOrNull(1) +// } +// logger.info("Iterating: ${shortestPath.size} vertices remaining, with $remaining failing z-score criterion") +// } + +// val afterCount = shortestPath.size +// logger.info("Pruned ${beforeCount - afterCount} vertices due to path length") + val singlePoints = shortestPath + .groupBy { it.timepoint } + .mapNotNull { vs -> vs.value.maxByOrNull{ it.metadata?.confidence ?: 0f } } + .filter { + (it.metadata?.direction?.dot(it.previous!!.metadata?.direction) ?: 0f) > 0.5f + } + + + logger.info("Returning ${singlePoints.size} points") + + + return Track(singlePoints.map { it.position to it}, avgConfidence) + } + + companion object { + private val logger by lazyLogger(System.getProperty("scenery.LogLevel", "info")) + + fun fromIncompleteCSV(csv: File, separator: String = ","): HedgehogAnalysis { + logger.info("Loading spines from incomplete CSV at ${csv.absolutePath}") + + val lines = csv.readLines() + val spines = ArrayList(lines.size) + + lines.drop(1).forEach { line -> + val tokens = line.split(separator) + val timepoint = tokens[0].toInt() + val confidence = tokens[1].toFloat() + val samples = tokens.subList(2, tokens.size - 1).map { it.toFloat() } + + val currentSpine = SpineMetadata( + timepoint, + Vector3f(0.0f), + Vector3f(0.0f), + 0.0f, + Vector3f(0.0f), + Vector3f(0.0f), + Vector3f(0.0f), + Vector3f(0.0f), + Quaternionf(), + Vector3f(0.0f), + confidence, + samples) + + spines.add(currentSpine) + } + + return HedgehogAnalysis(spines, Matrix4f()) + } + + private fun String.toVector3f(): Vector3f { + val array = this.replace("(", "").replace(")", "").trim().split(" ").filterNot { it == ""} + + if (array[0] == "+Inf" || array[0] == "-Inf") + return Vector3f(0.0f,0.0f,0.0f) + + return Vector3f(array[0].toFloat(),array[1].toFloat(),array[2].toFloat()) + } + + private fun String.toQuaternionf(): Quaternionf { + val array = this.replace("(", "").replace(")", "").trim().split(" ").filterNot { it == ""} + return Quaternionf(array[0].toFloat(), array[1].toFloat(), array[2].toFloat(), array[3].toFloat()) + } + fun fromCSVWithMatrix(csv: File, matrix4f: Matrix4f,separator: String = ";"): HedgehogAnalysis { + logger.info("Loading spines from complete CSV with Matrix at ${csv.absolutePath}") + + val lines = csv.readLines() + val spines = ArrayList(lines.size) + logger.info("lines number: " + lines.size) + lines.drop(1).forEach { line -> + val tokens = line.split(separator) + val timepoint = tokens[0].toInt() + val origin = tokens[1].toVector3f() + val direction = tokens[2].toVector3f() + val localEntry = tokens[3].toVector3f() + val localExit = tokens[4].toVector3f() + val localDirection = tokens[5].toVector3f() + val headPosition = tokens[6].toVector3f() + val headOrientation = tokens[7].toQuaternionf() + val position = tokens[8].toVector3f() + val confidence = tokens[9].toFloat() + val samples = tokens.subList(10, tokens.size - 1).map { it.toFloat() } + + val currentSpine = SpineMetadata( + timepoint, + origin, + direction, + 0.0f, + localEntry, + localExit, + localDirection, + headPosition, + headOrientation, + position, + confidence, + samples) + + spines.add(currentSpine) + } + + return HedgehogAnalysis(spines, matrix4f) + } + + fun fromCSV(csv: File, separator: String = ";"): HedgehogAnalysis { + logger.info("Loading spines from complete CSV at ${csv.absolutePath}") + + val lines = csv.readLines() + val spines = ArrayList(lines.size) + + lines.drop(1).forEach { line -> + val tokens = line.split(separator) + val timepoint = tokens[0].toInt() + val origin = tokens[1].toVector3f() + val direction = tokens[2].toVector3f() + val localEntry = tokens[3].toVector3f() + val localExit = tokens[4].toVector3f() + val localDirection = tokens[5].toVector3f() + val headPosition = tokens[6].toVector3f() + val headOrientation = tokens[7].toQuaternionf() + val position = tokens[8].toVector3f() + val confidence = tokens[9].toFloat() + val samples = tokens.subList(10, tokens.size - 1).map { it.toFloat() } + + val currentSpine = SpineMetadata( + timepoint, + origin, + direction, + 0.0f, + localEntry, + localExit, + localDirection, + headPosition, + headOrientation, + position, + confidence, + samples) + + spines.add(currentSpine) + } + + return HedgehogAnalysis(spines, Matrix4f()) + } + } +} + +fun main(args: Array) { + val logger = LoggerFactory.getLogger("HedgehogAnalysisMain") + // main should only be called for testing purposes + val file = File("C:/path/to/your/test/CSV") + val analysis = HedgehogAnalysis.fromCSV(file) + val results = analysis.run() + logger.info("Results: \n$results") +} diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/SpineMetadata.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/SpineMetadata.kt new file mode 100644 index 000000000..546c23573 --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/SpineMetadata.kt @@ -0,0 +1,23 @@ +package sc.iview.commands.demo.advanced + +import org.joml.Quaternionf +import org.joml.Vector3f + +/** + * Data class to store metadata for spines of the hedgehog. + */ +data class SpineMetadata( + val timepoint: Int, + val origin: Vector3f, + val direction: Vector3f, + val distance: Float, + val localEntry: Vector3f, + val localExit: Vector3f, + val localDirection: Vector3f, + val headPosition: Vector3f, + val headOrientation: Quaternionf, + val position: Vector3f, + val confidence: Float, + val samples: List, + val samplePosList: List = ArrayList() +) \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/TimepointObserver.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/TimepointObserver.kt new file mode 100644 index 000000000..130bc4ce0 --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/TimepointObserver.kt @@ -0,0 +1,13 @@ +package sc.iview.commands.demo.advanced + +/** + * Interface to allow subscription to timepoint updates, especially for updating sciview contents + * after a user triggered a timepoint change via controller input. + */ +interface TimepointObserver { + + /** + * Called when the timepoint was updated. + */ + fun onTimePointChanged(timepoint: Int) +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/TrackingDragBehaviour.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/TrackingDragBehaviour.kt new file mode 100644 index 000000000..ce7b42b99 --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/TrackingDragBehaviour.kt @@ -0,0 +1,17 @@ +package sc.iview.commands.demo.advanced + +import org.scijava.ui.behaviour.DragBehaviour + +class TrackingDragBehaviour():DragBehaviour{ + override fun init(x: Int, y: Int) { + TODO("Not yet implemented") + } + + override fun drag(x: Int, y: Int) { + TODO("Not yet implemented") + } + + override fun end(x: Int, y: Int) { + TODO("Not yet implemented") + } +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/TrackingTest.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/TrackingTest.kt new file mode 100644 index 000000000..bafca1bf9 --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/TrackingTest.kt @@ -0,0 +1,244 @@ +package sc.iview.commands.demo.advanced + +import graphics.scenery.* +import graphics.scenery.attribute.material.Material +import graphics.scenery.numerics.Random +import graphics.scenery.primitives.Cylinder +import graphics.scenery.primitives.TextBoard +import graphics.scenery.utils.MaybeIntersects +import graphics.scenery.utils.SystemHelpers +import graphics.scenery.utils.extensions.minus +import graphics.scenery.utils.extensions.xyz +import graphics.scenery.utils.extensions.xyzw +import graphics.scenery.volumes.Volume +import org.joml.* +import sc.iview.SciView +import java.io.File +import java.nio.file.Files +import java.nio.file.Paths +import java.text.DecimalFormat +import kotlin.concurrent.thread + +/** + * A class to test to show tracks and perform track analysis from saved CSV tracking files without + * the requirement of a VR headset. + */ +class TrackingTest( + sciview: SciView +): CellTrackingBase(sciview) { + + val TestTarget = Icosphere(0.1f, 2) + + val laser = Cylinder(0.005f, 0.2f, 10) + + lateinit var point1:Icosphere + lateinit var point2:Icosphere + + val confidenceThreshold = 0.60f + + override fun run() { + + sciview.addNode(TestTarget) + TestTarget.visible = false + + sessionId = "BionicTracking-generated-${SystemHelpers.formatDateTime()}" + sessionDirectory = Files.createDirectory(Paths.get(System.getProperty("user.home"), "Desktop", sessionId)) + + referenceTarget.visible = false + referenceTarget.ifMaterial{ + roughness = 1.0f + metallic = 0.0f + diffuse = Vector3f(0.8f, 0.8f, 0.8f) + } + sciview.camera!!.addChild(referenceTarget) + + laser.visible = false + laser.ifMaterial{diffuse = Vector3f(1.0f, 1.0f, 1.0f)} + sciview.addNode(laser) + + val shell = Box(Vector3f(20.0f, 20.0f, 20.0f), insideNormals = true) + shell.ifMaterial{ + cullingMode = Material.CullingMode.Front + diffuse = Vector3f(0.4f, 0.4f, 0.4f) } + shell.name = "shell" + shell.spatial().position = Vector3f(0.0f, 0.0f, 0.0f) + sciview.addNode(shell) + + volume = sciview.find("volume") as Volume + volume.visible = false + + point1 = Icosphere(0.1f, 2) + point1.spatial().position = Vector3f(1.858f,2f,8.432f) + point1.ifMaterial{ diffuse = Vector3f(0.5f, 0.3f, 0.8f)} + sciview.addNode(point1) + + point2 = Icosphere(0.1f, 2) + point2.spatial().position = Vector3f(1.858f, 2f, -10.39f) + point2.ifMaterial {diffuse = Vector3f(0.3f, 0.8f, 0.3f)} + sciview.addNode(point2) + + + val connector = Cylinder.betweenPoints(point1.spatial().position, point2.spatial().position) + connector.ifMaterial {diffuse = Vector3f(1.0f, 1.0f, 1.0f)} + sciview.addNode(connector) + + + val bb = BoundingGrid() + bb.node = volume + bb.visible = false + + sciview.addNode(hedgehogs) + + val pupilFrameLimit = 20 + var lastFrame = System.nanoTime() + + val debugBoard = TextBoard() + debugBoard.name = "debugBoard" + debugBoard.spatial().scale = Vector3f(0.05f, 0.05f, 0.05f) + debugBoard.spatial().position = Vector3f(0.0f, -0.3f, -0.9f) + debugBoard.text = "" + debugBoard.visible = false + sciview.camera?.addChild(debugBoard) + + val lights = Light.createLightTetrahedron(Vector3f(0.0f, 0.0f, 0.0f), spread = 5.0f, radius = 15.0f, intensity = 5.0f) + lights.forEach { sciview.addNode(it) } + + + thread{ + inputSetup() + } + + launchUpdaterThread() + } + + override fun inputSetup() + { + val cam = sciview.camera ?: throw IllegalStateException("Could not find camera") + setupControllerforTracking() + + } + + private fun setupControllerforTracking( keybindingTracking: String = "U") { + thread { + val cam = sciview.camera as? DetachedHeadCamera ?: return@thread + volume.visible = true + volume.runRecursive { it.visible = true } + playing = true + eyeTrackingActive = true + + if(true) + { + val headCenter = point1.spatial().position//cam.viewportToWorld(Vector2f(0.0f, 0.0f)) + val pointWorld = point2.spatial().position///Matrix4f(cam.world).transform(p.xyzw()).xyz() + + val direction = (pointWorld - headCenter).normalize() + + if (eyeTrackingActive) { + addSpine(headCenter, direction, volume,0.8f, volume.viewerState.currentTimepoint) + showTrack() + } + Thread.sleep(200) + } + } + + } + + private fun showTrack() + { + val file = File("C:\\Users\\lanru\\Desktop\\BionicTracking-generated-2022-10-19 13.48.51\\Hedgehog_1_2022-10-19 13.49.41.csv") + + var volumeDimensions = volume.getDimensions() + var selfdefineworlfmatrix = volume.spatial().world + val analysis = HedgehogAnalysis.fromCSVWithMatrix(file,selfdefineworlfmatrix) + print("volume.getDimensions(): "+ volume.getDimensions()) + print("volume.spatial().world: "+ volume.spatial().world) + print("selfdefineworlfmatrix: "+ selfdefineworlfmatrix) + + val track = analysis.run() + + print("flag1") + val master = Cylinder(0.1f, 1.0f, 10) + master.setMaterial (ShaderMaterial.fromFiles("DefaultDeferredInstanced.vert", "DefaultDeferred.frag")) + print("flag2") + master.ifMaterial{ + ambient = Vector3f(0.1f, 0f, 0f) + diffuse = Random.random3DVectorFromRange(0.2f, 0.8f) + metallic = 0.01f + roughness = 0.5f + } + + val mInstanced = InstancedNode(master) + sciview.addNode(mInstanced) + + + print("flag3") + if(track == null) + { + return + } + print("flag4") + track.points.windowed(2, 1).forEach { pair -> + + val element = mInstanced.addInstance() + val p0 = Vector3f(pair[0].first)//direct product + val p1 = Vector3f(pair[1].first) + val p0w = Matrix4f(volume.spatial().world).transform(p0.xyzw()).xyz() + val p1w = Matrix4f(volume.spatial().world).transform(p1.xyzw()).xyz() + element.spatial().orientBetweenPoints(p0w,p1w, rescale = true, reposition = true) + + val tp = pair[0].second.timepoint + val pp = Icosphere(0.1f, 1) + pp.name = "trackpoint_${tp}_${pair[0].first.x}_${pair[0].first.y}_${pair[0].first.z}" + println("the local position of the point is:" + pair[0].first) + println("the world position of the point is: "+ p0w) + pp.spatial().position = p0w + pp.material().diffuse = Vector3f(0.5f, 0.3f, 0.8f) + sciview.addNode(pp) + } + } + + override fun addSpine(center: Vector3f, direction: Vector3f, volume: Volume, confidence: Float, timepoint: Int) { + val cam = sciview.camera as? DetachedHeadCamera ?: return + val sphere = volume.boundingBox?.getBoundingSphere() ?: return + + val sphereDirection = sphere.origin.minus(center) + val sphereDist = Math.sqrt(sphereDirection.x * sphereDirection.x + sphereDirection.y * sphereDirection.y + sphereDirection.z * sphereDirection.z) - sphere.radius + + val p1 = center + val temp = direction.mul(sphereDist + 2.0f * sphere.radius) + val p2 = Vector3f(center).add(temp) + + val intersection = volume.spatial().intersectAABB(p1, (p2 - p1).normalize()) + System.out.println(intersection); + if(intersection is MaybeIntersects.Intersection) { + // get local entry and exit coordinates, and convert to UV coords + val localEntry = (intersection.relativeEntry) //.add(Vector3f(1.0f)) ) .mul (1.0f / 2.0f) + val localExit = (intersection.relativeExit) //.add (Vector3f(1.0f)) ).mul (1.0f / 2.0f) + val nf = DecimalFormat("0.0000") + println("Ray intersects volume at world=${intersection.entry.toString(nf)}/${intersection.exit.toString(nf)} local=${localEntry.toString(nf)}/${localExit.toString(nf)} ") + + val (samples, localDirection) = volume.sampleRay(localEntry, localExit) ?: (null to null) + + if (samples != null && localDirection != null) { + val metadata = SpineMetadata( + timepoint, + center, + direction, + intersection.distance, + localEntry, + localExit, + localDirection, + cam.headPosition, + cam.headOrientation, + cam.position, + confidence, + samples.map { it ?: 0.0f } + ) + val count = samples.filterNotNull().count { it > 0.002f } + + logger.info("count of samples: $count") + logger.info(samples.joinToString { ", " }) + } + } + } +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/TrackingTestCommand.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/TrackingTestCommand.kt new file mode 100644 index 000000000..9f97bcf89 --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/TrackingTestCommand.kt @@ -0,0 +1,39 @@ +package sc.iview.commands.demo.advanced + +import org.scijava.command.Command +import org.scijava.command.CommandService +import org.scijava.plugin.Menu +import org.scijava.plugin.Parameter +import org.scijava.plugin.Plugin +import sc.iview.SciView +import sc.iview.commands.MenuWeights + +@Plugin(type = Command::class, + menuRoot = "SciView", + menu = [Menu(label = "Demo", weight = MenuWeights.DEMO), + Menu(label = "Advanced", weight = MenuWeights.DEMO_ADVANCED), + Menu(label = "Test without VR and Eye Tracker", weight = MenuWeights.DEMO_ADVANCED_EYETRACKING)]) +class TrackingTestCommand: Command { + + @Parameter + private lateinit var sv: SciView + + override fun run() { + val test = TrackingTest(sv) + test.run() + } + + companion object { + + @Throws(Exception::class) + @JvmStatic + fun main(args: Array) { + val sv = SciView.create() + val command = sv.scijavaContext!!.getService(CommandService::class.java) + val argmap = HashMap() + argmap["sv"] = sv + command.run(TrackingTestCommand::class.java, true, argmap) + } + } + +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/VRControllerTracking.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/VRControllerTracking.kt new file mode 100644 index 000000000..547b072ab --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/VRControllerTracking.kt @@ -0,0 +1,197 @@ +package sc.iview.commands.demo.advanced + +import graphics.scenery.* +import graphics.scenery.attribute.material.Material +import graphics.scenery.controls.OpenVRHMD +import graphics.scenery.controls.OpenVRHMD.OpenVRButton +import graphics.scenery.controls.TrackedDevice +import graphics.scenery.controls.TrackedDeviceType +import graphics.scenery.controls.TrackerRole +import graphics.scenery.primitives.Cylinder +import graphics.scenery.primitives.TextBoard +import graphics.scenery.utils.SystemHelpers +import graphics.scenery.utils.extensions.minus +import graphics.scenery.utils.extensions.xyz +import graphics.scenery.utils.extensions.xyzw +import graphics.scenery.volumes.Volume +import org.joml.Matrix4f +import org.joml.Vector3f +import org.scijava.ui.behaviour.ClickBehaviour +import sc.iview.SciView +import java.nio.file.Files +import java.nio.file.Paths +import kotlin.concurrent.thread + + +/** + * This class utilizes VR controllers to track cells in volumetric datasets in a sciview environment. + */ +class VRControllerTracking( + sciview: SciView +): CellTrackingBase(sciview) { + + val testTarget1 = Icosphere(0.01f, 2) + val testTarget2 = Icosphere(0.04f, 2) + val laser = Cylinder(0.0025f, 1f, 20) + + lateinit var rightController: TrackedDevice + + var hedgehogsList = mutableListOf() + + override fun run() { + + sciview.toggleVRRendering() + hmd = sciview.hub.getWorkingHMD() as? OpenVRHMD ?: throw IllegalStateException("Could not find headset") + + sessionId = "BionicTracking-generated-${SystemHelpers.formatDateTime()}" + sessionDirectory = Files.createDirectory(Paths.get(System.getProperty("user.home"), "Desktop", sessionId)) + + laser.material().diffuse = Vector3f(5.0f, 0.0f, 0.02f) + laser.material().metallic = 0.0f + laser.material().roughness = 1.0f + laser.visible = false + sciview.addNode(laser) + + referenceTarget.visible = false + referenceTarget.ifMaterial{ + roughness = 1.0f + metallic = 0.0f + diffuse = Vector3f(0.8f, 0.8f, 0.8f) + } + sciview.addNode(referenceTarget) + + testTarget1.visible = false + testTarget1.ifMaterial{ + roughness = 1.0f + metallic = 0.0f + diffuse = Vector3f(0.8f, 0.8f, 0.8f) + } + sciview.addNode(testTarget1) + + + testTarget2.visible = false + testTarget2.ifMaterial{ + roughness = 1.0f + metallic = 0.0f + diffuse = Vector3f(0.8f, 0.8f, 0.8f) + } + sciview.addNode(testTarget2) + + val shell = Box(Vector3f(20.0f, 20.0f, 20.0f), insideNormals = true) + shell.ifMaterial{ + cullingMode = Material.CullingMode.Front + diffuse = Vector3f(0.4f, 0.4f, 0.4f) } + + shell.spatial().position = Vector3f(0.0f, 0.0f, 0.0f) + sciview.addChild(shell) + + volume = sciview.find("volume") as Volume + + val bb = BoundingGrid() + bb.node = volume + bb.visible = false + + + val debugBoard = TextBoard() + debugBoard.name = "debugBoard" + debugBoard.scale = Vector3f(0.05f, 0.05f, 0.05f) + debugBoard.position = Vector3f(0.0f, -0.3f, -0.9f) + debugBoard.text = "" + debugBoard.visible = false + sciview.camera?.addChild(debugBoard) + + val lights = Light.createLightTetrahedron(Vector3f(0.0f, 0.0f, 0.0f), spread = 5.0f, radius = 15.0f, intensity = 5.0f) + lights.forEach { sciview.addChild(it) } + + thread { + logger.info("Adding onDeviceConnect handlers") + hmd.events.onDeviceConnect.add { hmd, device, timestamp -> + logger.info("onDeviceConnect called, cam=${sciview.camera}") + if(device.type == TrackedDeviceType.Controller) { + logger.info("Got device ${device.name} at $timestamp") + device.model?.let { hmd.attachToNode(device, it, sciview.camera) } + } + } + } + thread{ + inputSetup() + setupControllerforTracking() + } + + launchUpdaterThread() + } + + private fun setupControllerforTracking( + keybindingTracking: Pair = (TrackerRole.RightHand to OpenVRButton.Trigger) + ) { + + thread { + val cam = sciview.camera as? DetachedHeadCamera ?: return@thread + + val toggleTracking = ClickBehaviour { _, _ -> + if (eyeTrackingActive) { + referenceTarget.ifMaterial { diffuse = Vector3f(0.5f, 0.5f, 0.5f) } + cam.showMessage("Tracking deactivated.",distance = 1.2f, size = 0.2f) + eyeTrackingActive = false + dumpHedgehog() + println("before dumphedgehog: "+ hedgehogsList.last().instances.size.toString()) + } else { + addHedgehog() + println("after addhedgehog: "+ hedgehogsList.last().instances.size.toString()) + referenceTarget.ifMaterial { diffuse = Vector3f(1.0f, 0.0f, 0.0f) } + cam.showMessage("Tracking active.",distance = 1.2f, size = 0.2f) + eyeTrackingActive = true + } + } + hmd.addBehaviour("toggle_tracking", toggleTracking) + hmd.addKeyBinding("toggle_tracking", keybindingTracking.first, keybindingTracking.second) + + volume.visible = true + volume.runRecursive { it.visible = true } + playing = true + + while(true) + { + /** + * the following code is added to detect right controller + */ + if(!hmd.getTrackedDevices(TrackedDeviceType.Controller).containsKey("Controller-2")) + { + continue + } + else + { + rightController = hmd.getTrackedDevices(TrackedDeviceType.Controller).get("Controller-2")!! + + if (rightController.model?.spatialOrNull() == null) { + //println("spatial null") + } + else + { + val headCenter = Matrix4f(rightController.model?.spatialOrNull()?.world).transform(Vector3f(0.0f,0f,-0.1f).xyzw()).xyz() + val pointWorld = Matrix4f(rightController.model?.spatialOrNull()?.world).transform(Vector3f(0.0f,0f,-2f).xyzw()).xyz() + + println(headCenter.toString()) + println(pointWorld.toString()) + testTarget1.visible = true + testTarget1.ifSpatial { position = headCenter} + + testTarget2.visible = true + testTarget2.ifSpatial { position = pointWorld} + + laser.visible = true + laser.spatial().orientBetweenPoints(headCenter, pointWorld,true,true) + + referenceTarget.visible = true + referenceTarget.ifSpatial { position = pointWorld} + + val direction = (pointWorld - headCenter).normalize() + if (eyeTrackingActive) { + addSpine(headCenter, direction, volume,0.8f, volume.viewerState.currentTimepoint) + } + } + } + } + } + } +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/VRControllerTrackingCommand.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/VRControllerTrackingCommand.kt new file mode 100644 index 000000000..01a36c4f2 --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/VRControllerTrackingCommand.kt @@ -0,0 +1,55 @@ +package sc.iview.commands.demo.advanced + +import graphics.scenery.* +import graphics.scenery.utils.SystemHelpers +import graphics.scenery.utils.extensions.minus +import graphics.scenery.utils.extensions.xyz +import graphics.scenery.utils.extensions.xyzw +import graphics.scenery.volumes.Volume +import org.joml.* +import org.scijava.command.Command +import org.scijava.command.CommandService +import org.scijava.plugin.Menu +import org.scijava.plugin.Plugin +import org.scijava.ui.behaviour.ClickBehaviour +import sc.iview.SciView +import sc.iview.commands.MenuWeights +import java.nio.file.Files +import java.nio.file.Paths +import java.util.HashMap +import kotlin.concurrent.thread +import graphics.scenery.attribute.material.Material +import graphics.scenery.controls.* +import graphics.scenery.primitives.Cylinder +import graphics.scenery.primitives.TextBoard +import org.scijava.plugin.Parameter + +@Plugin( + type = Command::class, + menuRoot = "SciView", + menu = [Menu(label = "Demo", weight = MenuWeights.DEMO), + Menu(label = "Advanced", weight = MenuWeights.DEMO_ADVANCED), + Menu(label = "Utilize VR Controller for Cell Tracking", weight = MenuWeights.DEMO_ADVANCED_EYETRACKING)] +) +class VRControllerTrackingCommand : Command { + + @Parameter + private lateinit var sv: SciView + + override fun run() { + val tracking = VRControllerTracking(sv) + tracking.run() + } + + companion object { + @Throws(Exception::class) + @JvmStatic + fun main(args: Array) { + val sv = SciView.create() + val command = sv.scijavaContext!!.getService(CommandService::class.java) + val argmap = HashMap() + argmap["sv"] = sv + command.run(VRControllerTrackingCommand::class.java, true, argmap) + } + } +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/VRHeadSetTrackingCommand.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/VRHeadSetTrackingCommand.kt new file mode 100644 index 000000000..7ef9163e1 --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/VRHeadSetTrackingCommand.kt @@ -0,0 +1,60 @@ +package sc.iview.commands.demo.advanced + +import graphics.scenery.* +import org.joml.* +import org.scijava.command.Command +import org.scijava.command.CommandService +import org.scijava.plugin.Menu +import org.scijava.plugin.Parameter +import org.scijava.plugin.Plugin +import sc.iview.SciView +import sc.iview.commands.MenuWeights +import java.io.File +import java.util.HashMap +import graphics.scenery.controls.behaviours.* +import graphics.scenery.utils.extensions.* +import org.scijava.event.EventService +import sc.iview.commands.file.OpenDirofTif + +@Plugin( + type = Command::class, + menuRoot = "SciView", + menu = [Menu(label = "Demo", weight = MenuWeights.DEMO), + Menu(label = "Advanced", weight = MenuWeights.DEMO_ADVANCED), + Menu(label = "Utilize VR Headset for Cell Tracking", weight = MenuWeights.DEMO_ADVANCED_EYETRACKING)] +) +class VRHeadSetTrackingCommand : Command { + + @Parameter + private lateinit var eventService: EventService + + @Parameter + private lateinit var sv: SciView + + override fun run() { + val tracking = VRHeadsetTracking(sv, eventService) + tracking.run() + } + + companion object { + //run function from here, it will automatically choose the volume for rendering, please give the correct location of volume + @Throws(Exception::class) + @JvmStatic + fun main(args: Array) { + val sv = SciView.create() + val command = sv.scijavaContext!!.getService(CommandService::class.java) + // TODO this should probably open a file open dialog instead of hardcoding a path? + command.run(OpenDirofTif::class.java, true, + hashMapOf( + "file" to File("E:\\dataset\\Pdu_H2BeGFP_CAAXmCherry_0123_20130312_192018.corrected-histone"), + "onlyFirst" to 10 + )) + .get() + + val argmap = HashMap() + argmap["sv"] = sv + command.run(VRHeadSetTrackingCommand::class.java, true, argmap) + .get() + } + } +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/commands/demo/advanced/VRHeadsetTracking.kt b/src/main/kotlin/sc/iview/commands/demo/advanced/VRHeadsetTracking.kt new file mode 100644 index 000000000..0d33a151b --- /dev/null +++ b/src/main/kotlin/sc/iview/commands/demo/advanced/VRHeadsetTracking.kt @@ -0,0 +1,165 @@ +package sc.iview.commands.demo.advanced + +import graphics.scenery.* +import graphics.scenery.attribute.material.Material +import graphics.scenery.controls.OpenVRHMD +import graphics.scenery.controls.TrackedDeviceType +import graphics.scenery.controls.TrackerRole +import graphics.scenery.controls.behaviours.VRGrab +import graphics.scenery.controls.behaviours.VRSelect +import graphics.scenery.controls.behaviours.VRTouch +import graphics.scenery.primitives.TextBoard +import graphics.scenery.utils.SystemHelpers +import graphics.scenery.utils.extensions.minus +import graphics.scenery.utils.extensions.xyz +import graphics.scenery.utils.extensions.xyzw +import graphics.scenery.volumes.Volume +import org.joml.Matrix4f +import org.joml.Vector3f +import org.scijava.event.EventService +import org.scijava.ui.behaviour.ClickBehaviour +import sc.iview.SciView +import sc.iview.event.NodeTaggedEvent +import java.nio.file.Files +import java.nio.file.Paths +import kotlin.concurrent.thread + +/** + * This class uses the VR headset's orientation to track cells in volumetric datasets in a sciview environment. + */ +class VRHeadsetTracking( + sciview: SciView, + val eventService: EventService, +): CellTrackingBase(sciview) { + + var hedgehogsList = mutableListOf() + + private var selectionStorage: Node? = null + + override fun run() { + + sciview.toggleVRRendering() + hmd = sciview.hub.getWorkingHMD() as? OpenVRHMD ?: throw IllegalStateException("Could not find headset") + sessionId = "BionicTracking-generated-${SystemHelpers.formatDateTime()}" + sessionDirectory = Files.createDirectory(Paths.get(System.getProperty("user.home"), "Desktop", sessionId)) + + referenceTarget.visible = false + referenceTarget.ifMaterial{ + roughness = 1.0f + metallic = 0.0f + diffuse = Vector3f(0.8f, 0.8f, 0.8f) + } + sciview.camera!!.addChild(referenceTarget) + + val shell = Box(Vector3f(20.0f, 20.0f, 20.0f), insideNormals = true) + shell.ifMaterial{ + cullingMode = Material.CullingMode.Front + diffuse = Vector3f(0.4f, 0.4f, 0.4f) } + + shell.spatial().position = Vector3f(0.0f, 0.0f, 0.0f) + sciview.addChild(shell) + + volume = sciview.find("volume") as Volume + + val bb = BoundingGrid() + bb.node = volume + bb.visible = false + + val debugBoard = TextBoard() + debugBoard.name = "debugBoard" + debugBoard.spatial().scale = Vector3f(0.05f, 0.05f, 0.05f) + debugBoard.spatial().position = Vector3f(0.0f, -0.3f, -0.9f) + debugBoard.text = "" + debugBoard.visible = false + sciview.camera?.addChild(debugBoard) + + val lights = Light.createLightTetrahedron(Vector3f(0.0f, 0.0f, 0.0f), spread = 5.0f, radius = 15.0f, intensity = 5.0f) + lights.forEach { sciview.addChild(it) } + + thread { + logger.info("Adding onDeviceConnect handlers") + hmd.events.onDeviceConnect.add { hmd, device, timestamp -> + logger.info("onDeviceConnect called, cam=${sciview.camera}") + if(device.type == TrackedDeviceType.Controller) { + logger.info("Got device ${device.name} at $timestamp") + device.model?.let { hmd.attachToNode(device, it, sciview.camera) } + } + } + } + thread{ + inputSetup() + setupHeadsetTracking() + } + + launchUpdaterThread() + } + + private fun setupHeadsetTracking() { + //VRGrab.createAndSet(scene = Scene(), hmd, listOf(OpenVRHMD.OpenVRButton.Trigger), listOf(TrackerRole.LeftHand)) + //left trigger button can validate or delete a track, the function should be arranged to two different button in the future + VRSelect.createAndSet(sciview.currentScene, + hmd, + listOf(OpenVRHMD.OpenVRButton.Trigger), + listOf(TrackerRole.LeftHand), + { n -> + println("the spot ${n.name} is selected") + + // validate the selected node from volume, the tag event is designed specially for tag of Elephant + eventService.publish(NodeTaggedEvent(n)) + + }, + true) + + + VRTouch.createAndSet(sciview.currentScene,hmd, listOf(TrackerRole.LeftHand, TrackerRole.RightHand),true) + + VRGrab.createAndSet(sciview.currentScene,hmd, listOf(OpenVRHMD.OpenVRButton.Side), listOf(TrackerRole.RightHand)) + setupControllerforTracking() + } + + private fun setupControllerforTracking( keybindingTracking: String = "U") { + thread { + val cam = sciview.camera as? DetachedHeadCamera ?: return@thread + val toggleTracking = ClickBehaviour { _, _ -> + if (eyeTrackingActive) { + referenceTarget.ifMaterial { diffuse = Vector3f(0.5f, 0.5f, 0.5f) } + cam.showMessage("Tracking deactivated.",distance = 1.2f, size = 0.2f) + eyeTrackingActive = false + thread { + dumpHedgehog() + println("before dumphedgehog: " + hedgehogsList.last().instances.size.toString()) + } + } else { + addHedgehog() + println("after addhedgehog: "+ hedgehogsList.last().instances.size.toString()) + referenceTarget.ifMaterial { diffuse = Vector3f(1.0f, 0.0f, 0.0f) } + cam.showMessage("Tracking active.",distance = 1.2f, size = 0.2f) + eyeTrackingActive = true + } + } + //RightController.trigger + hmd.addBehaviour("toggle_tracking", toggleTracking) + hmd.addKeyBinding("toggle_tracking", keybindingTracking) + + volume.visible = true + volume.runRecursive { it.visible = true } + + while(true) + { + + val headCenter = Matrix4f(cam.spatial().world).transform(Vector3f(0.0f,0f,-1f).xyzw()).xyz() + val pointWorld = Matrix4f(cam.spatial().world).transform(Vector3f(0.0f,0f,-2f).xyzw()).xyz() + + referenceTarget.visible = true + referenceTarget.ifSpatial { position = Vector3f(0.0f,0f,-1f) } + + val direction = (pointWorld - headCenter).normalize() + if (eyeTrackingActive) { + addSpine(headCenter, direction, volume,0.8f, volume.viewerState.currentTimepoint) + } + + Thread.sleep(2) + } + } + } +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/controls/behaviours/MoveInstanceVR.kt b/src/main/kotlin/sc/iview/controls/behaviours/MoveInstanceVR.kt new file mode 100644 index 000000000..4659a9d9d --- /dev/null +++ b/src/main/kotlin/sc/iview/controls/behaviours/MoveInstanceVR.kt @@ -0,0 +1,83 @@ +package sc.iview.controls.behaviours + +import graphics.scenery.Scene +import graphics.scenery.controls.OpenVRHMD +import graphics.scenery.controls.TrackedDeviceType +import graphics.scenery.controls.TrackerRole +import org.joml.Vector3f +import org.scijava.ui.behaviour.DragBehaviour + +class MoveInstanceVR( + val buttonmanager: MultiButtonManager, + val button: OpenVRHMD.OpenVRButton, + val trackerRole: TrackerRole, + val getTipPosition: () -> Vector3f, + val spotMoveInitCallback: ((Vector3f) -> Unit)? = null, + val spotMoveDragCallback: ((Vector3f) -> Unit)? = null, + val spotMoveEndCallback: ((Vector3f) -> Unit)? = null, +): DragBehaviour { + + override fun init(x: Int, y: Int) { + buttonmanager.pressButton(button, trackerRole) + if (!buttonmanager.isTwoHandedActive()) { + spotMoveInitCallback?.invoke(getTipPosition()) + } + } + + override fun drag(x: Int, y: Int) { + // Only perform the single hand behavior when no other grab button is currently active + // to prevent simultaneous execution of behaviors + if (!buttonmanager.isTwoHandedActive()) { + spotMoveDragCallback?.invoke(getTipPosition()) + } + } + + override fun end(x: Int, y: Int) { + if (!buttonmanager.isTwoHandedActive()) { + spotMoveEndCallback?.invoke(getTipPosition()) + } + buttonmanager.releaseButton(button, trackerRole) + } + + companion object { + + /** + * Convenience method for adding grab behaviour + */ + fun createAndSet( + scene: Scene, + hmd: OpenVRHMD, + buttons: List, + controllerSide: List, + buttonmanager: MultiButtonManager, + getTipPosition: () -> Vector3f, + spotMoveInitCallback: ((Vector3f) -> Unit)? = null, + spotMoveDragCallback: ((Vector3f) -> Unit)? = null, + spotMoveEndCallback: ((Vector3f) -> Unit)? = null, + ) { + hmd.events.onDeviceConnect.add { _, device, _ -> + if (device.type == TrackedDeviceType.Controller) { + device.model?.let { controller -> + if (controllerSide.contains(device.role)) { + buttons.forEach { button -> + val name = "VRDrag:${hmd.trackingSystemName}:${device.role}:$button" + val grabBehaviour = MoveInstanceVR( + buttonmanager, + button, + device.role, + getTipPosition, + spotMoveInitCallback, + spotMoveDragCallback, + spotMoveEndCallback + ) + buttonmanager.registerButtonConfig(button, device.role) + hmd.addBehaviour(name, grabBehaviour) + hmd.addKeyBinding(name, device.role, button) + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/controls/behaviours/MultiButtonManager.kt b/src/main/kotlin/sc/iview/controls/behaviours/MultiButtonManager.kt new file mode 100644 index 000000000..488c87aa3 --- /dev/null +++ b/src/main/kotlin/sc/iview/controls/behaviours/MultiButtonManager.kt @@ -0,0 +1,85 @@ +package sc.iview.controls.behaviours + +import graphics.scenery.controls.OpenVRHMD +import graphics.scenery.controls.TrackerRole +import graphics.scenery.utils.lazyLogger +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.atomic.AtomicBoolean + +/** Keep track of which VR buttons are currently being pressed. This is useful if you want to assign the same button + * to different behaviors with different combinations. This class helps with managing the button states. + * Buttons to track first need to be registered with [registerButtonConfig]. Call [pressButton] and [releaseButton] + * in your behavior init/end methods. You can check if both hands are in use with [isTwoHandedActive] or if a specific + * button is currently pressed with [isButtonPressed]. */ +class MultiButtonManager { + data class ButtonConfig ( + val button: OpenVRHMD.OpenVRButton, + val trackerRole: TrackerRole + ) + + val logger by lazyLogger() + + /** List of registered buttons, stored as [ButtonConfig] and whether the button is pressed right now. */ + private val buttons = ConcurrentHashMap() + private val twoHandedActive = AtomicBoolean(false) + + init { + buttons.forEach { (config, value) -> + buttons[config] = false + } + } + + /** Add a new button configuration that the manager will keep track of. */ + fun registerButtonConfig(button: OpenVRHMD.OpenVRButton, trackerRole: TrackerRole) { + logger.debug("Registered new button config: $button, $trackerRole") + buttons[ButtonConfig(button, trackerRole)] = false + } + + /** Add a button to the list of pressed buttons. */ + fun pressButton(button: OpenVRHMD.OpenVRButton, role: TrackerRole): Boolean { + val config = ButtonConfig(button, role) + if (!buttons.containsKey(config)) { return false } + buttons[config] = true + updateTwoHandedState() + return true + } + + /** Overload function that takes a button config instead of separate button and trackerrole inputs. */ + fun pressButton(buttonConfig: ButtonConfig): Boolean { + return pressButton(buttonConfig.button, buttonConfig.trackerRole) + } + + /** Remove a button from the list of pressed buttons. */ + fun releaseButton(button: OpenVRHMD.OpenVRButton, role: TrackerRole): Boolean { + val config = ButtonConfig(button, role) + if (!buttons.containsKey(config)) { return false } + buttons[config] = false + updateTwoHandedState() + return true + } + + /** Overload function that takes a button config instead of separate button and trackerrole inputs. */ + fun releaseButton(buttonConfig: ButtonConfig): Boolean { + return releaseButton(buttonConfig.button, buttonConfig.trackerRole) + } + + private fun updateTwoHandedState() { + // Check if any buttons are pressed on both hands + val leftPressed = buttons.any { it.key.trackerRole == TrackerRole.LeftHand && it.value } + val rightPressed = buttons.any { it.key.trackerRole == TrackerRole.RightHand && it.value } + twoHandedActive.set(leftPressed && rightPressed) + } + + /** Returns true when the same button is currently pressed on both VR controllers. */ + fun isTwoHandedActive(): Boolean = twoHandedActive.get() + + /** Check if a button is currently being pressed. */ + fun isButtonPressed(button: OpenVRHMD.OpenVRButton, role: TrackerRole): Boolean { + return buttons[ButtonConfig(button, role)] ?: false + } + + /** Retrieve a list of currently registered buttons. */ + fun getRegisteredButtons(): ConcurrentHashMap { + return buttons + } +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/controls/behaviours/VR2HandNodeTransform.kt b/src/main/kotlin/sc/iview/controls/behaviours/VR2HandNodeTransform.kt new file mode 100644 index 000000000..d426d8e39 --- /dev/null +++ b/src/main/kotlin/sc/iview/controls/behaviours/VR2HandNodeTransform.kt @@ -0,0 +1,150 @@ +package sc.iview.controls.behaviours + +import graphics.scenery.Node +import graphics.scenery.Scene +import graphics.scenery.attribute.spatial.Spatial +import graphics.scenery.controls.OpenVRHMD +import graphics.scenery.controls.TrackerRole +import graphics.scenery.controls.behaviours.VRScale +import graphics.scenery.controls.behaviours.VRTwoHandDragBehavior +import graphics.scenery.controls.behaviours.VRTwoHandDragOffhand +import graphics.scenery.utils.extensions.minus +import graphics.scenery.utils.extensions.plus +import graphics.scenery.utils.extensions.times +import org.joml.Quaternionf +import org.joml.Vector3f +import sc.iview.controls.behaviours.VRGrabTheWorld.Companion.createAndSet +import java.util.concurrent.CompletableFuture + + +/** Transform a target node [target] by pressing the same buttons defined in [createAndSet] on both VR controllers. + * The fastest way to attach the behavior is by using [createAndSet]. + * [onEndCallback] is an optional lambda that is executed once the behavior ends. + * @author Jan Tiemann + * @author Samuel Pantze */ +class VR2HandNodeTransform( + name: String, + controller: Spatial, + offhand: VRTwoHandDragOffhand, + val scene: Scene, + val scaleLocked: Boolean = false, + val rotationLocked: Boolean = false, + val positionLocked: Boolean = false, + val lockYaxis: Boolean = true, + val target: Node, + private val onEndCallback: (() -> Unit)? = null, + private val resetRotationBtnManager: MultiButtonManager? = null, + private val resetRotationButton: MultiButtonManager.ButtonConfig? = null, +) : VRTwoHandDragBehavior(name, controller, offhand) { + + override fun init(x: Int, y: Int) { + super.init(x, y) + // Find the button that doesn't lock the y Axis and indicate that it is now pressed + val transformBtn = + resetRotationBtnManager?.getRegisteredButtons() + ?.filter { it.key != resetRotationButton }?.map { it.key }?.firstOrNull() + if (transformBtn != null) { + resetRotationBtnManager?.pressButton(transformBtn) + } + } + + override fun dragDelta( + currentPositionMain: Vector3f, + currentPositionOff: Vector3f, + lastPositionMain: Vector3f, + lastPositionOff: Vector3f + ) { + + val scaleDelta = + VRScale.getScaleDelta(currentPositionMain, currentPositionOff, lastPositionMain, lastPositionOff) + + val currentDirection = (currentPositionMain - currentPositionOff).normalize() + val lastDirection = (lastPositionMain - lastPositionOff).normalize() + if (lockYaxis) { + lastDirection.y = 0f + currentDirection.y = 0f + } + + // Rotation implementation: https://discussions.unity.com/t/two-hand-grabbing-of-objects-in-virtual-reality/219972 + + target.let { + if (!rotationLocked) { + it.ifSpatial { + val rotationDelta = Quaternionf().rotationTo(lastDirection, currentDirection) + if (resetRotationBtnManager?.isTwoHandedActive() == true) { + // Reset the rotation when the reset button was pressed too + rotation = Quaternionf() + } else { + // Rotate node with respect to the world space delta + rotation = Quaternionf(rotationDelta).mul(Quaternionf(rotation)) + } + } + } + if (!scaleLocked) { + target.ifSpatial { + scale *= scaleDelta + } + } + if (!positionLocked) { + val positionDelta = + (currentPositionMain + currentPositionOff) / 2f - (lastPositionMain + lastPositionOff) / 2f + target.ifSpatial { + position.add(positionDelta) + } + } + } + } + + override fun end(x: Int, y: Int) { + super.end(x, y) + onEndCallback?.invoke() + // Find the button that doesn't lock the y Axis and indicate that it is now released + val transformBtn = resetRotationBtnManager?.getRegisteredButtons()?.filter { it.key != resetRotationButton }?.map {it.key}?.firstOrNull() + if (transformBtn != null) { + resetRotationBtnManager?.releaseButton(transformBtn) + } + } + + companion object { + /** + * Convenience method for adding scale behaviour + */ + fun createAndSet( + hmd: OpenVRHMD, + button: OpenVRHMD.OpenVRButton, + scene: Scene, + scaleLocked: Boolean = false, + rotationLocked: Boolean = false, + positionLocked: Boolean = false, + lockYaxis: Boolean = true, + target: Node, + onEndCallback: (() -> Unit)? = null, + resetRotationBtnManager: MultiButtonManager? = null, + resetRotationButton: MultiButtonManager.ButtonConfig? = null, + ): CompletableFuture { + @Suppress("UNCHECKED_CAST") return createAndSet( + hmd, button + ) { controller: Spatial, offhand: VRTwoHandDragOffhand -> + // Assign the yLock button and the right grab button to the button manager to handle multi-button events + resetRotationButton?.let { + resetRotationBtnManager?.registerButtonConfig(it.button, it.trackerRole) + } + resetRotationBtnManager?.registerButtonConfig(button, TrackerRole.RightHand) + VR2HandNodeTransform( + "Scaling", + controller, + offhand, + scene, + scaleLocked, + rotationLocked, + positionLocked, + lockYaxis, + target, + onEndCallback, + resetRotationBtnManager, + resetRotationButton + ) + } as CompletableFuture + } + } +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/controls/behaviours/VRGrabTheWorld.kt b/src/main/kotlin/sc/iview/controls/behaviours/VRGrabTheWorld.kt new file mode 100644 index 000000000..4480b5e90 --- /dev/null +++ b/src/main/kotlin/sc/iview/controls/behaviours/VRGrabTheWorld.kt @@ -0,0 +1,96 @@ +package sc.iview.controls.behaviours + +import graphics.scenery.Node +import graphics.scenery.Scene +import graphics.scenery.attribute.spatial.Spatial +import graphics.scenery.controls.OpenVRHMD +import graphics.scenery.controls.TrackedDeviceType +import graphics.scenery.controls.TrackerRole +import graphics.scenery.utils.extensions.minus +import graphics.scenery.utils.extensions.plusAssign +import graphics.scenery.utils.extensions.times +import org.joml.Vector3f +import org.scijava.ui.behaviour.DragBehaviour + + +/** Move yourself (the scene camera) by pressing a VR button. + * The fastest way to attach the behavior is by using [createAndSet]. + * You can pass a [grabButtonmanager] to handle multiple assignments per button. + * @author Jan Tiemann + * @author Samuel Pantze */ +class VRGrabTheWorld ( + @Suppress("UNUSED_PARAMETER") name: String, + controllerHitbox: Node, + private val cam: Spatial, + private val grabButtonmanager: MultiButtonManager? = null, + val button: OpenVRHMD.OpenVRButton, + private val trackerRole: TrackerRole, + private val multiplier: Float +) : DragBehaviour { + + private var camDiff = Vector3f() + + private val controllerSpatial: Spatial = controllerHitbox.spatialOrNull() + ?: throw IllegalArgumentException("controller hitbox needs a spatial attribute") + + + override fun init(x: Int, y: Int) { + grabButtonmanager?.pressButton(button, trackerRole) + camDiff = controllerSpatial.worldPosition() - cam.position + } + + override fun drag(x: Int, y: Int) { + // Only drag when no other grab button is currently active + // to prevent simultaneous behaviors with two-handed gestures + if (grabButtonmanager?.isTwoHandedActive() != true) { + //grabbed world + val newCamDiff = controllerSpatial.worldPosition() - cam.position + val diffTranslation = camDiff - newCamDiff //reversed + cam.position += diffTranslation * multiplier + camDiff = newCamDiff + } + } + + override fun end(x: Int, y: Int) { + grabButtonmanager?.releaseButton(button, trackerRole) + } + + companion object { + + /** + * Convenience method for adding grab behaviour + */ + fun createAndSet( + scene: Scene, + hmd: OpenVRHMD, + buttons: List, + controllerSide: List, + buttonManager: MultiButtonManager? = null, + multiplier: Float = 1f + ) { + hmd.events.onDeviceConnect.add { _, device, _ -> + if (device.type == TrackedDeviceType.Controller) { + device.model?.let { controller -> + if (controllerSide.contains(device.role)) { + buttons.forEach { button -> + val name = "VRDrag:${hmd.trackingSystemName}:${device.role}:$button" + val grabBehaviour = VRGrabTheWorld( + name, + controller.children.first(), + scene.findObserver()!!.spatial(), + buttonManager, + button, + device.role, + multiplier + ) + buttonManager?.registerButtonConfig(button, device.role) + hmd.addBehaviour(name, grabBehaviour) + hmd.addKeyBinding(name, device.role, button) + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/src/main/kotlin/sc/iview/node/Line3D.kt b/src/main/kotlin/sc/iview/node/Line3D.kt index 94b74e90c..63cdb78eb 100644 --- a/src/main/kotlin/sc/iview/node/Line3D.kt +++ b/src/main/kotlin/sc/iview/node/Line3D.kt @@ -147,7 +147,7 @@ class Line3D : Mesh { * geometry information into consideration if this Node implements [HasGeometry]. * In case a bounding box cannot be determined, the function will return null. */ - override fun generateBoundingBox(): OrientedBoundingBox? { + override fun generateBoundingBox(includeChildren: Boolean): OrientedBoundingBox? { var bb = OrientedBoundingBox(this, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f) for (n in children) { diff --git a/src/main/resources/sc/iview/commands/demo/animation/ParticleDemo.vert b/src/main/resources/sc/iview/commands/demo/animation/ParticleDemo.vert index 4a3d2b12f..1f612d645 100644 --- a/src/main/resources/sc/iview/commands/demo/animation/ParticleDemo.vert +++ b/src/main/resources/sc/iview/commands/demo/animation/ParticleDemo.vert @@ -58,7 +58,7 @@ mat4 mv; mv = (vrParameters.stereoEnabled ^ 1) * ViewMatrices[0] * iModelMatrix + (vrParameters.stereoEnabled * ViewMatrices[currentEye.eye] * iModelMatrix); projectionMatrix = (vrParameters.stereoEnabled ^ 1) * ProjectionMatrix + vrParameters.stereoEnabled * vrParameters.projectionMatrices[currentEye.eye]; - if(ubo.isBillboard > 0) { + if(ubo.isBillboard == 1) { mv[0][0] = 1.0f; mv[0][1] = .0f; mv[0][2] = .0f; diff --git a/src/test/kotlin/sc/iview/StartEyeTrackingDirectly.kt b/src/test/kotlin/sc/iview/StartEyeTrackingDirectly.kt new file mode 100644 index 000000000..ece61eabf --- /dev/null +++ b/src/test/kotlin/sc/iview/StartEyeTrackingDirectly.kt @@ -0,0 +1,34 @@ +import graphics.scenery.utils.extensions.times +import graphics.scenery.volumes.RAIVolume +import graphics.scenery.volumes.TransferFunction +import org.scijava.command.CommandService +import org.scijava.ui.UIService +import sc.iview.SciView +import sc.iview.commands.demo.advanced.EyeTrackingCommand + +// Test class with hardcoded path to open eyetracking directly. +fun main() { + val sv = SciView.create() + val context = sv.scijavaContext + val uiService = context?.service(UIService::class.java) + uiService?.showUI() + + sv.open("C:/Software/datasets/MastodonTutorialDataset1/datasethdf5.xml") + val volumes = sv.findNodes { it.javaClass == RAIVolume::class.java } + volumes.first().let { + it as RAIVolume + it.minDisplayRange = 400f + it.maxDisplayRange = 1500f + val tf = TransferFunction() + tf.addControlPoint(0f, 0f) + tf.addControlPoint(1f, 1f) + it.transferFunction = tf + it.spatial().scale *= 50f + it.spatial().scale.z *= -1f + } + + val command = sv.scijavaContext!!.getService(CommandService::class.java) + val argmap = HashMap() + command.run(EyeTrackingCommand::class.java, true, argmap) + +} \ No newline at end of file