Skip to content
Merged
Show file tree
Hide file tree
Changes from 54 commits
Commits
Show all changes
67 commits
Select commit Hold shift + click to select a range
7c42408
update readdy module to match cytosim as much as possible
Jun 25, 2024
0b17f02
script to copy readdy outputs
Jun 26, 2024
aa344d5
visualize individual trajectories, leverage readdy post processing co…
Jun 27, 2024
de1b0a5
visualize combined
Jun 27, 2024
037f3bf
add readdy baseline individual viz
Jul 1, 2024
fa06164
WIP tomography viz
Jul 1, 2024
9bd0675
tomography visualization
Jul 1, 2024
5cea593
Merge branch 'main' into update-viz
Jul 1, 2024
4bd4c60
calculate compression metrics on tomography data for viz
Jul 2, 2024
27050ce
WIP debugging tomography visualization and analysis
Jul 3, 2024
97b09e3
dim reduction viz, tomography viz in separate files
Jul 8, 2024
1ec3599
WIP adding options to pca viz
Jul 9, 2024
503c5b6
Update name to series_name in workflow docstrings
jessicasyu Jul 1, 2024
1866f55
Move copy readdy outputs into workflow notebooks
jessicasyu Jul 1, 2024
73c4527
Lint readdy processing notebooks
jessicasyu Jul 1, 2024
5164acf
Update docstrings in readdy data structures module
jessicasyu Jul 2, 2024
b599032
Update docstrings in readdy loader module
jessicasyu Jul 2, 2024
1366b98
Update docstrings and type hinting in readdy post processor module
jessicasyu Jul 2, 2024
5190623
Fix readdy no compression simulation series name
jessicasyu Jul 2, 2024
f6e7376
Update readdy simulation workflows
jessicasyu Jul 2, 2024
548f3aa
Fix autodoc for readdy simulation module
jessicasyu Jul 2, 2024
5497614
Fix floating point error when calculating control points
jessicasyu Jul 3, 2024
9ca451b
Fix readdy series name in dim reduction analysis
jessicasyu Jul 3, 2024
36065d0
Add compression metric descriptions and bounds to enum
jessicasyu Jul 3, 2024
6f7b3d5
Update compression workflow to include non compression cases
jessicasyu Jul 3, 2024
e89b6a0
Fix readdy post processor normal calculation to skip start and end
jessicasyu Jul 3, 2024
e9921d2
Remove outdated visualization files
jessicasyu Jul 3, 2024
881125b
Update individual simulator visualization workflows
jessicasyu Jul 3, 2024
b0ff9e6
Adjust number of sampled timepoints for Cytosim to match ReaDDy
jessicasyu Jul 8, 2024
fba0a71
Refactor combined trajectory visualization
jessicasyu Jul 8, 2024
11986b6
Add visualization notebooks to docs
jessicasyu Jul 8, 2024
07ca4eb
Add normalized time to compression metrics workflow
jessicasyu Jul 9, 2024
1d378e2
Update PCA transform points
jessicasyu Jul 9, 2024
a84eb7c
* refactor tomography visualization functions
mogres Jul 9, 2024
cf1ca60
Update tomography visualization
jessicasyu Jul 9, 2024
e4a7597
add column to check if files are ordered
mogres Jul 10, 2024
669374d
Merge branch 'main' into debug-update-viz
jessicasyu Jul 10, 2024
120ad45
Formatting for compression metrics analysis
jessicasyu Jul 10, 2024
6699c55
Update pca analysis workflow to save results to bucket
jessicasyu Jul 10, 2024
18c54a5
Update visualization README
jessicasyu Jul 10, 2024
7e5d06a
Refactor PCA visualization
jessicasyu Jul 10, 2024
0b1001f
merged in changes from testing PCA viz with options
Jul 15, 2024
d81f78c
update colors and camera position for combined viz
Jul 15, 2024
64c8762
PCA viz tested with all options
Jul 16, 2024
d4cf64d
test and tweak individual readdy viz
Jul 17, 2024
723343b
test and tweak individual cytosim viz
Jul 17, 2024
4dd48a3
updated camera views for individual and PCA viz
Jul 17, 2024
dfba623
finish commit
Jul 17, 2024
05bccff
add ipdb to requirements
mogres Jul 17, 2024
83fab65
Methods to calculate twist angle
mogres Jul 17, 2024
d8895fe
Methods to visualize twist angle
mogres Jul 17, 2024
1cb6bff
Remove tangent angle visualization. Fix doc strings.
mogres Jul 18, 2024
5cf8e74
Merge branch 'debug-update-viz' of github.com:simularium/subcell-pipe…
mogres Jul 18, 2024
a3e4740
lint
Jul 19, 2024
e85f117
more lint
Jul 19, 2024
65b4f9d
and more linting
Jul 19, 2024
491173e
fix saving tomography 2D plots
Jul 19, 2024
7c87800
tweaks to dim reduction and tomography viz
Jul 19, 2024
aa34e3b
lint
Jul 19, 2024
9d87bfe
Update subcell_pipeline/analysis/compression_metrics/README.md
Jul 25, 2024
7085345
changes from review
Jul 25, 2024
76b2ced
add h5py with pdm, remove unused imageio dep, remove broken tests
Jul 25, 2024
305b0d0
don't run tests on build since there aren't any
Jul 25, 2024
497bcb5
calculate total steps for readdy
Jul 25, 2024
d0328b5
remove .pdm-python and add to gitignore
Jul 29, 2024
d2c6c0a
Merge remote-tracking branch 'origin/feature/polymer_twist' into debu…
Jul 29, 2024
f2dfd59
more changes from review
Jul 29, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -123,3 +123,4 @@ ENV/
*.cym
*.simularium
**/analysis_outputs/**
*.h5
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@

# List of modules to be mocked up. Useful when some external dependencies are
# not met at build time and break the building process.
autodoc_mock_imports = []
autodoc_mock_imports = ["readdy"]

# Controls how to represent typehints.
autodoc_typehints = "signature"
Expand Down
2 changes: 2 additions & 0 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

Simulations <simulation>
Analysis <analysis>
Visualization <visualization>

.. autosummary::
:toctree: _summary
Expand All @@ -25,6 +26,7 @@

subcell_pipeline.simulation
subcell_pipeline.analysis
subcell_pipeline.visualization

.. toctree::
:hidden:
Expand Down
5 changes: 5 additions & 0 deletions docs/visualization.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Visualization workflow notebooks
================================

.. include:: ../subcell_pipeline/visualization/README.md
:parser: myst_parser.sphinx_
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ dependencies = [
"setuptools>=70.0.0",
"io-collection>=0.10.2",
"python-dotenv>=1.0.1",
"imageio",
]

[project.urls]
Expand Down Expand Up @@ -95,6 +96,7 @@ extend-ignore = [
"D100", # Missing docstring in public module
"D101", # Missing docstring in public class
"D103", # Missing docstring in public function
"D105", # Missing docstring in magic method
"D107", # Missing docstring in __init__
"D202", # Blank lines between the function body and the function docstring
"D203", # 1 blank line required before class docstring
Expand Down
4 changes: 2 additions & 2 deletions subcell_pipeline/analysis/compression_metrics/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@

## Metrics for comparing traces of compressed fibers

Analysis combines compression simulations from Cytosim and Readdy and calculates various metrics to compare the compressed fibers.
Analysis combines compression simulations from Cytosim and Readdy and calculates various compression metrics metrics to compare fibers.

- **Calculate compression metrics** ([source](https://github.com/simularium/subcell-pipeline/blob/main/subcell_pipeline/analysis/compression_metrics/_compare_compression_metrics.py) | [notebook](https://simularium.github.io/subcell-pipeline/_notebooks/analysis/compression_metrics/_compare_compression_metrics.html))
- **Compare compression metrics between simulators** ([source](https://github.com/simularium/subcell-pipeline/blob/main/subcell_pipeline/analysis/compression_metrics/_compare_compression_metrics.py) | [notebook](https://simularium.github.io/subcell-pipeline/_notebooks/analysis/compression_metrics/_compare_compression_metrics.html))
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# %% [markdown]
# # Compare metrics across simulators
# # Compare compression metrics between simulators

# %% [markdown]
"""
Expand Down Expand Up @@ -39,12 +39,14 @@

Defines the `COMPRESSION_VELOCITY` simulation series, which compresses a single
500 nm actin fiber at four different velocities (4.7, 15, 47, and 150 μm/s) with
five replicates each (random seeds 1, 2, 3, 4, and 5).
five replicates each and the baseline `NO_COMPRESSION` simulation series, which
simulates a single actin fiber with a free barbed end across five replicates.
"""

# %%
# Name of the simulation series
series_name: str = "COMPRESSION_VELOCITY"
compression_series_name: str = "COMPRESSION_VELOCITY"
no_compression_series_name: str = "NO_COMPRESSION"

# S3 bucket Cytosim for input and output files
cytosim_bucket: str = "s3://cytosim-working-bucket"
Expand Down Expand Up @@ -89,39 +91,61 @@
"""

# %%
cytosim_metrics = get_compression_metric_data(
cytosim_metrics_compression = get_compression_metric_data(
bucket=cytosim_bucket,
series_name=series_name,
series_name=compression_series_name,
condition_keys=condition_keys,
random_seeds=random_seeds,
metrics=metrics,
recalculate=recalculate,
)
cytosim_metrics["simulator"] = "cytosim"
cytosim_metrics_compression["simulator"] = "cytosim"

# %%
cytosim_metrics_no_compression = get_compression_metric_data(
bucket=cytosim_bucket,
series_name=no_compression_series_name,
condition_keys=[""],
random_seeds=random_seeds,
metrics=metrics,
recalculate=recalculate,
)
cytosim_metrics_no_compression["simulator"] = "cytosim"

# %% [markdown]
"""
## Calculate metrics for ReaDDy data
"""

# %%
readdy_metrics = get_compression_metric_data(
readdy_metrics_compression = get_compression_metric_data(
bucket=readdy_bucket,
series_name=series_name,
series_name=f"ACTIN_{compression_series_name}",
condition_keys=condition_keys,
random_seeds=random_seeds,
metrics=metrics,
recalculate=recalculate,
)
readdy_metrics["simulator"] = "readdy"
readdy_metrics_compression["simulator"] = "readdy"

# %%
readdy_metrics_no_compression = get_compression_metric_data(
bucket=readdy_bucket,
series_name=f"ACTIN_{no_compression_series_name}",
condition_keys=[""],
random_seeds=random_seeds,
metrics=metrics,
recalculate=recalculate,
)
readdy_metrics_no_compression["simulator"] = "readdy"

# %% [markdown]
"""
## Combine metrics from both simulators
"""

# %%
combined_metrics = pd.concat([cytosim_metrics, readdy_metrics])
combined_metrics = pd.concat([cytosim_metrics_compression, readdy_metrics_compression])
combined_metrics["repeat"] = combined_metrics["seed"] - 1
combined_metrics["velocity"] = combined_metrics["key"].astype("int") / 10

Expand All @@ -135,7 +159,6 @@
combined_metrics, str(save_location), "actin_compression_combined_metrics.csv"
)


# %% [markdown]
"""
## Plot metrics vs time
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,10 @@ def calculate_compression_metrics(
polymer_trace=polymer_trace, **options
)

return df_metrics.reset_index().rename(columns={"index": "time"})
metrics = df_metrics.reset_index().rename(columns={"index": "time"})
metrics["normalized_time"] = metrics["time"] / metrics["time"].max()

return metrics


def save_compression_metrics(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,58 @@ def label(self: Enum) -> str:
}
return labels.get(self.value, "")

def description(self: Enum) -> str:
"""
Return the description for the compression metric.

Parameters
----------
self
the CompressionMetric object

Returns
-------
:
The description (and units) for the compression metric.
"""
units = {
CompressionMetric.NON_COPLANARITY.value: "3rd component variance from PCA",
CompressionMetric.PEAK_ASYMMETRY.value: "normalized peak distance",
CompressionMetric.SUM_BENDING_ENERGY.value: "sum of bending energy",
CompressionMetric.AVERAGE_PERP_DISTANCE.value: "distance (nm)",
CompressionMetric.TOTAL_FIBER_TWIST.value: "total fiber twist",
CompressionMetric.CALC_BENDING_ENERGY.value: "energy",
CompressionMetric.CONTOUR_LENGTH.value: "filament contour length (nm)",
CompressionMetric.COMPRESSION_RATIO.value: "compression ratio",
}
return units.get(self.value, "")

def bounds(self: Enum) -> tuple[float, float]:
"""
Return the default bounds for the compression metric.

Parameters
----------
self
the CompressionMetric object

Returns
-------
:
The default bounds for the compression metric.
"""
bounds = {
CompressionMetric.NON_COPLANARITY.value: (0, 0.03),
CompressionMetric.PEAK_ASYMMETRY.value: (0, 0.5),
CompressionMetric.SUM_BENDING_ENERGY.value: (0, 0), # TODO
CompressionMetric.AVERAGE_PERP_DISTANCE.value: (0, 85.0),
CompressionMetric.TOTAL_FIBER_TWIST.value: (0, 0), # TODO
CompressionMetric.CALC_BENDING_ENERGY.value: (0, 10),
CompressionMetric.CONTOUR_LENGTH.value: (480, 505),
CompressionMetric.COMPRESSION_RATIO.value: (0, 1), # TODO
}
return bounds.get(self.value, (0, 0))

def calculate_metric(
self, polymer_trace: np.ndarray, **options: dict[str, Any]
) -> Union[float, np.floating[Any]]:
Expand Down
24 changes: 16 additions & 8 deletions subcell_pipeline/analysis/compression_metrics/polymer_trace.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Methods to calculate metrics from polymer trace data."""

from typing import Any, Dict, Tuple, Union
from typing import Any, Dict, Tuple

import numpy as np
from sklearn.decomposition import PCA
Expand Down Expand Up @@ -61,7 +61,7 @@ def get_end_to_end_axis_distances_and_projections(
def get_average_distance_from_end_to_end_axis(
polymer_trace: np.ndarray,
**options: Dict[str, Any],
) -> Union[float, np.floating[Any]]:
) -> float:
"""
Calculate the average perpendicular distance of polymer trace points from
the end-to-end axis.
Expand Down Expand Up @@ -152,7 +152,7 @@ def get_pca_polymer_trace_projection(
def get_contour_length_from_trace(
polymer_trace: np.ndarray,
**options: Dict[str, Any],
) -> Union[float, np.floating[Any]]:
) -> float:
"""
Calculate the sum of inter-monomer distances in the trace.

Expand All @@ -172,13 +172,13 @@ def get_contour_length_from_trace(
total_distance = np.float_(0)
for i in range(len(polymer_trace) - 1):
total_distance += np.linalg.norm(polymer_trace[i] - polymer_trace[i + 1])
return total_distance
return total_distance.item()


def get_bending_energy_from_trace(
polymer_trace: np.ndarray,
**options: Dict[str, Any],
) -> Union[float, np.floating[Any]]:
) -> float:
"""
Calculate the bending energy per monomer of a polymer trace.

Expand Down Expand Up @@ -207,6 +207,14 @@ def get_bending_energy_from_trace(
vec1 = polymer_trace[ind + 1] - polymer_trace[ind]
vec2 = polymer_trace[ind + 2] - polymer_trace[ind + 1]

if np.isclose(np.linalg.norm(vec1), 0.0) or np.isclose(
np.linalg.norm(vec2), 0.0
):
# TODO handle this differently?
cos_angle[ind] = 0.0
print("Warning: zero vector in bending energy calculation.")
continue

cos_angle[ind] = (
np.dot(vec1, vec2) / np.linalg.norm(vec1) / np.linalg.norm(vec2)
)
Expand All @@ -215,7 +223,7 @@ def get_bending_energy_from_trace(
# the type checker is unable to infer its type
energy = bending_constant * (1 - np.nanmean(cos_angle))

return energy
return energy.item()


def get_total_fiber_twist(
Expand Down Expand Up @@ -454,7 +462,7 @@ def get_sum_bending_energy(
def get_compression_ratio(
polymer_trace: np.ndarray,
**options: Dict[str, Any],
) -> Union[float, np.floating[Any]]:
) -> float:
"""
Calculate the compression ratio of a polymer trace.

Expand All @@ -474,5 +482,5 @@ def get_compression_ratio(
:
The compression ratio of the polymer trace.
"""
end_to_end_axis_length = np.linalg.norm(polymer_trace[-1] - polymer_trace[0])
end_to_end_axis_length = np.linalg.norm(polymer_trace[-1] - polymer_trace[0]).item()
return 1 - end_to_end_axis_length / get_contour_length_from_trace(polymer_trace)
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,9 @@
"""

# %%
readdy_data = get_merged_data(readdy_bucket, series_name, condition_keys, random_seeds)
readdy_data = get_merged_data(
readdy_bucket, f"ACTIN_{series_name}", condition_keys, random_seeds
)
readdy_data["simulator"] = "readdy"

# %%
Expand Down
Loading