Skip to content

Add Sage 2.2.1 for lipids v1.0 dataset benchmarking #35

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/opt.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ jobs:

input_file=${{ inputs.path }} # path to the input YAML file
input_dir=$(dirname $input_file) # parent directory of input YAML file
git add $input_dir/output/{dde,icrmsd,rmsd,tfd}.csv
git add $input_dir/output/{dde,rmsd,tfd}.csv
git add $input_dir/output/{dde,rmsd,rmsd_cdf,tfd,tfd_cdf,bonds,angles,dihedrals,impropers}.png
git commit -m "Add benchmark results"
git push
Expand All @@ -160,7 +160,7 @@ jobs:
micromamba env export > env.yaml
input_files=$(python get_files.py ${{ inputs.path }})
tar cf results.tar \
tmp.sqlite.bz2 $input_dir/output/{dde,icrmsd,rmsd,tfd}.csv \
tmp.sqlite.bz2 $input_dir/output/{dde,rmsd,tfd}.csv \
$input_dir/output/{dde,rmsd,rmsd_cdf,tfd,tfd_cdf,bonds,angles,dihedrals,impropers}.png \
env.yaml main.py $input_files

Expand All @@ -179,7 +179,7 @@ jobs:
run: |
input_files=$(python get_files.py ${{ inputs.path }})
python zenodo_upload.py --title "YDS Upload ${{ inputs.pr_number }}" \
tmp.sqlite.bz2 $input_dir/output/{dde,icrmsd,rmsd,tfd}.csv \
tmp.sqlite.bz2 $input_dir/output/{dde,rmsd,tfd}.csv \
$input_dir/output/{dde,rmsd,rmsd_cdf,tfd,tfd_cdf,bonds,angles,dihedrals,impropers}.png \
env.yaml main.py $input_files

Expand Down
8 changes: 4 additions & 4 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ def make_csvs(store, forcefield, out_dir):
store.get_rmsd(forcefield, skip_check=True).to_csv(f"{out_dir}/rmsd.csv")
print("getting TFDs")
store.get_tfd(forcefield, skip_check=True).to_csv(f"{out_dir}/tfd.csv")
print("getting internal coordinate RMSDs")
store.get_internal_coordinate_rmsd(forcefield, skip_check=True).to_csv(
f"{out_dir}/icrmsd.csv"
)
# print("getting internal coordinate RMSDs")
# store.get_internal_coordinate_rmsd(forcefield, skip_check=True).to_csv(
# f"{out_dir}/icrmsd.csv"
# )


def _main(forcefield, dataset, sqlite_file, out_dir, procs, invalidate_cache):
Expand Down
9 changes: 5 additions & 4 deletions plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,10 @@ def load_bench(d: Path) -> pandas.DataFrame:
rmsd.columns = ["rec_id", "rmsd"]
tfd = pandas.read_csv(d / "output" / "tfd.csv")
tfd.columns = ["rec_id", "tfd"]
icrmsd = pandas.read_csv(d / "output" / "icrmsd.csv")
icrmsd.columns = ["rec_id", "bonds", "angles", "dihedrals", "impropers"]
ret = dde.merge(rmsd).pipe(DF.merge, tfd).pipe(DF.merge, icrmsd)
# icrmsd = pandas.read_csv(d / "output" / "icrmsd.csv")
# icrmsd.columns = ["rec_id", "bonds", "angles", "dihedrals", "impropers"]
# ret = dde.merge(rmsd).pipe(DF.merge, tfd).pipe(DF.merge, icrmsd)
ret = dde.merge(rmsd).pipe(DF.merge, tfd)
print(f"loaded {ret.shape} rows for {d}")
return ret

Expand Down Expand Up @@ -148,7 +149,7 @@ def plot(ffs, out_dir: str):
plot_ddes(dfs, names, out_dir)
plot_rmsds(dfs, names, out_dir)
plot_tfds(dfs, names, out_dir)
plot_icrmsds(dfs, names, out_dir)
# plot_icrmsds(dfs, names, out_dir)


@click.command()
Expand Down
3 changes: 3 additions & 0 deletions submissions/2025-02-04-Sage-2.2.1/input.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
forcefield: openff_unconstrained-2.2.1.offxml
datasets:
- datasets/OpenFF-Lipid-Optimization-Training-Supplement-v1.0/cache.json
Loading