Skip to content

Commit 5fa9af7

Browse files
committed
Tools install without fabric for bcbio
Move away from fabric requirement as a first step towards supporting python 3 installs. Fabric 2.x has py3 support but requires a full migration of fabric support. Since we're no longer making use of cross host capabilities, this transitions to using subprocess and local calls. This is step 1, which provides a target for tool installation without needing fabric.
1 parent e6d7d1e commit 5fa9af7

File tree

4 files changed

+111
-93
lines changed

4 files changed

+111
-93
lines changed

cloudbio/manifest.py

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -66,12 +66,20 @@ def write_custom_pkg_info(out_dir, tooldir):
6666
if not os.path.exists(out_file):
6767
out = {}
6868
for modname in custom_names:
69-
mod = getattr(__import__("cloudbio.custom", globals(), locals(),
70-
[modname], -1),
71-
modname)
72-
for prog in [x for x in dir(mod) if x.startswith("install")]:
73-
pkg = _get_custom_pkg_info(prog, getattr(mod, prog))
74-
out[pkg["name"]] = pkg
69+
try:
70+
mod = getattr(__import__("cloudbio.custom", globals(), locals(),
71+
[modname], -1),
72+
modname)
73+
except ImportError as msg:
74+
# Skip fabric import errors as we transition away from it
75+
if "fabric" in str(msg):
76+
mod = None
77+
else:
78+
raise
79+
if mod:
80+
for prog in [x for x in dir(mod) if x.startswith("install")]:
81+
pkg = _get_custom_pkg_info(prog, getattr(mod, prog))
82+
out[pkg["name"]] = pkg
7583
with open(out_file, "w") as out_handle:
7684
yaml.safe_dump(out, out_handle, default_flow_style=False, allow_unicode=False)
7785
return out_file

cloudbio/package/__init__.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,20 +2,19 @@
22
"""
33
import os
44

5-
from fabric.api import env, cd
6-
7-
from cloudbio.custom.shared import _make_tmp_dir
8-
from cloudbio.package import brew
9-
from cloudbio.package.deb import (_apt_packages, _add_apt_gpg_keys,
10-
_setup_apt_automation, _setup_apt_sources)
11-
from cloudbio.package.rpm import (_yum_packages, _setup_yum_bashrc,
12-
_setup_yum_sources)
135

146
def _configure_and_install_native_packages(env, pkg_install):
157
"""
168
Setups up native package repositories, determines list
179
of native packages to install, and installs them.
1810
"""
11+
from fabric.api import env
12+
from cloudbio.package import brew
13+
from cloudbio.package.deb import (_apt_packages, _add_apt_gpg_keys,
14+
_setup_apt_automation, _setup_apt_sources)
15+
from cloudbio.package.rpm import (_yum_packages, _setup_yum_bashrc,
16+
_setup_yum_sources)
17+
1918
home_dir = env.safe_run("echo $HOME")
2019
if home_dir:
2120
if env.shell_config.startswith("~"):
@@ -43,6 +42,7 @@ def _connect_native_packages(env, pkg_install, lib_install):
4342
This helps setup a non-sudo environment to handle software
4443
that needs a local version in our non-root directory tree.
4544
"""
45+
from fabric.api import env
4646
bin_dir = os.path.join(env.system_install, "bin")
4747
exports = _get_shell_exports(env)
4848
path = env.safe_run_output("echo $PATH")
@@ -70,6 +70,9 @@ def _print_shell_exports(env):
7070
def _create_local_virtualenv(target_dir):
7171
"""Create virtualenv in target directory for non-sudo installs.
7272
"""
73+
from fabric.api import cd
74+
from cloudbio.custom.shared import _make_tmp_dir
75+
7376
url = "https://raw.github.com/pypa/virtualenv/master/virtualenv.py"
7477
if not os.path.exists(os.path.join(target_dir, "bin", "python")):
7578
with _make_tmp_dir() as work_dir:

cloudbio/package/conda.py

Lines changed: 81 additions & 71 deletions
Original file line numberDiff line numberDiff line change
@@ -7,97 +7,105 @@
77
import subprocess
88
import yaml
99

10-
from distutils.version import LooseVersion
11-
12-
from cloudbio.custom import shared
13-
from cloudbio.fabutils import quiet
14-
from cloudbio.flavor.config import get_config_file
1510
from cloudbio.package.shared import _yaml_to_packages
1611

1712
ENV_PY_VERSIONS = collections.defaultdict(lambda: "python=2")
1813
ENV_PY_VERSIONS["python3"] = "python=3"
1914

2015
def install_packages(env, to_install=None, packages=None):
16+
"""Old installation, based on pre-configured fabric inputs.
17+
"""
18+
from cloudbio.flavor.config import get_config_file
19+
from cloudbio.custom import shared
20+
2121
if shared._is_anaconda(env):
2222
conda_bin = shared._conda_cmd(env)
2323
if hasattr(env, "conda_yaml"):
2424
Config = collections.namedtuple("Config", "base dist")
2525
config_file = Config(base=env.conda_yaml, dist=None)
2626
else:
2727
config_file = get_config_file(env, "packages-conda.yaml")
28-
if config_file.base is None and packages is None:
29-
packages = []
30-
channels = ""
31-
else:
32-
if to_install:
33-
(packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist)
34-
with open(config_file.base) as in_handle:
35-
channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])])
36-
conda_envs = _create_environments(env, conda_bin, packages)
37-
for env_dir in conda_envs.values():
38-
_clean_environment(env_dir)
39-
conda_info = json.loads(env.safe_run_output("{conda_bin} info --json".format(**locals())))
40-
# Uninstall old R packages that clash with updated versions
41-
# Temporary fix to allow upgrades from older versions that have migrated
42-
# r-tximport is now bioconductor-tximport
43-
# py2cairo is incompatible with r 3.4.1
44-
for problem in ["r-tximport", "py2cairo", "libedit"]:
45-
cur_packages = [x["name"] for x in
46-
json.loads(env.safe_run_output("{conda_bin} list --json {problem}".format(**locals())))]
47-
if problem in cur_packages:
48-
env.safe_run("{conda_bin} remove --force -y {problem}".format(**locals()))
49-
# install our customized packages
50-
if len(packages) > 0:
51-
for env_name, env_packages in _split_by_condaenv(packages):
52-
if env_name:
53-
assert env_name in conda_envs, (env_name, conda_envs)
54-
env_str = "-n %s" % env_name
55-
else:
56-
env_str = ""
57-
pkgs_str = " ".join(["'%s'" % x for x in sorted(env_packages)])
58-
py_version = ENV_PY_VERSIONS[env_name]
59-
if "deepvariant" in env_packages:
60-
# Ignore /etc/boto.cfg which creates conflicts with conda gsutils
61-
# https://github.com/GoogleCloudPlatform/gsutil/issues/516
62-
exports = "export BOTO_CONFIG=/ignoreglobal && "
63-
else:
64-
exports = ""
65-
env.safe_run("{exports}{conda_bin} install -y {env_str} {channels} "
66-
"{py_version} {pkgs_str}".format(**locals()))
67-
conda_pkg_list = json.loads(env.safe_run_output(
68-
"{conda_bin} list --json {env_str}".format(**locals())))
69-
for package in env_packages:
70-
_link_bin(package, env, conda_info, conda_bin, conda_pkg_list,
71-
conda_envdir=conda_envs.get(env_name))
72-
conda_pkg_list = json.loads(env.safe_run_output("{conda_bin} list --json".format(**locals())))
73-
for pkg in ["python", "conda", "pip"]:
74-
_link_bin(pkg, env, conda_info, conda_bin, conda_pkg_list, files=[pkg], prefix="bcbio_")
75-
76-
def _link_bin(package, env, conda_info, conda_bin, conda_pkg_list, files=None, prefix="", conda_env=None,
77-
conda_envdir=None):
28+
install_in(conda_bin, env.system_install, config_file.base, packages)
29+
30+
def install_in(conda_bin, system_installdir, config_file=None, packages=None):
31+
"""Install packages inside a given anaconda directory.
32+
33+
New approach, local only and not dependent on fabric.
34+
"""
35+
if config_file is None and packages is None:
36+
packages = []
37+
channels = ""
38+
else:
39+
(packages, _) = _yaml_to_packages(config_file)
40+
with open(config_file) as in_handle:
41+
channels = " ".join(["-c %s" % x for x in yaml.safe_load(in_handle).get("channels", [])])
42+
conda_envs = _create_environments(conda_bin, packages)
43+
for env_dir in conda_envs.values():
44+
_clean_environment(env_dir)
45+
conda_info = json.loads(subprocess.check_output("{conda_bin} info --json".format(**locals()), shell=True))
46+
# Uninstall old R packages that clash with updated versions
47+
# Temporary fix to allow upgrades from older versions that have migrated
48+
# r-tximport is now bioconductor-tximport
49+
# py2cairo is incompatible with r 3.4.1
50+
problems = ["r-tximport", "py2cairo", "libedit"]
51+
if problems:
52+
print("Checking for problematic packages: %s" % ", ".join(problems))
53+
cur_packages = [x["name"] for x in
54+
json.loads(subprocess.check_output("%s list --json '%s'" % (conda_bin, "|".join(problems)),
55+
shell=True)) if x["name"] in problems]
56+
for problem in cur_packages:
57+
subprocess.check_call("{conda_bin} remove --force -y {problem}".format(**locals()), shell=True)
58+
# install our customized packages
59+
if len(packages) > 0:
60+
for env_name, env_packages in _split_by_condaenv(packages):
61+
print("# Installing into conda environment %s: %s" % (env_name or "default", ", ".join(env_packages)))
62+
if env_name:
63+
assert env_name in conda_envs, (env_name, conda_envs)
64+
env_str = "-n %s" % env_name
65+
else:
66+
env_str = ""
67+
pkgs_str = " ".join(["'%s'" % x for x in sorted(env_packages)])
68+
py_version = ENV_PY_VERSIONS[env_name]
69+
if "deepvariant" in env_packages:
70+
# Ignore /etc/boto.cfg which creates conflicts with conda gsutils
71+
# https://github.com/GoogleCloudPlatform/gsutil/issues/516
72+
exports = "export BOTO_CONFIG=/ignoreglobal && "
73+
else:
74+
exports = ""
75+
subprocess.check_call("{exports}{conda_bin} install -y {env_str} {channels} "
76+
"{py_version} {pkgs_str}".format(**locals()), shell=True)
77+
conda_pkg_list = json.loads(subprocess.check_output(
78+
"{conda_bin} list --json {env_str}".format(**locals()), shell=True))
79+
for package in env_packages:
80+
_link_bin(package, system_installdir, conda_info, conda_bin, conda_pkg_list,
81+
conda_envdir=conda_envs.get(env_name))
82+
conda_pkg_list = json.loads(subprocess.check_output("{conda_bin} list --json".format(**locals()), shell=True))
83+
for pkg in ["python", "conda", "pip"]:
84+
_link_bin(pkg, system_installdir, conda_info, conda_bin, conda_pkg_list, files=[pkg], prefix="bcbio_")
85+
86+
def _link_bin(package, system_installdir, conda_info, conda_bin, conda_pkg_list, files=None,
87+
prefix="", conda_env=None, conda_envdir=None):
7888
"""Link files installed in the bin directory into the install directory.
7989
8090
This is imperfect but we're trying not to require injecting everything in the anaconda
8191
directory into a user's path.
8292
"""
8393
package = package.split("=")[0]
84-
final_bindir = os.path.join(env.system_install, "bin")
94+
final_bindir = os.path.join(system_installdir, "bin")
8595
if conda_envdir:
8696
base_bindir = os.path.join(conda_envdir, "bin")
8797
else:
8898
base_bindir = os.path.dirname(conda_bin)
8999
# resolve any symlinks in the final and base heirarchies
90-
with quiet():
91-
final_bindir = env.safe_run_output("cd %s && pwd -P" % final_bindir)
92-
base_bindir = env.safe_run_output("cd %s && pwd -P" % base_bindir)
100+
final_bindir = subprocess.check_output("cd %s && pwd -P" % final_bindir, shell=True)
101+
base_bindir = subprocess.check_output("cd %s && pwd -P" % base_bindir, shell=True)
93102
for pkg_subdir in [x for x in conda_pkg_list if x["name"] == package]:
94103
pkg_subdir = pkg_subdir["dist_name"].split("::")[-1]
95104
for pkg_dir in conda_info["pkgs_dirs"]:
96105
pkg_bindir = os.path.join(pkg_dir, pkg_subdir, "bin")
97-
if env.safe_exists(pkg_bindir):
106+
if os.path.exists(pkg_bindir):
98107
if not files:
99-
with quiet():
100-
files = env.safe_run_output("ls -1 {pkg_bindir}".format(**locals())).split()
108+
files = subprocess.check_output("ls -1 {pkg_bindir}".format(**locals()), shell=True).split()
101109
for fname in files:
102110
# symlink to the original file in the /anaconda/bin directory
103111
# this could be a hard or soft link
@@ -139,13 +147,13 @@ def _split_by_condaenv(packages):
139147
if k == "env":
140148
condaenv = v
141149
out[condaenv].append(name)
142-
return dict(out).items()
150+
return sorted(dict(out).items())
143151

144-
def _get_conda_envs(env, conda_bin):
145-
info = json.loads(env.safe_run_output("{conda_bin} info --envs --json".format(**locals())))
152+
def _get_conda_envs(conda_bin):
153+
info = json.loads(subprocess.check_output("{conda_bin} info --envs --json".format(**locals()), shell=True))
146154
return [e for e in info["envs"] if e.startswith(info["conda_prefix"])]
147155

148-
def _create_environments(env, conda_bin, packages):
156+
def _create_environments(conda_bin, packages):
149157
"""Creates custom local environments that conflict with global dependencies.
150158
151159
Available environments:
@@ -157,17 +165,19 @@ def _create_environments(env, conda_bin, packages):
157165
"""
158166
env_names = set([e for e, ps in _split_by_condaenv(packages) if e])
159167
out = {}
160-
conda_envs = _get_conda_envs(env, conda_bin)
168+
conda_envs = _get_conda_envs(conda_bin)
161169
if "python3" in env_names:
162170
if not any(x.endswith("/python3") for x in conda_envs):
163-
env.safe_run("{conda_bin} create --no-default-packages -y --name python3 python=3".format(**locals()))
164-
conda_envs = _get_conda_envs(env, conda_bin)
171+
subprocess.check_call("{conda_bin} create --no-default-packages -y --name python3 python=3"
172+
.format(**locals()), shell=True)
173+
conda_envs = _get_conda_envs(conda_bin)
165174
out["python3"] = [x for x in conda_envs if x.endswith("/python3")][0]
166175
for addenv in ["samtools0", "dv"]:
167176
if addenv in env_names:
168177
if not any(x.endswith("/%s" % addenv) for x in conda_envs):
169-
env.safe_run("{conda_bin} create --no-default-packages -y --name {addenv} python=2".format(**locals()))
170-
conda_envs = _get_conda_envs(env, conda_bin)
178+
subprocess.check_call("{conda_bin} create --no-default-packages -y --name {addenv} python=2"
179+
.format(**locals()), shell=True)
180+
conda_envs = _get_conda_envs(conda_bin)
171181
out[addenv] = [x for x in conda_envs if x.endswith("/%s" % addenv)][0]
172182
return out
173183

cloudbio/package/shared.py

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,11 @@
11
"""Shared functionality useful for multiple package managers.
22
"""
33
import yaml
4-
from fabric.api import *
5-
from fabric.contrib.files import *
64

7-
def _yaml_to_packages(yaml_file, to_install=None, subs_yaml_file=None, namesort=True):
5+
def _yaml_to_packages(yaml_file, to_install=None, subs_yaml_file=None, namesort=True, env=None):
86
"""Read a list of packages from a nested YAML configuration file.
97
"""
10-
env.logger.info("Reading %s" % yaml_file)
8+
print("Reading packages from %s" % yaml_file)
119
with open(yaml_file) as in_handle:
1210
full_data = yaml.load(in_handle)
1311
if full_data is None:
@@ -19,7 +17,7 @@ def _yaml_to_packages(yaml_file, to_install=None, subs_yaml_file=None, namesort=
1917
subs = {}
2018
# filter the data based on what we have configured to install
2119
data = [(k, v) for (k, v) in full_data.iteritems()
22-
if to_install is None or k in to_install]
20+
if (to_install is None or k in to_install) and k not in ["channels"]]
2321
data.sort()
2422
packages = []
2523
pkg_to_group = dict()
@@ -33,17 +31,16 @@ def _yaml_to_packages(yaml_file, to_install=None, subs_yaml_file=None, namesort=
3331
elif isinstance(cur_info, dict):
3432
for key, val in cur_info.iteritems():
3533
# if we are okay, propagate with the top level key
36-
if key == 'needs_64bit':
34+
if env and key == 'needs_64bit':
3735
if env.is_64bit:
3836
data.insert(0, (cur_key, val))
39-
elif key.startswith(env.distribution):
37+
elif env and key.startswith(env.distribution):
4038
if key.endswith(env.dist_name):
4139
data.insert(0, (cur_key, val))
4240
else:
4341
data.insert(0, (cur_key, val))
4442
else:
4543
raise ValueError(cur_info)
46-
env.logger.debug("Packages to install: {0}".format(",".join(packages)))
4744
return packages, pkg_to_group
4845

4946
def _filter_subs_packages(initial, subs, namesort=True):

0 commit comments

Comments
 (0)