-
Notifications
You must be signed in to change notification settings - Fork 134
Expand file tree
/
Copy pathbuild.py
More file actions
420 lines (339 loc) · 13.4 KB
/
build.py
File metadata and controls
420 lines (339 loc) · 13.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
import subprocess as sp
from collections import defaultdict, namedtuple
import os
import logging
import networkx as nx
from . import utils
from . import docker_utils
from . import pkg_test
from . import upload
logger = logging.getLogger(__name__)
BuildResult = namedtuple("BuildResult", ["success", "mulled_image"])
def purge():
utils.run(["conda", "build", "purge"])
free = utils.get_free_space()
if free < 10:
logger.info("CLEANING UP PACKAGE CACHE (free space: %iMB).", free)
utils.run(["conda", "clean", "--all"])
logger.info("CLEANED UP PACKAGE CACHE (free space: %iMB).",
utils.get_free_space())
def build(
recipe,
recipe_folder,
env,
testonly=False,
mulled_test=True,
force=False,
channels=None,
docker_builder=None,
mulled_upload_target='biocontainers',
):
"""
Build a single recipe for a single env
Parameters
----------
recipe : str
Path to recipe
env : dict
Environment (typically a single yielded dictionary from EnvMatrix
instance)
testonly : bool
If True, skip building and instead run the test described in the
meta.yaml.
mulled_test : bool
Test the built package in a minimal docker container
force : bool
If True, the recipe will be built even if it already exists. Note that
typically you'd want to bump the build number rather than force
a build.
channels : list
Channels to include via the `--channel` argument to conda-build. Higher
priority channels should come first.
docker_builder : docker_utils.RecipeBuilder object
Use this docker builder to build the recipe, copying over the built
recipe to the host's conda-bld directory.
mulled_upload_target: biocontainers
Namespace for docker container
"""
# Clean provided env and exisiting os.environ to only allow whitelisted env
# vars
_docker = docker_builder is not None
_env = {}
_env.update({k: str(v) for k, v in os.environ.items() if utils.allowed_env_var(k, _docker)})
_env.update({k: str(v) for k, v in dict(env).items() if utils.allowed_env_var(k, _docker)})
logger.info(
"BUILD START %s, env: %s",
recipe, ';'.join(['='.join(map(str, i)) for i in sorted(_env.items())])
)
# --no-build-id is needed for some very long package names that triggers the 89 character limits
# this option can be removed as soon as all packages are rebuild with the 255 character limit
# Moreover, --no-build-id will block us from using parallel builds in conda-build 2.x
# build_args = ["--no-build-id"]
build_args = []
if testonly:
build_args.append("--test")
else:
build_args += ["--no-anaconda-upload"]
channel_args = []
if channels:
for c in channels:
channel_args += ['--channel', c]
logger.debug('build_args: %s', build_args)
logger.debug('channel_args: %s', channel_args)
CONDA_BUILD_CMD = [utils.bin_for('conda'), 'build']
pkg_path = utils.built_package_path(recipe, _env)
meta = utils.load_meta(recipe, _env)
try:
# Note we're not sending the contents of os.environ here. But we do
# want to add TRAVIS* vars if that behavior is not disabled.
if docker_builder is not None:
response = docker_builder.build_recipe(
recipe_dir=os.path.abspath(recipe),
build_args=' '.join(channel_args + build_args),
pkg=os.path.basename(pkg_path),
env=_env,
noarch=bool(utils.get_meta_value(meta, 'build', 'noarch'))
)
if not os.path.exists(pkg_path):
logger.error(
"BUILD FAILED: the built package %s "
"cannot be found", pkg_path)
return BuildResult(False, None)
build_success = True
else:
# Temporarily reset os.environ to avoid leaking env vars to
# conda-build, and explicitly provide `env` to `run()`
# we explicitly point to the meta.yaml, in order to keep
# conda-build from building all subdirectories
with utils.sandboxed_env(_env):
cmd = CONDA_BUILD_CMD + build_args + channel_args + \
[os.path.join(recipe, 'meta.yaml')]
logger.debug('command: %s', cmd)
with utils.Progress():
p = utils.run(cmd, env=os.environ)
build_success = True
logger.info(
'BUILD SUCCESS %s, %s',
utils.built_package_path(recipe, _env), utils.envstr(_env)
)
except (docker_utils.DockerCalledProcessError, sp.CalledProcessError) as e:
logger.error(
'BUILD FAILED %s, %s', recipe, utils.envstr(env))
return BuildResult(False, None)
if not mulled_test:
return BuildResult(True, None)
logger.info(
'TEST START via mulled-build %s, %s',
recipe, utils.envstr(_env))
use_base_image = utils.get_meta_value(
meta,
'extra', 'container', 'extended-base')
base_image = 'bioconda/extended-base-image' if use_base_image else None
try:
if channels:
res = pkg_test.test_package(pkg_path, base_image=base_image, mulled_upload_target=mulled_upload_target, channels=channels)
else:
res = pkg_test.test_package(pkg_path, base_image=base_image, mulled_upload_target=mulled_upload_target)
logger.info("TEST SUCCESS %s, %s", recipe, utils.envstr(_env))
mulled_image = pkg_test.get_image_name(pkg_path)
return BuildResult(True, mulled_image)
except sp.CalledProcessError as e:
logger.error('TEST FAILED: %s, %s', recipe, utils.envstr(_env))
return BuildResult(False, None)
def build_recipes(
recipe_folder,
config,
packages="*",
mulled_test=True,
testonly=False,
force=False,
docker_builder=None,
label=None,
anaconda_upload=False,
mulled_upload_target='biocontainers',
check_channels=None,
):
"""
Build one or many bioconda packages.
Parameters
----------
recipe_folder : str
Directory containing possibly many, and possibly nested, recipes.
config : str or dict
If string, path to config file; if dict then assume it's an
already-parsed config file.
packages : str
Glob indicating which packages should be considered. Note that packages
matching the glob will still be filtered out by any blacklists
specified in the config.
mulled_test : bool
If True, then test the package in a minimal container.
testonly : bool
If True, only run test.
force : bool
If True, build the recipe even though it would otherwise be filtered
out.
docker_builder : docker_utils.RecipeBuilder instance
If not None, then use this RecipeBuilder to build all recipes.
label : str
Optional label to use when uploading packages. Useful for testing and
debugging. Default is to use the "main" label.
anaconda_upload : bool
If True, upload the package to anaconda.org.
mulled_upload_target : biocontainers
If not None, upload the mulled docker image to the given target on quay.io.
check_channels : list
Channels to check to see if packages already exist in them. If None,
then defaults to the highest-priority channel (that is,
`config['channels'][0]`). If this list is empty, then do not check any
channels.
"""
orig_config = config
config = utils.load_config(config)
env_matrix = utils.EnvMatrix(config['env_matrix'])
blacklist = utils.get_blacklist(config['blacklists'], recipe_folder)
if check_channels is None:
if config['channels']:
check_channels = [config['channels'][0]]
else:
check_channels = []
logger.info('blacklist: %s', ', '.join(sorted(blacklist)))
if packages == "*":
packages = ["*"]
recipes = []
for package in packages:
for recipe in utils.get_recipes(recipe_folder, package):
if os.path.relpath(recipe, recipe_folder) in blacklist:
logger.debug('blacklisted: %s', recipe)
continue
recipes.append(recipe)
logger.debug(recipe)
if not recipes:
logger.info("Nothing to be done.")
return True
logger.debug('recipes: %s', recipes)
logger.info('Filtering recipes')
recipe_targets = dict(
utils.filter_recipes(
recipes, env_matrix, check_channels, force=force)
)
recipes = set(list(recipe_targets.keys()))
dag, name2recipes = utils.get_dag(recipes, config=orig_config, blacklist=blacklist)
recipe2name = {}
for k, v in name2recipes.items():
for i in v:
recipe2name[i] = k
if not dag:
logger.info("Nothing to be done.")
return True
else:
logger.info("Building and testing %s recipes in total", len(dag))
logger.info("Recipes to build: \n%s", "\n".join(dag.nodes()))
subdags_n = int(os.environ.get("SUBDAGS", 1))
subdag_i = int(os.environ.get("SUBDAG", 0))
if subdag_i >= subdags_n:
raise ValueError(
"SUBDAG=%s (zero-based) but only SUBDAGS=%s "
"subdags are available")
# Get connected subdags and sort by nodes
if testonly:
# use each node as a subdag (they are grouped into equal sizes below)
subdags = sorted([[n] for n in nx.nodes(dag)])
else:
# take connected components as subdags
subdags = sorted(map(sorted, nx.connected_components(dag.to_undirected(
))))
# chunk subdags such that we have at most subdags_n many
if subdags_n < len(subdags):
chunks = [[n for subdag in subdags[i::subdags_n] for n in subdag]
for i in range(subdags_n)]
else:
chunks = subdags
if subdag_i >= len(chunks):
logger.info("Nothing to be done.")
return True
# merge subdags of the selected chunk
subdag = dag.subgraph(chunks[subdag_i])
# ensure that packages which need a build are built in the right order
recipes = [recipe
for package in nx.topological_sort(subdag)
for recipe in name2recipes[package]]
logger.info(
"Building and testing subdag %s of %s (%s recipes)",
subdag_i + 1, subdags_n, len(recipes)
)
failed = []
built_recipes = []
skipped_recipes = []
all_success = True
failed_uploads = []
skip_dependent = defaultdict(list)
for recipe in recipes:
recipe_success = True
name = recipe2name[recipe]
if name in skip_dependent:
logger.info(
'BUILD SKIP: '
'skipping %s because it depends on %s '
'which had a failed build.',
recipe, skip_dependent[name])
skipped_recipes.append(recipe)
continue
for target in recipe_targets[recipe]:
res = build(
recipe=recipe,
recipe_folder=recipe_folder,
env=target.env,
testonly=testonly,
mulled_test=mulled_test,
force=force,
channels=config['channels'],
docker_builder=docker_builder,
mulled_upload_target=mulled_upload_target,
)
all_success &= res.success
recipe_success &= res.success
if not res.success:
failed.append((recipe, target))
for n in nx.algorithms.descendants(subdag, name):
skip_dependent[n].append(recipe)
elif not testonly:
# upload build
if anaconda_upload:
if not upload.anaconda_upload(target.pkg, label):
failed_uploads.append(target.pkg)
if mulled_upload_target:
upload.mulled_upload(res.mulled_image, mulled_upload_target)
# remove traces of the build
purge()
if recipe_success:
built_recipes.append(recipe)
if failed or failed_uploads:
failed_recipes = set(i[0] for i in failed)
logger.error(
'BUILD SUMMARY: of %s recipes, '
'%s failed and %s were skipped. '
'Details of recipes and environments follow.',
len(recipes), len(failed_recipes), len(skipped_recipes))
if len(built_recipes) > 0:
logger.error(
'BUILD SUMMARY: while the entire build failed, '
'the following recipes were built successfully:\n%s',
'\n'.join(built_recipes))
for recipe, target in failed:
logger.error(
'BUILD SUMMARY: FAILED recipe %s, environment %s',
str(target), target.envstring())
for name, dep in skip_dependent.items():
logger.error(
'BUILD SUMMARY: SKIPPED recipe %s '
'due to failed dependencies %s', name, dep)
if failed_uploads:
logger.error(
'UPLOAD SUMMARY: the following packages failed to upload:\n%s',
'\n'.join(failed_uploads))
return False
logger.info(
"BUILD SUMMARY: successfully built %s of %s recipes",
len(built_recipes), len(recipes))
return all_success