Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor: Pylint alerts corrections as part of an intervention experiment 298 #1022

Open
wants to merge 10 commits into
base: master
Choose a base branch
from
2 changes: 1 addition & 1 deletion bioconda_utils/artifacts.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def parse_gha_build_id(url: str) -> str:

def get_gha_artifacts(check_run, platform, repo):
gha_workflow_id = parse_gha_build_id(check_run.details_url)
if (gha_workflow_id) :
if gha_workflow_id:
# The workflow run is different from the check run
run = repo.get_workflow_run(int(gha_workflow_id))
artifacts = run.get_artifacts()
Expand Down
54 changes: 29 additions & 25 deletions bioconda_utils/autobump.py
Original file line number Diff line number Diff line change
Expand Up @@ -1163,31 +1163,7 @@ async def apply(self, recipe: Recipe) -> None:
# check if we already have an open PR (=> update in progress)
pullreqs = await self.ghub.get_prs(from_branch=branch_name, from_user="bioconda")
if pullreqs:
if len(pullreqs) > 1:
logger.error("Multiple PRs updating %s: %s",
recipe,
", ".join(str(pull['number']) for pull in pullreqs))
for pull in pullreqs:
logger.debug("Found PR %i updating %s: %s",
pull["number"], recipe, pull["title"])
# update the PR if title or body changed
pull = pullreqs[0]
if body == pull["body"]:
body = None
if title == pull["title"]:
title = None
if not (body is None and title is None):
if await self.ghub.modify_issue(number=pull['number'], body=body, title=title):
logger.info("Updated PR %i updating %s to %s",
pull['number'], recipe, recipe.version)
else:
logger.error("Failed to update PR %i with title=%s and body=%s",
pull['number'], title, body)
else:
logger.debug("Not updating PR %i updating %s - no changes",
pull['number'], recipe)

raise self.UpdateInProgress(recipe)
title, body = await self._handle_open_PRs(recipe, title, body, pullreqs)

# check for matching closed PR (=> update rejected)
pullreqs = await self.ghub.get_prs(from_branch=branch_name, state=self.ghub.STATE.closed)
Expand All @@ -1206,6 +1182,34 @@ async def apply(self, recipe: Recipe) -> None:

logger.info("Created PR %i: %s", pull['number'], title)

async def _handle_open_PRs(self, recipe, title, body, pullreqs):
if len(pullreqs) > 1:
logger.error("Multiple PRs updating %s: %s",
recipe,
", ".join(str(pull['number']) for pull in pullreqs))
for pull in pullreqs:
logger.debug("Found PR %i updating %s: %s",
pull["number"], recipe, pull["title"])
# update the PR if title or body changed
pull = pullreqs[0]
if body == pull["body"]:
body = None
if title == pull["title"]:
title = None
if not (body is None and title is None):
if await self.ghub.modify_issue(number=pull['number'], body=body, title=title):
logger.info("Updated PR %i updating %s to %s",
pull['number'], recipe, recipe.version)
else:
logger.error("Failed to update PR %i with title=%s and body=%s",
pull['number'], title, body)
else:
logger.debug("Not updating PR %i updating %s - no changes",
pull['number'], recipe)

raise self.UpdateInProgress(recipe)
return title,body


class MaxUpdates(Filter):
"""Terminate pipeline after **max_updates** recipes have been updated."""
Expand Down
2 changes: 1 addition & 1 deletion bioconda_utils/bioconductor_skeleton.py
Original file line number Diff line number Diff line change
Expand Up @@ -775,7 +775,7 @@ def dependencies(self):

# Check SystemRequirements in the DESCRIPTION file to make sure
# packages with such requirements are provided correct recipes.
if (self.packages[self.package].get('SystemRequirements') is not None):
if self.packages[self.package].get('SystemRequirements') is not None:
logger.warning(
"The 'SystemRequirements' {} are needed".format(
self.packages[self.package].get('SystemRequirements')) +
Expand Down
109 changes: 67 additions & 42 deletions bioconda_utils/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,14 +106,7 @@ def build(recipe: str, pkg_paths: List[str] = None,

logger.info("BUILD START %s", recipe)

args = ['--override-channels']
if testonly:
args += ["--test"]
else:
args += ["--no-anaconda-upload"]

for channel in channels or ['local']:
args += ['-c', channel]
args = _build_args(testonly, channels)

logger.debug('Build and Channel Args: %s', args)

Expand Down Expand Up @@ -155,17 +148,7 @@ def build(recipe: str, pkg_paths: List[str] = None,
"cannot be found", pkg_path)
return BuildResult(False, None)
else:
conda_build_cmd = [utils.bin_for('conda-build')]
# - Temporarily reset os.environ to avoid leaking env vars
# - Also pass filtered env to run()
# - Point conda-build to meta.yaml, to avoid building subdirs
with utils.sandboxed_env(whitelisted_env):
cmd = conda_build_cmd + args
for config_file in utils.get_conda_build_config_files():
cmd += [config_file.arg, config_file.path]
cmd += [os.path.join(recipe, 'meta.yaml')]
with utils.Progress():
utils.run(cmd, mask=False, live=live_logs)
_handle_conda_build(recipe, live_logs, whitelisted_env, args)

logger.info('BUILD SUCCESS %s',
' '.join(os.path.basename(p) for p in pkg_paths))
Expand All @@ -183,6 +166,28 @@ def build(recipe: str, pkg_paths: List[str] = None,
raise exc
return BuildResult(False, None)

return _handle_mulled_test(mulled_test
, recipe
, pkg_paths
, base_image
, mulled_conda_image
, live_logs)

def _handle_conda_build(recipe, live_logs, whitelisted_env, args):
conda_build_cmd = [utils.bin_for('conda-build')]
# - Temporarily reset os.environ to avoid leaking env vars
# - Also pass filtered env to run()
# - Point conda-build to meta.yaml, to avoid building subdirs
with utils.sandboxed_env(whitelisted_env):
cmd = conda_build_cmd + args
for config_file in utils.get_conda_build_config_files():
cmd += [config_file.arg, config_file.path]
cmd += [os.path.join(recipe, 'meta.yaml')]
with utils.Progress():
utils.run(cmd, mask=False, live=live_logs)

def _handle_mulled_test(mulled_test, recipe, pkg_paths, base_image, mulled_conda_image, live_logs):

if mulled_test:
logger.info('TEST START via mulled-build %s', recipe)
mulled_images = []
Expand All @@ -200,6 +205,17 @@ def build(recipe: str, pkg_paths: List[str] = None,

return BuildResult(True, None)

def _build_args(testonly, channels):
args = ['--override-channels']
if testonly:
args += ["--test"]
else:
args += ["--no-anaconda-upload"]

for channel in channels or ['local']:
args += ['-c', channel]
return args


def store_build_failure_record(recipe, output, meta, dag, skiplist_leafs):
"""
Expand Down Expand Up @@ -421,6 +437,38 @@ def build_recipes(recipe_folder: str, config_path: str, recipes: List[str],
skipped_recipes = []
failed_uploads = []

_handle_recipes(recipe_folder, recipes, mulled_test, testonly, force, docker_builder, label
, anaconda_upload, mulled_upload_target, check_channels, keep_old_work, mulled_conda_image
, record_build_failures, skiplist_leafs, live_logs, config, linter, failed, dag
, skip_dependent, subdag, built_recipes, skipped_recipes, failed_uploads)

if failed or failed_uploads:
logger.error('BUILD SUMMARY: of %s recipes, '
'%s failed and %s were skipped. '
'Details of recipes and environments follow.',
len(recipes), len(failed), len(skipped_recipes))
if built_recipes:
logger.error('BUILD SUMMARY: while the entire build failed, '
'the following recipes were built successfully:\n%s',
'\n'.join(built_recipes))
for recipe in failed:
logger.error('BUILD SUMMARY: FAILED recipe %s', recipe)
for name, dep in skip_dependent.items():
logger.error('BUILD SUMMARY: SKIPPED recipe %s '
'due to failed dependencies %s', name, dep)
if failed_uploads:
logger.error('UPLOAD SUMMARY: the following packages failed to upload:\n%s',
'\n'.join(failed_uploads))
return False

logger.info("BUILD SUMMARY: successfully built %s of %s recipes",
len(built_recipes), len(recipes))
return True

def _handle_recipes(recipe_folder, recipes, mulled_test, testonly, force, docker_builder, label, anaconda_upload
, mulled_upload_target, check_channels, keep_old_work, mulled_conda_image, record_build_failures
, skiplist_leafs, live_logs, config, linter, failed, dag, skip_dependent, subdag, built_recipes
, skipped_recipes, failed_uploads):
for recipe, name in recipes:
platform = utils.RepoData().native_platform()
if not force and do_not_consider_for_additional_platform(recipe_folder, recipe, platform):
Expand Down Expand Up @@ -488,26 +536,3 @@ def build_recipes(recipe_folder: str, config_path: str, recipes: List[str],
# remove traces of the build
if not keep_old_work:
conda_build_purge()

if failed or failed_uploads:
logger.error('BUILD SUMMARY: of %s recipes, '
'%s failed and %s were skipped. '
'Details of recipes and environments follow.',
len(recipes), len(failed), len(skipped_recipes))
if built_recipes:
logger.error('BUILD SUMMARY: while the entire build failed, '
'the following recipes were built successfully:\n%s',
'\n'.join(built_recipes))
for recipe in failed:
logger.error('BUILD SUMMARY: FAILED recipe %s', recipe)
for name, dep in skip_dependent.items():
logger.error('BUILD SUMMARY: SKIPPED recipe %s '
'due to failed dependencies %s', name, dep)
if failed_uploads:
logger.error('UPLOAD SUMMARY: the following packages failed to upload:\n%s',
'\n'.join(failed_uploads))
return False

logger.info("BUILD SUMMARY: successfully built %s of %s recipes",
len(built_recipes), len(recipes))
return True
5 changes: 1 addition & 4 deletions bioconda_utils/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -466,10 +466,7 @@ def build(recipe_folder, config, packages="*", git_range=None, testonly=False,
build_script_template = open(build_script_template).read()
else:
build_script_template = docker_utils.BUILD_SCRIPT_TEMPLATE
if pkg_dir is None:
use_host_conda_bld = True
else:
use_host_conda_bld = False
use_host_conda_bld = pkg_dir is None

if not utils.is_stable_version(VERSION):
image_tag = utils.extract_stable_version(VERSION)
Expand Down
1 change: 0 additions & 1 deletion bioconda_utils/githandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,7 +510,6 @@ def __init__(self, folder: str=".",
except:
# This will fail on CI nodes from forks, but we don't need to switch back and forth between branches there
logger.warning("Couldn't get the active branch name, we must be on detached HEAD")
pass

def checkout_master(self):
"""Check out master branch (original branch restored by `close()`)"""
Expand Down
27 changes: 18 additions & 9 deletions bioconda_utils/recipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,18 @@ def _rewrite_selector_block(text, block_top, block_left):
# can't handle "[py2k or osx]" style things
return None

new_lines = Recipe._compute_new_lines(block_left, variants)

logger.debug("Replacing: lines %i - %i with %i lines:\n%s\n---\n%s",
block_top, block_top+block_height, len(new_lines),
"\n".join(lines[block_top:block_top+block_height]),
"\n".join(new_lines))

lines[block_top:block_top+block_height] = new_lines
return "\n".join(lines)

@staticmethod
def _compute_new_lines(block_left, variants):
new_lines = []
for variant in variants.values():
first = True
Expand All @@ -322,14 +334,7 @@ def _rewrite_selector_block(text, block_top, block_left):
first = False
else:
new_lines.append("".join((" " * (block_left + 2), line)))

logger.debug("Replacing: lines %i - %i with %i lines:\n%s\n---\n%s",
block_top, block_top+block_height, len(new_lines),
"\n".join(lines[block_top:block_top+block_height]),
"\n".join(new_lines))

lines[block_top:block_top+block_height] = new_lines
return "\n".join(lines)
return new_lines

def get_template(self):
"""Create a Jinja2 template from the current raw recipe"""
Expand Down Expand Up @@ -636,7 +641,11 @@ def replace(self, before: str, after: str,
re_before = re.compile(before_pattern)
re_select = re.compile(before_pattern + r".*#.*\[")

# replace within those lines, erroring on "# [asd]" selectors

return self._handle_replacements(lines, re_before, re_select, after)

def _handle_replacements(self, lines, re_before, re_select, after):
# replace within those lines, erroring on "# [asd]" selectors
replacements = 0
for lineno in sorted(lines):
line = self.meta_yaml[lineno]
Expand Down
22 changes: 13 additions & 9 deletions bioconda_utils/update_pinnings.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,6 +338,18 @@ def check(
return State.FAIL, recipe

flags = State(0)
maybe_bump = _handle_metas(skip_variant_keys, metas, flags)
if maybe_bump:
# Skip bump if we only add to the build matrix.
if will_build_only_missing(metas):
flags |= State.BUMPED
else:
flags |= State.BUMP
if not keep_metas:
recipe.conda_release()
return flags, recipe

def _handle_metas(skip_variant_keys, metas, flags):
maybe_bump = False
for meta in metas:
if meta.skip() or skip_for_variants(meta, skip_variant_keys):
Expand All @@ -352,12 +364,4 @@ def check(
logger.info("Package %s=%s=%s missing!",
meta.name(), meta.version(), meta.build_id())
maybe_bump = True
if maybe_bump:
# Skip bump if we only add to the build matrix.
if will_build_only_missing(metas):
flags |= State.BUMPED
else:
flags |= State.BUMP
if not keep_metas:
recipe.conda_release()
return flags, recipe
return maybe_bump
36 changes: 20 additions & 16 deletions bioconda_utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -670,22 +670,7 @@ def handle_output(output_lines):
else:
masked_cmds = [do_mask(c) for c in cmds]

if proc.poll() is None:
mylogger.log(loglevel, 'Command closed STDOUT/STDERR but is still running')
waitfor = 30
waittimes = 5
for attempt in range(waittimes):
mylogger.log(loglevel, "Waiting %s seconds (%i/%i)", waitfor, attempt+1, waittimes)
try:
proc.wait(timeout=waitfor)
break;
except sp.TimeoutExpired:
pass
else:
mylogger.log(loglevel, "Terminating process")
proc.kill()
proc.wait()
returncode = proc.poll()
returncode = _handle_process(mylogger, loglevel, proc)

if returncode:
if not quiet_failure:
Expand All @@ -697,6 +682,25 @@ def handle_output(output_lines):

return sp.CompletedProcess(masked_cmds, returncode, stdout=output)

def _handle_process(mylogger, loglevel, proc):
if proc.poll() is None:
mylogger.log(loglevel, 'Command closed STDOUT/STDERR but is still running')
waitfor = 30
waittimes = 5
for attempt in range(waittimes):
mylogger.log(loglevel, "Waiting %s seconds (%i/%i)", waitfor, attempt+1, waittimes)
try:
proc.wait(timeout=waitfor)
break;
except sp.TimeoutExpired:
pass
else:
mylogger.log(loglevel, "Terminating process")
proc.kill()
proc.wait()
returncode = proc.poll()
return returncode


def envstr(env):
env = dict(env)
Expand Down
Loading
Loading