Skip to content

Commit

Permalink
Dump info about installed packages
Browse files Browse the repository at this point in the history
  • Loading branch information
crusaderky committed Nov 19, 2024
1 parent 7727131 commit c688161
Show file tree
Hide file tree
Showing 3 changed files with 60 additions and 11 deletions.
45 changes: 34 additions & 11 deletions .github/workflows/benchmark.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ on:
push:
branches:
- main
pull_request:
branches:
- '*'
workflow_dispatch: # allows to trigger manually
schedule:
- cron: "0 5 * * 4" # Once a week at 0500 on Thursday

Expand Down Expand Up @@ -37,7 +41,11 @@ jobs:
with:
repository: deshaw/versioned-hdf5
path: ./versioned-hdf5/
fetch-depth: 0 # Needed for asv to be able to run benchmarks on old commits
fetch-depth: 0 # Needed for asv to be able to run benchmarks on old commits

- name: Get commit hash
working-directory: ./versioned-hdf5
run: echo "PROJECT_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV

- uses: actions/checkout@v4
with:
Expand All @@ -46,17 +54,30 @@ jobs:
- name: Setup python with miniconda
uses: conda-incubator/setup-miniconda@v3
with:
python-version: 3.11
channels: conda-forge
miniforge-variant: Mambaforge
miniforge-version: latest
use-mamba: true
environment-file: versioned-hdf5-benchmarks/environment.yml
activate-environment: versioned-hdf5-benchmarks

- name: Install versioned-hdf5
shell: bash -el {0}
working-directory: ./versioned-hdf5
run: python -m pip install '.[bench]'

- name: Dump conda environment
shell: bash -el {0}
run: conda list

- name: Print hdf5 configuration
shell: bash -el {0}
working-directory: ./versioned-hdf5-benchmarks
run: |
echo "PROJECT_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
conda install -n test pip hdf5 openmpi h5py ndindex -c conda-forge -y
conda run -n test pip install '.[bench]'
python -c 'import utils; utils.debug_libhdf5_so()'
h5cc -showconfig
- name: Run benchmarks
shell: bash -el {0}
working-directory: ./versioned-hdf5
run: |
# Copy the old asv results back into ./versioned-hdf5 to avoid
Expand All @@ -65,11 +86,11 @@ jobs:
# set the machine name; otherwise this is unique to each run
echo "Setting machine name to $MACHINE_NAME"
conda run -n test asv machine --machine $MACHINE_NAME --yes -v
asv machine --machine $MACHINE_NAME --yes -v
cat ~/.asv-machine.json
# Don't return exit code 1 if results are slower
conda run -n test asv run --skip-existing --machine $MACHINE_NAME 1.7.0.. || true;
asv run --skip-existing --machine $MACHINE_NAME 1.7.0.. || true;
# Copy the new benchmark results to the benchmarks repo to commit them
rsync -r .asv ../versioned-hdf5-benchmarks/
Expand All @@ -82,18 +103,20 @@ jobs:
message: "Update benchmarks for commit ${{ env.PROJECT_SHA }}"

- name: Generate html
shell: bash -el {0}
working-directory: ./versioned-hdf5
run: |
conda run -n test asv publish -o ./html
run: asv publish -o ./html

- name: Setup Pages
uses: actions/configure-pages@v5

- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: './versioned-hdf5/html'
path: ./versioned-hdf5/html
retention-days: 30

- name: Deploy to GitHub Pages
if: github.event_name != 'pull_request'
id: deployment
uses: actions/deploy-pages@v4
10 changes: 10 additions & 0 deletions environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
name: versioned-hdf5-benchmarks
channels:
- conda-forge
dependencies:
- python =3.11
- pip
- hdf5
- openmpi
- h5py
- ndindex
16 changes: 16 additions & 0 deletions utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import os
import subprocess


def debug_libhdf5_so():
"""Print out which .so file is actually loaded at runtime for libhdf5"""
import h5py

for row in (
subprocess.check_output(["lsof", "-p", str(os.getpid())])
.decode("utf-8")
.splitlines()
):
row = row.strip()
if "libhdf5" in row:
print(row)

0 comments on commit c688161

Please sign in to comment.