Skip to content

Commit

Permalink
Merge pull request #169 from feelpp/139-add-integration-tests-for-ci
Browse files Browse the repository at this point in the history
139 add integration tests for ci
  • Loading branch information
vincentchabannes authored Nov 29, 2024
2 parents 0256b81 + eea8eef commit 0288884
Show file tree
Hide file tree
Showing 12 changed files with 272 additions and 12 deletions.
39 changes: 39 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,45 @@ jobs:
name: wheel-artifacts
path: dist/*.whl

tests:
runs-on: self-ubuntu-22.04
name: Integration Tests
needs: build_wheel
if: "!contains(github.event.head_commit.message, 'tests skip')"
steps:
- uses: actions/checkout@v4
- name: Download packages
uses: actions/download-artifact@v3
with:
name: wheel-artifacts
path: dist/
- name: Install Python dependencies
run: |
python -m venv .venv
source .venv/bin/activate
pip3 install -I -r requirements.txt
pip3 install dist/*.whl
- name: Compile test applications
run: |
mpic++ -std=c++17 -O3 tests/data/parallelSum.cpp -o tests/data/parallelSum
- name: Test benchmarks
run: |
source .venv/bin/activate
execute-benchmark -mc config/machines/gaya_ci.json -bc config/tests_parallelSum/parallelSum.json -pc config/tests_parallelSum/plots.json
echo $?
- name: Dry-run reports
run: |
source .venv/bin/activate
render-benchmarks --config_file reports/website_config.json
- name: Check files
run: | #TODO: check if not empty (maybe)
nb_rfm_report_files=$(ls -1q reports/parallelSum/parallel_sum/gaya|wc -l)
nb_doc_reports=$(ls -1q docs/modules/ROOT/pages/reports/|wc -l)
if [ "$nb_rfm_report_files" -ne 1 ] || [ "$nb_doc_reports" -ne 1 ]; then
echo "Reports where not rendered."
exit 1
fi
docs:
runs-on: self-ubuntu-22.04
name: Build Antora Site
Expand Down
6 changes: 5 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -62,4 +62,8 @@ project/report/*

#Docs
docs/modules/ROOT/pages/
!docs/modules/ROOT/pages/index.adoc
!docs/modules/ROOT/pages/index.adoc
reports/

#Test Outputs
tests/data/outputs/
4 changes: 2 additions & 2 deletions config/machines/gaya.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
"output_app_dir":"/data/scratch/cladellash/feelppdb",
"containers":{
"apptainer":{
"cachedir": "/home/u2/cladellash/.apptainer/cache",
"tmpdir": "/data/scratch/cladellash/images/tmp",
// "cachedir": "/home/u2/cladellash/.apptainer/cache",
// "tmpdir": "/data/scratch/cladellash/images/tmp",
"image_base_dir":"/data/scratch/cladellash/images",
"options":[
"--sharens",
Expand Down
21 changes: 21 additions & 0 deletions config/machines/gaya_ci.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
{
"machine": "gaya",
"targets":["production:builtin:"],
// "partitions":["*"],
// "platform":"apptainer",
// "prog_environments":["*"],
"execution_policy": "async",
"reframe_base_dir":"$PWD/build/reframe",
"reports_base_dir":"$PWD/reports/",
"input_dataset_base_dir":"$PWD",
"output_app_dir":"$PWD",
"containers":{
"apptainer":{
"image_base_dir":"/data/scratch/",
"options":[
"--sharens",
"--bind /opt/:/opt/"
]
}
}
}
51 changes: 51 additions & 0 deletions config/tests_parallelSum/parallelSum.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
{
"executable": "{{machine.input_dataset_base_dir}}/tests/data/parallelSum",
"output_directory": "{{machine.output_app_dir}}/tests/data/outputs/parallelSum",
"use_case_name": "parallel_sum",
"timeout":"0-0:5:0",
"options": [
"{{parameters.elements.value}}",
"{{output_directory}}/{{instance}}"
],
"outputs": [
{
"filepath":"{{output_directory}}/{{instance}}/outputs.csv",
"format":"csv"
}
],
"scalability": {
"directory": "{{output_directory}}/{{instance}}/",
"stages": [
{
"name":"",
"filepath": "scalability.json",
"format": "json",
"variables_path":"*"
}
]
},
"sanity": {
"success": ["[SUCCESS]"],
"error": ["[OOPSIE]","Error"]
},
"parameters": [
{
"name": "nb_tasks",
"sequence": [
{"tasks" : 1, "exclusive_access":true},
{"tasks" : 2, "exclusive_access":true},
{"tasks" : 4, "exclusive_access":true},
{"tasks" : 8, "exclusive_access":true},
{"tasks" : 16, "exclusive_access":true}
]
},
{
"name":"elements",
"linspace":{
"min":100000000,
"max":1000000000,
"n_steps":4
}
}
]
}
54 changes: 54 additions & 0 deletions config/tests_parallelSum/plots.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
{
"plots":[
{
"title": "Absolute performance",
"plot_types": [ "stacked_bar", "grouped_bar" ],
"transformation": "performance",
"variables": [ "computation_time" ],
"names": ["Time"],
"xaxis":{
"parameter":"nb_tasks.tasks",
"label":"Number of tasks"
},
"yaxis":{"label":"Execution time (s)"},
"secondary_axis":{
"parameter":"elements",
"label":"N"
}
},
{
"title": "Absolute performance",
"plot_types": [ "stacked_bar", "table" ],
"transformation": "performance",
"variables": [ "computation_time" ],
"names": ["Time"],
"xaxis":{
"parameter":"elements",
"label":"N"
},
"yaxis":{"label":"Execution time (s)"},
"secondary_axis":{
"parameter":"nb_tasks.tasks",
"label":"Number of tasks"
}
},
{
"title": "Speedup",
"plot_types": [ "scatter" ],
"transformation": "speedup",
"variables": [ "computation_time" ],
"names": ["Time"],
"xaxis": {
"parameter": "nb_tasks.tasks",
"label": "Number of tasks"
},
"yaxis": {
"label": "Execution time (s)"
},
"secondary_axis":{
"parameter":"elements",
"label":"N"
}
}
]
}
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ plotly
setuptools
nbformat
Jinja2
ReFrame-HPC
ReFrame-HPC==4.6.3
pandas
nbmake
traitlets
Expand Down
6 changes: 3 additions & 3 deletions src/feelpp/benchmarking/reframe/config/configSchemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def extractProtocol(self):

class Platform(BaseModel):
image:Optional[Image] = None
input_dir:str
input_dir:Optional[str] = None
options:Optional[List[str]]= []
append_app_options:Optional[List[str]]= []

Expand All @@ -70,7 +70,7 @@ class AdditionalFiles(BaseModel):
class ConfigFile(BaseModel):
executable: str
timeout: str
platforms:Optional[Dict[str,Platform]] = None
platforms:Optional[Dict[str,Platform]] = {"builtin":Platform()}
output_directory:str
use_case_name: str
options: List[str]
Expand Down Expand Up @@ -101,7 +101,7 @@ def checkPlotAxisParameters(self):
for inner in outer.sequence[0].keys():
parameter_names.append(f"{outer.name}.{inner}")

parameter_names += [outer.name for outer in self.parameters if outer.sequence] + ["performance_variable"]
parameter_names += [outer.name for outer in self.parameters if outer] + ["performance_variable"]
for plot in self.plots:
for ax in [plot.xaxis,plot.secondary_axis,plot.yaxis,plot.color_axis]:
if ax and ax.parameter:
Expand Down
5 changes: 3 additions & 2 deletions src/feelpp/benchmarking/reframe/validation.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import reframe.utility.sanity as sn
import re

class ValidationHandler:
""" Class to handle test validation and sanity functions"""
Expand All @@ -8,8 +9,8 @@ def __init__(self, sanity_config):

def check_success(self,stdout):
""" Checks that all given regex patterns are found in an output (can be sn.stdout) """
return all(sn.assert_found(rf"{pattern}",stdout) for pattern in self.success)
return all(sn.assert_found(rf"{re.escape(pattern)}",stdout) for pattern in self.success)

def check_errors(self,stdout):
""" Checks that no given regex patterns are found in an output (can be sn.stdout) """
return all(sn.assert_not_found(rf"{pattern}",stdout) for pattern in self.errors)
return all(sn.assert_not_found(rf"{re.escape(pattern)}",stdout) for pattern in self.errors)
6 changes: 3 additions & 3 deletions src/feelpp/benchmarking/report/config/overviewConfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@
]
},
{
"title": "Execution by environment",
"title": "Execution by environment (latest benchmarks)",
"plot_types": ["stacked_bar"],
"transformation": "performance",
"names": ["performance"],
Expand Down Expand Up @@ -150,7 +150,7 @@
]
},
{
"title": "Execution by environment",
"title": "Execution by environment (latest benchmarks)",
"plot_types": ["stacked_bar"],
"transformation": "performance",
"names": ["performance"],
Expand Down Expand Up @@ -234,7 +234,7 @@
]
},
{
"title": "Execution by environment",
"title": "Execution by environment (latest benchmarks)",
"plot_types": ["stacked_bar"],
"transformation": "performance",
"names": ["performance"],
Expand Down
Binary file added tests/data/parallelSum
Binary file not shown.
90 changes: 90 additions & 0 deletions tests/data/parallelSum.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
#include <mpi.h>
#include <iostream>
#include <fstream>
#include <vector>
#include <numeric>
#include <string>
#include <filesystem>

namespace fs = std::filesystem;

int main(int argc, char** argv)
{
MPI_Init(&argc, &argv);

int rank, size;
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
MPI_Comm_size(MPI_COMM_WORLD, &size);

if ( argc < 3 )
{
if (rank == 0)
std::cerr << "Usage: " << argv[0] << " <N> <output_directory>\n";
MPI_Finalize();
return 1;
}
int N = std::stoi(argv[1]);
fs::path output_dir = argv[2];
int base_chunk_size = N / size;
int remainder = N % size;

int local_start = rank * base_chunk_size + std::min(rank, remainder);
int local_end = local_start + base_chunk_size + (rank < remainder ? 1 : 0);

std::vector<double> local_array(local_end - local_start, 1.0);

double start_time = MPI_Wtime();
double local_sum = std::accumulate(local_array.begin(), local_array.end(), 0.0);
double end_time = MPI_Wtime();

double start_comm_time = MPI_Wtime();
double global_sum = 0.0;
MPI_Reduce(&local_sum, &global_sum, 1, MPI_DOUBLE, MPI_SUM, 0, MPI_COMM_WORLD);
double end_comm_time = MPI_Wtime();
if ( rank == 0 )
{
double computation_time = end_time - start_time;
double communication_time = end_comm_time - start_comm_time;
std::cout << "Global sum = " << global_sum << "\n";
std::cout << "Computation time: " << computation_time << " seconds\n";
std::cout << "Communication time: " << communication_time << " seconds\n";

if (!fs::exists(output_dir))
fs::create_directories(output_dir);

fs::path filename = "scalability.json";

std::ofstream scal_outfile(output_dir/filename);
if ( scal_outfile.is_open() )
{
scal_outfile << "{\n";
scal_outfile << " \"computation_time\": " << computation_time << ",\n";
scal_outfile << " \"communication_time\": " << communication_time << ",\n";
scal_outfile << " \"num_processes\": " << size << ",\n";
scal_outfile << " \"N\": " << N << ",\n";
scal_outfile << " \"sum\": " << global_sum << "\n";
scal_outfile << "}\n";
scal_outfile.close();
}
else
std::cerr << "[OOPSIE] Error opening file for writing." << std::endl;

fs::path out_filename = "outputs.csv";
std::ofstream out_outfile(output_dir/out_filename);
if (out_outfile.is_open())
{
out_outfile << "N,sum\n";
out_outfile << N << "," <<global_sum;
out_outfile.close();
}
else
std::cerr << "[OOPSIE] Error opening file for writing [outputs]." << std::endl;

std::cout << "[SUCCESS]" << std::endl;
}

MPI_Finalize();


return 0;
}

0 comments on commit 0288884

Please sign in to comment.