Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Include coverage from code executed remotely #255

Open
wants to merge 4 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
- how to limit number of jobs [\#195](https://github.com/Matgenix/jobflow-remote/issues/195)
- non-unique uuids as a problem in jobflow-remote [\#193](https://github.com/Matgenix/jobflow-remote/issues/193)
- Set `projects_folder` via env var [\#188](https://github.com/Matgenix/jobflow-remote/issues/188)
- Support for heterogenous computing resources? [\#184](https://github.com/Matgenix/jobflow-remote/issues/184)
- Support for heterogeneous computing resources? [\#184](https://github.com/Matgenix/jobflow-remote/issues/184)
- Jobflow remote logo [\#178](https://github.com/Matgenix/jobflow-remote/issues/178)
- How could I use SGE for job submission? [\#159](https://github.com/Matgenix/jobflow-remote/issues/159)
- Check there is not already a runner running [\#140](https://github.com/Matgenix/jobflow-remote/issues/140)
Expand Down
6 changes: 5 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ dependencies = [
"jobflow >= 0.1.19",
"psutil >= 5.9,< 7.0",
"pydantic ~= 2.4",
"pymongo < 4.11",
"python-dateutil>=2.8.2",
"qtoolkit >= 0.1.6",
"rich ~= 13.7",
Expand Down Expand Up @@ -169,7 +170,10 @@ parallel = true
branch = true

[tool.coverage.paths]
source = ["src/"]
source = [
"src/",
"/home/jobflow/.venv/lib/python3.10/site-packages/", # for coverage from docker containers
]

[tool.coverage.report]
skip_covered = true
Expand Down
3 changes: 2 additions & 1 deletion requirements/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
jobflow==0.1.19
pydantic==2.10.5
pydantic==2.10.6
pymongo==4.10.1
fabric==3.2.2
tomlkit==0.13.2
qtoolkit==0.1.6
Expand Down
1 change: 1 addition & 0 deletions requirements/requirements_tests.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
coverage==7.6.10
11 changes: 11 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,17 @@ def test_dir():
return test_dir.resolve()


@pytest.fixture(scope="session")
def coverage_file(request):
"""Fixture to get the pytest-cov coverage file path."""
cov_plugin = request.config.pluginmanager.get_plugin("_cov")
if cov_plugin:
cov_controller = getattr(cov_plugin, "cov_controller", None)
if cov_controller:
return cov_controller.cov.config.data_file
return None # pytest-cov is not active or coverage tracking is disabled


@pytest.fixture(scope="session")
def log_to_stdout() -> None:
import logging
Expand Down
37 changes: 29 additions & 8 deletions tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def bake_containers():

@pytest.fixture(scope="session", autouse=True)
def compose_containers(
slurm_ssh_port, sge_ssh_port, pbs_ssh_port, db_port, bake_containers
slurm_ssh_port, sge_ssh_port, pbs_ssh_port, db_port, bake_containers, coverage_file
):
compose_yaml = f"""
name: jobflow_remote_testing
Expand Down Expand Up @@ -194,6 +194,22 @@ def compose_containers(

yield docker_client
finally:
# After tests finish, copy coverage data from container(s) to local machine
if coverage_file:
coverage_dir = str(Path(coverage_file).parent)
print(" * Copying coverage data back...")
for c in containers:
if c.name in ("mongo_container",):
continue
flist = c.execute(
["ls", "-a", "/home/jobflow/coverage/"]
).splitlines()
for file in flist:
if file.startswith(".coverage"):
c.copy_from(
f"/home/jobflow/coverage/{file}",
f"{coverage_dir}/{file}",
)
try:
print("\n * Stopping containers...")
try:
Expand Down Expand Up @@ -248,6 +264,11 @@ def write_tmp_settings(
# config on import
from jobflow_remote.config import Project

prerun = (
"source /home/jobflow/.venv/bin/activate; "
"export COVERAGE_PROCESS_START=/home/jobflow/.coveragerc; "
"export COVERAGE_FILE=/home/jobflow/coverage/.coverage"
)
project = Project(
name=random_project_name,
jobstore={
Expand Down Expand Up @@ -301,7 +322,7 @@ def write_tmp_settings(
work_dir="/home/jobflow/jfr",
user="jobflow",
password="jobflow",
pre_run="source /home/jobflow/.venv/bin/activate",
pre_run=prerun,
resources={"partition": "debug", "ntasks": 1, "time": "00:01:00"},
connect_kwargs={"allow_agent": False, "look_for_keys": False},
),
Expand All @@ -314,7 +335,7 @@ def write_tmp_settings(
user="jobflow",
password="jobflow",
scheduler_username="jobflow",
pre_run="source /home/jobflow/.venv/bin/activate",
pre_run=prerun,
connect_kwargs={"allow_agent": False, "look_for_keys": False},
),
"test_remote_pbs_worker": dict(
Expand All @@ -325,7 +346,7 @@ def write_tmp_settings(
work_dir="/home/jobflow/jfr",
user="jobflow",
password="jobflow",
pre_run="source /home/jobflow/.venv/bin/activate",
pre_run=prerun,
connect_kwargs={"allow_agent": False, "look_for_keys": False},
resources={"walltime": "00:05:00", "select": "nodes=1:ppn=1"},
),
Expand All @@ -337,7 +358,7 @@ def write_tmp_settings(
work_dir="/home/jobflow/jfr",
user="jobflow",
password="jobflow",
pre_run="source /home/jobflow/.venv/bin/activate",
pre_run=prerun,
resources={"partition": "debug", "ntasks": 1, "time": "00:01:00"},
connect_kwargs={"allow_agent": False, "look_for_keys": False},
max_jobs=1,
Expand All @@ -350,7 +371,7 @@ def write_tmp_settings(
work_dir="/home/jobflow/jfr",
user="jobflow",
password="jobflow",
pre_run="source /home/jobflow/.venv/bin/activate",
pre_run=prerun,
resources={"partition": "debug", "ntasks": 1, "time": "00:01:00"},
connect_kwargs={"allow_agent": False, "look_for_keys": False},
batch={
Expand All @@ -368,7 +389,7 @@ def write_tmp_settings(
work_dir="/home/jobflow/jfr",
user="jobflow",
password="jobflow",
pre_run="source /home/jobflow/.venv/bin/activate",
pre_run=prerun,
resources={"partition": "debug", "ntasks": 1, "time": "00:01:00"},
connect_kwargs={"allow_agent": False, "look_for_keys": False},
batch={
Expand All @@ -394,7 +415,7 @@ def write_tmp_settings(
work_dir="/home/jobflow/jfr",
user="jobflow",
password="jobflow",
pre_run="source /home/jobflow/.venv/bin/activate",
pre_run=prerun,
resources={"partition": "debug", "ntasks": 1, "time": "00:01:00"},
connect_kwargs={"allow_agent": False, "look_for_keys": False},
sanitize_command=True,
Expand Down
5 changes: 5 additions & 0 deletions tests/integration/coverage/.coveragerc
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[run]
branch = True
source = jobflow_remote
relative_files = True
parallel = True
21 changes: 20 additions & 1 deletion tests/integration/dockerfiles/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ WORKDIR /home/${USERNAME}/jobflow-remote
COPY src/jobflow_remote /home/${USERNAME}/jobflow-remote/src/jobflow_remote
COPY pyproject.toml /home/${USERNAME}/jobflow-remote/
COPY requirements/requirements.txt /home/${USERNAME}/jobflow-remote/requirements/
COPY requirements/requirements_tests.txt /home/${USERNAME}/jobflow-remote/requirements/

# versioningit refuses to install a package without its full git history
# so here we remove versioningit config from pyproject.toml as we don't need
Expand All @@ -32,7 +33,11 @@ RUN sed -i '/\[tool.versioningit.vcs\]/,+3d' /home/${USERNAME}/jobflow-remote/py
WORKDIR /home/${USERNAME}/jobflow-remote
RUN uv venv /home/${USERNAME}/.venv && \
uv pip install --python /home/${USERNAME}/.venv/bin/python -r /home/${USERNAME}/jobflow-remote/requirements/requirements.txt && \
uv pip install --python /home/${USERNAME}/.venv/bin/python --no-deps .
uv pip install --python /home/${USERNAME}/.venv/bin/python --no-deps . && \
uv pip install --python /home/${USERNAME}/.venv/bin/python -r /home/${USERNAME}/jobflow-remote/requirements/requirements_tests.txt

# Create a .pth file to auto-start coverage
RUN printf "import coverage; coverage.process_startup()\n" > /home/${USERNAME}/.venv/lib/python3.10/site-packages/coverage_autostart.pth

# Add desired queue system as a build stage:
# Each different queue system must provide an startup script, following the example
Expand Down Expand Up @@ -211,6 +216,8 @@ RUN mkdir jfr

# Copy the installed jobflow-remote virtualenv
COPY --from=jobflow-remote-install /home/${USERNAME}/.venv /home/${USERNAME}/.venv
# Copy the coveragerc config file
COPY tests/integration/coverage/.coveragerc /home/${USERNAME}/

# Finalize permissions on startup script and jobflow installation
USER root
Expand All @@ -220,4 +227,16 @@ RUN chown -R ${USERNAME} /home/${USERNAME} && \
chmod -R 755 /home/${USERNAME}
USER ${USERNAME}

# Note about setting of the COVERAGE_PROCESS_START and COVERAGE_FILE environment variables
# Here neither ENV nor having the exports in the .bashrc seems to work, i.e. when jobs are
# executed within slurm/pbs/sge, the environment variables seem to be "lost". No idea why ...
# Thus I am setting the coverage environment variables in each worker pre_run for now ...
# ENV COVERAGE_PROCESS_START=/home/${USERNAME}/.coveragerc
# ENV COVERAGE_FILE=/home/${USERNAME}/coverage/.coverage
# RUN printf "\n# Environment variables for implicit coverage\n" >> /home/${USERNAME}/.bashrc
# RUN printf "\nexport COVERAGE_PROCESS_START=/home/${USERNAME}/.coveragerc\n" >> /home/${USERNAME}/.bashrc
# RUN printf "export COVERAGE_FILE=/home/${USERNAME}/coverage/.coverage\n" >> /home/${USERNAME}/.bashrc
RUN mkdir /home/${USERNAME}/coverage


CMD sudo /etc/startup.sh ; /bin/bash -l