Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
889b95f
feat: add CWL workflow submission endpoint and storage model
ryuwd Apr 2, 2026
b4644f8
feat: add CWL executor with replica map and sandbox integration
ryuwd Apr 2, 2026
281df17
feat: add CWL CLI submission commands and input pipeline
ryuwd Apr 7, 2026
8a6c65e
feat: wire up parametric jobs and fix worker-side issues
ryuwd Apr 9, 2026
a7250dd
feat: stream ApplicationStatus from cwltool stderr
ryuwd Apr 9, 2026
c7e38da
fix: sandbox upload/download and add sandbox CLI commands
ryuwd Apr 10, 2026
fe410c7
feat: add job monitoring with prmon heartbeats and stall detection
ryuwd Apr 14, 2026
73096d2
feat: align dirac:Job hint with matcher specification
ryuwd Apr 14, 2026
33920b6
docs(job_wrapper): added TODOs for prmon compress output
ryuwd Apr 14, 2026
551ee2a
feat: add OnlineCompressor for streaming prmon compression
ryuwd Apr 15, 2026
4e993ee
feat: add PrmonFifoReader for FIFO-based prmon streaming
ryuwd Apr 15, 2026
ca7137f
refactor: update JobMonitor and send_final_heartbeat to use PrmonFifo…
ryuwd Apr 15, 2026
220df39
test: add test for send_final_heartbeat with reader but no data
ryuwd Apr 15, 2026
7cbf7f0
feat: use FIFO streaming for prmon with 1s sampling and --fast-memmon
ryuwd Apr 15, 2026
c057458
fix: serialize HeartbeatData before sending to API
ryuwd Apr 15, 2026
e793040
fix: use location for CWL File URIs per CWL v1.2 spec
ryuwd Apr 15, 2026
650c288
test: add PathMapper target assertions for location-to-path resolution
ryuwd Apr 15, 2026
ac9fe71
fix: check location before path on cwl_utils File objects
ryuwd Apr 15, 2026
e2efca4
fix(executor): restore CommandLineTool subclass dispatch under mypyc …
ryuwd Apr 27, 2026
e0613b4
chore: rename CWL runner CLI from dirac-cwl-run to dirac-cwl-runner
ryuwd Apr 27, 2026
7a42493
fix(cli): backfill workflow input defaults before sandbox resolution
ryuwd May 4, 2026
e051233
fix(cli): inject sandbox-referenced defaults at sandbox-processing step
ryuwd May 4, 2026
075f491
refactor(api): drop cwl_utils round-trip on the worker
ryuwd May 5, 2026
17e45ac
fix(api): prepend CVMFS node to subprocess PATH for cwltool JS eval
ryuwd May 5, 2026
f9ea8e8
feat(executor): default cwl_utils JS sandbox to Singularity, not Docker
ryuwd May 5, 2026
81af869
fix(cli): tolerate complex CWL input types in parse_cli_args
ryuwd May 5, 2026
74f48f2
fix(api): also prepend job_path to PATH so sandbox-staged binaries ar…
ryuwd May 5, 2026
2fe6982
fix(executor): match NodeJSEngine.eval positional-arg signature in ov…
ryuwd May 5, 2026
7892aa7
fix(api): log cwltool output JSON and sandbox-upload decisions
ryuwd May 6, 2026
155bc9c
fix(api): post_process returns bool, runs on success and failure paths
ryuwd May 6, 2026
8219b2a
fix(cli): write DiracX token to tmpdir, not homedir
ryuwd May 6, 2026
b345f2c
feat(cli): page sandbox peek output through PAGER
ryuwd May 6, 2026
f062811
fix(cli): set LESS=-R so pager renders ANSI colours instead of escape…
ryuwd May 6, 2026
98f7b1e
fix: set JobMinorStatus.APP_ERRORS when cwltool fails
ryuwd May 6, 2026
fa5e403
fix(api): skip leading non-JSON noise (prmon errors) when parsing cwl…
ryuwd May 7, 2026
89c7680
chore: lockfile
ryuwd May 7, 2026
b584c77
fix(api): use generated-client HeartbeatData/JobCommand
ryuwd May 7, 2026
7c09022
test(api): rewrite job_wrapper integration tests for path-based run_job
ryuwd May 7, 2026
fa3d301
test(cli): point test_jobs at renamed job.submit.jdl / job.search mod…
ryuwd May 7, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ pip-log.txt
.tox
.ruff_cache
.mypy_cache
workernode


# Eclipse
.project
Expand Down
1 change: 1 addition & 0 deletions diracx-api/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ classifiers = [
"Topic :: System :: Distributed Computing",
]
dependencies = [
"cwl-utils",
"diracx-client",
"diracx-core",
"httpx",
Expand Down
270 changes: 270 additions & 0 deletions diracx-api/src/diracx/api/job_monitor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,270 @@
"""Job monitor: prmon metrics, heartbeats, peek, stall/kill handling."""

from __future__ import annotations

import asyncio
import logging
import os
import signal
from collections import deque
from pathlib import Path

from diracx.api.job_report import JobReport
from diracx.api.prmon_reader import PrmonFifoReader
from diracx.client.models import HeartbeatData # type: ignore[attr-defined]

logger = logging.getLogger(__name__)

# TODO: replace with CS config options
PEEK_LINES = 800


def build_heartbeat_data(
*,
prmon_row: dict[str, int],
job_path: Path,
peek_content: str,
) -> HeartbeatData:
"""Build a HeartbeatData from a prmon TSV row.

Metric mapping (prmon TSV columns to HeartbeatData fields):
- CPUConsumed = utime + stime (seconds)
- MemoryUsed = pss / 1024 (KB to MB)
- Vsize = vmem / 1024 (KB to MB)
- WallClockTime = wtime (seconds)
- AvailableDiskSpace = free disk in job_path (bytes to MB)
- LoadAverage = 1-minute load average
- StandardOutput = peek content string
"""
cpu = float(prmon_row.get("utime", 0) + prmon_row.get("stime", 0))
pss_kb = prmon_row.get("pss", 0)
vmem_kb = prmon_row.get("vmem", 0)
wtime = float(prmon_row.get("wtime", 0))

try:
st = os.statvfs(job_path)
disk_mb = (st.f_bavail * st.f_frsize) / (1024 * 1024)
except OSError:
disk_mb = None

try:
load_avg = os.getloadavg()[0]
except OSError:
load_avg = None

return HeartbeatData(
cpu_consumed=cpu,
memory_used=pss_kb / 1024,
vsize=vmem_kb / 1024,
wall_clock_time=wtime,
available_disk_space=disk_mb,
load_average=load_avg,
standard_output=peek_content,
)


def build_peek_content(
cwltool_stderr: deque[str],
*,
max_lines: int = PEEK_LINES,
) -> str:
"""Build peek content for Watchdog display.

Returns the last *max_lines* cwltool stderr lines from the shared deque.
Application stdout/stderr go into the output sandbox and are not
duplicated here.
"""
return "\n".join(list(cwltool_stderr)[-max_lines:])


async def send_final_heartbeat(
*,
job_path: Path,
job_report: JobReport,
cwltool_stderr: deque[str],
fifo_reader: PrmonFifoReader | None = None,
) -> None:
"""Send one final heartbeat with exit metrics."""
if fifo_reader is None or fifo_reader.latest_row is None:
logger.info("No prmon data -- skipping final heartbeat")
return

peek = build_peek_content(cwltool_stderr)
data = build_heartbeat_data(
prmon_row=fifo_reader.latest_row,
job_path=job_path,
peek_content=peek,
)
try:
await job_report.send_heartbeat(data)
except Exception:
logger.warning("Failed to send final heartbeat", exc_info=True)


class StallDetector:
"""Detect stalled jobs via CPU/wall-clock ratio over a rolling window.

Reads cumulative CPU time (utime + stime) and wall clock time (wtime)
from prmon metrics each heartbeat cycle. A job is stalled when the
ratio stays below *threshold* for at least *window_seconds*.
"""

def __init__(
self,
window_seconds: float = 1800,
threshold: float = 0.05,
) -> None:
self._window = window_seconds
self._threshold = threshold
self._first_cpu: float | None = None
self._first_wall: float | None = None
self._stalled_since: float | None = None

def check(self, *, cpu_seconds: float, wall_seconds: float) -> bool:
"""Record a sample and return True if the job is stalled.

:param cpu_seconds: Cumulative CPU time (prmon utime + stime).
:param wall_seconds: Cumulative wall clock time (prmon wtime).
"""
if self._first_cpu is None:
self._first_cpu = cpu_seconds
self._first_wall = wall_seconds
return False

assert self._first_wall is not None
delta_wall = wall_seconds - self._first_wall
if delta_wall <= 0:
return False

delta_cpu = cpu_seconds - self._first_cpu
ratio = delta_cpu / delta_wall

if ratio >= self._threshold:
# Healthy — reset window start
self._first_cpu = cpu_seconds
self._first_wall = wall_seconds
self._stalled_since = None
return False

# Below threshold
if self._stalled_since is None:
self._stalled_since = self._first_wall

return (wall_seconds - self._stalled_since) >= self._window


class KillCommandReceived(Exception): # noqa: N818
"""Raised when the server sends a Kill command via heartbeat."""


class JobMonitor:
"""Monitor a running job: heartbeats, peek, kill handling, stall detection.

Start with ``asyncio.create_task(monitor.run())`` after launching the
subprocess. Cancel the task when the subprocess exits.

Note: prmon is launched as a wrapper around the command (not as a sidecar),
so this class does not manage the prmon process. It reads metrics from a
PrmonFifoReader that streams data from the prmon FIFO pipe.

:param pid: PID of the subprocess (the prmon wrapper process).
:param job_path: Working directory of the job.
:param job_report: JobReport instance for sending heartbeats.
:param cwltool_stderr: Shared deque of cwltool stderr lines.
:param heartbeat_interval: Seconds between heartbeat cycles.
:param fifo_reader: PrmonFifoReader instance for reading prmon metrics.
:param stall_window: Stall detection window in seconds (default 1800).
:param stall_threshold: CPU/wall ratio below which a job is stalled.
:param kill_grace_period: Seconds between SIGTERM and SIGKILL.
"""

def __init__(
self,
*,
pid: int,
job_path: Path,
job_report: JobReport,
cwltool_stderr: deque[str],
heartbeat_interval: float = 60.0,
fifo_reader: PrmonFifoReader | None = None,
stall_window: float = 1800.0,
stall_threshold: float = 0.05,
kill_grace_period: float = 30.0,
) -> None:
self._pid = pid
self._job_path = job_path
self._job_report = job_report
self._cwltool_stderr = cwltool_stderr
self._interval = heartbeat_interval
self._fifo_reader = fifo_reader
self._stall_detector = StallDetector(
window_seconds=stall_window, threshold=stall_threshold
)
self._kill_grace = kill_grace_period

def _kill_subprocess(self, sig: int = signal.SIGTERM) -> None:
"""Send a signal to the subprocess's process group."""
try:
pgid = os.getpgid(self._pid)
os.killpg(pgid, sig)
except OSError:
logger.debug("Could not signal process %d", self._pid)

async def run(self) -> None:
"""Run the monitor loop until cancelled or a kill/stall triggers.

Raises KillCommandReceived if the server sends a Kill command or
a stall is detected.
"""
while True:
await asyncio.sleep(self._interval)
try:
await self._heartbeat_cycle()
except KillCommandReceived:
raise
except Exception:
logger.warning("Heartbeat cycle failed", exc_info=True)

async def _heartbeat_cycle(self) -> None:
"""One iteration: collect metrics from prmon FIFO reader, send, check."""
# 1. Read prmon metrics from FIFO reader
if self._fifo_reader is None or self._fifo_reader.latest_row is None:
logger.debug("No prmon data yet -- skipping heartbeat")
return
prmon_row = self._fifo_reader.latest_row

# 2. Build peek content
peek = build_peek_content(self._cwltool_stderr)

# 3. Build HeartbeatData
data = build_heartbeat_data(
prmon_row=prmon_row,
job_path=self._job_path,
peek_content=peek,
)

# 4. Send heartbeat
commands = await self._job_report.send_heartbeat(data)

# 5. Check for Kill command
for cmd in commands:
if cmd.command == "Kill":
logger.warning("Kill command received for job")
self._kill_subprocess(signal.SIGTERM)
await asyncio.sleep(self._kill_grace)
self._kill_subprocess(signal.SIGKILL)
raise KillCommandReceived("Server sent Kill command")

# 6. Stall detection (using prmon CPU/wall metrics)
cpu = float(prmon_row.get("utime", 0) + prmon_row.get("stime", 0))
wall = float(prmon_row.get("wtime", 0))
if self._stall_detector.check(cpu_seconds=cpu, wall_seconds=wall):
logger.warning("Job stalled: CPU/wall ratio below threshold")
self._job_report.set_job_status(
application_status="Stalled: low CPU/wall-clock ratio"
)
await self._job_report.commit()
self._kill_subprocess(signal.SIGTERM)
await asyncio.sleep(self._kill_grace)
self._kill_subprocess(signal.SIGKILL)
raise KillCommandReceived("Job stalled — killed by monitor")
79 changes: 79 additions & 0 deletions diracx-api/src/diracx/api/job_report.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
"""All classes related to job reports."""

from __future__ import annotations

from datetime import datetime, timezone

from diracx.client.aio import AsyncDiracClient # type: ignore[attr-defined]
from diracx.client.models import HeartbeatData, JobCommand # type: ignore[attr-defined]
from diracx.core.models.job import (
JobMinorStatus,
JobStatus,
JobStatusUpdate,
)


class JobReport:
"""JobReport."""

def __init__(self, job_id: int, source: str, client: AsyncDiracClient) -> None:
"""Initialize Job Report.

:param job_id: the job ID
:param source: source for the reports
:param client: DiracX client instance
"""
self.job_status_info: dict[
str, dict[str, str]
] = {} # where job status updates are cumulated
self.job_id = job_id
self.source = source
self._client = client

def set_job_status(
self,
status: JobStatus | None = None,
minor_status: JobMinorStatus | None = None,
application_status: str | None = None,
) -> None:
"""Add a new job status to the job report.

:param status: job status
:param minor_status: job minor status
:param application_status: application status
"""
timestamp = str(datetime.now(timezone.utc))
# add job status record
self.job_status_info.update(
{
timestamp: JobStatusUpdate(
Status=status,
MinorStatus=minor_status,
ApplicationStatus=application_status,
Source=self.source,
).model_dump()
}
)

async def send_heartbeat(self, metrics: HeartbeatData) -> list[JobCommand]:
"""Send a heartbeat with metrics and return any pending commands.

:param metrics: Resource metrics to report.
:return: List of commands from the server (e.g. Kill).
"""
return await self._client.jobs.add_heartbeat({str(self.job_id): metrics})

async def send_stored_status_info(self):
"""Send all the accumulated job status information."""
if not self.job_status_info:
return
body = {self.job_id: self.job_status_info}
ret = await self._client.jobs.set_job_statuses(body)
if ret.success:
self.job_status_info = {}
else:
raise RuntimeError(f"Could not set job statuses: {ret}")

async def commit(self):
"""Send all the accumulated information."""
Comment thread
ryuwd marked this conversation as resolved.
await self.send_stored_status_info()
Loading
Loading