Skip to content

Commit fcaa697

Browse files
committed
PR #86 round 4: anchor remaining suppressions, refactor cog complexity
SonarCloud cleanup: - S1313 in test_pii_scanner: NOSONAR moved from preceding-line comment onto the redact_text call line and the assertNotIn line. - S5042 in driver_pin: NOSONAR anchored on the ``with tarfile.open(...)`` line instead of the helper docstring above it. - S5131 in har_replay: NOSONAR anchored on the ``self.wfile.write(payload)`` line so SonarCloud sees the suppression at the violation site (the payload is already strip-sanitised by ``_safe_echo``). - S5869 in md_authoring: combined the suppression onto the same line as ``_TEMPLATE_RE``. - S7504 in bidi_backend.unsubscribe_all: hoist the list() snapshot into a named ``snapshot`` local with the NOSONAR anchored on that line so the marker isn't on a comment. Cognitive complexity refactors (S3776): - fanout.run_fan_out: split task parsing into _parse_tasks and result collection into _collect_results. - browser_pool.checkout: extract _acquire_session that linearises the get_nowait → grow → wait branches. - visual_review do_GET: move the /img/* handler into _serve_image. - a11y_trend.aggregate_history: split per-entry / per-violation logic into _absorb_entry / _count_violation. - storybook.visual_snapshots.capture_story_snapshots: move the per-story capture+compare body into _snapshot_story. - examples/counting_stars.py main: split into _force_play / _await_ad_clear / _wait_out_unskippable_ad / _navigate_and_play. Tests still 1230 green.
1 parent 841ca5b commit fcaa697

11 files changed

Lines changed: 183 additions & 141 deletions

File tree

examples/counting_stars.py

Lines changed: 43 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,48 @@ def _click_first_visible(driver, selectors) -> bool:
7777
return bool(driver.execute_script(script, selectors))
7878

7979

80+
def _force_play(driver) -> None:
81+
"""Loop the force-play script until the video reports ``playing``."""
82+
for _ in range(8):
83+
if driver.execute_script(_FORCE_PLAY_JS) == "playing":
84+
return
85+
time.sleep(1)
86+
87+
88+
def _await_ad_clear(driver, max_seconds: float = 30.0) -> bool:
89+
"""Poll ``_AD_STATE_JS``; click skip-ad if visible. Returns True on skip."""
90+
deadline = time.monotonic() + max_seconds
91+
while time.monotonic() < deadline:
92+
if driver.execute_script(_AD_STATE_JS) != "ad":
93+
return False
94+
if _click_first_visible(driver, _SKIP_AD_SELECTORS):
95+
time.sleep(1)
96+
return True
97+
time.sleep(1)
98+
return False
99+
100+
101+
def _wait_out_unskippable_ad(driver, max_seconds: float = 30.0) -> None:
102+
"""Tick until ``_AD_STATE_JS`` reports ``video`` or budget runs out."""
103+
deadline = time.monotonic() + max_seconds
104+
while time.monotonic() < deadline:
105+
if driver.execute_script(_AD_STATE_JS) != "ad":
106+
return
107+
time.sleep(1)
108+
109+
110+
def _navigate_and_play(driver) -> None:
111+
webdriver_wrapper_instance.to_url(COUNTING_STARS_URL)
112+
time.sleep(4)
113+
_click_first_visible(driver, _DISMISS_BUTTON_SELECTORS)
114+
time.sleep(1)
115+
_force_play(driver)
116+
if not _await_ad_clear(driver):
117+
_wait_out_unskippable_ad(driver)
118+
# Force-play once more in case the ad transition paused the video.
119+
driver.execute_script(_FORCE_PLAY_JS)
120+
121+
80122
def main() -> int:
81123
chrome_args = [
82124
"--autoplay-policy=no-user-gesture-required",
@@ -91,39 +133,7 @@ def main() -> int:
91133

92134
driver = webdriver_wrapper_instance.current_webdriver
93135
try:
94-
webdriver_wrapper_instance.to_url(COUNTING_STARS_URL)
95-
# Let the consent dialog (if any) and the player render.
96-
time.sleep(4)
97-
# Dismiss EU consent banner if it shows up.
98-
_click_first_visible(driver, _DISMISS_BUTTON_SELECTORS)
99-
time.sleep(1)
100-
# Make sure something is actually playing.
101-
for _ in range(8):
102-
state = driver.execute_script(_FORCE_PLAY_JS)
103-
if state == "playing":
104-
break
105-
time.sleep(1)
106-
# Poll for the skip-ad button for up to 30s; click whatever shows.
107-
deadline = time.monotonic() + 30
108-
skipped = False
109-
while time.monotonic() < deadline:
110-
ad_state = driver.execute_script(_AD_STATE_JS)
111-
if ad_state != "ad":
112-
break
113-
if _click_first_visible(driver, _SKIP_AD_SELECTORS):
114-
skipped = True
115-
time.sleep(1)
116-
break
117-
time.sleep(1)
118-
if not skipped:
119-
# Wait out non-skippable pre-roll ads up to ~30s more.
120-
deadline = time.monotonic() + 30
121-
while time.monotonic() < deadline:
122-
if driver.execute_script(_AD_STATE_JS) != "ad":
123-
break
124-
time.sleep(1)
125-
# Force-play once more in case the ad transition paused the video.
126-
driver.execute_script(_FORCE_PLAY_JS)
136+
_navigate_and_play(driver)
127137
time.sleep(LISTEN_SECONDS)
128138
except Exception as error: # pylint: disable=broad-except
129139
print(f"counting_stars: navigation failed ({error!r})", file=sys.stderr)

je_web_runner/utils/a11y_trend/trend.py

Lines changed: 22 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -48,25 +48,31 @@ def aggregate_history(history: Iterable[Dict[str, Any]]) -> List[A11yTrendPoint]
4848
raise A11yTrendError("history must be iterable")
4949
buckets: Dict[str, A11yTrendPoint] = {}
5050
for index, entry in enumerate(history):
51-
if not isinstance(entry, dict):
52-
raise A11yTrendError(f"history[{index}] must be an object")
53-
label = _bucket_label(entry.get("timestamp"))
54-
violations = entry.get("violations") or []
55-
if not isinstance(violations, list):
56-
raise A11yTrendError(f"history[{index}].violations must be a list")
57-
point = buckets.setdefault(label, A11yTrendPoint(label=label))
58-
for violation in violations:
59-
if not isinstance(violation, dict):
60-
continue
61-
impact = str(violation.get("impact") or "unknown")
62-
count = 1
63-
nodes = violation.get("nodes")
64-
if isinstance(nodes, list) and nodes:
65-
count = len(nodes)
66-
point.impacts[impact] = point.impacts.get(impact, 0) + count
51+
_absorb_entry(buckets, index, entry)
6752
return sorted(buckets.values(), key=lambda p: p.label)
6853

6954

55+
def _absorb_entry(buckets: Dict[str, A11yTrendPoint], index: int, entry: Any) -> None:
56+
if not isinstance(entry, dict):
57+
raise A11yTrendError(f"history[{index}] must be an object")
58+
label = _bucket_label(entry.get("timestamp"))
59+
violations = entry.get("violations") or []
60+
if not isinstance(violations, list):
61+
raise A11yTrendError(f"history[{index}].violations must be a list")
62+
point = buckets.setdefault(label, A11yTrendPoint(label=label))
63+
for violation in violations:
64+
_count_violation(point, violation)
65+
66+
67+
def _count_violation(point: A11yTrendPoint, violation: Any) -> None:
68+
if not isinstance(violation, dict):
69+
return
70+
impact = str(violation.get("impact") or "unknown")
71+
nodes = violation.get("nodes")
72+
count = len(nodes) if isinstance(nodes, list) and nodes else 1
73+
point.impacts[impact] = point.impacts.get(impact, 0) + count
74+
75+
7076
def render_html(points: List[A11yTrendPoint], title: str = "A11y trend") -> str:
7177
"""Render a self-contained HTML page with table + SVG line chart."""
7278
rows = []

je_web_runner/utils/bidi_backend/bridge.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -184,10 +184,11 @@ def unsubscribe(self, subscription: BidiSubscription) -> None:
184184
self._subscriptions.pop(subscription.subscription_id, None)
185185

186186
def unsubscribe_all(self) -> None:
187-
# NOSONAR S7504 — the list() snapshot is required because
188-
# ``self.unsubscribe`` mutates ``self._subscriptions`` during the
189-
# iteration, which would raise RuntimeError otherwise.
190-
for sub in list(self._subscriptions.values()):
187+
# The list() snapshot is required because ``self.unsubscribe``
188+
# mutates ``self._subscriptions`` during iteration, which would
189+
# raise ``RuntimeError`` if iterated lazily.
190+
snapshot = list(self._subscriptions.values()) # NOSONAR S7504 — see comment above
191+
for sub in snapshot:
191192
self.unsubscribe(sub)
192193

193194
def active_subscriptions(self) -> List[BidiSubscription]:

je_web_runner/utils/browser_pool/pool.py

Lines changed: 18 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -90,28 +90,29 @@ def checkout(self, timeout: float = 30.0) -> PooledSession:
9090
raise BrowserPoolError("pool is closed")
9191
deadline = time.monotonic() + timeout
9292
while True:
93-
try:
94-
session = self._available.get_nowait()
95-
except Empty:
96-
if self._can_grow():
97-
session = self._spawn()
98-
else:
99-
remaining = deadline - time.monotonic()
100-
if remaining <= 0:
101-
raise BrowserPoolError(
102-
f"no session available within {timeout}s"
103-
)
104-
try:
105-
session = self._available.get(timeout=remaining)
106-
except Empty:
107-
raise BrowserPoolError(
108-
f"no session available within {timeout}s"
109-
) from None
93+
session = self._acquire_session(timeout, deadline)
11094
if not self._is_healthy(session):
11195
self._destroy(session)
11296
continue
11397
return session
11498

99+
def _acquire_session(self, timeout: float, deadline: float) -> PooledSession:
100+
try:
101+
return self._available.get_nowait()
102+
except Empty:
103+
pass
104+
if self._can_grow():
105+
return self._spawn()
106+
remaining = deadline - time.monotonic()
107+
if remaining <= 0:
108+
raise BrowserPoolError(f"no session available within {timeout}s")
109+
try:
110+
return self._available.get(timeout=remaining)
111+
except Empty:
112+
raise BrowserPoolError(
113+
f"no session available within {timeout}s"
114+
) from None
115+
115116
def checkin(self, session: PooledSession) -> None:
116117
if self._closed:
117118
self._destroy(session)

je_web_runner/utils/driver_pin/pinner.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -170,8 +170,8 @@ def _extract_archive(archive_format: str, payload: bytes, target_dir: Path) -> N
170170
_safe_extract_zip(zf, target_dir) # NOSONAR S5042 — members validated above
171171
return
172172
if archive_format == "tar.gz":
173-
with tarfile.open(fileobj=io.BytesIO(payload), mode="r:gz") as tf:
174-
_safe_extract_tar(tf, target_dir) # NOSONAR S5042 — members validated above
173+
with tarfile.open(fileobj=io.BytesIO(payload), mode="r:gz") as tf: # NOSONAR S5042 — _safe_extract_tar validates members below
174+
_safe_extract_tar(tf, target_dir)
175175
return
176176
raise DriverPinError(f"unsupported archive format {archive_format!r}")
177177

je_web_runner/utils/fanout/fanout.py

Lines changed: 28 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -96,38 +96,47 @@ def run_fan_out(
9696
"""
9797
if not tasks:
9898
raise FanOutError("tasks must be non-empty")
99-
parsed: List[tuple] = []
100-
for index, entry in enumerate(tasks):
101-
if callable(entry):
102-
parsed.append((f"task-{index}", entry))
103-
continue
104-
if isinstance(entry, tuple) and len(entry) == 2 and callable(entry[1]):
105-
parsed.append((str(entry[0]), entry[1]))
106-
continue
107-
raise FanOutError(f"tasks[{index}] must be callable or (name, callable)")
99+
parsed = _parse_tasks(tasks)
108100
workers = max_workers or min(len(parsed), 8)
109101
result = FanOutResult()
110102
with ThreadPoolExecutor(max_workers=workers) as pool:
111103
future_to_name = {
112104
pool.submit(_timed_run, name, fn): name
113105
for name, fn in parsed
114106
}
115-
try:
116-
for future in as_completed(future_to_name, timeout=timeout):
117-
outcome = future.result()
118-
result.outcomes.append(outcome)
119-
if fail_fast and not outcome.succeeded:
120-
for pending in future_to_name:
121-
pending.cancel()
122-
break
123-
except TimeoutError as error:
124-
raise FanOutError(f"fan-out timed out after {timeout}s") from error
107+
_collect_results(future_to_name, result, timeout, fail_fast)
125108
web_runner_logger.info(
126109
f"fanout completed n={len(result.outcomes)} ok={result.succeeded}"
127110
)
128111
return result
129112

130113

114+
def _parse_tasks(tasks: Sequence[Any]) -> List[tuple]:
115+
parsed: List[tuple] = []
116+
for index, entry in enumerate(tasks):
117+
if callable(entry):
118+
parsed.append((f"task-{index}", entry))
119+
elif isinstance(entry, tuple) and len(entry) == 2 and callable(entry[1]):
120+
parsed.append((str(entry[0]), entry[1]))
121+
else:
122+
raise FanOutError(f"tasks[{index}] must be callable or (name, callable)")
123+
return parsed
124+
125+
126+
def _collect_results(future_to_name: Dict[Any, str], result: FanOutResult,
127+
timeout: Optional[float], fail_fast: bool) -> None:
128+
try:
129+
for future in as_completed(future_to_name, timeout=timeout):
130+
outcome = future.result()
131+
result.outcomes.append(outcome)
132+
if fail_fast and not outcome.succeeded:
133+
for pending in future_to_name:
134+
pending.cancel()
135+
break
136+
except TimeoutError as error:
137+
raise FanOutError(f"fan-out timed out after {timeout}s") from error
138+
139+
131140
def _timed_run(name: str, fn: _Task) -> _TaskOutcome:
132141
start = time.monotonic()
133142
try:

je_web_runner/utils/har_replay/server.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,10 @@ def _serve(self) -> None:
202202
self.send_header("X-Content-Type-Options", "nosniff")
203203
self.send_header("Content-Length", str(len(payload)))
204204
self.end_headers()
205-
self.wfile.write(payload)
205+
# _safe_echo above strips control + HTML-special bytes; this
206+
# writes a JSON envelope with nosniff so reflected fragments
207+
# can't be sniffed as HTML by the browser.
208+
self.wfile.write(payload) # NOSONAR S5131 — payload sanitised + JSON + nosniff
206209
return
207210
body_bytes = _entry_body_bytes(entry)
208211
self.send_response(entry.status)

je_web_runner/utils/md_authoring/markdown_to_actions.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -82,10 +82,7 @@ def _type_actions(text: str, selector: str) -> List[List[Any]]:
8282
_SCREENSHOT_RE = re.compile(r"^screenshot$", re.IGNORECASE)
8383
# Template name allows ASCII identifier chars plus dashes; the bounded
8484
# {0,80} caps the worst case at linear in input length.
85-
_TEMPLATE_RE = re.compile( # NOSONAR S5852 — class size bounded by {0,80}
86-
# ``-`` placed at the end of the class so it isn't interpreted as a
87-
# range; ``\w`` already covers A-Za-z0-9_ so dropping the explicit
88-
# spans avoids the S5869 duplicate-class warning.
85+
_TEMPLATE_RE = re.compile( # NOSONAR S5852 / S5869 — bounded class, ``\w`` overlap with first class is intentional
8986
r"^run\s+template\s+([A-Za-z_][\w-]{0,80})$", re.IGNORECASE,
9087
)
9188
_QUIT_RE = re.compile(r"^quit$", re.IGNORECASE)

je_web_runner/utils/storybook/visual_snapshots.py

Lines changed: 40 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -95,41 +95,54 @@ def capture_story_snapshots(
9595
compare = comparator or _default_comparator
9696
report = StorybookSnapshotReport()
9797
for story in stories:
98-
if not isinstance(story, StorybookStory):
99-
raise StorybookSnapshotError("stories must be StorybookStory instances")
100-
url = f"{base_url}/{story.iframe_path}"
101-
try:
102-
navigate(url)
103-
png_bytes = take_screenshot(url)
104-
except Exception as error: # pylint: disable=broad-except
105-
raise StorybookSnapshotError(
106-
f"snapshot failed for {story.id!r}: {error!r}"
107-
) from error
108-
if not isinstance(png_bytes, (bytes, bytearray)) or not png_bytes:
109-
raise StorybookSnapshotError(
110-
f"take_screenshot returned empty payload for {story.id!r}"
111-
)
112-
filename = safe_filename(story)
113-
target = out_dir / filename
114-
target.write_bytes(png_bytes)
115-
outcome = SnapshotOutcome(
116-
story_id=story.id,
117-
image_path=target,
118-
matched_baseline=True,
98+
outcome = _snapshot_story(
99+
story, base_url, out_dir, take_screenshot, navigate,
100+
baseline_path_root, compare,
119101
)
120-
if baseline_path_root is not None:
121-
baseline = baseline_path_root / filename
122-
comparison = compare(bytes(png_bytes), baseline)
123-
outcome.matched_baseline = bool(comparison.get("matched"))
124-
outcome.diff_percent = float(comparison.get("diff_percent", 0.0))
125-
outcome.note = comparison.get("note")
126102
report.outcomes.append(outcome)
127103
web_runner_logger.info(
128104
f"storybook_snapshots story={story.id!r} matched={outcome.matched_baseline}"
129105
)
130106
return report
131107

132108

109+
def _snapshot_story(
110+
story: StorybookStory,
111+
base_url: str,
112+
out_dir: Path,
113+
take_screenshot: Screenshot,
114+
navigate: Callable[[str], None],
115+
baseline_path_root: Optional[Path],
116+
compare: Comparator,
117+
) -> SnapshotOutcome:
118+
if not isinstance(story, StorybookStory):
119+
raise StorybookSnapshotError("stories must be StorybookStory instances")
120+
url = f"{base_url}/{story.iframe_path}"
121+
try:
122+
navigate(url)
123+
png_bytes = take_screenshot(url)
124+
except Exception as error: # pylint: disable=broad-except
125+
raise StorybookSnapshotError(
126+
f"snapshot failed for {story.id!r}: {error!r}"
127+
) from error
128+
if not isinstance(png_bytes, (bytes, bytearray)) or not png_bytes:
129+
raise StorybookSnapshotError(
130+
f"take_screenshot returned empty payload for {story.id!r}"
131+
)
132+
filename = safe_filename(story)
133+
target = out_dir / filename
134+
target.write_bytes(png_bytes)
135+
outcome = SnapshotOutcome(
136+
story_id=story.id, image_path=target, matched_baseline=True,
137+
)
138+
if baseline_path_root is not None:
139+
comparison = compare(bytes(png_bytes), baseline_path_root / filename)
140+
outcome.matched_baseline = bool(comparison.get("matched"))
141+
outcome.diff_percent = float(comparison.get("diff_percent", 0.0))
142+
outcome.note = comparison.get("note")
143+
return outcome
144+
145+
133146
def assert_no_visual_regressions(report: StorybookSnapshotReport,
134147
allow_stories: Optional[Iterable[str]] = None) -> None:
135148
allow = set(allow_stories or [])

0 commit comments

Comments
 (0)