Coverage for src / lilbee / cli / tui / widgets / crawl_dialog.py: 100%
90 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-05-15 20:55 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-05-15 20:55 +0000
1"""Modal dialog for configuring a web crawl."""
3from __future__ import annotations
5from dataclasses import dataclass
6from typing import ClassVar
8from textual.app import ComposeResult
9from textual.binding import Binding, BindingType
10from textual.containers import Center, Vertical
11from textual.screen import ModalScreen
12from textual.widgets import Button, Checkbox, Collapsible, Input, Label, Static
14from lilbee.cli.tui import messages as msg
17@dataclass(frozen=True)
18class CrawlParams:
19 """Validated crawl parameters returned by CrawlDialog.
21 depth: None = whole-site unbounded. 0 = single URL only. Positive int =
22 explicit link-follow depth cap. max_pages: None = no cap. Positive int =
23 explicit page cap.
24 """
26 url: str
27 depth: int | None
28 max_pages: int | None
31class CrawlDialog(ModalScreen[CrawlParams | None]):
32 """Modal dialog that collects URL, recursion toggle, and optional caps."""
34 CSS_PATH = "crawl_dialog.tcss"
35 AUTO_FOCUS = "#crawl-url-input"
37 BINDINGS: ClassVar[list[BindingType]] = [
38 Binding("escape", "cancel", "Cancel", show=False),
39 ]
41 def compose(self) -> ComposeResult:
42 with Vertical():
43 yield Static(msg.CRAWL_DIALOG_TITLE, id="crawl-title")
44 yield Label(msg.CRAWL_DIALOG_URL_LABEL)
45 yield Input(
46 placeholder=msg.CRAWL_DIALOG_URL_PLACEHOLDER,
47 id="crawl-url-input",
48 )
49 yield Checkbox(
50 msg.CRAWL_DIALOG_RECURSIVE_LABEL,
51 value=True,
52 id="crawl-recursive-checkbox",
53 )
54 with Collapsible(title=msg.CRAWL_DIALOG_ADVANCED_TITLE, id="crawl-advanced"):
55 yield Label(msg.CRAWL_DIALOG_DEPTH_LABEL, classes="crawl-field-label")
56 yield Input(
57 placeholder=msg.CRAWL_DIALOG_DEPTH_PLACEHOLDER,
58 id="crawl-depth-input",
59 )
60 yield Label(msg.CRAWL_DIALOG_MAX_PAGES_LABEL, classes="crawl-field-label")
61 yield Input(
62 placeholder=msg.CRAWL_DIALOG_MAX_PAGES_PLACEHOLDER,
63 id="crawl-max-pages-input",
64 )
65 yield Static("", id="crawl-error")
66 with Center():
67 yield Button(msg.CRAWL_DIALOG_SUBMIT, variant="primary", id="crawl-submit")
68 yield Button(msg.CRAWL_DIALOG_CANCEL, variant="default", id="crawl-cancel")
70 def on_button_pressed(self, event: Button.Pressed) -> None:
71 if event.button.id == "crawl-submit":
72 self._try_submit()
73 else:
74 self.dismiss(None)
76 def on_input_submitted(self, _event: Input.Submitted) -> None:
77 self._try_submit()
79 @staticmethod
80 def _parse_optional_non_negative_int(value: str) -> int | None:
81 """Parse a non-negative integer from *value*; empty string returns None.
83 None means "no cap" in the crawl API. Zero is meaningful for the
84 depth field (single-URL crawl per the crawler contract). Raises
85 ValueError on non-numeric input or negative integers.
86 """
87 if not value:
88 return None
89 n = int(value)
90 if n < 0:
91 raise ValueError
92 return n
94 @staticmethod
95 def _parse_optional_positive_int(value: str) -> int | None:
96 """Parse a positive integer from *value*; empty string returns None.
98 None means "no cap" in the crawl API. Raises ValueError on non-numeric
99 input or non-positive integers. Used for fields like max_pages where
100 zero has no useful meaning.
101 """
102 if not value:
103 return None
104 n = int(value)
105 if n <= 0:
106 raise ValueError
107 return n
109 def _validate(self) -> CrawlParams | str:
110 """Validate inputs. Returns CrawlParams on success, error message on failure."""
111 from lilbee.crawler import is_url, require_valid_crawl_url
113 url = self.query_one("#crawl-url-input", Input).value.strip()
114 recursive = self.query_one("#crawl-recursive-checkbox", Checkbox).value
115 depth_str = self.query_one("#crawl-depth-input", Input).value.strip()
116 max_pages_str = self.query_one("#crawl-max-pages-input", Input).value.strip()
118 if not url:
119 return msg.CRAWL_DIALOG_URL_REQUIRED
121 if not is_url(url):
122 url = f"https://{url}"
124 try:
125 require_valid_crawl_url(url)
126 except ValueError as exc:
127 return msg.CRAWL_DIALOG_INVALID_URL.format(error=exc)
129 if not recursive:
130 return CrawlParams(url=url, depth=0, max_pages=None)
132 try:
133 # depth=0 means "single URL" per the crawler contract; allow it.
134 depth = self._parse_optional_non_negative_int(depth_str)
135 except ValueError:
136 return msg.CRAWL_DIALOG_INVALID_NUMBER.format(field=msg.CRAWL_DIALOG_DEPTH_LABEL)
138 try:
139 max_pages = self._parse_optional_positive_int(max_pages_str)
140 except ValueError:
141 return msg.CRAWL_DIALOG_INVALID_NUMBER.format(field=msg.CRAWL_DIALOG_MAX_PAGES_LABEL)
143 return CrawlParams(url=url, depth=depth, max_pages=max_pages)
145 def _try_submit(self) -> None:
146 """Validate inputs and dismiss with CrawlParams or show an error."""
147 result = self._validate()
148 error_widget = self.query_one("#crawl-error", Static)
149 # _validate returns str (error) or CrawlParams; isinstance disambiguates
150 if isinstance(result, str):
151 error_widget.update(result)
152 return
153 error_widget.update("")
154 self.dismiss(result)
156 def action_cancel(self) -> None:
157 self.dismiss(None)