Coverage for src / lilbee / server / handlers / config.py: 100%

66 statements  

« prev     ^ index     » next       coverage.py v7.13.4, created at 2026-05-15 20:55 +0000

1"""Config read/update handlers for the HTTP server.""" 

2 

3from __future__ import annotations 

4 

5import copy 

6import functools 

7from typing import Any 

8 

9from pydantic_core import PydanticUndefined 

10 

11from lilbee.config_meta import ( 

12 MODEL_ROLE_FIELDS as _MODEL_ROLE_FIELDS, 

13) 

14from lilbee.config_meta import ( 

15 PUBLIC_CONFIG_FIELDS as _PUBLIC_CONFIG_FIELDS, 

16) 

17from lilbee.config_meta import ( 

18 REINDEX_FIELDS, 

19 WRITABLE_CONFIG_FIELDS, 

20) 

21from lilbee.core import settings 

22from lilbee.core.config import Config, cfg 

23from lilbee.providers.sdk_backend import API_KEY_FIELDS 

24from lilbee.providers.sdk_llm_provider import inject_provider_keys 

25from lilbee.server.models import ConfigResponse, ConfigUpdateResponse 

26 

27_MIN_CHUNK_SIZE = 64 

28 

29 

30def _validate_config_updates(updates: dict[str, Any]) -> None: 

31 """Reject unknown fields, null values on non-nullable fields, and invalid ranges.""" 

32 for key, value in updates.items(): 

33 if key not in WRITABLE_CONFIG_FIELDS: 

34 raise ValueError(f"Unknown or read-only config field: {key}") 

35 if value is None and not WRITABLE_CONFIG_FIELDS[key]: 

36 raise ValueError(f"Field '{key}' does not accept null") 

37 chunk_val = updates.get("chunk_size") 

38 if isinstance(chunk_val, int) and chunk_val < _MIN_CHUNK_SIZE: 

39 raise ValueError(f"chunk_size must be >= {_MIN_CHUNK_SIZE}") 

40 

41 

42def _apply_config_updates(updates: dict[str, Any]) -> tuple[dict[str, str], list[str]]: 

43 """Apply updates to the in-memory config, rolling back on error. 

44 Returns (fields_to_persist, fields_to_delete) for disk write. 

45 """ 

46 snapshot = {k: getattr(cfg, k) for k in updates} 

47 to_persist: dict[str, str] = {} 

48 to_delete: list[str] = [] 

49 try: 

50 for key, value in updates.items(): 

51 if value is None: 

52 setattr(cfg, key, None) 

53 to_delete.append(key) 

54 else: 

55 setattr(cfg, key, value) 

56 to_persist[key] = str(getattr(cfg, key)) 

57 except Exception: 

58 for k, v in snapshot.items(): 

59 setattr(cfg, k, v) 

60 raise 

61 return to_persist, to_delete 

62 

63 

64async def update_config(updates: dict[str, Any]) -> ConfigUpdateResponse: 

65 """Partial update of writable config fields. 

66 Algorithm: validate-then-apply with rollback. 

67 

68 1. Validate all keys and null-acceptability upfront (no mutations yet). 

69 This catches typos and bad input before anything changes. 

70 2. Snapshot current values, then apply each update via setattr (pydantic's 

71 validate_assignment catches type errors). If any field fails type 

72 validation, roll back ALL fields from the snapshot so the config 

73 stays consistent: no half-applied updates. 

74 3. Persist to disk in batch (one file write for sets, one for deletes) 

75 rather than per-field, avoiding partial writes on crash. 

76 

77 Why not just setattr-and-save per field? A multi-field PATCH like 

78 {"chunk_size": 1024, "chunk_overlap": "bad"} would leave chunk_size 

79 changed but chunk_overlap unchanged: the caller gets an error but 

80 the config is silently modified. The snapshot/rollback prevents that. 

81 """ 

82 _validate_config_updates(updates) 

83 to_persist, to_delete = _apply_config_updates(updates) 

84 if to_persist: 

85 settings.update_values(cfg.data_root, to_persist) 

86 if to_delete: 

87 settings.delete_values(cfg.data_root, to_delete) 

88 if API_KEY_FIELDS & set(updates): 

89 inject_provider_keys() 

90 reindex_required = bool(REINDEX_FIELDS & set(updates)) 

91 return ConfigUpdateResponse(updated=list(updates), reindex_required=reindex_required) 

92 

93 

94async def get_config() -> ConfigResponse: 

95 """Return all user-facing configuration values.""" 

96 dumped = cfg.model_dump() 

97 result = {k: v for k, v in dumped.items() if k in _PUBLIC_CONFIG_FIELDS} 

98 return ConfigResponse(**result) 

99 

100 

101@functools.cache 

102def _compute_config_defaults() -> dict[str, Any]: 

103 """Materialize Config defaults once per process.""" 

104 defaults: dict[str, Any] = {} 

105 for name, info in Config.model_fields.items(): 

106 is_writable_public = name in WRITABLE_CONFIG_FIELDS and name in _PUBLIC_CONFIG_FIELDS 

107 if not is_writable_public and name not in _MODEL_ROLE_FIELDS: 

108 continue 

109 value = info.get_default(call_default_factory=True) 

110 if value is PydanticUndefined: # pragma: no cover 

111 continue 

112 defaults[name] = value 

113 return defaults 

114 

115 

116async def get_config_defaults() -> ConfigResponse: 

117 """Return canonical defaults for every public config field. 

118 

119 Covers writable fields (resettable via PATCH /api/config) and the 

120 model-role fields (resettable via PUT /api/models/<role>). 

121 

122 Deepcopies the cached dict so callers that mutate the response 

123 (list-valued fields like ``crawl_exclude_patterns``) cannot poison 

124 subsequent calls. 

125 """ 

126 return ConfigResponse(**copy.deepcopy(_compute_config_defaults()))