2# +==== BEGIN CatFeeder =================+
5# ...............)..(.')
7# ...............\(__)|
8# Inspired by Joan Stark
9# source https://www.asciiart.eu/
13# FILE: redis_instance.py
14# CREATION DATE: 11-10-2025
15# LAST Modified: 14:51:35 19-12-2025
17# This is the backend server in charge of making the actual website work.
19# COPYRIGHT: (c) Cat Feeder
20# PURPOSE: The file in charge of handling the redis connection as well as cache.
22# +==== END CatFeeder =================+
28from typing
import Any, Callable, Dict, List, Optional, Union, cast
29from platform
import system
33from display_tty
import Disp, initialise_logger
35from .redis_args
import RedisArgs
36from .
import redis_constants
as CONST
40 """Create and return a configured Redis client.
42 This function does not connect immediately; the client will establish
43 a connection upon first command execution.
46 redis.Redis: Configured Redis client instance.
48 socket_path = os.getenv(CONST.REDIS_SOCKET_KEY, CONST.REDIS_SOCKET_DEFAULT)
49 password = os.getenv(CONST.REDIS_PASSWORD_KEY)
50 if system().lower() !=
"Windows":
51 unix_socket_path = socket_path
53 unix_socket_path =
None
54 node: redis.Redis = redis.Redis(
55 unix_socket_path=unix_socket_path,
63 """High-level Redis cache facade for SQL-related data.
65 Provides read-through caching, key namespacing, and targeted invalidation for database metadata (version, schema objects), table data, and row counts. The layer is intentionally thin: actual SQL execution is delegated to caller‑supplied callables (``fetcher``/``writer``) so this class remains decoupled from any specific database driver.
68 - JSON Serialization: All cached values are stored as JSON for portability and human inspection. Tuples are normalized to lists.
69 - Deterministic Keys: Keys follow the pattern ``{namespace}:{db_label}:{category}:{name}[:{param_hash}]``. The optional hash is a stable SHA‑256 prefix over normalized parameters ensuring low collision probability while keeping keys concise.
70 - Lazy Client: A Redis client is created only on first access allowing import/use without forcing an immediate socket connection.
71 - Safe Invalidation: Category/table/trigger specific invalidation methods use ``SCAN`` + ``DELETE`` rather than broad ``FLUSHDB`` calls.
72 - Error Sentinel Respect: Methods can be given an ``error_token`` so failed results are not cached (preventing sticky error states).
75 cache = RedisCaching()
76 rows = cache.get_data_from_table(
78 column=["id", "name"],
81 fetcher=lambda: sql.get_data_from_table("users", ["id", "name"], ["active=1"], True),
82 ttl_seconds=cache.default_ttls["data"],
87 result = cache.update_data_in_table(
89 data=["42", "new_name"],
90 column=["id", "name"],
92 writer=lambda t, d, c, w: sql.update_data_in_table(t, d, c, w)
93 ) # On success, relevant keys are invalidated automatically.
96 namespace (str): Top‑level logical partition for all keys (default ``"sql"``).
97 db_label (str): Database label inserted into every key for multi‑DB isolation.
98 default_ttls (dict[str,int]): Per category TTL defaults (``version``, ``schema``, ``data``, ``count``).
99 client (redis.Redis | RedisArgs | None): Underlying Redis client or lazy args container.
100 disp (Disp): Logger used for debug and diagnostic messages.
102 existing_instance: Optional[
"RedisCaching"] =
None
103 disp: Disp = initialise_logger(__qualname__,
False)
105 def __new__(cls, *args, existing_instance: Optional[
"RedisCaching"] =
None, **kwargs):
106 """Return a new or previously existing instance.
108 If ``existing_instance`` is provided and is an instance of ``cls`` (the
109 exact class being constructed, potentially a subclass), it is returned
110 as-is and ``__init__`` will not run again. Otherwise, a fresh instance
111 of ``cls`` is allocated normally.
114 existing_instance (RedisCaching | None): An existing instance to reuse.
117 RedisCaching: Either the supplied ``existing_instance`` or a freshly allocated object.
119 if existing_instance
is not None and isinstance(existing_instance, cls):
120 return existing_instance
125 client: Optional[Union[redis.Redis, RedisArgs]] =
None,
128 namespace: str =
"sql",
129 db_label: Optional[str] =
None,
130 default_ttls: Optional[Dict[str, int]] =
None,
131 existing_instance: Optional[
"RedisCaching"] =
None,
133 """Initialize the Redis caching layer.
136 client (redis.Redis | None): Optional Redis client. When omitted, a client is built from environment variables.
137 namespace (str): Top-level namespace used to prefix keys.
138 db_label (str | None): Optional database label to isolate keys per DB. Defaults to the ``DB_NAME`` environment variable or ``"default"``.
139 default_ttls (dict[str, int] | None): Optional TTLs in seconds per category (``version``, ``schema``, ``data``, ``count``).
140 existing_instance (RedisCaching | None): When provided, indicates this __init__ call is for a reused instance and initialization should be skipped.
143 self.
disp.update_disp_debug(debug)
144 self.
disp.log_debug(
"Initialising...")
147 if existing_instance
is not None:
151 if client
is not None:
157 ns_candidate = namespace.strip()
163 label_candidate: Optional[str]
164 if db_label
is not None and db_label.strip():
165 label_candidate = db_label
167 env_label = os.getenv(
"DB_NAME")
168 if env_label
is not None and env_label.strip():
169 label_candidate = env_label
171 label_candidate =
"default"
176 "version": CONST.HOUR_24,
177 "schema": CONST.HOUR_1,
179 "count": CONST.MIN_1,
182 for k, v
in default_ttls.items():
187 client_kind: str =
"None"
191 client_kind =
"RedisArgs"
193 client_kind =
"Redis"
195 f
"Initialized RedisCaching namespace='{self.namespace}', db_label='{self.db_label}', client={client_kind}"
197 self.
disp.log_debug(
"Initialised")
203 """Return a stable JSON string for ``obj`` used in key hashing.
205 Tuples are converted to lists for JSON compatibility; sets are
206 converted to sorted lists. Dicts are sorted by key.
209 obj (Any): Any JSON-serializable Python object.
212 str: A deterministically ordered JSON string.
214 def normalize(x: Any) -> Any:
215 if isinstance(x, tuple):
216 out_list: List[Any] = []
218 out_list.append(normalize(v))
220 if isinstance(x, set):
223 tmp.append(normalize(v))
224 tmp.sort(key=
lambda i: json.dumps(
225 i, sort_keys=
True, ensure_ascii=
False))
227 if isinstance(x, list):
228 out_list2: List[Any] = []
230 out_list2.append(normalize(v))
232 if isinstance(x, dict):
233 out_dict: Dict[str, Any] = {}
234 for k
in sorted(x.keys()):
235 out_dict[k] = normalize(x[k])
239 return json.dumps(normalize(obj), separators=(
",",
":"), ensure_ascii=
False)
242 """Hash arbitrary parameters deterministically for compact keys.
245 params (Any): Parameters to be represented in the cache key.
248 str: A short SHA-256 hex digest prefix.
251 digest = hashlib.sha256(dumped.encode(
"utf-8")).hexdigest()[:20]
252 self.
disp.log_debug(f
"Computed params hash='{digest}'")
255 def _key(self, category: str, name: str, params: Optional[Any] =
None) -> str:
256 """Build a namespaced Redis key.
258 Key format: ``{namespace}:{db_label}:{category}:{name}[:{hash}]``
261 category (str): Logical grouping (e.g., ``schema``, ``data``).
262 name (str): Base key name within the category.
263 params (Any | None): Optional parameters to hash for uniqueness.
266 str: Fully-qualified Redis key.
268 base = f
"{self.namespace}:{self.db_label}:{category}:{name}"
270 self.
disp.log_debug(f
"Key generated='{base}' (no params)")
272 key = f
"{base}:{self._hash_params(params)}"
273 self.
disp.log_debug(f
"Key generated='{key}' (with params)")
277 """Serialize a Python value to a JSON string.
279 Tuples are converted to lists; on read we may convert back where needed.
282 value (Any): Value to serialize.
285 str: JSON string representation.
287 def normalize(x: Any) -> Any:
288 if isinstance(x, tuple):
289 out_list: List[Any] = []
291 out_list.append(normalize(v))
293 if isinstance(x, list):
294 out_list2: List[Any] = []
296 out_list2.append(normalize(v))
298 if isinstance(x, dict):
299 out_dict: Dict[str, Any] = {}
300 for k, v
in x.items():
301 out_dict[k] = normalize(v)
305 payload = json.dumps(normalize(value), ensure_ascii=
False)
306 self.
disp.log_debug(f
"Serialized payload length={len(payload)}")
310 """Deserialize a JSON string back to Python.
313 raw (str | None): Raw JSON string from Redis or ``None``.
316 Any: Deserialized Python value, or ``None`` if input is ``None``.
320 "Deserialize called with raw=None (cache miss)")
323 value = json.loads(raw)
324 self.
disp.log_debug(
"Deserialized payload successfully")
326 except json.JSONDecodeError
as e:
327 self.
disp.log_warning(
328 f
"Failed to deserialize payload: {e}"
333 """Get and deserialize a cached value for ``key``.
336 key (str): Redis key to fetch.
339 Any: Deserialized cached value, or ``None`` if not present.
341 self.
disp.log_debug(f
"GET key='{key}'")
345 self.
disp.log_debug(
"Cache miss")
347 self.
disp.log_debug(
"Cache hit")
350 def _set_cached(self, key: str, value: Any, ttl_seconds: int) ->
None:
351 """Serialize and set a value under ``key`` with TTL.
353 Uses ``SETEX`` when ``ttl_seconds > 0``; otherwise uses ``SET``.
356 key (str): Redis key.
357 value (Any): Python value to serialize and store.
358 ttl_seconds (int): Time to live in seconds; 0 or negative disables TTL.
362 self.
disp.log_debug(f
"SETEX key='{key}', ttl={ttl_seconds}s")
365 self.
disp.log_debug(f
"SET key='{key}' (no ttl)")
369 """Decide whether to cache a union-typed result.
371 When ``error_token`` is provided, skip caching if result equals it.
372 Otherwise, cache the value (including ints such as row counts).
375 result (Any): Value returned by the fetcher.
376 error_token (int | None): Error sentinel value to avoid caching.
379 bool: ``True`` if the value should be cached, else ``False``.
381 if error_token
is None:
382 self.
disp.log_debug(
"No error_token provided; will cache result")
384 decision = result != error_token
386 f
"Error token present; will_cache={decision} (result == error_token? {not decision})"
391 """Delete all keys matching the provided patterns.
394 patterns (list[str]): List of glob-style patterns for ``SCAN``.
398 self.
disp.log_debug(f
"Invalidating keys with pattern='{p}'")
403 f
"Invalidation pattern='{p}' deleted={deleted} keys")
406 """Return a Redis client, constructing it on first use.
409 redis.Redis: Active Redis client instance.
412 self.
disp.log_debug(
"Constructing Redis client via factory")
416 "Building Redis client from RedisArgs configuration")
423 socket_connect_timeout=self.
clientclient.socket_connect_timeout,
425 socket_keepalive_options=self.
clientclient.socket_keepalive_options,
438 ssl_include_verify_flags=self.
clientclient.ssl_include_verify_flags,
439 ssl_exclude_verify_flags=self.
clientclient.ssl_exclude_verify_flags,
443 ssl_check_hostname=self.
clientclient.ssl_check_hostname,
446 ssl_validate_ocsp_stapled=self.
clientclient.ssl_validate_ocsp_stapled,
448 ssl_ocsp_expected_cert=self.
clientclient.ssl_ocsp_expected_cert,
452 single_connection_client=self.
clientclient.single_connection_client,
453 health_check_interval=self.
clientclient.health_check_interval,
458 redis_connect_func=self.
clientclient.redis_connect_func,
459 credential_provider=self.
clientclient.credential_provider,
464 maint_notifications_config=self.
clientclient.maint_notifications_config
468 self.
disp.log_debug(
"Using provided Redis client instance")
472 """Return selector as-is, normalizing list vs string types.
475 selector (str | list[str]): Column list or ``"*"``.
478 Any: The selector unchanged, used for hashing and display.
480 if isinstance(selector, list):
481 self.
disp.log_debug(
"Normalizing selector: list")
483 self.
disp.log_debug(
"Normalizing selector: str")
490 """Remove all keys for the current namespace and DB label."""
491 prefix = f
"{self.namespace}:{self.db_label}:*"
492 self.
disp.log_debug(
"Invalidating all keys for namespace/db_label")
496 """Remove all schema-related keys (tables, columns, triggers, descriptions)."""
497 base = f
"{self.namespace}:{self.db_label}:schema:*"
498 self.
disp.log_debug(
"Invalidating all schema keys")
502 """Remove all cache entries related to a specific table.
505 table (str): Table name.
507 ns = f
"{self.namespace}:{self.db_label}"
508 self.
disp.log_debug(f
"Invalidating caches for table='{table}'")
510 f
"{ns}:data:{table}:*",
511 f
"{ns}:count:{table}:*",
512 f
"{ns}:schema:columns:{table}:*",
513 f
"{ns}:schema:describe:{table}:*",
518 """Remove cache entries related to triggers.
521 trigger_name (str | None): Specific trigger to clean. When omitted,
522 all trigger-related entries are removed.
524 ns = f
"{self.namespace}:{self.db_label}"
527 f
"Invalidating caches for trigger='{trigger_name}'")
529 [f
"{ns}:schema:trigger:{trigger_name}:*"])
531 self.
disp.log_debug(
"Invalidating all trigger-related caches")
533 f
"{ns}:schema:trigger:*",
534 f
"{ns}:schema:triggers",
535 f
"{ns}:schema:trigger_names",
Any _deserialize(self, Optional[str] raw)
str _key(self, str category, str name, Optional[Any] params=None)
None _invalidate_patterns(self, List[str] patterns)
Any _normalize_selector(self, Union[str, List[str]] selector)
None invalidate_schema(self)
Any _get_cached(self, str key)
None invalidate_all(self)
str _hash_params(self, Any params)
None _set_cached(self, str key, Any value, int ttl_seconds)
Callable[[], redis.Redis] _client_factory
__new__(cls, *args, Optional["RedisCaching"] existing_instance=None, **kwargs)
None invalidate_trigger(self, Optional[str] trigger_name=None)
redis.Redis _ensure_client(self)
Optional[Union[redis.Redis, RedisArgs]] client
str _serialize(self, Any value)
Callable[[], redis.Redis] client
bool _should_cache_union_result(self, Any result, Optional[int] error_token)
None invalidate_table(self, str table)
None __init__(self, Optional[Union[redis.Redis, RedisArgs]] client=None, bool debug=False, *, str namespace="sql", Optional[str] db_label=None, Optional[Dict[str, int]] default_ttls=None, Optional["RedisCaching"] existing_instance=None)
str _stable_dump(self, Any obj)
redis.Redis _build_redis_client()