Coverage for functions \ flipdare \ service \ search_service.py: 59%

103 statements  

« prev     ^ index     » next       coverage.py v7.13.0, created at 2026-05-08 12:22 +1000

1#!/usr/bin/env python 

2# Copyright (c) 2026 Flipdare Pty Ltd. All rights reserved. 

3# 

4# This file is part of Flipdare's proprietary software and contains 

5# confidential and copyrighted material. Unauthorised copying, 

6# modification, distribution, or use of this file is strictly 

7# prohibited without prior written permission from Flipdare Pty Ltd. 

8# 

9# This software includes third-party components licensed under MIT, 

10# BSD, and Apache 2.0 licences. See THIRD_PARTY_NOTICES for details. 

11# 

12 

13from __future__ import annotations 

14from typing import TYPE_CHECKING, Any 

15from cachetools import TTLCache 

16 

17from flipdare.app_env import get_app_environment 

18from flipdare.app_log import LOG 

19from flipdare.constants import IS_DEBUG, SEARCH_CACHE_MAX_SIZE, SEARCH_CACHE_TTL_SECONDS 

20from flipdare.service._service_provider import ServiceProvider 

21from flipdare.error.app_error import AppError, ErrorSchema, SearchError 

22from flipdare.message.error_message import ErrorMessage 

23from flipdare.request.data.search_request_adapter import SearchRequestAdapter 

24from flipdare.search.db.app_friend_search import AppFriendSearch 

25from flipdare.search.db.app_general_search import AppGeneralSearch 

26from flipdare.search.result.search_response_builder import SearchResponseBuilder 

27from flipdare.search.result.typesense_payload import TypesensePayload 

28 

29__all__ = ["SearchService"] 

30 

31type ResponseType = dict[str, Any] | ErrorSchema 

32 

33if TYPE_CHECKING: 

34 from flipdare.manager.db_manager import DbManager 

35 from flipdare.manager.backend_manager import BackendManager 

36 from flipdare.manager.search_manager import SearchManager 

37 

38 

39class SearchService(ServiceProvider): 

40 """ 

41 Orchestrates search operations with intelligent caching. 

42 Singleton for performance - caches recent searches to reduce load on search client. 

43 """ 

44 

45 def __init__( 

46 self, 

47 db_manager: DbManager | None = None, 

48 backend_manager: BackendManager | None = None, 

49 search_manager: SearchManager | None = None, 

50 cache_enabled: bool = True, 

51 cache_ttl_seconds: int = SEARCH_CACHE_TTL_SECONDS, 

52 cache_max_size: int = SEARCH_CACHE_MAX_SIZE, 

53 ) -> None: 

54 super().__init__( 

55 backend_manager=backend_manager, 

56 db_manager=db_manager, 

57 search_manager=search_manager, 

58 ) 

59 

60 # LRU cache with TTL for search results 

61 self._search_cache: TTLCache[str, dict[str, Any]] = TTLCache( 

62 maxsize=cache_max_size, 

63 ttl=cache_ttl_seconds, 

64 ) 

65 self._cache_enabled = cache_enabled 

66 if not get_app_environment().in_cloud: 

67 self._cache_enabled = False 

68 LOG().info("SearchController cache disabled in dev/emulator mode.") 

69 

70 @property 

71 def friend(self) -> AppFriendSearch: 

72 return self.search_manager.friend 

73 

74 @property 

75 def general(self) -> AppGeneralSearch: 

76 return self.search_manager.general 

77 

78 def enable_cache(self) -> None: 

79 self._cache_enabled = True 

80 

81 def disable_cache(self) -> None: 

82 self._cache_enabled = False 

83 

84 def clear_cache(self, pattern: str | None = None) -> int: 

85 """ 

86 Clear cache entries. 

87 

88 Args: 

89 pattern: If provided, only clear entries containing this pattern. 

90 If None, clear all entries. 

91 

92 Returns: 

93 Number of entries cleared. 

94 

95 """ 

96 if pattern is None: 

97 count = len(self._search_cache) 

98 self._search_cache.clear() 

99 LOG().debug(f"Cleared all {count} search cache entries") 

100 return count 

101 keys_to_remove = [k for k in self._search_cache if pattern in k] 

102 for key in keys_to_remove: 

103 del self._search_cache[key] 

104 LOG().debug(f"Cleared {len(keys_to_remove)} search cache entries matching '{pattern}'") 

105 return len(keys_to_remove) 

106 

107 def search( 

108 self, 

109 search_request: SearchRequestAdapter, 

110 ) -> ResponseType: 

111 LOG().debug(f"Processing search request for {search_request}") 

112 

113 col = search_request.collection 

114 query = search_request.query 

115 try: 

116 # we need to prepare the query first .. 

117 # because if there is access to _filter_by and _prepare is not called, 

118 # an exception will be thrown.. 

119 query.prepare(self.friend) 

120 except Exception as e: 

121 LOG().warning(f"Error preparing search query {query}: {e}") 

122 raise AppError( 

123 source=search_request.endpoint, 

124 message=ErrorMessage.SEARCH_QUERY_PREPARE_ERROR, 

125 ) from e 

126 

127 if IS_DEBUG: 

128 LOG().debug(f"Executing search for collection {col} with query: {query}") 

129 

130 # Check cache if enabled 

131 if self._cache_enabled: 

132 cache_value = self._retrieve_from_cache(search_request) 

133 if cache_value is not None: 

134 return cache_value 

135 

136 # Execute search 

137 payload: TypesensePayload | None = None 

138 try: 

139 payload = ( 

140 self.friend.search(query.search_params) 

141 if col.is_friend 

142 else self.general.search(query.search_params) 

143 ) 

144 return self._process_results(search_request, payload) 

145 

146 except AppError as e: 

147 LOG().warning(f"Search error for {search_request}: {e}") 

148 return e.to_dict() 

149 except Exception as e: 

150 LOG().error(f"Unexpected error during search for {search_request}: {e}") 

151 return SearchError( 

152 source=search_request.endpoint, 

153 message=ErrorMessage.SEARCH_ERROR, 

154 ).to_dict() 

155 

156 def _process_results( 

157 self, 

158 search_request: SearchRequestAdapter, 

159 payload: TypesensePayload, 

160 ) -> ResponseType: 

161 try: 

162 builder = SearchResponseBuilder( 

163 user_db=self.user_db, 

164 group_db=self.group_db, 

165 dare_db=self.dare_db, 

166 ) 

167 result = builder.process(endpoint=search_request.endpoint, payload=payload) 

168 result_dict = result.model_dump(mode="json") 

169 self._add_to_cache(search_request, result_dict) 

170 return result_dict 

171 

172 except AppError as e: 

173 LOG().warning(f"Error processing search results for {search_request}: {e}") 

174 return e.to_dict() 

175 except Exception as e: 

176 LOG().error(f"Unexpected error processing search results for {search_request}: {e}") 

177 return SearchError( 

178 source=search_request.endpoint, 

179 message=ErrorMessage.SEARCH_RESULT_PROCESS_ERROR, 

180 ).to_dict() 

181 

182 def _retrieve_from_cache( 

183 self, 

184 search_request: SearchRequestAdapter, 

185 ) -> dict[str, Any] | None: 

186 if not self._cache_enabled: 

187 return None 

188 

189 cache_key = search_request.cache_key 

190 if cache_key is None: 

191 return None 

192 

193 if cache_key not in self._search_cache: 

194 LOG().debug(f"Cache miss for: {cache_key}") 

195 return None 

196 

197 LOG().debug(f"Cache hit for: {cache_key}") 

198 return self._search_cache[cache_key] 

199 

200 def _add_to_cache( 

201 self, 

202 request: SearchRequestAdapter, 

203 response: dict[str, Any], 

204 ) -> None: 

205 if not self._cache_enabled: 

206 return 

207 cache_key = request.cache_key 

208 if cache_key is None: 

209 return 

210 

211 if IS_DEBUG: 

212 LOG().debug(f"Added search request to cache with key: {cache_key}") 

213 self._search_cache[cache_key] = response