mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-04-08 03:00:28 -04:00
invalidating cache for favoriates
This commit is contained in:
@@ -141,14 +141,18 @@ def cached(
|
||||
# Fast path: check cache without lock
|
||||
if key in cache_storage:
|
||||
if ttl_seconds is None:
|
||||
logger.debug(f"Cache hit for {target_func.__name__}")
|
||||
logger.debug(
|
||||
f"Cache hit for {target_func.__name__} args: {args} kwargs: {kwargs}"
|
||||
)
|
||||
return cache_storage[key]
|
||||
else:
|
||||
cached_data = cache_storage[key]
|
||||
if isinstance(cached_data, tuple):
|
||||
result, timestamp = cached_data
|
||||
if current_time - timestamp < ttl_seconds:
|
||||
logger.debug(f"Cache hit for {target_func.__name__}")
|
||||
logger.debug(
|
||||
f"Cache hit for {target_func.__name__} args: {args} kwargs: {kwargs}"
|
||||
)
|
||||
return result
|
||||
|
||||
# Slow path: acquire lock for cache miss/expiry
|
||||
@@ -199,14 +203,18 @@ def cached(
|
||||
# Fast path: check cache without lock
|
||||
if key in cache_storage:
|
||||
if ttl_seconds is None:
|
||||
logger.debug(f"Cache hit for {target_func.__name__}")
|
||||
logger.debug(
|
||||
f"Cache hit for {target_func.__name__} args: {args} kwargs: {kwargs}"
|
||||
)
|
||||
return cache_storage[key]
|
||||
else:
|
||||
cached_data = cache_storage[key]
|
||||
if isinstance(cached_data, tuple):
|
||||
result, timestamp = cached_data
|
||||
if current_time - timestamp < ttl_seconds:
|
||||
logger.debug(f"Cache hit for {target_func.__name__}")
|
||||
logger.debug(
|
||||
f"Cache hit for {target_func.__name__} args: {args} kwargs: {kwargs}"
|
||||
)
|
||||
return result
|
||||
|
||||
# Slow path: acquire lock for cache miss/expiry
|
||||
|
||||
@@ -765,7 +765,7 @@ async def create_new_graph(
|
||||
cache.get_cached_graphs.cache_delete(user_id=user_id, page=1, page_size=250)
|
||||
for page in range(1, 20):
|
||||
library_cache.get_cached_library_agents.cache_delete(
|
||||
user_id=user_id, page=page, page_size=8
|
||||
user_id=user_id, page=page, page_size=10
|
||||
)
|
||||
|
||||
return await on_graph_activate(graph, user_id=user_id)
|
||||
|
||||
@@ -230,7 +230,7 @@ async def add_marketplace_agent_to_library(
|
||||
# Clear library caches after adding new agent
|
||||
for page in range(1, 20):
|
||||
library_cache.get_cached_library_agents.cache_delete(
|
||||
user_id=user_id, page=page, page_size=8
|
||||
user_id=user_id, page=page, page_size=10
|
||||
)
|
||||
|
||||
return result
|
||||
@@ -283,13 +283,20 @@ async def update_library_agent(
|
||||
HTTPException(500): If a server/database error occurs.
|
||||
"""
|
||||
try:
|
||||
return await library_db.update_library_agent(
|
||||
result = await library_db.update_library_agent(
|
||||
library_agent_id=library_agent_id,
|
||||
user_id=user_id,
|
||||
auto_update_version=payload.auto_update_version,
|
||||
is_favorite=payload.is_favorite,
|
||||
is_archived=payload.is_archived,
|
||||
)
|
||||
|
||||
for page in range(1, 20):
|
||||
library_cache.get_cached_library_agent_favorites.cache_delete(
|
||||
user_id=user_id, page=page, page_size=10
|
||||
)
|
||||
|
||||
return result
|
||||
except NotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
@@ -347,7 +354,7 @@ async def delete_library_agent(
|
||||
)
|
||||
for page in range(1, 20):
|
||||
library_cache.get_cached_library_agents.cache_delete(
|
||||
user_id=user_id, page=page, page_size=8
|
||||
user_id=user_id, page=page, page_size=10
|
||||
)
|
||||
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -15,7 +15,7 @@ export const useLibraryAgentList = () => {
|
||||
} = useGetV2ListLibraryAgentsInfinite(
|
||||
{
|
||||
page: 1,
|
||||
page_size: 8,
|
||||
page_size: 10,
|
||||
search_term: searchTerm || undefined,
|
||||
sort_by: librarySort,
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user