|
|
@ -1,30 +1,88 @@
|
|
|
|
|
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
|
|
|
|
from functools import lru_cache, wraps
|
|
|
|
from functools import lru_cache, wraps
|
|
|
|
from typing import Any
|
|
|
|
from typing import Any
|
|
|
|
|
|
|
|
|
|
|
|
DEFAULT_CACHE: dict[str, list[str]] = {}
|
|
|
|
DEFAULT_CACHE: dict[str, "LRUCache"] = {}
|
|
|
|
DEFAULT_CACHE_VALUE: dict[str, Any] = {}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def set_cache(group: str, key: str, value: Any) -> None:
|
|
|
|
class Node:
|
|
|
|
"""キャッシュを設定します
|
|
|
|
"""キャッシュのノード
|
|
|
|
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
Attributes
|
|
|
|
----------
|
|
|
|
----------
|
|
|
|
group : str
|
|
|
|
|
|
|
|
キャッシュのグループ名
|
|
|
|
|
|
|
|
key : str
|
|
|
|
key : str
|
|
|
|
一意のキー
|
|
|
|
キャッシュのキー
|
|
|
|
value : Any
|
|
|
|
value : Any
|
|
|
|
設定する値
|
|
|
|
キャッシュの値
|
|
|
|
|
|
|
|
next : ListNode
|
|
|
|
|
|
|
|
次のノード
|
|
|
|
|
|
|
|
prev : ListNode
|
|
|
|
|
|
|
|
前のノード
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
if len(DEFAULT_CACHE.get(group, [])) > 50:
|
|
|
|
|
|
|
|
del DEFAULT_CACHE[group][-1]
|
|
|
|
|
|
|
|
del DEFAULT_CACHE_VALUE[key]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if DEFAULT_CACHE.get(group) is None:
|
|
|
|
def __init__(self, key: str, value: Any) -> None:
|
|
|
|
DEFAULT_CACHE[group] = []
|
|
|
|
self.key: str = key
|
|
|
|
DEFAULT_CACHE[group].append(key)
|
|
|
|
self.value: Any = value
|
|
|
|
DEFAULT_CACHE_VALUE[key] = value
|
|
|
|
self.next: Node | None = None
|
|
|
|
|
|
|
|
self.prev: Node | None = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class LRUCache:
|
|
|
|
|
|
|
|
def __init__(self, capacity: int = 100) -> None:
|
|
|
|
|
|
|
|
self.items: dict[str, Node] = {}
|
|
|
|
|
|
|
|
# headとtailはダミーノード、この間にノードを追加していく
|
|
|
|
|
|
|
|
self.head_node = Node("", 0)
|
|
|
|
|
|
|
|
self.tail_node = Node("", 0)
|
|
|
|
|
|
|
|
self.head_node.next = self.tail_node # 先頭 => 末尾
|
|
|
|
|
|
|
|
self.tail_node.prev = self.head_node # 先頭 <= 末尾
|
|
|
|
|
|
|
|
self.capacity = capacity
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _remove_node(self, node: Node):
|
|
|
|
|
|
|
|
if node.prev: # 削除対象の前にいるnodeのnextを削除対象の次のnodeに変更
|
|
|
|
|
|
|
|
node.prev.next = node.next
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if node.next: # 削除対象の次のnodeのprevを削除対象の前のnodeに変更
|
|
|
|
|
|
|
|
node.next.prev = node.prev
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
del self.items[node.key]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _add(self, node: Node):
|
|
|
|
|
|
|
|
prev_node = self.tail_node.prev
|
|
|
|
|
|
|
|
next_node = self.tail_node
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if prev_node:
|
|
|
|
|
|
|
|
prev_node.next = node
|
|
|
|
|
|
|
|
node.prev = prev_node
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if next_node:
|
|
|
|
|
|
|
|
next_node.prev = node
|
|
|
|
|
|
|
|
node.next = next_node
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
self.items[node.key] = node
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get(self, key: str) -> Any:
|
|
|
|
|
|
|
|
if key not in self.items:
|
|
|
|
|
|
|
|
raise KeyError
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
node = self.items[key]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# 一度削除して追加しなおすことで最新のノードとして更新する
|
|
|
|
|
|
|
|
self._remove_node(node)
|
|
|
|
|
|
|
|
self._add(node)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return node.value
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def put(self, key: str, value: Any):
|
|
|
|
|
|
|
|
if key in self.items: # 既に同一のキーが存在する場合は再登録するために一度消す
|
|
|
|
|
|
|
|
del self.items[key]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
node = Node(key, value)
|
|
|
|
|
|
|
|
self._add(node)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if len(self.items) > self.capacity:
|
|
|
|
|
|
|
|
if self.head_node.next: # 基本存在するはずだけど型的に一応やっとく
|
|
|
|
|
|
|
|
self._remove_node(self.head_node.next)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cache(group: str = "default", override: bool = False):
|
|
|
|
def cache(group: str = "default", override: bool = False):
|
|
|
@ -34,12 +92,20 @@ def cache(group: str = "default", override: bool = False):
|
|
|
|
@wraps(func)
|
|
|
|
@wraps(func)
|
|
|
|
async def wrapper(self, *args, **kwargs):
|
|
|
|
async def wrapper(self, *args, **kwargs):
|
|
|
|
key = cache_key_builder(func, self, *args, **kwargs)
|
|
|
|
key = cache_key_builder(func, self, *args, **kwargs)
|
|
|
|
hit_item = DEFAULT_CACHE_VALUE.get(key)
|
|
|
|
target_cache = DEFAULT_CACHE.get(group)
|
|
|
|
if hit_item and override is False and kwargs.get("cache_override") is None:
|
|
|
|
|
|
|
|
return hit_item
|
|
|
|
if target_cache is None:
|
|
|
|
res = await func(self, *args, **kwargs)
|
|
|
|
target_cache = LRUCache()
|
|
|
|
set_cache(group, key, res)
|
|
|
|
DEFAULT_CACHE[group] = target_cache
|
|
|
|
return res
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
|
|
hit_item = target_cache.get(key)
|
|
|
|
|
|
|
|
if hit_item and override is False and kwargs.get("cache_override") is None:
|
|
|
|
|
|
|
|
return hit_item
|
|
|
|
|
|
|
|
except KeyError:
|
|
|
|
|
|
|
|
res = await func(self, *args, **kwargs)
|
|
|
|
|
|
|
|
target_cache.put(key, res)
|
|
|
|
|
|
|
|
return res
|
|
|
|
|
|
|
|
|
|
|
|
return wrapper
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
|
|
|