From ccc6c4ffe40c8ee91e8ec937057ffbefb439b524 Mon Sep 17 00:00:00 2001 From: nipeone Date: Sun, 25 Feb 2024 19:11:07 +0800 Subject: [PATCH 01/10] Add redis as cache backend option --- .circleci/config.yml | 4 + docs/config_options.rst | 6 + large_image/cache_util/__init__.py | 7 +- large_image/cache_util/cachefactory.py | 3 + large_image/cache_util/rediscache.py | 171 +++++++++++++++++++++++++ large_image/config.py | 4 +- setup.py | 3 +- test/test_cache.py | 27 +++- test/test_cached_tiles.py | 7 + test/test_config.py | 2 + test/test_files/sample.girder.cfg | 2 + 11 files changed, 232 insertions(+), 4 deletions(-) create mode 100644 large_image/cache_util/rediscache.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 0c9eb9063..8285912e2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -103,6 +103,10 @@ commands: name: start memcached command: | docker run --rm -d -p 11211:11211 memcached -m 64 + - run: + name: start redis + command: | + docker run --rm -d -p 6379:6379 redis -m 64 - run: name: Use nvm # see https://discuss.circleci.com/t/nvm-does-not-change-node-version-on-machine/28973/14 diff --git a/docs/config_options.rst b/docs/config_options.rst index d0531d257..fc9f7e17c 100644 --- a/docs/config_options.rst +++ b/docs/config_options.rst @@ -19,6 +19,12 @@ Configuration parameters: - ``cache_memcached_password``: A password for the memcached server. Default ``None``. +- ``cache_redis_url``: If tiles are cached in redis, the url or list of urls where the redis server is located. Default '127.0.0.1:6379'. + +- ``cache_redis_username``: A username for the redis server. Default ``None``. + +- ``cache_redis_password``: A password for the redis server. Default ``None``. + - ``cache_tilesource_memory_portion``: Tilesources are cached on open so that subsequent accesses can be faster. These use file handles and memory. This limits the maximum based on a memory estimation and using no more than 1 / (``cache_tilesource_memory_portion``) of the available memory. - ``cache_tilesource_maximum``: If this is non-zero, this further limits the number of tilesources than can be cached to this value. diff --git a/large_image/cache_util/__init__.py b/large_image/cache_util/__init__.py index 6d7486143..01809104e 100644 --- a/large_image/cache_util/__init__.py +++ b/large_image/cache_util/__init__.py @@ -22,10 +22,15 @@ from .cachefactory import CacheFactory, pickAvailableCache MemCache: Any +RedisCache: Any try: from .memcache import MemCache except ImportError: MemCache = None +try: + from .rediscache import RedisCache +except ImportError: + RedisCache = None _cacheClearFuncs: List[Callable] = [] @@ -99,6 +104,6 @@ def cachesInfo(*args, **kwargs) -> Dict[str, Dict[str, int]]: return info -__all__ = ('CacheFactory', 'getTileCache', 'isTileCacheSetup', 'MemCache', +__all__ = ('CacheFactory', 'getTileCache', 'isTileCacheSetup', 'MemCache', 'RedisCache', 'strhash', 'LruCacheMetaclass', 'pickAvailableCache', 'methodcache', 'CacheProperties') diff --git a/large_image/cache_util/cachefactory.py b/large_image/cache_util/cachefactory.py index 1174ab995..da94154e0 100644 --- a/large_image/cache_util/cachefactory.py +++ b/large_image/cache_util/cachefactory.py @@ -31,6 +31,7 @@ from .. import config from ..exceptions import TileCacheError from .memcache import MemCache +from .rediscache import RedisCache # DO NOT MANUALLY ADD ANYTHING TO `_availableCaches` # use entrypoints and let loadCaches fill in `_availableCaches` @@ -66,6 +67,8 @@ def loadCaches( if MemCache is not None: # TODO: put this in an entry point for a new package _availableCaches['memcached'] = MemCache + if RedisCache is not None: + _availableCaches['redis'] = RedisCache # NOTE: `python` cache is viewed as a fallback and isn't listed in `availableCaches` diff --git a/large_image/cache_util/rediscache.py b/large_image/cache_util/rediscache.py new file mode 100644 index 000000000..e967b1e4d --- /dev/null +++ b/large_image/cache_util/rediscache.py @@ -0,0 +1,171 @@ +############################################################################# +# Copyright Kitware Inc. +# +# Licensed under the Apache License, Version 2.0 ( the "License" ); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +############################################################################# + +import pickle +import threading +import time +from typing import Any, Callable, List, Optional, Tuple, TypeVar, Union + +from .. import config +from .base import BaseCache + +_VT = TypeVar('_VT') + +class RedisCache(BaseCache): + """Use redis as the backing cache.""" + + def __init__( + self, url: Union[str, List[str]] = '127.0.0.1:6379', + username: Optional[str] = None, password: Optional[str] = None, + getsizeof: Optional[Callable[[_VT], float]] = None, + mustBeAvailable: bool = False) -> None: + import redis + + self.redis = redis + super().__init__(0, getsizeof=getsizeof) + self._cache_key_prefix = 'large_image_' + self._clientParams = (f"redis://{url}", dict( + username=username, password=password, db=0, retry_on_timeout=1)) + self._client = redis.Redis.from_url(self._clientParams[0], **self._clientParams[1]) + if mustBeAvailable: + # Try to ping server; this will throw an error if the server is + # unreachable, so we don't bother trying to use it. + self._client.ping() + + def __repr__(self) -> str: + return "Redis doesn't list its keys" + + def __iter__(self): + # return invalid iter + return None + + def __len__(self) -> int: + # return invalid length + keys = self._client.keys(f"{self._cache_key_prefix}*") + return len(keys) + + def __contains__(self, key) -> bool: + # cache never contains key + _key = self._cache_key_prefix + self._hashKey(key) + return self._client.exists(_key) + + def __delitem__(self, key: str) -> None: + if not self.__contains__(key): + raise KeyError + _key = self._cache_key_prefix + self._hashKey(key) + self._client.delete(_key) + + def __getitem__(self, key: str) -> Any: + _key = self._cache_key_prefix + self._hashKey(key) + try: + # must determine if tke key exists , otherwise cache_test can not be passed. + if not self.__contains__(key): + raise KeyError + return pickle.loads(self._client.get(_key)) + except KeyError: + return self.__missing__(key) + except self.redis.ConnectionError: + self.logError(self.redis.ConnectionError, config.getLogger('logprint').info, + 'redis ConnectionError') + self._reconnect() + return self.__missing__(key) + except self.redis.RedisError: + self.logError(self.redis.RedisError, config.getLogger('logprint').exception, + 'redis RedisError') + return self.__missing__(key) + + def __setitem__(self, key: str, value: Any) -> None: + _key = self._cache_key_prefix + self._hashKey(key) + try: + self._client.set(_key, pickle.dumps(value)) + except (TypeError, KeyError) as exc: + valueSize = value.shape if hasattr(value, 'shape') else ( + value.size if hasattr(value, 'size') else ( + len(value) if hasattr(value, '__len__') else None)) + valueRepr = repr(value) + if len(valueRepr) > 500: + valueRepr = valueRepr[:500] + '...' + self.logError( + exc.__class__, config.getLogger('logprint').error, + '%s: Failed to save value (size %r) with key %s' % ( + exc.__class__.__name__, valueSize, key)) + except self.redis.ConnectionError: + self.logError(self.redis.ConnectionError, config.getLogger('logprint').info, + 'redis ConnectionError') + self._reconnect() + + @property + def curritems(self) -> int: + return self._client.dbsize() + + @property + def currsize(self) -> int: + return self._getStat('used_memory') + + @property + def maxsize(self) -> int: + maxmemory = self._getStat["maxmemory"] + if maxmemory: + return maxmemory + else: + return self._getStat["total_system_memory"] + + def _reconnect(self) -> None: + try: + self._lastReconnectBackoff = getattr(self, '_lastReconnectBackoff', 2) + if time.time() - getattr(self, '_lastReconnect', 0) > self._lastReconnectBackoff: + config.getLogger('logprint').info('Trying to reconnect to redis server') + self._client = self.redis.Redis.from_url(self._clientParams[0], **self._clientParams[1]) + self._lastReconnectBackoff = min(self._lastReconnectBackoff + 1, 30) + self._lastReconnect = time.time() + except Exception: + pass + + def _getStat(self, key: str) -> int: + try: + stats = self._client.info() + value = stats[key] + except Exception: + return 0 + return value + + def clear(self) -> None: + keys = self._client.keys(f"{self._cache_key_prefix}*") + if keys: + self._client.delete(*keys) + + @staticmethod + def getCache() -> Tuple[Optional['RedisCache'], threading.Lock]: + cacheLock = threading.Lock() + + # check if credentials and location exist, otherwise assume + # location is 127.0.0.1 (localhost) with no password + url = config.getConfig('cache_redis_url') + if not url: + url = '127.0.0.1:6379' + redisUsername = config.getConfig('cache_redis_username') + if not redisUsername: + redisUsername = None + redisPassword = config.getConfig('cache_redis_password') + if not redisPassword: + redisPassword = None + try: + cache = RedisCache(url, redisUsername, redisPassword, + mustBeAvailable=True) + except Exception: + config.getLogger().info('Cannot use redis for caching.') + cache = None + return cache, cacheLock \ No newline at end of file diff --git a/large_image/config.py b/large_image/config.py index 383eb3315..17076e24f 100644 --- a/large_image/config.py +++ b/large_image/config.py @@ -25,13 +25,15 @@ 'logprint': fallbackLogger, # For tiles - 'cache_backend': None, # 'python' or 'memcached' + 'cache_backend': None, # 'python', 'redis' or 'memcached' # 'python' cache can use 1/(val) of the available memory 'cache_python_memory_portion': 32, # cache_memcached_url may be a list 'cache_memcached_url': '127.0.0.1', 'cache_memcached_username': None, 'cache_memcached_password': None, + 'cache_redis_url': '127.0.0.1:6379', + 'cache_redis_password': None, # If set to False, the default will be to not cache tile sources. This has # substantial performance penalties if sources are used multiple times, so diff --git a/setup.py b/setup.py index 3bff9d454..605e8a475 100644 --- a/setup.py +++ b/setup.py @@ -36,6 +36,7 @@ def prerelease_local_scheme(version): extraReqs = { 'memcached': ['pylibmc>=1.5.1 ; platform_system != "Windows"'], + 'redis': ['redis>=4.5.5'], 'converter': [f'large-image-converter{limit_version}'], 'colormaps': ['matplotlib'], 'tiledoutput': ['pyvips'], @@ -74,7 +75,7 @@ def prerelease_local_scheme(version): # The common packages are ones that will install on Ubuntu, OSX, and Windows # from pypi with all needed dependencies. extraReqs['common'] = list(set(itertools.chain.from_iterable(extraReqs[key] for key in { - 'memcached', 'colormaps', 'performance', + 'memcached', 'redis', 'colormaps', 'performance', 'deepzoom', 'dicom', 'multi', 'nd2', 'test', 'tifffile', 'zarr', })) | { f'large-image-source-pil[all]{limit_version}', diff --git a/test/test_cache.py b/test/test_cache.py index 24176a4cd..130afc4db 100644 --- a/test/test_cache.py +++ b/test/test_cache.py @@ -7,7 +7,7 @@ import large_image.cache_util.cache from large_image import config -from large_image.cache_util import (LruCacheMetaclass, MemCache, cachesClear, +from large_image.cache_util import (LruCacheMetaclass, MemCache, RedisCache, cachesClear, cachesInfo, getTileCache, methodcache, strhash) @@ -51,6 +51,23 @@ def testCheckCacheMemcached(): assert val == 354224848179261915075 +@pytest.mark.singular() +def testCacheRedis(): + cache_test(RedisCache()) + + +@pytest.mark.singular() +def testCheckCacheRedis(): + cache = RedisCache() + + cache_test(cache) + + val = cache['(2,)'] + assert val == 1 + val = cache['(100,)'] + assert val == 354224848179261915075 + + def testBadMemcachedUrl(): # go though and check if all 100 fib numbers are in cache # it is stored in cache as ('fib', #) @@ -79,6 +96,14 @@ def testGetTileCacheMemcached(): assert isinstance(tileCache, MemCache) +@pytest.mark.singular() +def testGetTileCacheRedis(): + large_image.cache_util.cache._tileCache = None + large_image.cache_util.cache._tileLock = None + config.setConfig('cache_backend', 'redis') + tileCache, tileLock = getTileCache() + assert isinstance(tileCache, RedisCache) + class TestClass: def testLRUThreadSafety(self): # The cachetools LRU cache is not thread safe, and if two threads ask diff --git a/test/test_cached_tiles.py b/test/test_cached_tiles.py index 1cbc521cb..b9a789ddd 100644 --- a/test/test_cached_tiles.py +++ b/test/test_cached_tiles.py @@ -161,6 +161,13 @@ def setup_class(cls): config.setConfig('cache_backend', 'memcached') +class TestRedisCache(LargeImageCachedTilesTest): + @classmethod + def setup_class(cls): + large_image.cache_util.cache._tileCache = None + large_image.cache_util.cache._tileLock = None + config.setConfig('cache_backend', 'redis') + class TestPythonCache(LargeImageCachedTilesTest): @classmethod def setup_class(cls): diff --git a/test/test_config.py b/test/test_config.py index fab9218a6..ad88fb5df 100644 --- a/test/test_config.py +++ b/test/test_config.py @@ -7,6 +7,8 @@ def testConfigFunctions(): assert getConfig('cache_backend') == 'python' setConfig('cache_backend', 'memcached') assert getConfig('cache_backend') == 'memcached' + setConfig('cache_backend', 'redis') + assert getConfig('cache_backend') == 'redis' setConfig('cache_backend', None) assert getConfig('cache_backend') is None assert getConfig('unknown', 'python') == 'python' diff --git a/test/test_files/sample.girder.cfg b/test/test_files/sample.girder.cfg index daffc9633..2611829c2 100644 --- a/test/test_files/sample.girder.cfg +++ b/test/test_files/sample.girder.cfg @@ -8,6 +8,8 @@ cache_python_memory_portion = 32 cache_memcached_url = "127.0.0.1" cache_memcached_username = None cache_memcached_password = None +cache_redis_url = "127.0.0.1:6379" +cache_redis_password = None # The tilesource cache uses the lesser of a value based on available file # handles, the memory portion, and the maximum (if not 0) cache_tilesource_memory_portion = 8 From 76d671637eb0f93c2c05b80121d75a0ef4b1f33c Mon Sep 17 00:00:00 2001 From: nipeone Date: Sun, 25 Feb 2024 19:27:33 +0800 Subject: [PATCH 02/10] update tox cfg for redis --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index ef05e2848..a86aa68aa 100644 --- a/tox.ini +++ b/tox.ini @@ -13,6 +13,7 @@ toxworkdir = {toxinidir}/build/tox passenv = PYTEST_*,DICOMWEB_TEST_URL,DICOMWEB_TEST_TOKEN extras = memcached + redis performance setenv = PIP_FIND_LINKS=https://girder.github.io/large_image_wheels From 0552efcc94ea46bc9750cb323e13c60a15aaf3a8 Mon Sep 17 00:00:00 2001 From: nipeone Date: Sun, 25 Feb 2024 19:37:53 +0800 Subject: [PATCH 03/10] fix some bug in rediscache --- large_image/cache_util/rediscache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/large_image/cache_util/rediscache.py b/large_image/cache_util/rediscache.py index e967b1e4d..5c5ffa442 100644 --- a/large_image/cache_util/rediscache.py +++ b/large_image/cache_util/rediscache.py @@ -117,11 +117,11 @@ def currsize(self) -> int: @property def maxsize(self) -> int: - maxmemory = self._getStat["maxmemory"] + maxmemory = self._getStat("maxmemory") if maxmemory: return maxmemory else: - return self._getStat["total_system_memory"] + return self._getStat("total_system_memory") def _reconnect(self) -> None: try: From d7456018408b32eafe02a5f712730a51c1987a99 Mon Sep 17 00:00:00 2001 From: nipeone Date: Thu, 23 May 2024 23:06:31 +0800 Subject: [PATCH 04/10] fix ci bug --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8285912e2..ddeeba4b0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -106,7 +106,7 @@ commands: - run: name: start redis command: | - docker run --rm -d -p 6379:6379 redis -m 64 + docker run --rm -d -p 6379:6379 redis - run: name: Use nvm # see https://discuss.circleci.com/t/nvm-does-not-change-node-version-on-machine/28973/14 From da31e47a8da33090230bb0dc4006837b581a2095 Mon Sep 17 00:00:00 2001 From: nipeone Date: Thu, 23 May 2024 23:58:58 +0800 Subject: [PATCH 05/10] fix lint_and_docs bug --- large_image/cache_util/rediscache.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/large_image/cache_util/rediscache.py b/large_image/cache_util/rediscache.py index 5c5ffa442..8b8ed83af 100644 --- a/large_image/cache_util/rediscache.py +++ b/large_image/cache_util/rediscache.py @@ -17,6 +17,7 @@ import pickle import threading import time + from typing import Any, Callable, List, Optional, Tuple, TypeVar, Union from .. import config @@ -33,13 +34,15 @@ def __init__( getsizeof: Optional[Callable[[_VT], float]] = None, mustBeAvailable: bool = False) -> None: import redis + from redis.client import Redis self.redis = redis + self._redisCls = Redis super().__init__(0, getsizeof=getsizeof) self._cache_key_prefix = 'large_image_' - self._clientParams = (f"redis://{url}", dict( + self._clientParams = (f'redis://{url}', dict( username=username, password=password, db=0, retry_on_timeout=1)) - self._client = redis.Redis.from_url(self._clientParams[0], **self._clientParams[1]) + self._client = Redis.from_url(self._clientParams[0], **self._clientParams[1]) if mustBeAvailable: # Try to ping server; this will throw an error if the server is # unreachable, so we don't bother trying to use it. @@ -54,7 +57,7 @@ def __iter__(self): def __len__(self) -> int: # return invalid length - keys = self._client.keys(f"{self._cache_key_prefix}*") + keys = self._client.keys(f'{self._cache_key_prefix}*') return len(keys) def __contains__(self, key) -> bool: @@ -117,18 +120,19 @@ def currsize(self) -> int: @property def maxsize(self) -> int: - maxmemory = self._getStat("maxmemory") + maxmemory = self._getStat('maxmemory') if maxmemory: return maxmemory else: - return self._getStat("total_system_memory") + return self._getStat('total_system_memory') def _reconnect(self) -> None: try: self._lastReconnectBackoff = getattr(self, '_lastReconnectBackoff', 2) if time.time() - getattr(self, '_lastReconnect', 0) > self._lastReconnectBackoff: config.getLogger('logprint').info('Trying to reconnect to redis server') - self._client = self.redis.Redis.from_url(self._clientParams[0], **self._clientParams[1]) + self._client = self._redisCls.from_url(self._clientParams[0], + **self._clientParams[1]) self._lastReconnectBackoff = min(self._lastReconnectBackoff + 1, 30) self._lastReconnect = time.time() except Exception: @@ -143,7 +147,7 @@ def _getStat(self, key: str) -> int: return value def clear(self) -> None: - keys = self._client.keys(f"{self._cache_key_prefix}*") + keys = self._client.keys(f'{self._cache_key_prefix}*') if keys: self._client.delete(*keys) @@ -168,4 +172,4 @@ def getCache() -> Tuple[Optional['RedisCache'], threading.Lock]: except Exception: config.getLogger().info('Cannot use redis for caching.') cache = None - return cache, cacheLock \ No newline at end of file + return cache, cacheLock From a9ba44890ac451d230e70cfe8ce87a08603e51ae Mon Sep 17 00:00:00 2001 From: nipeone Date: Fri, 24 May 2024 00:12:10 +0800 Subject: [PATCH 06/10] fix ci --- large_image/cache_util/rediscache.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/large_image/cache_util/rediscache.py b/large_image/cache_util/rediscache.py index 8b8ed83af..bfa3b42e4 100644 --- a/large_image/cache_util/rediscache.py +++ b/large_image/cache_util/rediscache.py @@ -17,8 +17,8 @@ import pickle import threading import time - -from typing import Any, Callable, List, Optional, Tuple, TypeVar, Union +import asyncio +from typing import Any, Callable, List, Optional, Tuple, TypeVar, Union, Awaitable from .. import config from .base import BaseCache @@ -57,13 +57,13 @@ def __iter__(self): def __len__(self) -> int: # return invalid length - keys = self._client.keys(f'{self._cache_key_prefix}*') + keys = self._get_sync_result(self._client.keys(f'{self._cache_key_prefix}*')) return len(keys) def __contains__(self, key) -> bool: # cache never contains key _key = self._cache_key_prefix + self._hashKey(key) - return self._client.exists(_key) + return self._get_sync_result(self._client.exists(_key)) def __delitem__(self, key: str) -> None: if not self.__contains__(key): @@ -77,7 +77,7 @@ def __getitem__(self, key: str) -> Any: # must determine if tke key exists , otherwise cache_test can not be passed. if not self.__contains__(key): raise KeyError - return pickle.loads(self._client.get(_key)) + return pickle.loads(self._get_sync_result(self._client.get(_key))) except KeyError: return self.__missing__(key) except self.redis.ConnectionError: @@ -112,7 +112,7 @@ def __setitem__(self, key: str, value: Any) -> None: @property def curritems(self) -> int: - return self._client.dbsize() + return self._get_sync_result(self._client.dbsize()) @property def currsize(self) -> int: @@ -140,14 +140,14 @@ def _reconnect(self) -> None: def _getStat(self, key: str) -> int: try: - stats = self._client.info() + stats = self._get_sync_result(self._client.info()) value = stats[key] except Exception: return 0 return value def clear(self) -> None: - keys = self._client.keys(f'{self._cache_key_prefix}*') + keys = self._get_sync_result(self._client.keys(f'{self._cache_key_prefix}*')) if keys: self._client.delete(*keys) @@ -173,3 +173,8 @@ def getCache() -> Tuple[Optional['RedisCache'], threading.Lock]: config.getLogger().info('Cannot use redis for caching.') cache = None return cache, cacheLock + + def _get_sync_result(self, result: Union[Awaitable[Any], Any]) -> Any: + if isinstance(result, Awaitable): + return asyncio.run(result) + return result From 7b08fa855fd15f3a43399dbc73039f87e1fde8bc Mon Sep 17 00:00:00 2001 From: nipeone Date: Fri, 24 May 2024 00:30:22 +0800 Subject: [PATCH 07/10] fix ci --- large_image/cache_util/rediscache.py | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/large_image/cache_util/rediscache.py b/large_image/cache_util/rediscache.py index bfa3b42e4..942c52d7d 100644 --- a/large_image/cache_util/rediscache.py +++ b/large_image/cache_util/rediscache.py @@ -17,8 +17,7 @@ import pickle import threading import time -import asyncio -from typing import Any, Callable, List, Optional, Tuple, TypeVar, Union, Awaitable +from typing import Any, Callable, List, Optional, Tuple, TypeVar, Union from .. import config from .base import BaseCache @@ -42,7 +41,7 @@ def __init__( self._cache_key_prefix = 'large_image_' self._clientParams = (f'redis://{url}', dict( username=username, password=password, db=0, retry_on_timeout=1)) - self._client = Redis.from_url(self._clientParams[0], **self._clientParams[1]) + self._client: Redis = Redis.from_url(self._clientParams[0], **self._clientParams[1]) if mustBeAvailable: # Try to ping server; this will throw an error if the server is # unreachable, so we don't bother trying to use it. @@ -57,13 +56,13 @@ def __iter__(self): def __len__(self) -> int: # return invalid length - keys = self._get_sync_result(self._client.keys(f'{self._cache_key_prefix}*')) + keys = self._client.keys(f'{self._cache_key_prefix}*') return len(keys) def __contains__(self, key) -> bool: # cache never contains key _key = self._cache_key_prefix + self._hashKey(key) - return self._get_sync_result(self._client.exists(_key)) + return self._client.exists(_key) def __delitem__(self, key: str) -> None: if not self.__contains__(key): @@ -77,7 +76,7 @@ def __getitem__(self, key: str) -> Any: # must determine if tke key exists , otherwise cache_test can not be passed. if not self.__contains__(key): raise KeyError - return pickle.loads(self._get_sync_result(self._client.get(_key))) + return pickle.loads(self._client.get(_key)) except KeyError: return self.__missing__(key) except self.redis.ConnectionError: @@ -112,7 +111,7 @@ def __setitem__(self, key: str, value: Any) -> None: @property def curritems(self) -> int: - return self._get_sync_result(self._client.dbsize()) + return self._client.dbsize() @property def currsize(self) -> int: @@ -140,14 +139,14 @@ def _reconnect(self) -> None: def _getStat(self, key: str) -> int: try: - stats = self._get_sync_result(self._client.info()) + stats = self._client.info() value = stats[key] except Exception: return 0 return value def clear(self) -> None: - keys = self._get_sync_result(self._client.keys(f'{self._cache_key_prefix}*')) + keys = self._client.keys(f'{self._cache_key_prefix}*') if keys: self._client.delete(*keys) @@ -173,8 +172,3 @@ def getCache() -> Tuple[Optional['RedisCache'], threading.Lock]: config.getLogger().info('Cannot use redis for caching.') cache = None return cache, cacheLock - - def _get_sync_result(self, result: Union[Awaitable[Any], Any]) -> Any: - if isinstance(result, Awaitable): - return asyncio.run(result) - return result From 8a3ad4880091553444e12106d9b80f0a59972eed Mon Sep 17 00:00:00 2001 From: nipeone Date: Fri, 24 May 2024 07:58:26 +0800 Subject: [PATCH 08/10] fix ci --- large_image/cache_util/rediscache.py | 22 ++++++++++++---------- test/test_cache.py | 7 ++++--- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/large_image/cache_util/rediscache.py b/large_image/cache_util/rediscache.py index 942c52d7d..9119c662b 100644 --- a/large_image/cache_util/rediscache.py +++ b/large_image/cache_util/rediscache.py @@ -17,7 +17,9 @@ import pickle import threading import time -from typing import Any, Callable, List, Optional, Tuple, TypeVar, Union +from typing import Any, Callable, Iterable, List, Optional, Sized, Tuple, TypeVar, Union, cast + +from typing_extensions import Buffer from .. import config from .base import BaseCache @@ -34,7 +36,7 @@ def __init__( mustBeAvailable: bool = False) -> None: import redis from redis.client import Redis - + self.redis = redis self._redisCls = Redis super().__init__(0, getsizeof=getsizeof) @@ -57,12 +59,12 @@ def __iter__(self): def __len__(self) -> int: # return invalid length keys = self._client.keys(f'{self._cache_key_prefix}*') - return len(keys) + return len(cast(Sized, keys)) def __contains__(self, key) -> bool: # cache never contains key _key = self._cache_key_prefix + self._hashKey(key) - return self._client.exists(_key) + return bool(self._client.exists(_key)) def __delitem__(self, key: str) -> None: if not self.__contains__(key): @@ -76,7 +78,7 @@ def __getitem__(self, key: str) -> Any: # must determine if tke key exists , otherwise cache_test can not be passed. if not self.__contains__(key): raise KeyError - return pickle.loads(self._client.get(_key)) + return pickle.loads(cast(Buffer, self._client.get(_key))) except KeyError: return self.__missing__(key) except self.redis.ConnectionError: @@ -111,7 +113,7 @@ def __setitem__(self, key: str, value: Any) -> None: @property def curritems(self) -> int: - return self._client.dbsize() + return cast(int, self._client.dbsize()) @property def currsize(self) -> int: @@ -130,7 +132,7 @@ def _reconnect(self) -> None: self._lastReconnectBackoff = getattr(self, '_lastReconnectBackoff', 2) if time.time() - getattr(self, '_lastReconnect', 0) > self._lastReconnectBackoff: config.getLogger('logprint').info('Trying to reconnect to redis server') - self._client = self._redisCls.from_url(self._clientParams[0], + self._client = self._redisCls.from_url(self._clientParams[0], **self._clientParams[1]) self._lastReconnectBackoff = min(self._lastReconnectBackoff + 1, 30) self._lastReconnect = time.time() @@ -140,7 +142,7 @@ def _reconnect(self) -> None: def _getStat(self, key: str) -> int: try: stats = self._client.info() - value = stats[key] + value = cast(dict, stats)[key] except Exception: return 0 return value @@ -148,7 +150,7 @@ def _getStat(self, key: str) -> int: def clear(self) -> None: keys = self._client.keys(f'{self._cache_key_prefix}*') if keys: - self._client.delete(*keys) + self._client.delete(*list(cast(Iterable[Any], keys))) @staticmethod def getCache() -> Tuple[Optional['RedisCache'], threading.Lock]: @@ -167,7 +169,7 @@ def getCache() -> Tuple[Optional['RedisCache'], threading.Lock]: redisPassword = None try: cache = RedisCache(url, redisUsername, redisPassword, - mustBeAvailable=True) + mustBeAvailable=True) except Exception: config.getLogger().info('Cannot use redis for caching.') cache = None diff --git a/test/test_cache.py b/test/test_cache.py index 130afc4db..4ae22ecdb 100644 --- a/test/test_cache.py +++ b/test/test_cache.py @@ -7,9 +7,9 @@ import large_image.cache_util.cache from large_image import config -from large_image.cache_util import (LruCacheMetaclass, MemCache, RedisCache, cachesClear, - cachesInfo, getTileCache, methodcache, - strhash) +from large_image.cache_util import (LruCacheMetaclass, MemCache, RedisCache, + cachesClear, cachesInfo, getTileCache, + methodcache, strhash) class Fib: @@ -104,6 +104,7 @@ def testGetTileCacheRedis(): tileCache, tileLock = getTileCache() assert isinstance(tileCache, RedisCache) + class TestClass: def testLRUThreadSafety(self): # The cachetools LRU cache is not thread safe, and if two threads ask From 232c7ac46e70cb5b054bc4ed23d2b95e5fc9c5a1 Mon Sep 17 00:00:00 2001 From: nipeone Date: Fri, 24 May 2024 08:09:40 +0800 Subject: [PATCH 09/10] fix lint_and_docs --- large_image/cache_util/rediscache.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/large_image/cache_util/rediscache.py b/large_image/cache_util/rediscache.py index 9119c662b..3e67ac809 100644 --- a/large_image/cache_util/rediscache.py +++ b/large_image/cache_util/rediscache.py @@ -26,6 +26,7 @@ _VT = TypeVar('_VT') + class RedisCache(BaseCache): """Use redis as the backing cache.""" @@ -133,7 +134,7 @@ def _reconnect(self) -> None: if time.time() - getattr(self, '_lastReconnect', 0) > self._lastReconnectBackoff: config.getLogger('logprint').info('Trying to reconnect to redis server') self._client = self._redisCls.from_url(self._clientParams[0], - **self._clientParams[1]) + **self._clientParams[1]) self._lastReconnectBackoff = min(self._lastReconnectBackoff + 1, 30) self._lastReconnect = time.time() except Exception: From 3142b858eb1a1187d980972370b5631f4715fadf Mon Sep 17 00:00:00 2001 From: nipeone Date: Fri, 24 May 2024 08:19:15 +0800 Subject: [PATCH 10/10] fix lint_and_docs --- test/test_cached_tiles.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/test_cached_tiles.py b/test/test_cached_tiles.py index b9a789ddd..29d36ade9 100644 --- a/test/test_cached_tiles.py +++ b/test/test_cached_tiles.py @@ -168,6 +168,7 @@ def setup_class(cls): large_image.cache_util.cache._tileLock = None config.setConfig('cache_backend', 'redis') + class TestPythonCache(LargeImageCachedTilesTest): @classmethod def setup_class(cls):