Skip to content

Commit 8d76dee

Browse files
authored
cache configuration improvments (#429)
* cache configuration improvments * add variable `CACHE_GEOCODING` - `True` or `False` enable or disable caching for geocoding * add variable `CACHE_QUERIES` - `True` or `False` enable or disable caching for queries * fix md indentation
1 parent 32674f5 commit 8d76dee

File tree

7 files changed

+65
-27
lines changed

7 files changed

+65
-27
lines changed

docs/manuals/admin/configuration.md

+2
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ To configure QuantumLeap you can use the following environment variables:
1919
| `REDIS_HOST` | Redis Host |
2020
| `REDIS_PORT` | Redis Port |
2121
| `USE_GEOCODING` | `True` or `False` enable or disable geocoding |
22+
| `CACHE_GEOCODING` | `True` or `False` enable or disable caching for geocoding |
23+
| `CACHE_QUERIES` | `True` or `False` enable or disable caching for queries |
2224
| `DEFAULT_CACHE_TTL`| Time to live of metadata cache, default: 60 (seconds) | |
2325
| `QL_CONFIG` | Pathname for tenant configuration |
2426
| `QL_DEFAULT_DB` | Default backend: `timescale` or `crate` |

src/cache/factory.py

+5-3
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,9 @@ def redis_port(self) -> int:
2525
def default_ttl(self) -> int:
2626
return self.env.read(IntVar('DEFAULT_CACHE_TTL', 60))
2727

28+
def cache_queries(self) -> bool:
29+
return self.env.read(BoolVar('CACHE_QUERIES', False))
30+
2831

2932
def log():
3033
return logging.getLogger(__name__)
@@ -51,9 +54,8 @@ def get_cache() -> MaybeCache:
5154
object otherwise.
5255
"""
5356
env = CacheEnvReader()
54-
if is_cache_available():
55-
log().info("Cache env variables set, building a cache.")
56-
57+
if is_cache_available() and env.cache_queries():
58+
log().debug("Cache env variables set, building a cache.")
5759
return QueryCache(env.redis_host(), env.redis_port(),
5860
env.default_ttl())
5961

src/geocoding/factory.py

+22-6
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
from .geocache import GeoCodingCache
55
from utils.cfgreader import EnvReader, BoolVar, IntVar, StrVar, MaybeString
66

7-
87
MaybeGeoCache = Union[GeoCodingCache, None]
98

109

@@ -26,6 +25,9 @@ def __init__(self):
2625
def use_geocoding(self) -> bool:
2726
return self.env.read(BoolVar('USE_GEOCODING', False))
2827

28+
def cache_geocoding(self) -> bool:
29+
return self.env.read(BoolVar('CACHE_GEOCODING', False))
30+
2931
def redis_host(self) -> MaybeString:
3032
return self.env.read(StrVar('REDIS_HOST', None))
3133

@@ -48,7 +50,20 @@ def is_geo_coding_available() -> bool:
4850
use geo-coding.
4951
"""
5052
env = GeoCodingEnvReader()
51-
if env.use_geocoding() and env.redis_host():
53+
if env.use_geocoding():
54+
return True
55+
return False
56+
57+
58+
def is_geo_cache_available() -> bool:
59+
"""
60+
Can we use cache? Yes if the Redis host env var is set. No otherwise.
61+
62+
:return: True or False depending on whether or not we're supposed to
63+
use geo-coding.
64+
"""
65+
env = GeoCodingEnvReader()
66+
if env.redis_host():
5267
return True
5368
return False
5469

@@ -62,9 +77,10 @@ def get_geo_cache() -> MaybeGeoCache:
6277
"""
6378
env = GeoCodingEnvReader()
6479
if is_geo_coding_available():
65-
log().debug("Geo Cache env variables set, building a cache.")
66-
67-
return GeoCodingCache(env.redis_host(), env.redis_port())
80+
log().debug("Geo Cache env variables set, try to build a cache.")
81+
if env.cache_geocoding() and is_geo_cache_available():
82+
return GeoCodingCache(env.redis_host(), env.redis_port())
83+
log().warning("Geo Cache is not enabled, check env variables.")
6884

69-
log().debug("Geo Cache env variables indicate cache should not be used.")
85+
log().debug("Geo Cache is not enabled")
7086
return None

src/reporter/tests/docker-compose.yml

+1
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ services:
4040
environment:
4141
- USE_GEOCODING=False
4242
- REDIS_HOST=redis
43+
- CACHE_QUERIES=True
4344
- LOGLEVEL=INFO
4445
- POSTGRES_HOST=timescale
4546
- QL_CONFIG=/config/ql-config.yml

src/tests/benchmark/docker-compose.yml

+1
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ services:
4545
environment:
4646
- CRATE_HOST=${CRATE_HOST:-crate}
4747
- USE_GEOCODING=False
48+
- CACHE_QUERIES=True
4849
- REDIS_HOST=redis
4950
- REDIS_PORT=6379
5051
- LOGLEVEL=INFO

src/tests/docker-compose.yml

+1
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ services:
3333
environment:
3434
- CRATE_HOST=${CRATE_HOST:-crate}
3535
- USE_GEOCODING=False
36+
- CACHE_QUERIES=True
3637
- REDIS_HOST=redis
3738
- REDIS_PORT=6379
3839
- LOGLEVEL=INFO

src/translators/sql_translator.py

+33-18
Original file line numberDiff line numberDiff line change
@@ -131,8 +131,8 @@ def __init__(self, host, port, db_name):
131131

132132
def dispose(self):
133133
dt = datetime.now() - self.start_time
134-
time_difference = (
135-
dt.days * 24 * 60 * 60 + dt.seconds) * 1000 + dt.microseconds / 1000.0
134+
time_difference = (dt.days * 24 * 60 * 60 + dt.seconds)\
135+
* 1000 + dt.microseconds / 1000.0
136136
self.logger.debug("Translation completed | time={} msec".format(
137137
str(time_difference)))
138138

@@ -143,8 +143,8 @@ def _refresh(self, entity_types, fiware_service=None):
143143
"""
144144
Used for testing purposes only!
145145
Refreshing ensures a query after an insert retrieves the inserted data.
146-
:param entity_types: list(str) list of entity types whose tables will be
147-
refreshed
146+
:param entity_types: list(str) list of entity types whose tables will
147+
be refreshed
148148
"""
149149
table_names = [self._et2tn(et, fiware_service) for et in entity_types]
150150
table_names.append(METADATA_TABLE_NAME)
@@ -365,8 +365,8 @@ def _insert_entity_rows(self, table_name: str, col_names: List[str],
365365
start_time = datetime.now()
366366
self.cursor.executemany(stmt, rows)
367367
dt = datetime.now() - start_time
368-
time_difference = (
369-
dt.days * 24 * 60 * 60 + dt.seconds) * 1000 + dt.microseconds / 1000.0
368+
time_difference = (dt.days * 24 * 60 * 60 + dt.seconds)\
369+
* 1000 + dt.microseconds / 1000.0
370370
self.logger.debug("Query completed | time={} msec".format(
371371
str(time_difference)))
372372
except Exception as e:
@@ -426,8 +426,8 @@ def _should_insert_original_entities(self,
426426
def _insert_original_entities_in_failed_batch(
427427
self, table_name: str, entities: List[dict],
428428
insert_error: Exception):
429-
cols = f"{ENTITY_ID_COL}, {ENTITY_TYPE_COL}, {self.TIME_INDEX_NAME}" + \
430-
f", {ORIGINAL_ENTITY_COL}"
429+
cols = f"{ENTITY_ID_COL}, {ENTITY_TYPE_COL}, {self.TIME_INDEX_NAME}" \
430+
+ f", {ORIGINAL_ENTITY_COL}"
431431
stmt = f"insert into {table_name} ({cols}) values (?, ?, ?, ?)"
432432
tix = current_timex()
433433
batch_id = uuid4().hex
@@ -438,7 +438,8 @@ def _insert_original_entities_in_failed_batch(
438438
self.cursor.executemany(stmt, rows)
439439

440440
def _attr_is_structured(self, a):
441-
if 'value' in a and a['value'] is not None and isinstance(a['value'], dict):
441+
if 'value' in a and a['value'] is not None \
442+
and isinstance(a['value'], dict):
442443
self.logger.debug("attribute {} has 'value' attribute of type dict"
443444
.format(a))
444445
return True
@@ -448,8 +449,8 @@ def _attr_is_structured(self, a):
448449
@staticmethod
449450
def is_text(attr_type):
450451
# TODO: verify: same logic in two different places!
451-
# The above kinda reproduces the tests done by the translator, we should
452-
# factor this logic out and keep it in just one place!
452+
# The above kinda reproduces the tests done by the translator,
453+
# we should factor this logic out and keep it in just one place!
453454
return attr_type == NGSI_TEXT or attr_type not in NGSI_TO_SQL
454455

455456
def _preprocess_values(self, e, original_attrs, col_names,
@@ -1352,20 +1353,29 @@ def _execute_query_via_cache(self, tenant_name, key, stmt, parameters=None,
13521353
res = pickle.loads(value)
13531354
return res
13541355
except Exception as e:
1355-
self.logger.warning(str(e), exc_info=True)
1356+
self.logger.warning("Caching not available, metadata data may "
1357+
"not be consistent: " + str(e),
1358+
exc_info=True)
13561359

13571360
self.cursor.execute(stmt, parameters)
13581361
res = self.cursor.fetchall()
1359-
if res:
1360-
self._cache(tenant_name, key, res, ex)
1362+
if res and self.cache:
1363+
try:
1364+
self._cache(tenant_name, key, res, ex)
1365+
except Exception as e:
1366+
self.logger.warning("Caching not available, metadata data may "
1367+
"not be consistent: " + str(e),
1368+
exc_info=True)
13611369
return res
13621370

13631371
def _is_query_in_cache(self, tenant_name, key):
13641372
if self.cache:
13651373
try:
13661374
return self.cache.exists(tenant_name, key)
13671375
except Exception as e:
1368-
self.logger.warning(str(e), exc_info=True)
1376+
self.logger.warning("Caching not available, metadata data may "
1377+
"not be consistent: " + str(e),
1378+
exc_info=True)
13691379
return False
13701380

13711381
def _cache(self, tenant_name, key, value=None, ex=None):
@@ -1375,14 +1385,18 @@ def _cache(self, tenant_name, key, value=None, ex=None):
13751385
value = pickle.dumps(value)
13761386
self.cache.put(tenant_name, key, value, ex)
13771387
except Exception as e:
1378-
self.logger.warning(str(e), exc_info=True)
1388+
self.logger.warning("Caching not available, metadata data may "
1389+
"not be consistent: " + str(e),
1390+
exc_info=True)
13791391

13801392
def _remove_from_cache(self, tenant_name, key):
13811393
if self.cache:
13821394
try:
13831395
self.cache.delete(tenant_name, key)
13841396
except Exception as e:
1385-
self.logger.warning(str(e), exc_info=True)
1397+
self.logger.warning("Caching not available, metadata data may "
1398+
"not be consistent: " + str(e),
1399+
exc_info=True)
13861400

13871401

13881402
class QueryCacheManager(Borg):
@@ -1394,7 +1408,8 @@ def __init__(self):
13941408
try:
13951409
self.cache = get_cache()
13961410
except Exception as e:
1397-
self.logger.warning(str(e), exc_info=True)
1411+
self.logger.warning("Caching not available:" + str(e),
1412+
exc_info=True)
13981413

13991414
def get_query_cache(self):
14001415
return self.cache

0 commit comments

Comments
 (0)