@@ -131,8 +131,8 @@ def __init__(self, host, port, db_name):
131
131
132
132
def dispose (self ):
133
133
dt = datetime .now () - self .start_time
134
- time_difference = (
135
- dt . days * 24 * 60 * 60 + dt . seconds ) * 1000 + dt .microseconds / 1000.0
134
+ time_difference = (dt . days * 24 * 60 * 60 + dt . seconds )\
135
+ * 1000 + dt .microseconds / 1000.0
136
136
self .logger .debug ("Translation completed | time={} msec" .format (
137
137
str (time_difference )))
138
138
@@ -143,8 +143,8 @@ def _refresh(self, entity_types, fiware_service=None):
143
143
"""
144
144
Used for testing purposes only!
145
145
Refreshing ensures a query after an insert retrieves the inserted data.
146
- :param entity_types: list(str) list of entity types whose tables will be
147
- refreshed
146
+ :param entity_types: list(str) list of entity types whose tables will
147
+ be refreshed
148
148
"""
149
149
table_names = [self ._et2tn (et , fiware_service ) for et in entity_types ]
150
150
table_names .append (METADATA_TABLE_NAME )
@@ -365,8 +365,8 @@ def _insert_entity_rows(self, table_name: str, col_names: List[str],
365
365
start_time = datetime .now ()
366
366
self .cursor .executemany (stmt , rows )
367
367
dt = datetime .now () - start_time
368
- time_difference = (
369
- dt . days * 24 * 60 * 60 + dt . seconds ) * 1000 + dt .microseconds / 1000.0
368
+ time_difference = (dt . days * 24 * 60 * 60 + dt . seconds )\
369
+ * 1000 + dt .microseconds / 1000.0
370
370
self .logger .debug ("Query completed | time={} msec" .format (
371
371
str (time_difference )))
372
372
except Exception as e :
@@ -426,8 +426,8 @@ def _should_insert_original_entities(self,
426
426
def _insert_original_entities_in_failed_batch (
427
427
self , table_name : str , entities : List [dict ],
428
428
insert_error : Exception ):
429
- cols = f"{ ENTITY_ID_COL } , { ENTITY_TYPE_COL } , { self .TIME_INDEX_NAME } " + \
430
- f", { ORIGINAL_ENTITY_COL } "
429
+ cols = f"{ ENTITY_ID_COL } , { ENTITY_TYPE_COL } , { self .TIME_INDEX_NAME } " \
430
+ + f", { ORIGINAL_ENTITY_COL } "
431
431
stmt = f"insert into { table_name } ({ cols } ) values (?, ?, ?, ?)"
432
432
tix = current_timex ()
433
433
batch_id = uuid4 ().hex
@@ -438,7 +438,8 @@ def _insert_original_entities_in_failed_batch(
438
438
self .cursor .executemany (stmt , rows )
439
439
440
440
def _attr_is_structured (self , a ):
441
- if 'value' in a and a ['value' ] is not None and isinstance (a ['value' ], dict ):
441
+ if 'value' in a and a ['value' ] is not None \
442
+ and isinstance (a ['value' ], dict ):
442
443
self .logger .debug ("attribute {} has 'value' attribute of type dict"
443
444
.format (a ))
444
445
return True
@@ -448,8 +449,8 @@ def _attr_is_structured(self, a):
448
449
@staticmethod
449
450
def is_text (attr_type ):
450
451
# TODO: verify: same logic in two different places!
451
- # The above kinda reproduces the tests done by the translator, we should
452
- # factor this logic out and keep it in just one place!
452
+ # The above kinda reproduces the tests done by the translator,
453
+ # we should factor this logic out and keep it in just one place!
453
454
return attr_type == NGSI_TEXT or attr_type not in NGSI_TO_SQL
454
455
455
456
def _preprocess_values (self , e , original_attrs , col_names ,
@@ -1352,20 +1353,29 @@ def _execute_query_via_cache(self, tenant_name, key, stmt, parameters=None,
1352
1353
res = pickle .loads (value )
1353
1354
return res
1354
1355
except Exception as e :
1355
- self .logger .warning (str (e ), exc_info = True )
1356
+ self .logger .warning ("Caching not available, metadata data may "
1357
+ "not be consistent: " + str (e ),
1358
+ exc_info = True )
1356
1359
1357
1360
self .cursor .execute (stmt , parameters )
1358
1361
res = self .cursor .fetchall ()
1359
- if res :
1360
- self ._cache (tenant_name , key , res , ex )
1362
+ if res and self .cache :
1363
+ try :
1364
+ self ._cache (tenant_name , key , res , ex )
1365
+ except Exception as e :
1366
+ self .logger .warning ("Caching not available, metadata data may "
1367
+ "not be consistent: " + str (e ),
1368
+ exc_info = True )
1361
1369
return res
1362
1370
1363
1371
def _is_query_in_cache (self , tenant_name , key ):
1364
1372
if self .cache :
1365
1373
try :
1366
1374
return self .cache .exists (tenant_name , key )
1367
1375
except Exception as e :
1368
- self .logger .warning (str (e ), exc_info = True )
1376
+ self .logger .warning ("Caching not available, metadata data may "
1377
+ "not be consistent: " + str (e ),
1378
+ exc_info = True )
1369
1379
return False
1370
1380
1371
1381
def _cache (self , tenant_name , key , value = None , ex = None ):
@@ -1375,14 +1385,18 @@ def _cache(self, tenant_name, key, value=None, ex=None):
1375
1385
value = pickle .dumps (value )
1376
1386
self .cache .put (tenant_name , key , value , ex )
1377
1387
except Exception as e :
1378
- self .logger .warning (str (e ), exc_info = True )
1388
+ self .logger .warning ("Caching not available, metadata data may "
1389
+ "not be consistent: " + str (e ),
1390
+ exc_info = True )
1379
1391
1380
1392
def _remove_from_cache (self , tenant_name , key ):
1381
1393
if self .cache :
1382
1394
try :
1383
1395
self .cache .delete (tenant_name , key )
1384
1396
except Exception as e :
1385
- self .logger .warning (str (e ), exc_info = True )
1397
+ self .logger .warning ("Caching not available, metadata data may "
1398
+ "not be consistent: " + str (e ),
1399
+ exc_info = True )
1386
1400
1387
1401
1388
1402
class QueryCacheManager (Borg ):
@@ -1394,7 +1408,8 @@ def __init__(self):
1394
1408
try :
1395
1409
self .cache = get_cache ()
1396
1410
except Exception as e :
1397
- self .logger .warning (str (e ), exc_info = True )
1411
+ self .logger .warning ("Caching not available:" + str (e ),
1412
+ exc_info = True )
1398
1413
1399
1414
def get_query_cache (self ):
1400
1415
return self .cache
0 commit comments