From 0783a0790a1faa9d7467b0a4e4811cd55d4ed1a7 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 6 Nov 2017 14:03:46 -0700 Subject: [PATCH 01/55] initial commit for sampling feature dataset query --- Examples/Sample.py | 8 +++- odm2api/ODM2/models.py | 1 + odm2api/ODM2/services/readService.py | 63 +++++++++++++++++++++++++++- 3 files changed, 69 insertions(+), 3 deletions(-) diff --git a/Examples/Sample.py b/Examples/Sample.py index dcd8896..fc09cf6 100644 --- a/Examples/Sample.py +++ b/Examples/Sample.py @@ -17,15 +17,18 @@ #connect to database # createconnection (dbtype, servername, dbname, username, password) # session_factory = dbconnection.createConnection('connection type: sqlite|mysql|mssql|postgresql', '/your/path/to/db/goes/here', 2.0)#sqlite -session_factory = dbconnection.createConnection('postgresql', 'localhost', 'odm2', 'ODM', 'odm') +# session_factory = dbconnection.createConnection('postgresql', 'localhost', 'odm2', 'ODM', 'odm') # session_factory = dbconnection.createConnection('mysql', 'localhost', 'odm2', 'ODM', 'odm')#mysql # session_factory= dbconnection.createConnection('mssql', "(local)", "ODM2", "ODM", "odm")#win MSSQL + # session_factory= dbconnection.createConnection('mssql', "arroyoodm2", "", "ODM", "odm")#mac/linux MSSQL # session_factory = dbconnection.createConnection('sqlite', '/Users/stephanie/DEV/YODA-Tools/tests/test_files/XL_specimen.sqlite', 2.0) +session_factory = dbconnection.createConnection('postgresql', 'odm2wofpy1.uwrl.usu.edu', 'odm2', 'dbadmin', 'pinkbananastastegross') + @@ -39,6 +42,9 @@ create = CreateODM2(session_factory) +ds= read.getSamplingFeatureDatasets(ids = [1]) + + # Run some basic sample queries. # ------------------------------ # Get all of the variables from the database and print their names to the console diff --git a/odm2api/ODM2/models.py b/odm2api/ODM2/models.py index 7b6d5b2..805d755 100644 --- a/odm2api/ODM2/models.py +++ b/odm2api/ODM2/models.py @@ -316,6 +316,7 @@ class DataSets(Base): DataSetTitle = Column('datasettitle', String(255), nullable=False) DataSetAbstract = Column('datasetabstract', String(500), nullable=False) + # DatasetsResultsObj = relationship(DataSetsResults, primaryjoin='DatasetsResults.DatasetID == DataSets.ActionID') class ProcessingLevels(Base): """ diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 5816127..d111547 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -12,7 +12,7 @@ CalibrationActions, CalibrationReferenceEquipment, CalibrationStandards, CategoricalResultValueAnnotations, CategoricalResultValues, CitationExtensionPropertyValues, CitationExternalIdentifiers, DataLoggerFileColumns, DataLoggerFiles, DataLoggerProgramFiles, - DataQuality, DataSetCitations, DataSets, DerivationEquations, Directives, Equipment, + DataQuality, DataSetCitations, DataSets, DataSetsResults, DerivationEquations, Directives, Equipment, EquipmentActions, EquipmentAnnotations, EquipmentModels, EquipmentUsed, ExtensionProperties, ExternalIdentifierSystems, FeatureActions, InstrumentOutputVariables, MaintenanceActions, MeasurementResultValueAnnotations, MeasurementResultValues, MethodAnnotations, @@ -462,6 +462,8 @@ def getRelatedSamplingFeatures(self, sfid=None, rfid=None, relationshiptype=None print('Error running Query: {}'.format(e)) return None + + # Action def getActions(self, ids=None, type=None, sfid=None): """ @@ -694,13 +696,70 @@ def getDataSets(self, codes=None, uuids=None): if codes: q = q.filter(DataSets.DataSetCode.in_(codes)) if uuids: - q.q.filter(DataSets.DataSetUUID.in_(uuids)) + q.filter(DataSets.DataSetUUID.in_(uuids)) try: return q.all() except Exception as e: print('Error running Query {}'.format(e)) return None + def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, type=None): + """Retrieve a list of Sampling Feature objects. + Retrieve a list of Datasets associated with the given sampling feature data. + + Must specify either samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode) + + Args: + ids (list, optional): List of SamplingFeatureIDs. + codes (list, optional): List of SamplingFeature Codes. + uuids (list, optional): List of UUIDs string. + type (str, optional): Type of Dataset from + `controlled vocabulary name `_. + + + Returns: + list: List of sampling feature objects along with their associated datasets + + Examples: + >>> READ = ReadODM2(SESSION_FACTORY) + >>> READ.getSamplingFeatureDatasets(ids=[39, 40]) + >>> READ.getSamplingFeatureDatasets(codes=['HOME', 'FIELD']) + >>> READ.getSamplingFeatureDatasets(uuids=['a6f114f1-5416-4606-ae10-23be32dbc202', + ... '5396fdf3-ceb3-46b6-aaf9-454a37278bb4']) + >>> READ.getSamplingFeatureDatasets(type='singleTimeSeries') + + """ + + + # make sure one of the three arguments has been sent in + if all(v is None for v in [ids, codes, uuids]): + raise ValueError('Expected samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode argument') + + sf_query = self._session.query(SamplingFeatures.SamplingFeatureID) + + if ids: + sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureID.in_(ids)) + if codes: + sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureCode.in_(codes)) + if uuids: + sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureUUID.in_(uuids)) + sf_list = sf_query.all() + + # , DataSetsResults)\ + q = self._session.query(DataSetsResults)\ + .join(Results)\ + .join(FeatureActions)\ + .filter(FeatureActions.SamplingFeatureID.in_(sf_list)) + + if type: + q = q.filter_by(DatasetTypeCV=type) + + try: + return q.all() + except Exception as e: + print('Error running Query: {}'.format(e)) + return None + # Data Quality def getDataQuality(self): """ From 959ba641b3ba6d871e408e572bbe54e24b375d97 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 6 Nov 2017 14:42:39 -0700 Subject: [PATCH 02/55] remove extra info --- Examples/Sample.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Examples/Sample.py b/Examples/Sample.py index fc09cf6..81ad52c 100644 --- a/Examples/Sample.py +++ b/Examples/Sample.py @@ -27,7 +27,7 @@ # session_factory = dbconnection.createConnection('sqlite', '/Users/stephanie/DEV/YODA-Tools/tests/test_files/XL_specimen.sqlite', 2.0) -session_factory = dbconnection.createConnection('postgresql', 'odm2wofpy1.uwrl.usu.edu', 'odm2', 'dbadmin', 'pinkbananastastegross') + @@ -42,7 +42,7 @@ create = CreateODM2(session_factory) -ds= read.getSamplingFeatureDatasets(ids = [1]) +ds= read.getSamplingFeatureDatasets(ids = [1001]) # Run some basic sample queries. From d91ddc702526e7867e45821a2b166e3bb202bd14 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 6 Nov 2017 14:43:41 -0700 Subject: [PATCH 03/55] cleanup models comments --- odm2api/ODM2/models.py | 1 - 1 file changed, 1 deletion(-) diff --git a/odm2api/ODM2/models.py b/odm2api/ODM2/models.py index 805d755..7b6d5b2 100644 --- a/odm2api/ODM2/models.py +++ b/odm2api/ODM2/models.py @@ -316,7 +316,6 @@ class DataSets(Base): DataSetTitle = Column('datasettitle', String(255), nullable=False) DataSetAbstract = Column('datasetabstract', String(500), nullable=False) - # DatasetsResultsObj = relationship(DataSetsResults, primaryjoin='DatasetsResults.DatasetID == DataSets.ActionID') class ProcessingLevels(Base): """ From 9419af87de11197c2cfdbcc3057c8bdd5fb7da36 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 6 Nov 2017 14:46:05 -0700 Subject: [PATCH 04/55] update inline documentation --- odm2api/ODM2/services/readService.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index d111547..9c71d49 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -714,11 +714,11 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, type=None codes (list, optional): List of SamplingFeature Codes. uuids (list, optional): List of UUIDs string. type (str, optional): Type of Dataset from - `controlled vocabulary name `_. + `controlled vocabulary name `_. Returns: - list: List of sampling feature objects along with their associated datasets + list: List of DataSetsResults Objects associated with the given sampling feature Examples: >>> READ = ReadODM2(SESSION_FACTORY) @@ -745,7 +745,7 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, type=None sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureUUID.in_(uuids)) sf_list = sf_query.all() - # , DataSetsResults)\ + q = self._session.query(DataSetsResults)\ .join(Results)\ .join(FeatureActions)\ From 01b1933a6ec7990f6d8717379ba3de00f2a357b3 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 6 Nov 2017 15:02:08 -0700 Subject: [PATCH 05/55] cleanup documentation url --- odm2api/ODM2/services/readService.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 9c71d49..85cec4d 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -714,7 +714,7 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, type=None codes (list, optional): List of SamplingFeature Codes. uuids (list, optional): List of UUIDs string. type (str, optional): Type of Dataset from - `controlled vocabulary name `_. + `controlled vocabulary name `_. Returns: From e4de3e4418557a453d4a45e271a3072971df1b63 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 6 Nov 2017 15:10:06 -0700 Subject: [PATCH 06/55] bold required element in documentation --- Examples/Sample.py | 2 +- odm2api/ODM2/services/readService.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Examples/Sample.py b/Examples/Sample.py index 81ad52c..2c0f41e 100644 --- a/Examples/Sample.py +++ b/Examples/Sample.py @@ -27,7 +27,7 @@ # session_factory = dbconnection.createConnection('sqlite', '/Users/stephanie/DEV/YODA-Tools/tests/test_files/XL_specimen.sqlite', 2.0) - +session_factory = dbconnection.createConnection('postgresql', 'odm2wofpy1.uwrl.usu.edu', 'odm2', 'dbadmin', 'pinkbananastastegross') diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 85cec4d..54f3a29 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -707,7 +707,7 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, type=None """Retrieve a list of Sampling Feature objects. Retrieve a list of Datasets associated with the given sampling feature data. - Must specify either samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode) + >>>Must specify either samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode) Args: ids (list, optional): List of SamplingFeatureIDs. From 60ebf3f300136780961e407719354c07e6c7e154 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 6 Nov 2017 15:15:05 -0700 Subject: [PATCH 07/55] rename type to dstype --- odm2api/ODM2/services/readService.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 54f3a29..61dafeb 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -703,11 +703,11 @@ def getDataSets(self, codes=None, uuids=None): print('Error running Query {}'.format(e)) return None - def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, type=None): + def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=None): """Retrieve a list of Sampling Feature objects. Retrieve a list of Datasets associated with the given sampling feature data. - >>>Must specify either samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode) + >>> Must specify either samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode) Args: ids (list, optional): List of SamplingFeatureIDs. @@ -726,7 +726,7 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, type=None >>> READ.getSamplingFeatureDatasets(codes=['HOME', 'FIELD']) >>> READ.getSamplingFeatureDatasets(uuids=['a6f114f1-5416-4606-ae10-23be32dbc202', ... '5396fdf3-ceb3-46b6-aaf9-454a37278bb4']) - >>> READ.getSamplingFeatureDatasets(type='singleTimeSeries') + >>> READ.getSamplingFeatureDatasets(dstype='singleTimeSeries') """ @@ -751,8 +751,8 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, type=None .join(FeatureActions)\ .filter(FeatureActions.SamplingFeatureID.in_(sf_list)) - if type: - q = q.filter_by(DatasetTypeCV=type) + if dstype: + q = q.filter_by(DatasetTypeCV=dstype) try: return q.all() From 53e9631a081a6e0779ee7a45794b30adcc1987a8 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 6 Nov 2017 15:19:50 -0700 Subject: [PATCH 08/55] fix bold documentations --- odm2api/ODM2/services/readService.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 61dafeb..06252ad 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -704,16 +704,16 @@ def getDataSets(self, codes=None, uuids=None): return None def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=None): - """Retrieve a list of Sampling Feature objects. + """ Retrieve a list of Datasets associated with the given sampling feature data. - >>> Must specify either samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode) + **Must specify either samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode)** Args: ids (list, optional): List of SamplingFeatureIDs. codes (list, optional): List of SamplingFeature Codes. uuids (list, optional): List of UUIDs string. - type (str, optional): Type of Dataset from + dstype (str, optional): Type of Dataset from `controlled vocabulary name `_. From 4df7799f63fb288f5979b0368f0f4e16e85539b7 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 6 Nov 2017 15:24:47 -0700 Subject: [PATCH 09/55] all ready for merging --- Examples/Sample.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/Examples/Sample.py b/Examples/Sample.py index 2c0f41e..3292780 100644 --- a/Examples/Sample.py +++ b/Examples/Sample.py @@ -17,7 +17,7 @@ #connect to database # createconnection (dbtype, servername, dbname, username, password) # session_factory = dbconnection.createConnection('connection type: sqlite|mysql|mssql|postgresql', '/your/path/to/db/goes/here', 2.0)#sqlite -# session_factory = dbconnection.createConnection('postgresql', 'localhost', 'odm2', 'ODM', 'odm') +session_factory = dbconnection.createConnection('postgresql', 'localhost', 'odm2', 'ODM', 'odm') # session_factory = dbconnection.createConnection('mysql', 'localhost', 'odm2', 'ODM', 'odm')#mysql # session_factory= dbconnection.createConnection('mssql', "(local)", "ODM2", "ODM", "odm")#win MSSQL @@ -27,13 +27,6 @@ # session_factory = dbconnection.createConnection('sqlite', '/Users/stephanie/DEV/YODA-Tools/tests/test_files/XL_specimen.sqlite', 2.0) -session_factory = dbconnection.createConnection('postgresql', 'odm2wofpy1.uwrl.usu.edu', 'odm2', 'dbadmin', 'pinkbananastastegross') - - - - - - @@ -42,7 +35,7 @@ create = CreateODM2(session_factory) -ds= read.getSamplingFeatureDatasets(ids = [1001]) + # Run some basic sample queries. From 2a226760f19b81b97599781db586bc9a21176a9b Mon Sep 17 00:00:00 2001 From: sreeder Date: Thu, 9 Nov 2017 10:46:26 -0700 Subject: [PATCH 10/55] update getDatasets and create getDatasetsResults functions --- odm2api/ODM2/services/readService.py | 73 ++++++++++++++++++++++++++-- 1 file changed, 69 insertions(+), 4 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 06252ad..7dde058 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -686,23 +686,88 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi return None # Datasets - def getDataSets(self, codes=None, uuids=None): + def getDataSets(self, ids= None, codes=None, uuids=None, dstype=None): """ - * Pass nothing - returns a list of all DataSet objects - * Pass a list of DataSetCode - returns a single DataSet object for each code - * Pass a list of UUIDS - returns a single DataSet object for each UUID + Retrieve a list of Datasets + + Args: + ids (list, optional): List of DataSetsIDs. + codes (list, optional): List of DataSet Codes. + uuids (list, optional): List of Dataset UUIDs string. + dstype (str, optional): Type of Dataset from + `controlled vocabulary name `_. + + + Returns: + list: List of DataSets Objects + + Examples: + >>> READ = ReadODM2(SESSION_FACTORY) + >>> READ.getDataSets(ids=[39, 40]) + >>> READ.getDataSets(codes=['HOME', 'FIELD']) + >>> READ.getDataSets(uuids=['a6f114f1-5416-4606-ae10-23be32dbc202', + ... '5396fdf3-ceb3-46b6-aaf9-454a37278bb4']) + >>> READ.getDataSets(dstype='singleTimeSeries') + """ q = self._session.query(DataSets) + if ids: + q = q.filter(DataSets.DataSetID.in_(codes)) if codes: q = q.filter(DataSets.DataSetCode.in_(codes)) if uuids: q.filter(DataSets.DataSetUUID.in_(uuids)) + if dstype: + q = q.filter(DataSets.DataSetTypeCV == dstype) try: return q.all() except Exception as e: print('Error running Query {}'.format(e)) return None + # Datasets + + def getDataSetsResults(self, ids= None, codes=None, uuids=None, dstype=None): + """ + Retrieve a detailed list of Datasets along with detailed metadata about the datasets + and the results contained within them + + Args: + ids (list, optional): List of DataSetsIDs. + codes (list, optional): List of DataSet Codes. + uuids (list, optional): List of Dataset UUIDs string. + dstype (str, optional): Type of Dataset from + `controlled vocabulary name `_. + + + Returns: + list: List of DataSetsResults Objects + + Examples: + >>> READ = ReadODM2(SESSION_FACTORY) + >>> READ.getDataSetsResults(ids=[39, 40]) + >>> READ.getDataSetsResults(codes=['HOME', 'FIELD']) + >>> READ.getDataSetsResults(uuids=['a6f114f1-5416-4606-ae10-23be32dbc202', + ... '5396fdf3-ceb3-46b6-aaf9-454a37278bb4']) + >>> READ.getDataSetsResults(dstype='singleTimeSeries') + + """ + q = self._session.query(DataSetsResults)\ + .join(DataSets) + if ids: + q = q.filter(DataSets.DataSetID.in_(codes)) + if codes: + q = q.filter(DataSets.DataSetCode.in_(codes)) + if uuids: + q.filter(DataSets.DataSetUUID.in_(uuids)) + if dstype: + q = q.filter(DataSets.DataSetTypeCV == dstype) + try: + return q.all() + except Exception as e: + print('Error running Query {}'.format(e)) + return None + def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=None): """ Retrieve a list of Datasets associated with the given sampling feature data. From 1ce6c3712981c5faeabf52bd041df5811a47293a Mon Sep 17 00:00:00 2001 From: sreeder Date: Thu, 9 Nov 2017 10:48:18 -0700 Subject: [PATCH 11/55] add condition to getDatasetREsults function --- Examples/Sample.py | 9 ++++++++- odm2api/ODM2/services/readService.py | 6 ++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/Examples/Sample.py b/Examples/Sample.py index 3292780..27a4a15 100644 --- a/Examples/Sample.py +++ b/Examples/Sample.py @@ -17,7 +17,7 @@ #connect to database # createconnection (dbtype, servername, dbname, username, password) # session_factory = dbconnection.createConnection('connection type: sqlite|mysql|mssql|postgresql', '/your/path/to/db/goes/here', 2.0)#sqlite -session_factory = dbconnection.createConnection('postgresql', 'localhost', 'odm2', 'ODM', 'odm') +# session_factory = dbconnection.createConnection('postgresql', 'localhost', 'odm2', 'ODM', 'odm') # session_factory = dbconnection.createConnection('mysql', 'localhost', 'odm2', 'ODM', 'odm')#mysql # session_factory= dbconnection.createConnection('mssql', "(local)", "ODM2", "ODM", "odm")#win MSSQL @@ -27,6 +27,13 @@ # session_factory = dbconnection.createConnection('sqlite', '/Users/stephanie/DEV/YODA-Tools/tests/test_files/XL_specimen.sqlite', 2.0) +session_factory = dbconnection.createConnection('postgresql', 'odm2wofpy1.uwrl.usu.edu', 'odm2', 'dbadmin', 'pinkbananastastegross') + + + + + + diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 7dde058..7269a69 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -732,6 +732,7 @@ def getDataSetsResults(self, ids= None, codes=None, uuids=None, dstype=None): Retrieve a detailed list of Datasets along with detailed metadata about the datasets and the results contained within them + **Must specify either DataSetID OR DataSetUUID OR DataSetCode)** Args: ids (list, optional): List of DataSetsIDs. codes (list, optional): List of DataSet Codes. @@ -752,6 +753,11 @@ def getDataSetsResults(self, ids= None, codes=None, uuids=None, dstype=None): >>> READ.getDataSetsResults(dstype='singleTimeSeries') """ + + # make sure one of the three arguments has been sent in + if all(v is None for v in [ids, codes, uuids]): + raise ValueError('Expected DataSetID OR DataSetUUID OR DataSetCode argument') + q = self._session.query(DataSetsResults)\ .join(DataSets) if ids: From 4ea939503d8c164d954094d93cdba127f07c6ad3 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 09:42:03 -0700 Subject: [PATCH 12/55] fix typo in get from ids --- odm2api/ODM2/services/readService.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 7269a69..2e18772 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -712,7 +712,7 @@ def getDataSets(self, ids= None, codes=None, uuids=None, dstype=None): """ q = self._session.query(DataSets) if ids: - q = q.filter(DataSets.DataSetID.in_(codes)) + q = q.filter(DataSets.DataSetID.in_(ids)) if codes: q = q.filter(DataSets.DataSetCode.in_(codes)) if uuids: @@ -761,7 +761,7 @@ def getDataSetsResults(self, ids= None, codes=None, uuids=None, dstype=None): q = self._session.query(DataSetsResults)\ .join(DataSets) if ids: - q = q.filter(DataSets.DataSetID.in_(codes)) + q = q.filter(DataSets.DataSetID.in_(ids)) if codes: q = q.filter(DataSets.DataSetCode.in_(codes)) if uuids: From 11b0c36c28557f41f0ff62f8c840f5f6382f3dd6 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 09:53:03 -0700 Subject: [PATCH 13/55] update Sample.py --- Examples/Sample.py | 8 +------- tests/test_odm2/test_readservice.py | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/Examples/Sample.py b/Examples/Sample.py index 27a4a15..dcd8896 100644 --- a/Examples/Sample.py +++ b/Examples/Sample.py @@ -17,18 +17,15 @@ #connect to database # createconnection (dbtype, servername, dbname, username, password) # session_factory = dbconnection.createConnection('connection type: sqlite|mysql|mssql|postgresql', '/your/path/to/db/goes/here', 2.0)#sqlite -# session_factory = dbconnection.createConnection('postgresql', 'localhost', 'odm2', 'ODM', 'odm') +session_factory = dbconnection.createConnection('postgresql', 'localhost', 'odm2', 'ODM', 'odm') # session_factory = dbconnection.createConnection('mysql', 'localhost', 'odm2', 'ODM', 'odm')#mysql # session_factory= dbconnection.createConnection('mssql', "(local)", "ODM2", "ODM", "odm")#win MSSQL - # session_factory= dbconnection.createConnection('mssql', "arroyoodm2", "", "ODM", "odm")#mac/linux MSSQL # session_factory = dbconnection.createConnection('sqlite', '/Users/stephanie/DEV/YODA-Tools/tests/test_files/XL_specimen.sqlite', 2.0) -session_factory = dbconnection.createConnection('postgresql', 'odm2wofpy1.uwrl.usu.edu', 'odm2', 'dbadmin', 'pinkbananastastegross') - @@ -42,9 +39,6 @@ create = CreateODM2(session_factory) - - - # Run some basic sample queries. # ------------------------------ # Get all of the variables from the database and print their names to the console diff --git a/tests/test_odm2/test_readservice.py b/tests/test_odm2/test_readservice.py index 6ea9154..3311ab8 100644 --- a/tests/test_odm2/test_readservice.py +++ b/tests/test_odm2/test_readservice.py @@ -92,6 +92,26 @@ def test_getSamplingFeatureByID(self): resapi = self.reader.getSamplingFeatures(ids=[sfid]) assert resapi is not None +#DataSets + def test_getDataSets(self): + # get all datasets from the database + ds = self.engine.execute('SELECT * FROM DataSets').fetchone() + dsid = ds[0] + + dsapi = self.reader.getDataSets(ids=[dsid]) + assert dsapi is not None + assert True + + def test_getDataSetsResults(self): + # get all datasetresults from the database + dsr = self.engine.execute('SELECT * FROM DataSetsResults').fetchone() + dsid = dsr[2] + + dsrapi = self.reader.getDataSetsResults(ids=[dsid]) + assert dsrapi is not None + assert True + + # Models """ TABLE Models From 8a8ffe7d795cab657798c7149c9d4355e5c47bb5 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 13:27:56 -0700 Subject: [PATCH 14/55] update tests, update DateTime data type to be compatible with sqlite, some BigInteger values in models.py were the incorrect type --- odm2api/ODM2/models.py | 58 +++++++++++--------- odm2api/ODM2/services/readService.py | 17 +++++- tests/test_odm2/data/populated.sql | 82 ++++++++++++++-------------- tests/test_odm2/test_readservice.py | 15 +++++ 4 files changed, 103 insertions(+), 69 deletions(-) diff --git a/odm2api/ODM2/models.py b/odm2api/ODM2/models.py index 7b6d5b2..f238659 100644 --- a/odm2api/ODM2/models.py +++ b/odm2api/ODM2/models.py @@ -2,9 +2,10 @@ from odm2api.base import modelBase -from sqlalchemy import BigInteger, Boolean, Column, Date, DateTime, Float, ForeignKey, Integer, String, case +from sqlalchemy import BigInteger, Boolean, Column, Date, DateTime, Float, ForeignKey, Integer, String, case, types, Table, event from sqlalchemy.dialects import mysql, postgresql, sqlite from sqlalchemy.orm import relationship +from datetime import datetime, timedelta Base = modelBase.Base @@ -13,6 +14,9 @@ BigIntegerType = BigIntegerType.with_variant(postgresql.BIGINT(), 'postgresql') BigIntegerType = BigIntegerType.with_variant(mysql.BIGINT(), 'mysql') +DateTimeType = DateTime() +DateTimeType = DateTimeType.with_variant(sqlite.INTEGER(), 'sqlite') + def is_hex(s): try: @@ -404,9 +408,9 @@ class Results(Base): ProcessingLevelID = Column('processinglevelid', ForeignKey(ProcessingLevels.ProcessingLevelID), nullable=False) ResultDateTime = Column('resultdatetime', DateTime) - ResultDateTimeUTCOffset = Column('resultdatetimeutcoffset', BigInteger) + ResultDateTimeUTCOffset = Column('resultdatetimeutcoffset', BigIntegerType) ValidDateTime = Column('validdatetime', DateTime) - ValidDateTimeUTCOffset = Column('validdatetimeutcoffset', BigInteger) + ValidDateTimeUTCOffset = Column('validdatetimeutcoffset', BigIntegerType) StatusCV = Column('statuscv', ForeignKey(CVStatus.Name), index=True) SampledMediumCV = Column('sampledmediumcv', ForeignKey(CVMediumType.Name), nullable=False, index=True) ValueCount = Column('valuecount', Integer, nullable=False) @@ -503,7 +507,7 @@ class InstrumentOutputVariables(Base): class DataLoggerFileColumns(Base): DataLoggerFileColumnID = Column('dataloggerfilecolumnid', Integer, primary_key=True, nullable=False) - ResultID = Column('resultid', BigInteger, ForeignKey(Results.ResultID)) + ResultID = Column('resultid', BigIntegerType, ForeignKey(Results.ResultID)) DataLoggerFileID = Column('dataloggerfileid', Integer, ForeignKey(DataLoggerFiles.DataLoggerFileID), nullable=False) InstrumentOutputVariableID = Column('instrumentoutputvariableid', Integer, @@ -861,7 +865,7 @@ class ActionAnnotations(Base): class EquipmentAnnotations(Base): BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) - EquipmentID = Column('valueid', BigInteger, ForeignKey(Equipment.EquipmentID), nullable=False) + EquipmentID = Column('valueid', BigIntegerType, ForeignKey(Equipment.EquipmentID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) AnnotationObj = relationship(Annotations) @@ -1640,7 +1644,7 @@ class CategoricalResultValues(Base): ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(CategoricalResults.ResultID), nullable=False) DataValue = Column('datavalue', String(255), nullable=False) - ValueDateTime = Column('valuedatetime', DateTime, nullable=False) + ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False) ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False) ResultObj = relationship(CategoricalResults) @@ -1651,7 +1655,7 @@ class MeasurementResultValues(Base): ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(MeasurementResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) - ValueDateTime = Column('valuedatetime', DateTime, nullable=False) + ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False) ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False) ResultObj = relationship(MeasurementResults) @@ -1661,8 +1665,8 @@ class PointCoverageResultValues(Base): ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(PointCoverageResults.ResultID), nullable=False) - DataValue = Column('datavalue', BigInteger, nullable=False) - ValueDateTime = Column('valuedatetime', DateTime, nullable=False) + DataValue = Column('datavalue', BigIntegerType, nullable=False) + ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False) ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False) XLocation = Column('xlocation', Float(53), nullable=False) XLocationUnitsID = Column('xlocationunitsid', ForeignKey(Units.UnitsID), nullable=False) @@ -1687,7 +1691,7 @@ class ProfileResultValues(Base): ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(ProfileResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) - ValueDateTime = Column('valuedatetime', DateTime, nullable=False) + ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False) ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False) ZLocation = Column('zlocation', Float(53), nullable=False) ZAggregationInterval = Column('zaggregationinterval', Float(53), nullable=False) @@ -1714,12 +1718,12 @@ class SectionResultValues(Base): ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(SectionResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) - ValueDateTime = Column('valuedatetime', BigInteger, nullable=False) - ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', BigInteger, nullable=False) + ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False) + ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False) XLocation = Column('xlocation', Float(53), nullable=False) XAggregationInterval = Column('xaggregationinterval', Float(53), nullable=False) XLocationUnitsID = Column('xlocationunitsid', ForeignKey(Units.UnitsID), nullable=False) - ZLocation = Column('zlocation', BigInteger, nullable=False) + ZLocation = Column('zlocation', BigIntegerType, nullable=False) ZAggregationInterval = Column('zaggregationinterval', Float(53), nullable=False) ZLocationUnitsID = Column('zlocationunitsid', ForeignKey(Units.UnitsID), nullable=False) CensorCodeCV = Column('censorcodecv', ForeignKey(CVCensorCode.Name), nullable=False, index=True) @@ -1750,7 +1754,7 @@ class SpectraResultValues(Base): ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(SpectraResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) - ValueDateTime = Column('valuedatetime', DateTime, nullable=False) + ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False) ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False) ExcitationWavelength = Column('excitationwavelength', Float(53), nullable=False) EmissionWavelength = Column('emmistionwavelength', Float(53), nullable=False) @@ -1779,7 +1783,7 @@ class TimeSeriesResultValues(Base): ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(TimeSeriesResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) - ValueDateTime = Column('valuedatetime', DateTime, nullable=False) + ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False) ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False) CensorCodeCV = Column('censorcodecv', ForeignKey(CVCensorCode.Name), nullable=False, index=True) QualityCodeCV = Column('qualitycodecv', ForeignKey(CVQualityCode.Name), nullable=False, index=True) @@ -1805,7 +1809,7 @@ class TrajectoryResultValues(Base): ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(TrajectoryResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) - ValueDateTime = Column('valuedatetime', DateTime, nullable=False) + ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False) ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False) XLocation = Column('xlocation', Float(53), nullable=False) XLocationUnitsID = Column('xlocationunitsid', ForeignKey(Units.UnitsID), nullable=False) @@ -1850,8 +1854,8 @@ class TransectResultValues(Base): ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(TransectResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) - ValueDateTime = Column('valuedatetime', DateTime, nullable=False) - ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', DateTime, nullable=False) + ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False) + ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False) XLocation = Column('xlocation', Float(53), nullable=False) XLocationUnitsID = Column('xlocationunitsid', ForeignKey(Units.UnitsID), nullable=False) YLocation = Column('ylocation', Float(53), nullable=False) @@ -1896,7 +1900,7 @@ class TransectResultValues(Base): class CategoricalResultValueAnnotations(Base): BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) - ValueID = Column('valueid', BigInteger, ForeignKey(CategoricalResultValues.ValueID), nullable=False) + ValueID = Column('valueid', BigIntegerType, ForeignKey(CategoricalResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) AnnotationObj = relationship(Annotations) @@ -1906,7 +1910,7 @@ class CategoricalResultValueAnnotations(Base): class MeasurementResultValueAnnotations(Base): BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) - ValueID = Column('valueid', BigInteger, ForeignKey(MeasurementResultValues.ValueID), nullable=False) + ValueID = Column('valueid', BigIntegerType, ForeignKey(MeasurementResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) AnnotationObj = relationship(Annotations) @@ -1916,7 +1920,7 @@ class MeasurementResultValueAnnotations(Base): class PointCoverageResultValueAnnotations(Base): BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) - ValueID = Column('valueid', BigInteger, ForeignKey(PointCoverageResultValues.ValueID), nullable=False) + ValueID = Column('valueid', BigIntegerType, ForeignKey(PointCoverageResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) AnnotationObj = relationship(Annotations) @@ -1926,7 +1930,7 @@ class PointCoverageResultValueAnnotations(Base): class ProfileResultValueAnnotations(Base): BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) - ValueID = Column('valueid', BigInteger, ForeignKey(ProfileResultValues.ValueID), nullable=False) + ValueID = Column('valueid', BigIntegerType, ForeignKey(ProfileResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) AnnotationObj = relationship(Annotations) @@ -1936,7 +1940,7 @@ class ProfileResultValueAnnotations(Base): class SectionResultValueAnnotations(Base): BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) - ValueID = Column('valueid', BigInteger, ForeignKey(SectionResultValues.ValueID), nullable=False) + ValueID = Column('valueid', BigIntegerType, ForeignKey(SectionResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) AnnotationObj = relationship(Annotations) @@ -1946,7 +1950,7 @@ class SectionResultValueAnnotations(Base): class SpectraResultValueAnnotations(Base): BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) - ValueID = Column('valueid', BigInteger, ForeignKey(SpectraResultValues.ValueID), nullable=False) + ValueID = Column('valueid', BigIntegerType, ForeignKey(SpectraResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) AnnotationObj = relationship(Annotations) @@ -1956,7 +1960,7 @@ class SpectraResultValueAnnotations(Base): class TimeSeriesResultValueAnnotations(Base): BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) - ValueID = Column('valueid', BigInteger, ForeignKey(TimeSeriesResultValues.ValueID), nullable=False) + ValueID = Column('valueid', BigIntegerType, ForeignKey(TimeSeriesResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) AnnotationObj = relationship(Annotations) @@ -1966,7 +1970,7 @@ class TimeSeriesResultValueAnnotations(Base): class TrajectoryResultValueAnnotations(Base): BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) - ValueID = Column('valueid', BigInteger, ForeignKey(TrajectoryResultValues.ValueID), nullable=False) + ValueID = Column('valueid', BigIntegerType, ForeignKey(TrajectoryResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) AnnotationObj = relationship(Annotations) @@ -1976,7 +1980,7 @@ class TrajectoryResultValueAnnotations(Base): class TransectResultValueAnnotations(Base): BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) - ValueID = Column('valueid', BigInteger, ForeignKey(TransectResultValues.ValueID), nullable=False) + ValueID = Column('valueid', BigIntegerType, ForeignKey(TransectResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) AnnotationObj = relationship(Annotations) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 2e18772..c921759 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -774,6 +774,21 @@ def getDataSetsResults(self, ids= None, codes=None, uuids=None, dstype=None): print('Error running Query {}'.format(e)) return None + def getDataSetsValues(self, ids=None, codes=None, uuids=None, dstype=None): + + dsr = self.getDataSetsResults(ids, codes, uuids, dstype) + + resids = [] + for ds in dsr: + resids.append(ds.ResultID) + + try: + return self.getResultValues(resultids = resids) + except Exception as e: + print('Error running Query {}'.format(e)) + return None + + def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=None): """ Retrieve a list of Datasets associated with the given sampling feature data. @@ -1133,7 +1148,7 @@ def getResultValues(self, resultids, starttime=None, endtime=None): """ type = self._session.query(Results).filter_by(ResultID=resultids[0]).first().ResultTypeCV - ResultType = TimeSeriesResults + ResultType = TimeSeriesResultValues if 'categorical' in type.lower(): ResultType = CategoricalResultValues elif 'measurement' in type.lower(): diff --git a/tests/test_odm2/data/populated.sql b/tests/test_odm2/data/populated.sql index 84a1e23..c9f8689 100644 --- a/tests/test_odm2/data/populated.sql +++ b/tests/test_odm2/data/populated.sql @@ -11541,47 +11541,47 @@ INSERT INTO "TimeSeriesResultValues" VALUES(9982,1,11.9,'2014-12-31 23:30:00',-7 INSERT INTO "TimeSeriesResultValues" VALUES(9983,1,11.9,'2014-12-31 23:45:00',-7,'nc','Unknown',0.0,102); INSERT INTO "TimeSeriesResultValues" VALUES(9984,1,11.9,'2014-12-31 23:45:00',-7,'nc','Unknown',0.0,102); INSERT INTO "TimeSeriesResultValues" VALUES(9985,1,11.9,'2015-01-01 00:00:00',-7,'nc','Unknown',0.0,102); -INSERT INTO "TimeSeriesResultValues" VALUES(9986,1,184.0,'2013-06-15',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9987,1,200.0,'2013-05-06',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9988,1,201.0,'2013-06-14',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9989,1,208.0,'2013-06-13',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9990,1,214.0,'2013-05-07',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9991,1,221.0,'2013-06-12',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9992,1,229.0,'2013-05-08',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9993,1,233.0,'2013-05-09',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9994,1,242.0,'2013-06-06',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9995,1,243.0,'2013-06-07',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9996,1,245.0,'2013-06-11',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9997,1,249.0,'2013-06-02',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9998,1,251.0,'2013-06-08',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9999,1,254.0,'2013-06-01',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10000,1,256.0,'2013-06-10',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10001,1,258.0,'2013-06-05',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10002,1,260.0,'2013-06-09',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10003,1,263.0,'2013-06-03',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10004,1,265.0,'2013-05-10',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10005,1,265.0,'2013-06-04',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10006,1,270.0,'2013-05-31',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10007,1,291.0,'2013-05-11',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10008,1,306.0,'2013-05-30',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10009,1,315.0,'2013-05-27',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10010,1,316.0,'2013-05-12',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10011,1,317.0,'2013-05-29',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10012,1,320.0,'2013-05-25',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10013,1,321.0,'2013-05-28',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10014,1,322.0,'2013-05-26',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10015,1,338.0,'2013-05-24',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10016,1,341.0,'2013-05-22',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10017,1,346.0,'2013-05-21',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10018,1,347.0,'2013-05-23',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10019,1,358.0,'2013-05-13',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10020,1,367.0,'2013-05-20',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10021,1,393.0,'2013-05-19',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10022,1,422.0,'2013-05-14',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10023,1,453.0,'2013-05-18',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10024,1,458.0,'2013-05-15',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10025,1,478.0,'2013-05-16',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10026,1,480.0,'2013-05-17',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9986,1,184.0,'2013-06-15 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9987,1,200.0,'2013-05-06 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9988,1,201.0,'2013-06-14 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9989,1,208.0,'2013-06-13 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9990,1,214.0,'2013-05-07 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9991,1,221.0,'2013-06-12 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9992,1,229.0,'2013-05-08 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9993,1,233.0,'2013-05-09 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9994,1,242.0,'2013-06-06 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9995,1,243.0,'2013-06-07 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9996,1,245.0,'2013-06-11 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9997,1,249.0,'2013-06-02 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9998,1,251.0,'2013-06-08 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9999,1,254.0,'2013-06-01 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10000,1,256.0,'2013-06-10 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10001,1,258.0,'2013-06-05 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10002,1,260.0,'2013-06-09 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10003,1,263.0,'2013-06-03 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10004,1,265.0,'2013-05-10 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10005,1,265.0,'2013-06-04 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10006,1,270.0,'2013-05-31 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10007,1,291.0,'2013-05-11 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10008,1,306.0,'2013-05-30 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10009,1,315.0,'2013-05-27 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10010,1,316.0,'2013-05-12 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10011,1,317.0,'2013-05-29 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10012,1,320.0,'2013-05-25 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10013,1,321.0,'2013-05-28 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10014,1,322.0,'2013-05-26 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10015,1,338.0,'2013-05-24 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10016,1,341.0,'2013-05-22 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10017,1,346.0,'2013-05-21 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10018,1,347.0,'2013-05-23 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10019,1,358.0,'2013-05-13 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10020,1,367.0,'2013-05-20 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10021,1,393.0,'2013-05-19 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10022,1,422.0,'2013-05-14 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10023,1,453.0,'2013-05-18 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10024,1,458.0,'2013-05-15 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10025,1,478.0,'2013-05-16 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10026,1,480.0,'2013-05-17 00:00:00',-6,'nc','provisional',0.0,204); INSERT INTO "TimeSeriesResultValues" VALUES(10027,2,0.0,'02/07/2013 00:00:00',-5,'nc','provisional',1.0,206); INSERT INTO "TimeSeriesResultValues" VALUES(10028,2,0.254,'02/07/2013 00:01:00',-5,'nc','provisional',1.0,206); INSERT INTO "TimeSeriesResultValues" VALUES(10029,2,0.254,'02/07/2013 00:02:00',-5,'nc','provisional',1.0,206); diff --git a/tests/test_odm2/test_readservice.py b/tests/test_odm2/test_readservice.py index 3311ab8..1675929 100644 --- a/tests/test_odm2/test_readservice.py +++ b/tests/test_odm2/test_readservice.py @@ -111,6 +111,21 @@ def test_getDataSetsResults(self): assert dsrapi is not None assert True + def test_getDataSetsValues(self): + + dsr = self.engine.execute('SELECT * FROM DataSetsResults').fetchone() + dsid = dsr[2] + + values= self.reader.getDataSetsValues(ids=[dsid]) + assert values is not None + assert len(values) > 0 + + + + #ToDo figure out how to actually test this function + def test_getSamplingFeatureDataSets(self): + assert True + # Models """ From 120008d29e7cfbbe72bc222c5ba2a6779529c033 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 13:30:32 -0700 Subject: [PATCH 15/55] fix sample.py file --- Examples/Sample.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Examples/Sample.py b/Examples/Sample.py index dcd8896..b441620 100644 --- a/Examples/Sample.py +++ b/Examples/Sample.py @@ -28,7 +28,9 @@ - +#_session = session_factory.getSession() +read = ReadODM2(session_factory) +create = CreateODM2(session_factory) From a88c8aec7410e65f54f4135b8b7c1e0796cdfcba Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 13:31:06 -0700 Subject: [PATCH 16/55] accidentally duplicated code --- Examples/Sample.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/Examples/Sample.py b/Examples/Sample.py index b441620..e0a315f 100644 --- a/Examples/Sample.py +++ b/Examples/Sample.py @@ -28,14 +28,6 @@ -#_session = session_factory.getSession() -read = ReadODM2(session_factory) -create = CreateODM2(session_factory) - - - - - #_session = session_factory.getSession() read = ReadODM2(session_factory) create = CreateODM2(session_factory) From 9624cc5b8e74da79e45fe3b0b11743814e58ee87 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 13:31:52 -0700 Subject: [PATCH 17/55] remvoe unused imports in models.py --- odm2api/ODM2/models.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/odm2api/ODM2/models.py b/odm2api/ODM2/models.py index f238659..1b7a049 100644 --- a/odm2api/ODM2/models.py +++ b/odm2api/ODM2/models.py @@ -2,11 +2,9 @@ from odm2api.base import modelBase -from sqlalchemy import BigInteger, Boolean, Column, Date, DateTime, Float, ForeignKey, Integer, String, case, types, Table, event +from sqlalchemy import BigInteger, Boolean, Column, Date, DateTime, Float, ForeignKey, Integer, String, case from sqlalchemy.dialects import mysql, postgresql, sqlite from sqlalchemy.orm import relationship -from datetime import datetime, timedelta - Base = modelBase.Base BigIntegerType = BigInteger() From a4febb43a430f90b08e0f03c29e73e2e52117c7d Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 14:17:20 -0700 Subject: [PATCH 18/55] fix issue with getSamplingFeatureDatasets --- odm2api/ODM2/services/readService.py | 8 ++++--- tests/test_odm2/data/populated.sql | 28 +++++++++++------------ tests/test_odm2/test_readservice.py | 33 ++++++++++++++++++++++++---- 3 files changed, 48 insertions(+), 21 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index c921759..66bfbca 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -632,7 +632,8 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi simulationid (int, optional): SimulationID. sfid (int, optional): SamplingFeatureID. variableid (int, optional): VariableID. - siteid (int, optional): SiteID. + siteid (int, optional): SiteID. - goes through related features table and finds all of the measurement + values recorded at the given site Returns: list: List of Result objects @@ -829,8 +830,9 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureCode.in_(codes)) if uuids: sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureUUID.in_(uuids)) - sf_list = sf_query.all() - + sf_list = [] + for sf in sf_query.all(): + sf_list.append(sf[0]) q = self._session.query(DataSetsResults)\ .join(Results)\ diff --git a/tests/test_odm2/data/populated.sql b/tests/test_odm2/data/populated.sql index c9f8689..8188a01 100644 --- a/tests/test_odm2/data/populated.sql +++ b/tests/test_odm2/data/populated.sql @@ -11568,20 +11568,20 @@ INSERT INTO "TimeSeriesResultValues" VALUES(10009,1,315.0,'2013-05-27 00:00:00', INSERT INTO "TimeSeriesResultValues" VALUES(10010,1,316.0,'2013-05-12 00:00:00',-6,'nc','provisional',0.0,204); INSERT INTO "TimeSeriesResultValues" VALUES(10011,1,317.0,'2013-05-29 00:00:00',-6,'nc','provisional',0.0,204); INSERT INTO "TimeSeriesResultValues" VALUES(10012,1,320.0,'2013-05-25 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10013,1,321.0,'2013-05-28 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10014,1,322.0,'2013-05-26 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10015,1,338.0,'2013-05-24 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10016,1,341.0,'2013-05-22 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10017,1,346.0,'2013-05-21 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10018,1,347.0,'2013-05-23 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10019,1,358.0,'2013-05-13 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10020,1,367.0,'2013-05-20 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10021,1,393.0,'2013-05-19 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10022,1,422.0,'2013-05-14 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10023,1,453.0,'2013-05-18 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10024,1,458.0,'2013-05-15 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10025,1,478.0,'2013-05-16 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10026,1,480.0,'2013-05-17 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10013,1,321.0,'2013-05-28',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10014,1,322.0,'2013-05-26',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10015,1,338.0,'2013-05-24',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10016,1,341.0,'2013-05-22',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10017,1,346.0,'2013-05-21',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10018,1,347.0,'2013-05-23',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10019,1,358.0,'2013-05-13',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10020,1,367.0,'2013-05-20',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10021,1,393.0,'2013-05-19',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10022,1,422.0,'2013-05-14',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10023,1,453.0,'2013-05-18',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10024,1,458.0,'2013-05-15',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10025,1,478.0,'2013-05-16',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10026,1,480.0,'2013-05-17',-6,'nc','provisional',0.0,204); INSERT INTO "TimeSeriesResultValues" VALUES(10027,2,0.0,'02/07/2013 00:00:00',-5,'nc','provisional',1.0,206); INSERT INTO "TimeSeriesResultValues" VALUES(10028,2,0.254,'02/07/2013 00:01:00',-5,'nc','provisional',1.0,206); INSERT INTO "TimeSeriesResultValues" VALUES(10029,2,0.254,'02/07/2013 00:02:00',-5,'nc','provisional',1.0,206); diff --git a/tests/test_odm2/test_readservice.py b/tests/test_odm2/test_readservice.py index 1675929..a2ce719 100644 --- a/tests/test_odm2/test_readservice.py +++ b/tests/test_odm2/test_readservice.py @@ -124,7 +124,32 @@ def test_getDataSetsValues(self): #ToDo figure out how to actually test this function def test_getSamplingFeatureDataSets(self): - assert True + + #find a sampling feature that is associated with a dataset + sf = self.engine.execute( + 'SELECT * from SamplingFeatures as sf ' + 'inner join FeatureActions as fa on fa.SamplingFeatureID == sf.SamplingFeatureID ' + 'inner join Results as r on fa.FeatureActionID == r.FeatureActionID ' + 'inner join DataSetsResults as ds on r.ResultID == ds.ResultID ' + ).fetchone() + assert len(sf) > 0 + + #get the dataset associated with the sampling feature + ds = self.engine.execute( + 'SELECT * from DataSetsResults as ds ' + 'inner join Results as r on r.ResultID == ds.ResultID ' + 'inner join FeatureActions as fa on fa.FeatureActionID == r.FeatureActionID ' + 'where fa.SamplingFeatureID = ' + str(sf[0]) + ).fetchone() + assert len(ds) > 0 + + print (sf[0]) + # get the dataset associated with the sampling feature using hte api + dsapi = self.reader.getSamplingFeatureDatasets(ids=[sf[0]]) + + assert dsapi is not None + assert len(dsapi) > 0 + assert ds[1] == dsapi[0].DataSetID # Models @@ -178,7 +203,7 @@ def test_getRelatedModelsByCode(self): resapi = self.reader.getRelatedModels(code='swat') assert resapi is not None assert len(resapi) > 0 - print(resapi[0].ModelCode) + # print(resapi[0].ModelCode) assert resapi[0].ModelCode == 'swat' # test converter code that doesn't exist resapi = self.reader.getRelatedModels(code='None') @@ -213,7 +238,7 @@ def test_getRelatedModelsByCode(self): def test_getAllResults(self): # get all results from the database res = self.engine.execute('SELECT * FROM Results').fetchall() - print(res) + # print(res) # get all results using the api resapi = self.reader.getResults() assert len(res) == len(resapi) @@ -281,7 +306,7 @@ def test_getResultsBySimulationID(self): ).first() assert len(res) > 0 res = rawSql2Alchemy(res, models.Results) - print(res) + # print(res) # get simulation by id using the api # resapi = self.reader.getResultsBySimulationID(simulation.SimulationID) From 9401656d6a56a964c68d9047c94276d2a56c422d Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 14:18:01 -0700 Subject: [PATCH 19/55] fix issue with sf in datasets --- odm2api/ODM2/services/readService.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 66bfbca..c003019 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -822,7 +822,7 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No if all(v is None for v in [ids, codes, uuids]): raise ValueError('Expected samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode argument') - sf_query = self._session.query(SamplingFeatures.SamplingFeatureID) + sf_query = self._session.query(SamplingFeatures) if ids: sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureID.in_(ids)) @@ -832,7 +832,7 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureUUID.in_(uuids)) sf_list = [] for sf in sf_query.all(): - sf_list.append(sf[0]) + sf_list.append(sf.SamplingFeatureID) q = self._session.query(DataSetsResults)\ .join(Results)\ From 91ee0c30d2905c2bcead055aef1e1c8910cbde6b Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 14:19:56 -0700 Subject: [PATCH 20/55] undo changes made to test sql query --- tests/test_odm2/data/populated.sql | 54 +++++++++++++++--------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/tests/test_odm2/data/populated.sql b/tests/test_odm2/data/populated.sql index 8188a01..84a1e23 100644 --- a/tests/test_odm2/data/populated.sql +++ b/tests/test_odm2/data/populated.sql @@ -11541,33 +11541,33 @@ INSERT INTO "TimeSeriesResultValues" VALUES(9982,1,11.9,'2014-12-31 23:30:00',-7 INSERT INTO "TimeSeriesResultValues" VALUES(9983,1,11.9,'2014-12-31 23:45:00',-7,'nc','Unknown',0.0,102); INSERT INTO "TimeSeriesResultValues" VALUES(9984,1,11.9,'2014-12-31 23:45:00',-7,'nc','Unknown',0.0,102); INSERT INTO "TimeSeriesResultValues" VALUES(9985,1,11.9,'2015-01-01 00:00:00',-7,'nc','Unknown',0.0,102); -INSERT INTO "TimeSeriesResultValues" VALUES(9986,1,184.0,'2013-06-15 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9987,1,200.0,'2013-05-06 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9988,1,201.0,'2013-06-14 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9989,1,208.0,'2013-06-13 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9990,1,214.0,'2013-05-07 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9991,1,221.0,'2013-06-12 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9992,1,229.0,'2013-05-08 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9993,1,233.0,'2013-05-09 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9994,1,242.0,'2013-06-06 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9995,1,243.0,'2013-06-07 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9996,1,245.0,'2013-06-11 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9997,1,249.0,'2013-06-02 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9998,1,251.0,'2013-06-08 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(9999,1,254.0,'2013-06-01 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10000,1,256.0,'2013-06-10 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10001,1,258.0,'2013-06-05 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10002,1,260.0,'2013-06-09 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10003,1,263.0,'2013-06-03 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10004,1,265.0,'2013-05-10 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10005,1,265.0,'2013-06-04 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10006,1,270.0,'2013-05-31 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10007,1,291.0,'2013-05-11 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10008,1,306.0,'2013-05-30 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10009,1,315.0,'2013-05-27 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10010,1,316.0,'2013-05-12 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10011,1,317.0,'2013-05-29 00:00:00',-6,'nc','provisional',0.0,204); -INSERT INTO "TimeSeriesResultValues" VALUES(10012,1,320.0,'2013-05-25 00:00:00',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9986,1,184.0,'2013-06-15',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9987,1,200.0,'2013-05-06',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9988,1,201.0,'2013-06-14',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9989,1,208.0,'2013-06-13',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9990,1,214.0,'2013-05-07',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9991,1,221.0,'2013-06-12',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9992,1,229.0,'2013-05-08',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9993,1,233.0,'2013-05-09',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9994,1,242.0,'2013-06-06',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9995,1,243.0,'2013-06-07',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9996,1,245.0,'2013-06-11',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9997,1,249.0,'2013-06-02',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9998,1,251.0,'2013-06-08',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(9999,1,254.0,'2013-06-01',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10000,1,256.0,'2013-06-10',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10001,1,258.0,'2013-06-05',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10002,1,260.0,'2013-06-09',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10003,1,263.0,'2013-06-03',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10004,1,265.0,'2013-05-10',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10005,1,265.0,'2013-06-04',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10006,1,270.0,'2013-05-31',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10007,1,291.0,'2013-05-11',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10008,1,306.0,'2013-05-30',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10009,1,315.0,'2013-05-27',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10010,1,316.0,'2013-05-12',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10011,1,317.0,'2013-05-29',-6,'nc','provisional',0.0,204); +INSERT INTO "TimeSeriesResultValues" VALUES(10012,1,320.0,'2013-05-25',-6,'nc','provisional',0.0,204); INSERT INTO "TimeSeriesResultValues" VALUES(10013,1,321.0,'2013-05-28',-6,'nc','provisional',0.0,204); INSERT INTO "TimeSeriesResultValues" VALUES(10014,1,322.0,'2013-05-26',-6,'nc','provisional',0.0,204); INSERT INTO "TimeSeriesResultValues" VALUES(10015,1,338.0,'2013-05-24',-6,'nc','provisional',0.0,204); From f33c2cbedf648e644e7cae4a3fb9d0dc5dc0342f Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 14:56:40 -0700 Subject: [PATCH 21/55] add datasetsvalues docstrings --- odm2api/ODM2/services/readService.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index c003019..3c725a1 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -776,6 +776,30 @@ def getDataSetsResults(self, ids= None, codes=None, uuids=None, dstype=None): return None def getDataSetsValues(self, ids=None, codes=None, uuids=None, dstype=None): + """ + Retrieve a list of datavalues associated with the given dataset info + + **Must specify either DataSetID OR DataSetUUID OR DataSetCode)** + Args: + ids (list, optional): List of DataSetsIDs. + codes (list, optional): List of DataSet Codes. + uuids (list, optional): List of Dataset UUIDs string. + dstype (str, optional): Type of Dataset from + `controlled vocabulary name `_. + + + Returns: + list: List of Result Values Objects + + Examples: + >>> READ = ReadODM2(SESSION_FACTORY) + >>> READ.getDataSetsValues(ids=[39, 40]) + >>> READ.getDataSetsValues(codes=['HOME', 'FIELD']) + >>> READ.getDataSetsValues(uuids=['a6f114f1-5416-4606-ae10-23be32dbc202', + ... '5396fdf3-ceb3-46b6-aaf9-454a37278bb4']) + >>> READ.getDataSetsValues(dstype='singleTimeSeries') + + """ dsr = self.getDataSetsResults(ids, codes, uuids, dstype) From 644cb6f9245b105ddf66c61f1397628e3e905755 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 15:06:23 -0700 Subject: [PATCH 22/55] update doc strings --- odm2api/ODM2/services/readService.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 3c725a1..78ba586 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -632,8 +632,8 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi simulationid (int, optional): SimulationID. sfid (int, optional): SamplingFeatureID. variableid (int, optional): VariableID. - siteid (int, optional): SiteID. - goes through related features table and finds all of the measurement - values recorded at the given site + siteid (int, optional): SiteID. - goes through related features table and finds all of results + recorded at the given site Returns: list: List of Result objects @@ -728,7 +728,7 @@ def getDataSets(self, ids= None, codes=None, uuids=None, dstype=None): # Datasets - def getDataSetsResults(self, ids= None, codes=None, uuids=None, dstype=None): + def getDataSetsResults(self, ids=None, codes=None, uuids=None, dstype=None): """ Retrieve a detailed list of Datasets along with detailed metadata about the datasets and the results contained within them From 00834dd21e009c6d21fdd5589fec5cdc53700c42 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 15:39:11 -0700 Subject: [PATCH 23/55] add sfids, sfcodes and sfuuids to getResults --- odm2api/ODM2/services/readService.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 06252ad..26e8c00 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -615,7 +615,7 @@ def getAffiliations(self, ids=None, personfirst=None, personlast=None, orgcode=N # Results def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationid=None, sfid=None, - variableid=None, siteid=None): + variableid=None, siteid=None, sfids=None, sfuuids=None, sfcodes=None): # TODO what if user sends in both type and actionid vs just actionid """Retrieve a list of Result objects. @@ -633,6 +633,9 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi sfid (int, optional): SamplingFeatureID. variableid (int, optional): VariableID. siteid (int, optional): SiteID. + sfids(list, optional): List of Sampling Feature IDs integer. + sfuuids(list, optional): List of Sampling Feature UUIDs string. + sfcodes=(list, optional): List of Sampling Feature codes string. Returns: list: List of Result objects @@ -640,7 +643,7 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi Examples: >>> ReadODM2.getResults(ids=[39,40]) >>> ReadODM2.getResults(type='Time series coverage') - >>> ReadODM2.getResults(sfid=65) + >>> ReadODM2.getResults(sfids=[65]) >>> ReadODM2.getResults(uuids=['a6f114f1-5416-4606-ae10-23be32dbc202', ... '5396fdf3-ceb3-46b6-aaf9-454a37278bb4']) >>> ReadODM2.getResults(simulationid=50) @@ -667,9 +670,18 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi if actionid: query = query.join(FeatureActions).filter_by(ActionID=actionid) if sfid: + import warnings + warnings.warn('the parameter sfid, will no longer be supported. please use the sfids parameter and send in a list. ') query = query.join(FeatureActions).filter_by(SamplingFeatureID=sfid) + if sfids or sfcodes or sfuuids: + sf_list = self.getSamplingFeatures(ids=sfids, codes=sfcodes, uuids=sfuuids) + sfids = [] + for sf in sf_list: + sfids.append(sf.SamplingFeatureID) + query = query.join(FeatureActions).filter(FeatureActions.SamplingFeatureID.in_(sfids)) if siteid: + sfids = [x[0] for x in self._session.query( distinct(SamplingFeatures.SamplingFeatureID)) .select_from(RelatedFeatures) @@ -677,6 +689,14 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi .filter(RelatedFeatures.RelatedFeatureID == siteid) .all() ] + + # sf_list = self.getRelatedSamplingFeatures(rfid=siteid) + # sfids = [] + # for sf in sf_list: + # sfids.append(sf.SamplingFeatureID) + + + query = query.join(FeatureActions).filter(FeatureActions.SamplingFeatureID.in_(sfids)) try: From 69656fa524e525aa2f49c26237e337bfba6dd487 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 16:01:38 -0700 Subject: [PATCH 24/55] test sfresult function --- tests/test_odm2/test_readservice.py | 130 ++++++++++++---------------- 1 file changed, 53 insertions(+), 77 deletions(-) diff --git a/tests/test_odm2/test_readservice.py b/tests/test_odm2/test_readservice.py index 6ea9154..15e2b91 100644 --- a/tests/test_odm2/test_readservice.py +++ b/tests/test_odm2/test_readservice.py @@ -76,7 +76,7 @@ def setup(self): self.db = globals_vars['db'] -# Sampling Features + # Sampling Features def test_getAllSamplingFeatures(self): # get all models from the database res = self.engine.execute('SELECT * FROM SamplingFeatures').fetchall() @@ -92,17 +92,57 @@ def test_getSamplingFeatureByID(self): resapi = self.reader.getSamplingFeatures(ids=[sfid]) assert resapi is not None -# Models - """ - TABLE Models - ModelID INTEGER NOT NULL PRIMARY KEY, - ModelCode VARCHAR (50) NOT NULL, - ModelName VARCHAR (255) NOT NULL, - ModelDescription VARCHAR (500) NULL, - Version VARCHAR (255) NULL, - ModelLink VARCHAR (255) NULL - """ + def test_getSamplingFeatureByCode(self): + # get all models from the database + res = self.engine.execute('SELECT * FROM SamplingFeatures').fetchone() + code = res[2] + # get all simulations using the api + resapi = self.reader.getSamplingFeatures(codes=[code]) + assert resapi is not None + + # Results + def test_getAllResults(self): + # get all results from the database + res = self.engine.execute('SELECT * FROM Results').fetchall() + print(res) + # get all results using the api + resapi = self.reader.getResults() + assert len(res) == len(resapi) + + def test_getResultsByID(self): + # get a result from the database + res = self.engine.execute('SELECT * FROM Results').fetchone() + resultid = res[1] + + # get the result using the api + resapi = self.reader.getResults(ids=[resultid]) + assert resapi is not None + + def test_getResultsBySFID(self): + sf = self.engine.execute( + 'SELECT * from SamplingFeatures as sf ' + 'inner join FeatureActions as fa on fa.SamplingFeatureID == sf.SamplingFeatureID ' + 'inner join Results as r on fa.FeatureActionID == r.FeatureActionID ' + ).fetchone() + assert len(sf) > 0 + sfid = sf[0] + + res = self.engine.execute( + 'SELECT * from Results as r ' + 'inner join FeatureActions as fa on fa.FeatureActionID == r.FeatureActionID ' + 'where fa.SamplingFeatureID = ' + str(sfid) + ).fetchone() + + assert len(res) > 0 + + # get the result using the api + resapi = self.reader.getResults(sfids=[sfid]) + + assert resapi is not None + assert len(resapi) > 0 + assert resapi[0].ResultID == res[0] + # Models def test_getAllModels(self): # get all models from the database res = self.engine.execute('SELECT * FROM Models').fetchall() @@ -119,19 +159,7 @@ def test_getModelByCode(self): assert resapi is not None -# RelatedModels - """ - TABLE RelatedModels ( - RelatedID INTEGER NOT NULL PRIMARY KEY, - ModelID INTEGER NOT NULL, - RelationshipTypeCV VARCHAR (255) NOT NULL, - RelatedModelID INTEGER NOT NULL, - FOREIGN KEY (RelationshipTypeCV) REFERENCES CV_RelationshipType (Name) - ON UPDATE NO ACTION ON DELETE NO ACTION, - FOREIGN KEY (ModelID) REFERENCES Models (ModelID) - ON UPDATE NO ACTION ON DELETE NO ACTION - """ - + # RelatedModels def test_getRelatedModelsByID(self): # get related models by id using the api resapi = self.reader.getRelatedModels(id=1) @@ -156,59 +184,7 @@ def test_getRelatedModelsByCode(self): assert not resapi -# Results - """ - TABLE Results ( - ResultID INTEGER NOT NULL PRIMARY KEY, - ResultUUID VARCHAR(36) NOT NULL, - FeatureActionID INTEGER NOT NULL, - ResultTypeCV VARCHAR (255) NOT NULL, - VariableID INTEGER NOT NULL, - UnitsID INTEGER NOT NULL, - TaxonomicClassifierID INTEGER NULL, - ProcessingLevelID INTEGER NOT NULL, - ResultDateTime DATETIME NULL, - ResultDateTimeUTCOffset INTEGER NULL, - ValidDateTime DATETIME NULL, - ValidDateTimeUTCOffset INTEGER NULL, - StatusCV VARCHAR (255) NULL, - SampledMediumCV VARCHAR (255) NOT NULL, - ValueCount INTEGER NOT NULL - """ - def test_getAllResults(self): - # get all results from the database - res = self.engine.execute('SELECT * FROM Results').fetchall() - print(res) - # get all results using the api - resapi = self.reader.getResults() - assert len(res) == len(resapi) - - def test_getResultsByID(self): - # get a result from the database - res = self.engine.execute('SELECT * FROM Results').fetchone() - resultid = res[1] - - # get the result using the api - resapi = self.reader.getResults(ids=[resultid]) - assert resapi is not None - -# Simulations - """ - TABLE Simulations ( - SimulationID INTEGER NOT NULL PRIMARY KEY, - ActionID INTEGER NOT NULL, - SimulationName VARCHAR (255) NOT NULL, - SimulationDescription VARCHAR (500) NULL, - SimulationStartDateTime DATETIME NOT NULL, - SimulationStartDateTimeUTCOffset INTEGER NOT NULL, - SimulationEndDateTime DATETIME NOT NULL, - SimulationEndDateTimeUTCOffset INTEGER NOT NULL, - TimeStepValue FLOAT NOT NULL, - TimeStepUnitsID INTEGER NOT NULL, - InputDataSetID INTEGER NULL, - ModelID INTEGER NOT NULL, - """ - + # Simulations def test_getAllSimulations(self): # get all simulation from the database res = self.engine.execute('SELECT * FROM Simulations').fetchall() From 1559213ea064f0932775282402686455c06185d1 Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 16:02:42 -0700 Subject: [PATCH 25/55] add todo comment --- odm2api/ODM2/services/readService.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 26e8c00..e119d9a 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -690,13 +690,12 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi .all() ] + #TODO does this code do the same thing as the code above? # sf_list = self.getRelatedSamplingFeatures(rfid=siteid) # sfids = [] # for sf in sf_list: # sfids.append(sf.SamplingFeatureID) - - - + query = query.join(FeatureActions).filter(FeatureActions.SamplingFeatureID.in_(sfids)) try: From 5ec7eb6e1fc038c128ba0c61141dcc96c79d2a5e Mon Sep 17 00:00:00 2001 From: sreeder Date: Mon, 13 Nov 2017 16:10:32 -0700 Subject: [PATCH 26/55] update warning message --- Examples/Sample.py | 13 +++++++------ odm2api/ODM2/services/readService.py | 4 ++-- tests/test_odm2/test_readservice.py | 1 - 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Examples/Sample.py b/Examples/Sample.py index 3292780..a6c5ac1 100644 --- a/Examples/Sample.py +++ b/Examples/Sample.py @@ -17,14 +17,14 @@ #connect to database # createconnection (dbtype, servername, dbname, username, password) # session_factory = dbconnection.createConnection('connection type: sqlite|mysql|mssql|postgresql', '/your/path/to/db/goes/here', 2.0)#sqlite -session_factory = dbconnection.createConnection('postgresql', 'localhost', 'odm2', 'ODM', 'odm') -# session_factory = dbconnection.createConnection('mysql', 'localhost', 'odm2', 'ODM', 'odm')#mysql -# session_factory= dbconnection.createConnection('mssql', "(local)", "ODM2", "ODM", "odm")#win MSSQL +# session_factory = dbconnection.createConnection('postgresql', 'localhost', 'odm2', 'ODM', 'odm') +# session_factory = dbconnection.createConnection('mysql', 'localhost', 'odm2', 'ODM', 'odm')#mysql +session_factory= dbconnection.createConnection('mssql', "(local)", "ODM2", "ODM", "odm")#win MSSQL # session_factory= dbconnection.createConnection('mssql', "arroyoodm2", "", "ODM", "odm")#mac/linux MSSQL -# session_factory = dbconnection.createConnection('sqlite', '/Users/stephanie/DEV/YODA-Tools/tests/test_files/XL_specimen.sqlite', 2.0) +# session_factory = dbconnection.createConnection('sqlite', 'path/to/ODM2.sqlite', 2.0) @@ -34,7 +34,7 @@ read = ReadODM2(session_factory) create = CreateODM2(session_factory) - +sitetest = read.getResults(sfid = 1) @@ -66,6 +66,7 @@ try: print ("\n-------- Information about Sites ---------") siteFeatures = read.getSamplingFeatures(type= 'site') + # siteFeatures = read.getSamplingFeatures(type='Site') numSites = len(siteFeatures) print ("Successful query") @@ -149,7 +150,7 @@ # Get the values for a particular TimeSeriesResult print("\n-------- Example of Retrieving Time Series Result Values ---------") -tsValues = read.getResultValues(resultid = 1) # Return type is a pandas datafram +tsValues = read.getResultValues(resultids = [1]) # Return type is a pandas datafram # Print a few Time Series Values to the console # tsValues.set_index('ValueDateTime', inplace=True) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index e119d9a..21343dd 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -671,7 +671,7 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi query = query.join(FeatureActions).filter_by(ActionID=actionid) if sfid: import warnings - warnings.warn('the parameter sfid, will no longer be supported. please use the sfids parameter and send in a list. ') + warnings.warn("The parameter 'sfid' is no longer be supported. Please use the sfids parameter and send in a list.") query = query.join(FeatureActions).filter_by(SamplingFeatureID=sfid) if sfids or sfcodes or sfuuids: sf_list = self.getSamplingFeatures(ids=sfids, codes=sfcodes, uuids=sfuuids) @@ -695,7 +695,7 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi # sfids = [] # for sf in sf_list: # sfids.append(sf.SamplingFeatureID) - + query = query.join(FeatureActions).filter(FeatureActions.SamplingFeatureID.in_(sfids)) try: diff --git a/tests/test_odm2/test_readservice.py b/tests/test_odm2/test_readservice.py index 15e2b91..b88b3e4 100644 --- a/tests/test_odm2/test_readservice.py +++ b/tests/test_odm2/test_readservice.py @@ -137,7 +137,6 @@ def test_getResultsBySFID(self): # get the result using the api resapi = self.reader.getResults(sfids=[sfid]) - assert resapi is not None assert len(resapi) > 0 assert resapi[0].ResultID == res[0] From be1a758dbc3cb2115874e41005f4f00f2d51f630 Mon Sep 17 00:00:00 2001 From: sreeder Date: Tue, 14 Nov 2017 10:21:42 -0700 Subject: [PATCH 27/55] add restype to result function --- odm2api/ODM2/services/readService.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 21343dd..56ea400 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -614,7 +614,7 @@ def getAffiliations(self, ids=None, personfirst=None, personlast=None, orgcode=N return None # Results - def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationid=None, sfid=None, + def getResults(self, ids=None, type=None, restype = None, uuids=None, actionid=None, simulationid=None, sfid=None, variableid=None, siteid=None, sfids=None, sfuuids=None, sfcodes=None): # TODO what if user sends in both type and actionid vs just actionid @@ -625,7 +625,7 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi Args: ids (list, optional): List of ResultIDs. - type (str, optional): Type of Result from + restype (str, optional): Type of Result from `controlled vocabulary name `_. uuids (list, optional): List of UUIDs string. actionid (int, optional): ActionID. @@ -642,7 +642,7 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi Examples: >>> ReadODM2.getResults(ids=[39,40]) - >>> ReadODM2.getResults(type='Time series coverage') + >>> ReadODM2.getResults(restype='Time series coverage') >>> ReadODM2.getResults(sfids=[65]) >>> ReadODM2.getResults(uuids=['a6f114f1-5416-4606-ae10-23be32dbc202', ... '5396fdf3-ceb3-46b6-aaf9-454a37278bb4']) @@ -655,7 +655,12 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi query = self._session.query(Results) if type: + import warnings + warnings.warn( + "The parameter 'type' is no longer be supported. Please use the restype parameter instead.") query = query.filter_by(ResultTypeCV=type) + if restype: + query = query.filter_by(ResultTypeCV=restype) if variableid: query = query.filter_by(VariableID=variableid) if ids: From 7e7a34c6e66d710c2934cbf06a845d52839cd63a Mon Sep 17 00:00:00 2001 From: sreeder Date: Fri, 17 Nov 2017 09:48:50 -0700 Subject: [PATCH 28/55] update get samplingfeatureds to have new sf structur --- odm2api/ODM2/services/readService.py | 64 +++++++++++++++++++++++----- tests/test_odm2/test_readservice.py | 54 ++++++++++++----------- 2 files changed, 84 insertions(+), 34 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 0d105cc..4abe434 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -72,6 +72,43 @@ def __init__(self, affiliation, person, org): self.Organization = '(' + org.OrganizationCode + ') ' + org.OrganizationName +class SamplingFeatureDataSet(): + datasets={} + def __init__(self, samplingfeature, datasetresults): + sf = samplingfeature + + self.SamplingFeatureID = sf.SamplingFeatureID + self.SamplingFeatureUUID = sf.SamplingFeatureUUID + self.SamplingFeatureTypeCV = sf.SamplingFeatureTypeCV + self.SamplingFeatureCode = sf.SamplingFeatureCode + self.SamplingFeatureName = sf.SamplingFeatureName + self.SamplingFeatureDescription = sf.SamplingFeatureDescription + self.SamplingFeatureGeotypeCV = sf.SamplingFeatureGeotypeCV + self.Elevation_m = sf.Elevation_m + self.ElevationDatumCV = sf.ElevationDatumCV + self.FeatureGeometryWKT = sf.FeatureGeometryWKT + self.datasets = self.assignDatasets(datasetresults) + + print(self.datasets) + + + def assignDatasets(self, datasetresults): + for dsr in datasetresults: + if dsr.DataSetObj not in self.datasets: + #if the dataset is not in the dictionary, add it and the first result + self.datasets[dsr.DataSetObj]=[] + res = dsr.ResultObj + res.FeatureActionObj = None + self.datasets[dsr.DataSetObj].append(res) + else: + #if the dataset is in the dictionary, append the result object to the list + res = dsr.ResultObj + res.FeatureActionObj = None + self.datasets[dsr.DataSetObj].append(res) + + + + class ReadODM2(serviceBase): # Exists functions def resultExists(self, result): @@ -871,7 +908,6 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No raise ValueError('Expected samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode argument') sf_query = self._session.query(SamplingFeatures) - if ids: sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureID.in_(ids)) if codes: @@ -880,21 +916,29 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureUUID.in_(uuids)) sf_list = [] for sf in sf_query.all(): - sf_list.append(sf.SamplingFeatureID) + sf_list.append(sf) - q = self._session.query(DataSetsResults)\ - .join(Results)\ - .join(FeatureActions)\ - .filter(FeatureActions.SamplingFeatureID.in_(sf_list)) + sfds = None + try: + sfds=[] + for sf in sf_list: - if dstype: - q = q.filter_by(DatasetTypeCV=dstype) + q = self._session.query(DataSetsResults)\ + .join(Results)\ + .join(FeatureActions)\ + .filter(FeatureActions.SamplingFeatureID == sf.SamplingFeatureID) - try: - return q.all() + if dstype: + q = q.filter_by(DatasetTypeCV=dstype) + + + vals = q.all() + + sfds.append(SamplingFeatureDataSet(sf, vals)) except Exception as e: print('Error running Query: {}'.format(e)) return None + return sfds # Data Quality def getDataQuality(self): diff --git a/tests/test_odm2/test_readservice.py b/tests/test_odm2/test_readservice.py index 120c23d..4f33390 100644 --- a/tests/test_odm2/test_readservice.py +++ b/tests/test_odm2/test_readservice.py @@ -130,35 +130,40 @@ def test_getDataSetsValues(self): def test_getSamplingFeatureDataSets(self): + try: + #find a sampling feature that is associated with a dataset + sf = self.engine.execute( + 'SELECT * from SamplingFeatures as sf ' + 'inner join FeatureActions as fa on fa.SamplingFeatureID == sf.SamplingFeatureID ' + 'inner join Results as r on fa.FeatureActionID == r.FeatureActionID ' + 'inner join DataSetsResults as ds on r.ResultID == ds.ResultID ' + ).fetchone() + assert len(sf) > 0 + + #get the dataset associated with the sampling feature + ds = self.engine.execute( + 'SELECT * from DataSetsResults as ds ' + 'inner join Results as r on r.ResultID == ds.ResultID ' + 'inner join FeatureActions as fa on fa.FeatureActionID == r.FeatureActionID ' + 'where fa.SamplingFeatureID = ' + str(sf[0]) + ).fetchone() + assert len(ds) > 0 - #find a sampling feature that is associated with a dataset - sf = self.engine.execute( - 'SELECT * from SamplingFeatures as sf ' - 'inner join FeatureActions as fa on fa.SamplingFeatureID == sf.SamplingFeatureID ' - 'inner join Results as r on fa.FeatureActionID == r.FeatureActionID ' - 'inner join DataSetsResults as ds on r.ResultID == ds.ResultID ' - ).fetchone() - assert len(sf) > 0 - - #get the dataset associated with the sampling feature - ds = self.engine.execute( - 'SELECT * from DataSetsResults as ds ' - 'inner join Results as r on r.ResultID == ds.ResultID ' - 'inner join FeatureActions as fa on fa.FeatureActionID == r.FeatureActionID ' - 'where fa.SamplingFeatureID = ' + str(sf[0]) - ).fetchone() - assert len(ds) > 0 - - print (sf[0]) - # get the dataset associated with the sampling feature using hte api - dsapi = self.reader.getSamplingFeatureDatasets(ids=[sf[0]]) + print (sf[0]) + # get the dataset associated with the sampling feature using hte api + dsapi = self.reader.getSamplingFeatureDatasets(ids=[sf[0]]) - assert dsapi is not None - assert len(dsapi) > 0 - assert ds[1] == dsapi[0].DataSetID + assert dsapi is not None + assert len(dsapi) > 0 + assert dsapi[0].datasets is not None + # assert ds[1] == dsapi[0].DataSetID + finally: + self.reader._session.rollback() + assert False # Results def test_getAllResults(self): + # get all results from the database res = self.engine.execute('SELECT * FROM Results').fetchall() print(res) @@ -166,6 +171,7 @@ def test_getAllResults(self): resapi = self.reader.getResults() assert len(res) == len(resapi) + def test_getResultsByID(self): # get a result from the database res = self.engine.execute('SELECT * FROM Results').fetchone() From 3ff16c15c93bb654f3c1d62462d461c169515308 Mon Sep 17 00:00:00 2001 From: sreeder Date: Fri, 17 Nov 2017 09:59:06 -0700 Subject: [PATCH 29/55] update assign datasets --- odm2api/ODM2/services/readService.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 4abe434..f4fdffe 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -87,7 +87,7 @@ def __init__(self, samplingfeature, datasetresults): self.Elevation_m = sf.Elevation_m self.ElevationDatumCV = sf.ElevationDatumCV self.FeatureGeometryWKT = sf.FeatureGeometryWKT - self.datasets = self.assignDatasets(datasetresults) + self.assignDatasets(datasetresults) print(self.datasets) From d84c25223ab75f4bb2af18ecdcca26cd23690a3d Mon Sep 17 00:00:00 2001 From: sreeder Date: Fri, 17 Nov 2017 10:22:55 -0700 Subject: [PATCH 30/55] update sfdataset test --- odm2api/ODM2/services/readService.py | 4 ++-- tests/test_odm2/test_readservice.py | 7 +++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index f4fdffe..e6295f9 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -98,12 +98,12 @@ def assignDatasets(self, datasetresults): #if the dataset is not in the dictionary, add it and the first result self.datasets[dsr.DataSetObj]=[] res = dsr.ResultObj - res.FeatureActionObj = None + # res.FeatureActionObj = None self.datasets[dsr.DataSetObj].append(res) else: #if the dataset is in the dictionary, append the result object to the list res = dsr.ResultObj - res.FeatureActionObj = None + # res.FeatureActionObj = None self.datasets[dsr.DataSetObj].append(res) diff --git a/tests/test_odm2/test_readservice.py b/tests/test_odm2/test_readservice.py index 4f33390..f1ca669 100644 --- a/tests/test_odm2/test_readservice.py +++ b/tests/test_odm2/test_readservice.py @@ -156,10 +156,13 @@ def test_getSamplingFeatureDataSets(self): assert dsapi is not None assert len(dsapi) > 0 assert dsapi[0].datasets is not None - # assert ds[1] == dsapi[0].DataSetID + assert dsapi[0].SamplingFeatureID == sf[0] + # assert ds[0] == dsapi[0] + except Exception as ex: + assert False finally: self.reader._session.rollback() - assert False + # Results def test_getAllResults(self): From 89a7b95eaaffba1f8f97d714dc4f1904b7e4c7d1 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Wed, 22 Nov 2017 13:23:26 -0800 Subject: [PATCH 31/55] Fix Processing Levels code and docstring --- odm2api/ODM2/services/readService.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index e6295f9..830b0af 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -390,15 +390,28 @@ def getMethods(self, ids=None, codes=None, type=None): # ProcessingLevel def getProcessingLevels(self, ids=None, codes=None): """ - getProcessingLevels(self, ids=None, codes=None) - * Pass nothing - returns full list of ProcessingLevel objects - * Pass a list of ProcessingLevelID - returns a single processingLevel object for each given id - * Pass a list of ProcessingLevelCode - returns a single processingLevel object for each given code + Retrieve a list of Processing Levels + + If no arguments are passed to the function, or their values are None, + all Processing Levels objects in the database will be returned. + + Args: + ids (list, optional): List of Processing Levels IDs. + codes (list, optional): List of Processing Levels Codes. + + + Returns: + list: List of ProcessingLevels Objects + + Examples: + >>> READ = ReadODM2(SESSION_FACTORY) + >>> READ.getProcessingLevels(ids=[1, 3]) + >>> READ.getProcessingLevels(codes=['L1', 'L3']) """ q = self._session.query(ProcessingLevels) if ids: - q = q.filter(ProcessingLevels.ProcessingLevelsID.in_(ids)) + q = q.filter(ProcessingLevels.ProcessingLevelID.in_(ids)) if codes: q = q.filter(ProcessingLevels.ProcessingLevelCode.in_(codes)) From 2b0862495e916ab0c9e2c88066a0d29228bddfc6 Mon Sep 17 00:00:00 2001 From: Elijah West Date: Fri, 8 Dec 2017 13:38:43 -0700 Subject: [PATCH 32/55] Fixed getSamplingFeatureDatasets issue #130 --- odm2api/ODM2/services/readService.py | 30 ++++++++++++++++++++++------ 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index e6295f9..b7b8c09 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -74,9 +74,11 @@ def __init__(self, affiliation, person, org): class SamplingFeatureDataSet(): datasets={} - def __init__(self, samplingfeature, datasetresults): + related_features={} + def __init__(self, samplingfeature, datasetresults, relatedfeatures): sf = samplingfeature + self.SamplingFeature = sf self.SamplingFeatureID = sf.SamplingFeatureID self.SamplingFeatureUUID = sf.SamplingFeatureUUID self.SamplingFeatureTypeCV = sf.SamplingFeatureTypeCV @@ -88,11 +90,13 @@ def __init__(self, samplingfeature, datasetresults): self.ElevationDatumCV = sf.ElevationDatumCV self.FeatureGeometryWKT = sf.FeatureGeometryWKT self.assignDatasets(datasetresults) + self.assignRelatedFeatures(relatedfeatures) - print(self.datasets) + print(self.datasets) def assignDatasets(self, datasetresults): + self.datasets = {} for dsr in datasetresults: if dsr.DataSetObj not in self.datasets: #if the dataset is not in the dictionary, add it and the first result @@ -107,6 +111,14 @@ def assignDatasets(self, datasetresults): self.datasets[dsr.DataSetObj].append(res) + def assignRelatedFeatures(self, relatedfeatures): + self.related_features = {} + for related in relatedfeatures: + if related.SamplingFeatureTypeCV == 'Site': + self.related_features = related + + + class ReadODM2(serviceBase): @@ -875,7 +887,7 @@ def getDataSetsValues(self, ids=None, codes=None, uuids=None, dstype=None): return None - def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=None): + def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=None, type=None): """ Retrieve a list of Datasets associated with the given sampling feature data. @@ -904,16 +916,20 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No # make sure one of the three arguments has been sent in - if all(v is None for v in [ids, codes, uuids]): - raise ValueError('Expected samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode argument') + # if all(v is None for v in [ids, codes, uuids, type]): + # raise ValueError('Expected samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode OR samplingFeatureType ' + # 'argument') sf_query = self._session.query(SamplingFeatures) + if type: + sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureTypeCV == type) if ids: sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureID.in_(ids)) if codes: sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureCode.in_(codes)) if uuids: sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureUUID.in_(uuids)) + sf_list = [] for sf in sf_query.all(): sf_list.append(sf) @@ -934,7 +950,9 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No vals = q.all() - sfds.append(SamplingFeatureDataSet(sf, vals)) + related = self.getRelatedSamplingFeatures(sf.SamplingFeatureID) + + sfds.append(SamplingFeatureDataSet(sf, vals, related)) except Exception as e: print('Error running Query: {}'.format(e)) return None From 9592471d1bb0dd35df48b9bb5806161f72620e0f Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Wed, 13 Dec 2017 13:00:36 -0800 Subject: [PATCH 33/55] Fix getSamplingFeatureDataset --- odm2api/ODM2/services/readService.py | 49 +++++++++++++++------------- 1 file changed, 26 insertions(+), 23 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index b7b8c09..168d20b 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -97,25 +97,27 @@ def __init__(self, samplingfeature, datasetresults, relatedfeatures): def assignDatasets(self, datasetresults): self.datasets = {} - for dsr in datasetresults: - if dsr.DataSetObj not in self.datasets: - #if the dataset is not in the dictionary, add it and the first result - self.datasets[dsr.DataSetObj]=[] - res = dsr.ResultObj - # res.FeatureActionObj = None - self.datasets[dsr.DataSetObj].append(res) - else: - #if the dataset is in the dictionary, append the result object to the list - res = dsr.ResultObj - # res.FeatureActionObj = None - self.datasets[dsr.DataSetObj].append(res) + if datasetresults: + for dsr in datasetresults: + if dsr.DataSetObj not in self.datasets: + #if the dataset is not in the dictionary, add it and the first result + self.datasets[dsr.DataSetObj]=[] + res = dsr.ResultObj + # res.FeatureActionObj = None + self.datasets[dsr.DataSetObj].append(res) + else: + #if the dataset is in the dictionary, append the result object to the list + res = dsr.ResultObj + # res.FeatureActionObj = None + self.datasets[dsr.DataSetObj].append(res) def assignRelatedFeatures(self, relatedfeatures): self.related_features = {} - for related in relatedfeatures: - if related.SamplingFeatureTypeCV == 'Site': - self.related_features = related + if relatedfeatures: + for related in relatedfeatures: + if related.SamplingFeatureTypeCV == 'Site': + self.related_features = related @@ -887,7 +889,7 @@ def getDataSetsValues(self, ids=None, codes=None, uuids=None, dstype=None): return None - def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=None, type=None): + def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=None, sftype=None): """ Retrieve a list of Datasets associated with the given sampling feature data. @@ -899,7 +901,8 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No uuids (list, optional): List of UUIDs string. dstype (str, optional): Type of Dataset from `controlled vocabulary name `_. - + sftype (str, optional): Type of SamplingFeature from + `controlled vocabulary name `_. Returns: list: List of DataSetsResults Objects associated with the given sampling feature @@ -911,18 +914,19 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No >>> READ.getSamplingFeatureDatasets(uuids=['a6f114f1-5416-4606-ae10-23be32dbc202', ... '5396fdf3-ceb3-46b6-aaf9-454a37278bb4']) >>> READ.getSamplingFeatureDatasets(dstype='singleTimeSeries') + >>> READ.getSamplingFeatureDatasets(sftype='Specimen') """ # make sure one of the three arguments has been sent in - # if all(v is None for v in [ids, codes, uuids, type]): - # raise ValueError('Expected samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode OR samplingFeatureType ' - # 'argument') + if all(v is None for v in [ids, codes, uuids, sftype]): + raise ValueError('Expected samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode OR samplingFeatureType ' + 'argument') sf_query = self._session.query(SamplingFeatures) - if type: - sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureTypeCV == type) + if sftype: + sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureTypeCV == sftype) if ids: sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureID.in_(ids)) if codes: @@ -934,7 +938,6 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No for sf in sf_query.all(): sf_list.append(sf) - sfds = None try: sfds=[] for sf in sf_list: From fcef187eff5f98778612d2c3488ae0b3ae7a6168 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Wed, 20 Dec 2017 08:13:32 -0800 Subject: [PATCH 34/55] fix issue #132 by Miguel C Leon --- odm2api/ODM2/models.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/odm2api/ODM2/models.py b/odm2api/ODM2/models.py index 1b7a049..af30d3a 100644 --- a/odm2api/ODM2/models.py +++ b/odm2api/ODM2/models.py @@ -457,10 +457,10 @@ class DataLoggerProgramFiles(Base): class DataLoggerFiles(Base): DataLoggerFileID = Column('dataloggerfileid', Integer, primary_key=True, nullable=False) - ProgramID = Column('actionid', Integer, ForeignKey(DataLoggerProgramFiles.ProgramID), nullable=False) + ProgramID = Column('programid', Integer, ForeignKey(DataLoggerProgramFiles.ProgramID), nullable=False) DataLoggerFileName = Column('dataloggerfilename', String(255), nullable=False) - DataLoggerOutputFileDescription = Column('dataloggeroutputfiledescription', String(500)) - DataLoggerOutputFileLink = Column('dataloggeroutputfilelink', String(255)) + DataLoggerOutputFileDescription = Column('dataloggerfiledescription', String(500)) + DataLoggerOutputFileLink = Column('dataloggerfilelink', String(255)) ProgramObj = relationship(DataLoggerProgramFiles) @@ -513,7 +513,7 @@ class DataLoggerFileColumns(Base): nullable=False) ColumnLabel = Column('columnlabel', String(50), nullable=False) ColumnDescription = Column('columndescription', String(500)) - MeasurementEquation = Column('measurmentequation', String(255)) + MeasurementEquation = Column('measurementequation', String(255)) ScanInterval = Column('scaninterval', Float(50)) ScanIntervalUnitsID = Column('scanintervalunitsid', Integer, ForeignKey(Units.UnitsID)) RecordingInterval = Column('recordinginterval', Float(50)) From baee8b5d1720d1d5aa768d26e2a9e3f87242fddc Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Fri, 5 Jan 2018 10:51:20 -0800 Subject: [PATCH 35/55] Fix dataframe column names --- odm2api/ODM2/services/readService.py | 36 +++++++++++++++++++--------- 1 file changed, 25 insertions(+), 11 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index a7ed9f9..46c6c2f 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -120,10 +120,23 @@ def assignRelatedFeatures(self, relatedfeatures): self.related_features = related +class ReadODM2(serviceBase): + def _get_columns(self, model): + """Internal helper function to get a dictionary of a model column properties. + Args: + model (object): Sqlalchemy object, Ex. ODM2 model. + Returns: + dict: Dictionary of column properties Ex. {'resultid': 'ResultID'} + + """ + from sqlalchemy.orm.properties import ColumnProperty + columns = [(prop.key.lower(), prop.key) for prop in model.__mapper__.iterate_properties if + isinstance(prop, ColumnProperty)] + + return dict(columns) -class ReadODM2(serviceBase): # Exists functions def resultExists(self, result): """ @@ -1275,25 +1288,25 @@ def getResultValues(self, resultids, starttime=None, endtime=None): * Pass an endtime - Returns a dataframe with the values before the given end time """ - type = self._session.query(Results).filter_by(ResultID=resultids[0]).first().ResultTypeCV + restype = self._session.query(Results).filter_by(ResultID=resultids[0]).first().ResultTypeCV ResultType = TimeSeriesResultValues - if 'categorical' in type.lower(): + if 'categorical' in restype.lower(): ResultType = CategoricalResultValues - elif 'measurement' in type.lower(): + elif 'measurement' in restype.lower(): ResultType = MeasurementResultValues - elif 'point' in type.lower(): + elif 'point' in restype.lower(): ResultType = PointCoverageResultValues - elif 'profile' in type.lower(): + elif 'profile' in restype.lower(): ResultType = ProfileResultValues - elif 'section' in type.lower(): + elif 'section' in restype.lower(): ResultType = SectionResults - elif 'spectra' in type.lower(): + elif 'spectra' in restype.lower(): ResultType = SpectraResultValues - elif 'time' in type.lower(): + elif 'time' in restype.lower(): ResultType = TimeSeriesResultValues - elif 'trajectory' in type.lower(): + elif 'trajectory' in restype.lower(): ResultType = TrajectoryResultValues - elif 'transect' in type.lower(): + elif 'transect' in restype.lower(): ResultType = TransectResultValues q = self._session.query(ResultType).filter(ResultType.ResultID.in_(resultids)) @@ -1310,6 +1323,7 @@ def getResultValues(self, resultids, starttime=None, endtime=None): con=self._session_factory.engine, params=query.params ) + df.columns = [self._get_columns(ResultType)[c] for c in df.columns] return df except Exception as e: print('Error running Query: {}'.format(e)) From 32519f6a7aabc20ff9906419a93dc9edfde112ee Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Fri, 5 Jan 2018 11:07:35 -0800 Subject: [PATCH 36/55] Update docstrings --- odm2api/ODM2/services/readService.py | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 46c6c2f..04145b3 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -1274,18 +1274,26 @@ def getResultDerivationEquations(self): """ return self._session.query(ResultDerivationEquations).all() - # Results - # ResultValues def getResultValues(self, resultids, starttime=None, endtime=None): """ - getResultValues(self, resultids, starttime=None, endtime=None) - * Pass in a list of ResultID - Returns a pandas dataframe object of type - that is specific to the result type - The resultids must be associated - with the same value type - * Pass a ResultID and a date range - returns a pandas dataframe object - of type that is specific to the result type with values between the input date range - * Pass a starttime - Returns a dataframe with the values after the given start time - * Pass an endtime - Returns a dataframe with the values before the given end time + Retrieve result values associated with the given result. + + **The resultids must be associated with the same result type** + Args: + resultids (list): List of SamplingFeatureIDs. + starttime (object, optional): Start time to filter by as datetime object. + endtime (object, optional): End time to filter by as datetime object. + + Returns: + DataFrame: Pandas dataframe of result values. + + Examples: + >>> READ = ReadODM2(SESSION_FACTORY) + >>> READ.getResultValues(resultids=[10, 11]) + >>> READ.getResultValues(resultids=[100, 20, 34], starttime=datetime.today()) + >>> READ.getResultValues(resultids=[1, 2, 3, 4], + >>> starttime=datetime(2000, 01, 01), + >>> endtime=datetime(2003, 02, 01)) """ restype = self._session.query(Results).filter_by(ResultID=resultids[0]).first().ResultTypeCV From 3c132a32cd8b77aebb9df338a69d5bf7c2160d9e Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Fri, 5 Jan 2018 12:31:33 -0800 Subject: [PATCH 37/55] fix builtins in annotations and results --- odm2api/ODM2/services/readService.py | 57 +++++++++++++++------------- 1 file changed, 30 insertions(+), 27 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index a7ed9f9..3d5db70 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -38,6 +38,8 @@ from sqlalchemy import distinct, exists +import warnings + __author__ = 'sreeder' @@ -145,7 +147,7 @@ def resultExists(self, result): return None # Annotations - def getAnnotations(self, type=None, codes=None, ids=None): + def getAnnotations(self, annottype=None, codes=None, ids=None, **kwargs): """ * Pass Nothing - return a list of all objects * Pass AnnotationTypeCV - return a list of all objects of the fiven type @@ -155,34 +157,38 @@ def getAnnotations(self, type=None, codes=None, ids=None): """ # TODO What keywords do I use for type. a = Annotations - if type: - if type == 'action': + if 'type' in kwargs: + warnings.warn( + "The parameter 'type' is deprecated. Please use the annottype parameter instead.") + annottype = kwargs['type'] + if annottype: + if annottype == 'action': a = ActionAnnotations - elif type == 'categoricalresultvalue': + elif annottype == 'categoricalresultvalue': a = CategoricalResultValueAnnotations - elif type == 'equipmentannotation': + elif annottype == 'equipmentannotation': a = EquipmentAnnotations - elif type == 'measurementresultvalue': + elif annottype == 'measurementresultvalue': a = MeasurementResultValueAnnotations - elif type == 'method': + elif annottype == 'method': a = MethodAnnotations - elif type == 'pointcoverageresultvalue': + elif annottype == 'pointcoverageresultvalue': a = PointCoverageResultValueAnnotations - elif type == 'profileresultvalue': + elif annottype == 'profileresultvalue': a = ProfileResultValueAnnotations - elif type == 'result': + elif annottype == 'result': a = ResultAnnotations - elif type == 'samplingfeature': + elif annottype == 'samplingfeature': a = SamplingFeatureAnnotations - elif type == 'sectionresultvalue': + elif annottype == 'sectionresultvalue': a = SectionResultValueAnnotations - elif type == 'spectraresultvalue': + elif annottype == 'spectraresultvalue': a = SpectraResultValueAnnotations - elif type == 'timeseriesresultvalue': + elif annottype == 'timeseriesresultvalue': a = TimeSeriesResultValueAnnotations - elif type == 'trajectoryresultvalue': + elif annottype == 'trajectoryresultvalue': a = TrajectoryResultValueAnnotations - elif type == 'transectresultvalue': + elif annottype == 'transectresultvalue': a = TransectResultValueAnnotations try: query = self._session.query(a) @@ -678,8 +684,8 @@ def getAffiliations(self, ids=None, personfirst=None, personlast=None, orgcode=N return None # Results - def getResults(self, ids=None, type=None, restype = None, uuids=None, actionid=None, simulationid=None, sfid=None, - variableid=None, siteid=None, sfids=None, sfuuids=None, sfcodes=None): + def getResults(self, ids=None, restype = None, uuids=None, actionid=None, simulationid=None, + variableid=None, siteid=None, sfids=None, sfuuids=None, sfcodes=None, **kwargs): # TODO what if user sends in both type and actionid vs just actionid """Retrieve a list of Result objects. @@ -694,7 +700,6 @@ def getResults(self, ids=None, type=None, restype = None, uuids=None, actionid=N uuids (list, optional): List of UUIDs string. actionid (int, optional): ActionID. simulationid (int, optional): SimulationID. - sfid (int, optional): SamplingFeatureID. variableid (int, optional): VariableID. siteid (int, optional): SiteID. - goes through related features table and finds all of results recorded at the given site @@ -719,11 +724,10 @@ def getResults(self, ids=None, type=None, restype = None, uuids=None, actionid=N """ query = self._session.query(Results) - if type: - import warnings + if 'type' in kwargs: warnings.warn( - "The parameter 'type' is no longer be supported. Please use the restype parameter instead.") - query = query.filter_by(ResultTypeCV=type) + "The parameter 'type' is deprecated. Please use the restype parameter instead.") + restype = kwargs['type'] if restype: query = query.filter_by(ResultTypeCV=restype) if variableid: @@ -739,10 +743,9 @@ def getResults(self, ids=None, type=None, restype = None, uuids=None, actionid=N .filter_by(SimulationID=simulationid) if actionid: query = query.join(FeatureActions).filter_by(ActionID=actionid) - if sfid: - import warnings - warnings.warn("The parameter 'sfid' is no longer be supported. Please use the sfids parameter and send in a list.") - query = query.join(FeatureActions).filter_by(SamplingFeatureID=sfid) + if 'sfid' in kwargs: + warnings.warn("The parameter 'sfid' is deprecated. Please use the sfids parameter and send in a list.") + query = query.join(FeatureActions).filter_by(SamplingFeatureID=kwargs['sfid']) if sfids or sfcodes or sfuuids: sf_list = self.getSamplingFeatures(ids=sfids, codes=sfcodes, uuids=sfuuids) sfids = [] From b87dab25a581a764c42df0d231b16545600fdb4d Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Fri, 5 Jan 2018 12:34:07 -0800 Subject: [PATCH 38/55] change ResultType to ResultValues --- odm2api/ODM2/services/readService.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 04145b3..eabcaf3 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -1297,31 +1297,31 @@ def getResultValues(self, resultids, starttime=None, endtime=None): """ restype = self._session.query(Results).filter_by(ResultID=resultids[0]).first().ResultTypeCV - ResultType = TimeSeriesResultValues + ResultValues = TimeSeriesResultValues if 'categorical' in restype.lower(): - ResultType = CategoricalResultValues + ResultValues = CategoricalResultValues elif 'measurement' in restype.lower(): - ResultType = MeasurementResultValues + ResultValues = MeasurementResultValues elif 'point' in restype.lower(): - ResultType = PointCoverageResultValues + ResultValues = PointCoverageResultValues elif 'profile' in restype.lower(): - ResultType = ProfileResultValues + ResultValues = ProfileResultValues elif 'section' in restype.lower(): - ResultType = SectionResults + ResultValues = SectionResults elif 'spectra' in restype.lower(): - ResultType = SpectraResultValues + ResultValues = SpectraResultValues elif 'time' in restype.lower(): - ResultType = TimeSeriesResultValues + ResultValues = TimeSeriesResultValues elif 'trajectory' in restype.lower(): - ResultType = TrajectoryResultValues + ResultValues = TrajectoryResultValues elif 'transect' in restype.lower(): - ResultType = TransectResultValues + ResultValues = TransectResultValues - q = self._session.query(ResultType).filter(ResultType.ResultID.in_(resultids)) + q = self._session.query(ResultValues).filter(ResultValues.ResultID.in_(resultids)) if starttime: - q = q.filter(ResultType.ValueDateTime >= starttime) + q = q.filter(ResultValues.ValueDateTime >= starttime) if endtime: - q = q.filter(ResultType.ValueDateTime <= endtime) + q = q.filter(ResultValues.ValueDateTime <= endtime) try: # F841 local variable 'vals' is assigned to but never used # vals = q.order_by(ResultType.ValueDateTime) @@ -1331,7 +1331,7 @@ def getResultValues(self, resultids, starttime=None, endtime=None): con=self._session_factory.engine, params=query.params ) - df.columns = [self._get_columns(ResultType)[c] for c in df.columns] + df.columns = [self._get_columns(ResultValues)[c] for c in df.columns] return df except Exception as e: print('Error running Query: {}'.format(e)) From edb02fab2b6a144611408d3200452dd400320072 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Fri, 5 Jan 2018 13:05:58 -0800 Subject: [PATCH 39/55] Fix the rest of the builtins, added warnings --- odm2api/ODM2/services/readService.py | 113 ++++++++++++++++++--------- 1 file changed, 74 insertions(+), 39 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 3d5db70..394b5b0 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -158,8 +158,8 @@ def getAnnotations(self, annottype=None, codes=None, ids=None, **kwargs): # TODO What keywords do I use for type. a = Annotations if 'type' in kwargs: - warnings.warn( - "The parameter 'type' is deprecated. Please use the annottype parameter instead.") + warnings.warn('The parameter \'type\' is deprecated. Please use the annottype parameter instead.', + DeprecationWarning, stacklevel=2) annottype = kwargs['type'] if annottype: if annottype == 'action': @@ -385,7 +385,7 @@ def getVariables(self, ids=None, codes=None, sitecode=None, results=False): return None # Method - def getMethods(self, ids=None, codes=None, type=None): + def getMethods(self, ids=None, codes=None, medtype=None, **kwargs): """ * Pass nothing - returns full list of method objects * Pass a list of MethodIDs - returns a single method object for each given id @@ -393,13 +393,17 @@ def getMethods(self, ids=None, codes=None, type=None): * Pass a MethodType - returns a list of method objects of the given MethodType """ + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the medtype parameter instead.', + DeprecationWarning, stacklevel=2) + medtype = kwargs['type'] q = self._session.query(Methods) if ids: q = q.filter(Methods.MethodID.in_(ids)) if codes: q = q.filter(Methods.MethodCode.in_(codes)) - if type: - q = q.filter_by(MethodTypeCV=type) + if medtype: + q = q.filter_by(MethodTypeCV=medtype) try: return q.all() @@ -442,7 +446,8 @@ def getProcessingLevels(self, ids=None, codes=None): return None # Sampling Feature - def getSamplingFeatures(self, ids=None, codes=None, uuids=None, type=None, wkt=None, results=False): + def getSamplingFeatures(self, ids=None, codes=None, uuids=None, + sftype=None, wkt=None, results=False, **kwargs): """Retrieve a list of Sampling Feature objects. If no arguments are passed to the function, or their values are None, @@ -452,7 +457,7 @@ def getSamplingFeatures(self, ids=None, codes=None, uuids=None, type=None, wkt=N ids (list, optional): List of SamplingFeatureIDs. codes (list, optional): List of SamplingFeature Codes. uuids (list, optional): List of UUIDs string. - type (str, optional): Type of Sampling Feature from + sftype (str, optional): Type of Sampling Feature from `controlled vocabulary name `_. wkt (str, optional): SamplingFeature Well Known Text. results (bool, optional): Whether or not you want to return only the @@ -473,6 +478,10 @@ def getSamplingFeatures(self, ids=None, codes=None, uuids=None, type=None, wkt=N >>> READ.getSamplingFeatures(type='Site', results=True) """ + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the sftype parameter instead.', + DeprecationWarning, stacklevel=2) + sftype = kwargs['type'] if results: try: fas = [x[0] for x in self._session.query(distinct(Results.FeatureActionID)).all()] @@ -487,8 +496,8 @@ def getSamplingFeatures(self, ids=None, codes=None, uuids=None, type=None, wkt=N q = self._session.query(SamplingFeatures) - if type: - q = q.filter_by(SamplingFeatureTypeCV=type) + if sftype: + q = q.filter_by(SamplingFeatureTypeCV=sftype) if ids: q = q.filter(SamplingFeatures.SamplingFeatureID.in_(ids)) if codes: @@ -535,7 +544,7 @@ def getRelatedSamplingFeatures(self, sfid=None, rfid=None, relationshiptype=None # Action - def getActions(self, ids=None, type=None, sfid=None): + def getActions(self, ids=None, acttype=None, sfid=None, **kwargs): """ * Pass nothing - returns a list of all Actions * Pass a list of Action ids - returns a list of Action objects @@ -544,12 +553,16 @@ def getActions(self, ids=None, type=None, sfid=None): associated with that Sampling feature ID, Found through featureAction table """ + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the acttype parameter instead.', + DeprecationWarning, stacklevel=2) + acttype = kwargs['type'] a = Actions - if type == 'equipment': + if acttype == 'equipment': a = EquipmentActions - elif type == 'calibration': + elif acttype == 'calibration': a = CalibrationActions - elif type == 'maintenance': + elif acttype == 'maintenance': a = MaintenanceActions q = self._session.query(a) @@ -581,7 +594,7 @@ def getRelatedActions(self, actionid=None): return None # Unit - def getUnits(self, ids=None, name=None, type=None): + def getUnits(self, ids=None, name=None, unittype=None, **kwargs): """ * Pass nothing - returns a list of all units objects * Pass a list of UnitsID - returns a single units object for the given id @@ -589,13 +602,17 @@ def getUnits(self, ids=None, name=None, type=None): * Pass a type- returns a list of all objects of the given type """ + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the unittype parameter instead.', + DeprecationWarning, stacklevel=2) + unittype = kwargs['type'] q = self._session.query(Units) if ids: q = q.filter(Units.UnitsID.in_(ids)) if name: q = q.filter(Units.UnitsName.ilike(name)) - if type: - q = q.filter(Units.UnitsTypeCV.ilike(type)) + if unittype: + q = q.filter(Units.UnitsTypeCV.ilike(unittype)) try: return q.all() except Exception as e: @@ -725,8 +742,8 @@ def getResults(self, ids=None, restype = None, uuids=None, actionid=None, simula query = self._session.query(Results) if 'type' in kwargs: - warnings.warn( - "The parameter 'type' is deprecated. Please use the restype parameter instead.") + warnings.warn('The parameter \'type\' is deprecated. Please use the restype parameter instead.', + DeprecationWarning, stacklevel=2) restype = kwargs['type'] if restype: query = query.filter_by(ResultTypeCV=restype) @@ -744,7 +761,9 @@ def getResults(self, ids=None, restype = None, uuids=None, actionid=None, simula if actionid: query = query.join(FeatureActions).filter_by(ActionID=actionid) if 'sfid' in kwargs: - warnings.warn("The parameter 'sfid' is deprecated. Please use the sfids parameter and send in a list.") + warnings.warn('The parameter \'sfid\' is deprecated. ' + 'Please use the sfids parameter instead and send in a list.', + DeprecationWarning, stacklevel=2) query = query.join(FeatureActions).filter_by(SamplingFeatureID=kwargs['sfid']) if sfids or sfcodes or sfuuids: sf_list = self.getSamplingFeatures(ids=sfids, codes=sfcodes, uuids=sfuuids) @@ -1011,7 +1030,7 @@ def getResultsDataQuality(self): # TODO Equipment Schema Queries # Equipment - def getEquipment(self, codes=None, type=None, sfid=None, actionid=None): + def getEquipment(self, codes=None, equiptype=None, sfid=None, actionid=None, **kwargs): """ * Pass nothing - returns a list of all Equipment objects * Pass a list of EquipmentCodes- return a list of all Equipment objects that match each of the codes @@ -1020,6 +1039,10 @@ def getEquipment(self, codes=None, type=None, sfid=None, actionid=None): * Pass an ActionID - returns a single Equipment object """ + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the equiptype parameter instead.', + DeprecationWarning, stacklevel=2) + equiptype = kwargs['type'] e = self._session.query(Equipment) if sfid: e = e.join(EquipmentUsed) \ @@ -1108,25 +1131,29 @@ def RelatedEquipment(self, code=None): return r.all() # Extension Properties - def getExtensionProperties(self, type=None): + def getExtensionProperties(self, exptype=None, **kwargs): """ * Pass nothing - return a list of all objects * Pass type- return a list of all objects of the given type """ # Todo what values to use for extensionproperties type + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the exptype parameter instead.', + DeprecationWarning, stacklevel=2) + exptype = kwargs['type'] e = ExtensionProperties - if type == 'action': + if exptype == 'action': e = ActionExtensionPropertyValues - elif type == 'citation': + elif exptype == 'citation': e = CitationExtensionPropertyValues - elif type == 'method': + elif exptype == 'method': e = MethodExtensionPropertyValues - elif type == 'result': + elif exptype == 'result': e = ResultExtensionPropertyValues - elif type == 'samplingfeature': + elif exptype == 'samplingfeature': e = SamplingFeatureExtensionPropertyValues - elif type == 'variable': + elif exptype == 'variable': e = VariableExtensionPropertyValues try: return self._session.query(e).all() @@ -1135,28 +1162,32 @@ def getExtensionProperties(self, type=None): return None # External Identifiers - def getExternalIdentifiers(self, type=None): + def getExternalIdentifiers(self, eitype=None, **kwargs): """ * Pass nothing - return a list of all objects * Pass type- return a list of all objects of the given type """ + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the eitype parameter instead.', + DeprecationWarning, stacklevel=2) + eitype = kwargs['type'] e = ExternalIdentifierSystems - if type.lowercase == 'citation': + if eitype.lowercase == 'citation': e = CitationExternalIdentifiers - elif type == 'method': + elif eitype == 'method': e = MethodExternalIdentifiers - elif type == 'person': + elif eitype == 'person': e = PersonExternalIdentifiers - elif type == 'referencematerial': + elif eitype == 'referencematerial': e = ReferenceMaterialExternalIdentifiers - elif type == 'samplingfeature': + elif eitype == 'samplingfeature': e = SamplingFeatureExternalIdentifiers - elif type == 'spatialreference': + elif eitype == 'spatialreference': e = SpatialReferenceExternalIdentifiers - elif type == 'taxonomicclassifier': + elif eitype == 'taxonomicclassifier': e = TaxonomicClassifierExternalIdentifiers - elif type == 'variable': + elif eitype == 'variable': e = VariableExternalIdentifiers try: return self._session.query(e).all() @@ -1372,16 +1403,20 @@ def getModels(self, codes=None): print('Error running Query: {}'.format(e)) return None - def getRelatedModels(self, id=None, code=None): + def getRelatedModels(self, modid=None, code=None, **kwargs): """ getRelatedModels(self, id=None, code=None) * Pass a ModelID - get a list of converter objects related to the converter having ModelID * Pass a ModelCode - get a list of converter objects related to the converter having ModeCode """ + if 'id' in kwargs: + warnings.warn('The parameter \'id\' is deprecated. Please use the modid parameter instead.', + DeprecationWarning, stacklevel=2) + modid = kwargs['type'] m = self._session.query(Models).select_from(RelatedModels).join(RelatedModels.ModelObj) - if id: - m = m.filter(RelatedModels.ModelID == id) + if modid: + m = m.filter(RelatedModels.ModelID == modid) if code: m = m.filter(Models.ModelCode == code) From c493d61ff9bd476871f14c1389447064445b4ccd Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Fri, 5 Jan 2018 13:15:37 -0800 Subject: [PATCH 40/55] Fix mistype --- odm2api/ODM2/services/readService.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 394b5b0..32dea3b 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -1413,7 +1413,7 @@ def getRelatedModels(self, modid=None, code=None, **kwargs): if 'id' in kwargs: warnings.warn('The parameter \'id\' is deprecated. Please use the modid parameter instead.', DeprecationWarning, stacklevel=2) - modid = kwargs['type'] + modid = kwargs['id'] m = self._session.query(Models).select_from(RelatedModels).join(RelatedModels.ModelObj) if modid: m = m.filter(RelatedModels.ModelID == modid) From 2d92edd3b425c6339a2c7c8769892394d421bdb2 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Fri, 5 Jan 2018 12:31:33 -0800 Subject: [PATCH 41/55] fix builtins in annotations and results --- odm2api/ODM2/services/readService.py | 57 +++++++++++++++------------- 1 file changed, 30 insertions(+), 27 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index eabcaf3..9f45793 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -38,6 +38,8 @@ from sqlalchemy import distinct, exists +import warnings + __author__ = 'sreeder' @@ -158,7 +160,7 @@ def resultExists(self, result): return None # Annotations - def getAnnotations(self, type=None, codes=None, ids=None): + def getAnnotations(self, annottype=None, codes=None, ids=None, **kwargs): """ * Pass Nothing - return a list of all objects * Pass AnnotationTypeCV - return a list of all objects of the fiven type @@ -168,34 +170,38 @@ def getAnnotations(self, type=None, codes=None, ids=None): """ # TODO What keywords do I use for type. a = Annotations - if type: - if type == 'action': + if 'type' in kwargs: + warnings.warn( + "The parameter 'type' is deprecated. Please use the annottype parameter instead.") + annottype = kwargs['type'] + if annottype: + if annottype == 'action': a = ActionAnnotations - elif type == 'categoricalresultvalue': + elif annottype == 'categoricalresultvalue': a = CategoricalResultValueAnnotations - elif type == 'equipmentannotation': + elif annottype == 'equipmentannotation': a = EquipmentAnnotations - elif type == 'measurementresultvalue': + elif annottype == 'measurementresultvalue': a = MeasurementResultValueAnnotations - elif type == 'method': + elif annottype == 'method': a = MethodAnnotations - elif type == 'pointcoverageresultvalue': + elif annottype == 'pointcoverageresultvalue': a = PointCoverageResultValueAnnotations - elif type == 'profileresultvalue': + elif annottype == 'profileresultvalue': a = ProfileResultValueAnnotations - elif type == 'result': + elif annottype == 'result': a = ResultAnnotations - elif type == 'samplingfeature': + elif annottype == 'samplingfeature': a = SamplingFeatureAnnotations - elif type == 'sectionresultvalue': + elif annottype == 'sectionresultvalue': a = SectionResultValueAnnotations - elif type == 'spectraresultvalue': + elif annottype == 'spectraresultvalue': a = SpectraResultValueAnnotations - elif type == 'timeseriesresultvalue': + elif annottype == 'timeseriesresultvalue': a = TimeSeriesResultValueAnnotations - elif type == 'trajectoryresultvalue': + elif annottype == 'trajectoryresultvalue': a = TrajectoryResultValueAnnotations - elif type == 'transectresultvalue': + elif annottype == 'transectresultvalue': a = TransectResultValueAnnotations try: query = self._session.query(a) @@ -691,8 +697,8 @@ def getAffiliations(self, ids=None, personfirst=None, personlast=None, orgcode=N return None # Results - def getResults(self, ids=None, type=None, restype = None, uuids=None, actionid=None, simulationid=None, sfid=None, - variableid=None, siteid=None, sfids=None, sfuuids=None, sfcodes=None): + def getResults(self, ids=None, restype = None, uuids=None, actionid=None, simulationid=None, + variableid=None, siteid=None, sfids=None, sfuuids=None, sfcodes=None, **kwargs): # TODO what if user sends in both type and actionid vs just actionid """Retrieve a list of Result objects. @@ -707,7 +713,6 @@ def getResults(self, ids=None, type=None, restype = None, uuids=None, actionid=N uuids (list, optional): List of UUIDs string. actionid (int, optional): ActionID. simulationid (int, optional): SimulationID. - sfid (int, optional): SamplingFeatureID. variableid (int, optional): VariableID. siteid (int, optional): SiteID. - goes through related features table and finds all of results recorded at the given site @@ -732,11 +737,10 @@ def getResults(self, ids=None, type=None, restype = None, uuids=None, actionid=N """ query = self._session.query(Results) - if type: - import warnings + if 'type' in kwargs: warnings.warn( - "The parameter 'type' is no longer be supported. Please use the restype parameter instead.") - query = query.filter_by(ResultTypeCV=type) + "The parameter 'type' is deprecated. Please use the restype parameter instead.") + restype = kwargs['type'] if restype: query = query.filter_by(ResultTypeCV=restype) if variableid: @@ -752,10 +756,9 @@ def getResults(self, ids=None, type=None, restype = None, uuids=None, actionid=N .filter_by(SimulationID=simulationid) if actionid: query = query.join(FeatureActions).filter_by(ActionID=actionid) - if sfid: - import warnings - warnings.warn("The parameter 'sfid' is no longer be supported. Please use the sfids parameter and send in a list.") - query = query.join(FeatureActions).filter_by(SamplingFeatureID=sfid) + if 'sfid' in kwargs: + warnings.warn("The parameter 'sfid' is deprecated. Please use the sfids parameter and send in a list.") + query = query.join(FeatureActions).filter_by(SamplingFeatureID=kwargs['sfid']) if sfids or sfcodes or sfuuids: sf_list = self.getSamplingFeatures(ids=sfids, codes=sfcodes, uuids=sfuuids) sfids = [] From b8224814427680d22f9a3f19560b42abe3ced2b2 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Fri, 5 Jan 2018 13:05:58 -0800 Subject: [PATCH 42/55] Fix the rest of the builtins, added warnings --- odm2api/ODM2/services/readService.py | 113 ++++++++++++++++++--------- 1 file changed, 74 insertions(+), 39 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 9f45793..7f8e694 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -171,8 +171,8 @@ def getAnnotations(self, annottype=None, codes=None, ids=None, **kwargs): # TODO What keywords do I use for type. a = Annotations if 'type' in kwargs: - warnings.warn( - "The parameter 'type' is deprecated. Please use the annottype parameter instead.") + warnings.warn('The parameter \'type\' is deprecated. Please use the annottype parameter instead.', + DeprecationWarning, stacklevel=2) annottype = kwargs['type'] if annottype: if annottype == 'action': @@ -398,7 +398,7 @@ def getVariables(self, ids=None, codes=None, sitecode=None, results=False): return None # Method - def getMethods(self, ids=None, codes=None, type=None): + def getMethods(self, ids=None, codes=None, medtype=None, **kwargs): """ * Pass nothing - returns full list of method objects * Pass a list of MethodIDs - returns a single method object for each given id @@ -406,13 +406,17 @@ def getMethods(self, ids=None, codes=None, type=None): * Pass a MethodType - returns a list of method objects of the given MethodType """ + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the medtype parameter instead.', + DeprecationWarning, stacklevel=2) + medtype = kwargs['type'] q = self._session.query(Methods) if ids: q = q.filter(Methods.MethodID.in_(ids)) if codes: q = q.filter(Methods.MethodCode.in_(codes)) - if type: - q = q.filter_by(MethodTypeCV=type) + if medtype: + q = q.filter_by(MethodTypeCV=medtype) try: return q.all() @@ -455,7 +459,8 @@ def getProcessingLevels(self, ids=None, codes=None): return None # Sampling Feature - def getSamplingFeatures(self, ids=None, codes=None, uuids=None, type=None, wkt=None, results=False): + def getSamplingFeatures(self, ids=None, codes=None, uuids=None, + sftype=None, wkt=None, results=False, **kwargs): """Retrieve a list of Sampling Feature objects. If no arguments are passed to the function, or their values are None, @@ -465,7 +470,7 @@ def getSamplingFeatures(self, ids=None, codes=None, uuids=None, type=None, wkt=N ids (list, optional): List of SamplingFeatureIDs. codes (list, optional): List of SamplingFeature Codes. uuids (list, optional): List of UUIDs string. - type (str, optional): Type of Sampling Feature from + sftype (str, optional): Type of Sampling Feature from `controlled vocabulary name `_. wkt (str, optional): SamplingFeature Well Known Text. results (bool, optional): Whether or not you want to return only the @@ -486,6 +491,10 @@ def getSamplingFeatures(self, ids=None, codes=None, uuids=None, type=None, wkt=N >>> READ.getSamplingFeatures(type='Site', results=True) """ + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the sftype parameter instead.', + DeprecationWarning, stacklevel=2) + sftype = kwargs['type'] if results: try: fas = [x[0] for x in self._session.query(distinct(Results.FeatureActionID)).all()] @@ -500,8 +509,8 @@ def getSamplingFeatures(self, ids=None, codes=None, uuids=None, type=None, wkt=N q = self._session.query(SamplingFeatures) - if type: - q = q.filter_by(SamplingFeatureTypeCV=type) + if sftype: + q = q.filter_by(SamplingFeatureTypeCV=sftype) if ids: q = q.filter(SamplingFeatures.SamplingFeatureID.in_(ids)) if codes: @@ -548,7 +557,7 @@ def getRelatedSamplingFeatures(self, sfid=None, rfid=None, relationshiptype=None # Action - def getActions(self, ids=None, type=None, sfid=None): + def getActions(self, ids=None, acttype=None, sfid=None, **kwargs): """ * Pass nothing - returns a list of all Actions * Pass a list of Action ids - returns a list of Action objects @@ -557,12 +566,16 @@ def getActions(self, ids=None, type=None, sfid=None): associated with that Sampling feature ID, Found through featureAction table """ + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the acttype parameter instead.', + DeprecationWarning, stacklevel=2) + acttype = kwargs['type'] a = Actions - if type == 'equipment': + if acttype == 'equipment': a = EquipmentActions - elif type == 'calibration': + elif acttype == 'calibration': a = CalibrationActions - elif type == 'maintenance': + elif acttype == 'maintenance': a = MaintenanceActions q = self._session.query(a) @@ -594,7 +607,7 @@ def getRelatedActions(self, actionid=None): return None # Unit - def getUnits(self, ids=None, name=None, type=None): + def getUnits(self, ids=None, name=None, unittype=None, **kwargs): """ * Pass nothing - returns a list of all units objects * Pass a list of UnitsID - returns a single units object for the given id @@ -602,13 +615,17 @@ def getUnits(self, ids=None, name=None, type=None): * Pass a type- returns a list of all objects of the given type """ + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the unittype parameter instead.', + DeprecationWarning, stacklevel=2) + unittype = kwargs['type'] q = self._session.query(Units) if ids: q = q.filter(Units.UnitsID.in_(ids)) if name: q = q.filter(Units.UnitsName.ilike(name)) - if type: - q = q.filter(Units.UnitsTypeCV.ilike(type)) + if unittype: + q = q.filter(Units.UnitsTypeCV.ilike(unittype)) try: return q.all() except Exception as e: @@ -738,8 +755,8 @@ def getResults(self, ids=None, restype = None, uuids=None, actionid=None, simula query = self._session.query(Results) if 'type' in kwargs: - warnings.warn( - "The parameter 'type' is deprecated. Please use the restype parameter instead.") + warnings.warn('The parameter \'type\' is deprecated. Please use the restype parameter instead.', + DeprecationWarning, stacklevel=2) restype = kwargs['type'] if restype: query = query.filter_by(ResultTypeCV=restype) @@ -757,7 +774,9 @@ def getResults(self, ids=None, restype = None, uuids=None, actionid=None, simula if actionid: query = query.join(FeatureActions).filter_by(ActionID=actionid) if 'sfid' in kwargs: - warnings.warn("The parameter 'sfid' is deprecated. Please use the sfids parameter and send in a list.") + warnings.warn('The parameter \'sfid\' is deprecated. ' + 'Please use the sfids parameter instead and send in a list.', + DeprecationWarning, stacklevel=2) query = query.join(FeatureActions).filter_by(SamplingFeatureID=kwargs['sfid']) if sfids or sfcodes or sfuuids: sf_list = self.getSamplingFeatures(ids=sfids, codes=sfcodes, uuids=sfuuids) @@ -1024,7 +1043,7 @@ def getResultsDataQuality(self): # TODO Equipment Schema Queries # Equipment - def getEquipment(self, codes=None, type=None, sfid=None, actionid=None): + def getEquipment(self, codes=None, equiptype=None, sfid=None, actionid=None, **kwargs): """ * Pass nothing - returns a list of all Equipment objects * Pass a list of EquipmentCodes- return a list of all Equipment objects that match each of the codes @@ -1033,6 +1052,10 @@ def getEquipment(self, codes=None, type=None, sfid=None, actionid=None): * Pass an ActionID - returns a single Equipment object """ + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the equiptype parameter instead.', + DeprecationWarning, stacklevel=2) + equiptype = kwargs['type'] e = self._session.query(Equipment) if sfid: e = e.join(EquipmentUsed) \ @@ -1121,25 +1144,29 @@ def RelatedEquipment(self, code=None): return r.all() # Extension Properties - def getExtensionProperties(self, type=None): + def getExtensionProperties(self, exptype=None, **kwargs): """ * Pass nothing - return a list of all objects * Pass type- return a list of all objects of the given type """ # Todo what values to use for extensionproperties type + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the exptype parameter instead.', + DeprecationWarning, stacklevel=2) + exptype = kwargs['type'] e = ExtensionProperties - if type == 'action': + if exptype == 'action': e = ActionExtensionPropertyValues - elif type == 'citation': + elif exptype == 'citation': e = CitationExtensionPropertyValues - elif type == 'method': + elif exptype == 'method': e = MethodExtensionPropertyValues - elif type == 'result': + elif exptype == 'result': e = ResultExtensionPropertyValues - elif type == 'samplingfeature': + elif exptype == 'samplingfeature': e = SamplingFeatureExtensionPropertyValues - elif type == 'variable': + elif exptype == 'variable': e = VariableExtensionPropertyValues try: return self._session.query(e).all() @@ -1148,28 +1175,32 @@ def getExtensionProperties(self, type=None): return None # External Identifiers - def getExternalIdentifiers(self, type=None): + def getExternalIdentifiers(self, eitype=None, **kwargs): """ * Pass nothing - return a list of all objects * Pass type- return a list of all objects of the given type """ + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the eitype parameter instead.', + DeprecationWarning, stacklevel=2) + eitype = kwargs['type'] e = ExternalIdentifierSystems - if type.lowercase == 'citation': + if eitype.lowercase == 'citation': e = CitationExternalIdentifiers - elif type == 'method': + elif eitype == 'method': e = MethodExternalIdentifiers - elif type == 'person': + elif eitype == 'person': e = PersonExternalIdentifiers - elif type == 'referencematerial': + elif eitype == 'referencematerial': e = ReferenceMaterialExternalIdentifiers - elif type == 'samplingfeature': + elif eitype == 'samplingfeature': e = SamplingFeatureExternalIdentifiers - elif type == 'spatialreference': + elif eitype == 'spatialreference': e = SpatialReferenceExternalIdentifiers - elif type == 'taxonomicclassifier': + elif eitype == 'taxonomicclassifier': e = TaxonomicClassifierExternalIdentifiers - elif type == 'variable': + elif eitype == 'variable': e = VariableExternalIdentifiers try: return self._session.query(e).all() @@ -1394,16 +1425,20 @@ def getModels(self, codes=None): print('Error running Query: {}'.format(e)) return None - def getRelatedModels(self, id=None, code=None): + def getRelatedModels(self, modid=None, code=None, **kwargs): """ getRelatedModels(self, id=None, code=None) * Pass a ModelID - get a list of converter objects related to the converter having ModelID * Pass a ModelCode - get a list of converter objects related to the converter having ModeCode """ + if 'id' in kwargs: + warnings.warn('The parameter \'id\' is deprecated. Please use the modid parameter instead.', + DeprecationWarning, stacklevel=2) + modid = kwargs['type'] m = self._session.query(Models).select_from(RelatedModels).join(RelatedModels.ModelObj) - if id: - m = m.filter(RelatedModels.ModelID == id) + if modid: + m = m.filter(RelatedModels.ModelID == modid) if code: m = m.filter(Models.ModelCode == code) From 005f4233736c79f7fc9a6bb6c9e62fe64e1240c6 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Fri, 5 Jan 2018 13:15:37 -0800 Subject: [PATCH 43/55] Fix mistype --- odm2api/ODM2/services/readService.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 7f8e694..e7450c7 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -1435,7 +1435,7 @@ def getRelatedModels(self, modid=None, code=None, **kwargs): if 'id' in kwargs: warnings.warn('The parameter \'id\' is deprecated. Please use the modid parameter instead.', DeprecationWarning, stacklevel=2) - modid = kwargs['type'] + modid = kwargs['id'] m = self._session.query(Models).select_from(RelatedModels).join(RelatedModels.ModelObj) if modid: m = m.filter(RelatedModels.ModelID == modid) From 335254173ff63af893ae7f008b8ca65921e0d9fa Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Fri, 5 Jan 2018 14:35:57 -0800 Subject: [PATCH 44/55] Add kwargs checking, show warnings for unexpected --- odm2api/ODM2/services/readService.py | 33 +++++++++++++++++++++++----- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index e7450c7..f5fca82 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -139,6 +139,19 @@ def _get_columns(self, model): return dict(columns) + def _check_kwargs(self, args, kwargs): + """Internal helper function to check for unused keyword arguments + + Args: + args (list): List of expected, valid arguments. + kwargs (dict): Dictionary of keyword arguments from user + Returns: + None + """ + invkwd = filter(lambda x: x not in args, kwargs.keys()) + if invkwd: + warnings.warn('Got unexpected keyword argument(s) {}'.format(','.join(invkwd)), stacklevel=2) + # Exists functions def resultExists(self, result): """ @@ -170,6 +183,7 @@ def getAnnotations(self, annottype=None, codes=None, ids=None, **kwargs): """ # TODO What keywords do I use for type. a = Annotations + self._check_kwargs(['type'], kwargs) if 'type' in kwargs: warnings.warn('The parameter \'type\' is deprecated. Please use the annottype parameter instead.', DeprecationWarning, stacklevel=2) @@ -398,7 +412,7 @@ def getVariables(self, ids=None, codes=None, sitecode=None, results=False): return None # Method - def getMethods(self, ids=None, codes=None, medtype=None, **kwargs): + def getMethods(self, ids=None, codes=None, methodtype=None, **kwargs): """ * Pass nothing - returns full list of method objects * Pass a list of MethodIDs - returns a single method object for each given id @@ -406,17 +420,19 @@ def getMethods(self, ids=None, codes=None, medtype=None, **kwargs): * Pass a MethodType - returns a list of method objects of the given MethodType """ + self._check_kwargs(['type'], kwargs) if 'type' in kwargs: warnings.warn('The parameter \'type\' is deprecated. Please use the medtype parameter instead.', DeprecationWarning, stacklevel=2) - medtype = kwargs['type'] + methodtype = kwargs['type'] + q = self._session.query(Methods) if ids: q = q.filter(Methods.MethodID.in_(ids)) if codes: q = q.filter(Methods.MethodCode.in_(codes)) - if medtype: - q = q.filter_by(MethodTypeCV=medtype) + if methodtype: + q = q.filter_by(MethodTypeCV=methodtype) try: return q.all() @@ -491,6 +507,7 @@ def getSamplingFeatures(self, ids=None, codes=None, uuids=None, >>> READ.getSamplingFeatures(type='Site', results=True) """ + self._check_kwargs(['type'], kwargs) if 'type' in kwargs: warnings.warn('The parameter \'type\' is deprecated. Please use the sftype parameter instead.', DeprecationWarning, stacklevel=2) @@ -566,6 +583,7 @@ def getActions(self, ids=None, acttype=None, sfid=None, **kwargs): associated with that Sampling feature ID, Found through featureAction table """ + self._check_kwargs(['type'], kwargs) if 'type' in kwargs: warnings.warn('The parameter \'type\' is deprecated. Please use the acttype parameter instead.', DeprecationWarning, stacklevel=2) @@ -615,6 +633,7 @@ def getUnits(self, ids=None, name=None, unittype=None, **kwargs): * Pass a type- returns a list of all objects of the given type """ + self._check_kwargs(['type'], kwargs) if 'type' in kwargs: warnings.warn('The parameter \'type\' is deprecated. Please use the unittype parameter instead.', DeprecationWarning, stacklevel=2) @@ -753,7 +772,7 @@ def getResults(self, ids=None, restype = None, uuids=None, actionid=None, simula """ query = self._session.query(Results) - + self._check_kwargs(['type', 'sfid'], kwargs) if 'type' in kwargs: warnings.warn('The parameter \'type\' is deprecated. Please use the restype parameter instead.', DeprecationWarning, stacklevel=2) @@ -1052,6 +1071,7 @@ def getEquipment(self, codes=None, equiptype=None, sfid=None, actionid=None, **k * Pass an ActionID - returns a single Equipment object """ + self._check_kwargs(['type'], kwargs) if 'type' in kwargs: warnings.warn('The parameter \'type\' is deprecated. Please use the equiptype parameter instead.', DeprecationWarning, stacklevel=2) @@ -1151,6 +1171,7 @@ def getExtensionProperties(self, exptype=None, **kwargs): """ # Todo what values to use for extensionproperties type + self._check_kwargs(['type'], kwargs) if 'type' in kwargs: warnings.warn('The parameter \'type\' is deprecated. Please use the exptype parameter instead.', DeprecationWarning, stacklevel=2) @@ -1181,6 +1202,7 @@ def getExternalIdentifiers(self, eitype=None, **kwargs): * Pass type- return a list of all objects of the given type """ + self._check_kwargs(['type'], kwargs) if 'type' in kwargs: warnings.warn('The parameter \'type\' is deprecated. Please use the eitype parameter instead.', DeprecationWarning, stacklevel=2) @@ -1432,6 +1454,7 @@ def getRelatedModels(self, modid=None, code=None, **kwargs): * Pass a ModelCode - get a list of converter objects related to the converter having ModeCode """ + self._check_kwargs(['id'], kwargs) if 'id' in kwargs: warnings.warn('The parameter \'id\' is deprecated. Please use the modid parameter instead.', DeprecationWarning, stacklevel=2) From 9ca745940a3af50215e5b86dd2e157d047d509be Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Fri, 5 Jan 2018 15:03:45 -0800 Subject: [PATCH 45/55] Missed builtin, should be last one --- odm2api/ODM2/services/readService.py | 63 +++++++++++++++------------- 1 file changed, 34 insertions(+), 29 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index f5fca82..75b8e2f 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -229,66 +229,71 @@ def getAnnotations(self, annottype=None, codes=None, ids=None, **kwargs): return None # CV - def getCVs(self, type): + def getCVs(self, cvtype, **kwargs): """ getCVs(self, type): * Pass CVType - return a list of all objects of the given type """ - CV = CVActionType - if type == 'actiontype': + self._check_kwargs(['type'], kwargs) + if 'type' in kwargs: + warnings.warn('The parameter \'type\' is deprecated. Please use the cvtype parameter instead.', + DeprecationWarning, stacklevel=2) + cvtype = kwargs['type'] + + if cvtype == 'actiontype': CV = CVActionType - elif type == 'aggregationstatistic': + elif cvtype == 'aggregationstatistic': CV = CVAggregationStatistic - elif type == 'annotationtype': + elif cvtype == 'annotationtype': CV = CVAnnotationType - elif type == 'censorcode': + elif cvtype == 'censorcode': CV = CVCensorCode - elif type == 'dataqualitytype': + elif cvtype == 'dataqualitytype': CV = CVDataQualityType - elif type == 'dataset type': + elif cvtype == 'dataset type': CV = CVDataSetType - elif type == 'Directive Type': + elif cvtype == 'Directive Type': CV = CVDirectiveType - elif type == 'Elevation Datum': + elif cvtype == 'Elevation Datum': CV = CVElevationDatum - elif type == 'Equipment Type': + elif cvtype == 'Equipment Type': CV = CVEquipmentType - elif type == 'Medium': + elif cvtype == 'Medium': CV = CVMediumType - elif type == 'Method Type': + elif cvtype == 'Method Type': CV = CVMethodType - elif type == 'Organization Type': + elif cvtype == 'Organization Type': CV = CVOrganizationType - elif type == 'Property Data Type': + elif cvtype == 'Property Data Type': CV = CVPropertyDataType - elif type == 'Quality Code': + elif cvtype == 'Quality Code': CV = CVQualityCode - elif type == 'Relationship Type': + elif cvtype == 'Relationship Type': CV = CVRelationshipType - elif type == 'Result Type': + elif cvtype == 'Result Type': CV = CVResultType - elif type == 'Sampling Feature Geo-type': + elif cvtype == 'Sampling Feature Geo-type': CV = CVSamplingFeatureGeoType - elif type == 'Sampling Feature Type': + elif cvtype == 'Sampling Feature Type': CV = CVSamplingFeatureType - elif type == 'Site Type': + elif cvtype == 'Site Type': CV = CVSiteType - elif type == 'Spatial Offset Type': + elif cvtype == 'Spatial Offset Type': CV = CVSpatialOffsetType - elif type == 'Speciation': + elif cvtype == 'Speciation': CV = CVSpeciation - elif type == 'Specimen Type': + elif cvtype == 'Specimen Type': CV = CVSpecimenType - elif type == 'Status': + elif cvtype == 'Status': CV = CVStatus - elif type == 'Taxonomic Classifier Type': + elif cvtype == 'Taxonomic Classifier Type': CV = CVTaxonomicClassifierType - elif type == 'Units Type': + elif cvtype == 'Units Type': CV = CVUnitsType - elif type == 'Variable Name': + elif cvtype == 'Variable Name': CV = CVVariableName - elif type == 'Variable Type': + elif cvtype == 'Variable Type': CV = CVVariableType else: return None From fcc92864b38055a26727525736ccda12f449bca4 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Mon, 8 Jan 2018 14:43:33 -0800 Subject: [PATCH 46/55] Allow sfid to be None to work with older code --- odm2api/ODM2/services/readService.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 75b8e2f..ec183d1 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -801,7 +801,8 @@ def getResults(self, ids=None, restype = None, uuids=None, actionid=None, simula warnings.warn('The parameter \'sfid\' is deprecated. ' 'Please use the sfids parameter instead and send in a list.', DeprecationWarning, stacklevel=2) - query = query.join(FeatureActions).filter_by(SamplingFeatureID=kwargs['sfid']) + if kwargs['sfid']: + query = query.join(FeatureActions).filter_by(SamplingFeatureID=kwargs['sfid']) if sfids or sfcodes or sfuuids: sf_list = self.getSamplingFeatures(ids=sfids, codes=sfcodes, uuids=sfuuids) sfids = [] From fba24a51dc762357609cdbb6576a0df91cc3c308 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Mon, 8 Jan 2018 15:55:04 -0800 Subject: [PATCH 47/55] Cleanup readServices and base --- odm2api/ODM2/services/readService.py | 100 ++++++++++++++------------- odm2api/base.py | 8 +-- 2 files changed, 55 insertions(+), 53 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 75b8e2f..8383062 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -1,5 +1,7 @@ from __future__ import (absolute_import, division, print_function) +import warnings + from odm2api.ODM2 import serviceBase from odm2api.ODM2.models import ( ActionAnnotations, ActionDirectives, ActionExtensionPropertyValues, Actions, @@ -29,7 +31,7 @@ SpatialReferenceExternalIdentifiers, SpatialReferences, SpecimenBatchPositions, SpectraResultValueAnnotations, SpectraResultValues, TaxonomicClassifierExternalIdentifiers, TaxonomicClassifiers, TimeSeriesResultValueAnnotations, TimeSeriesResultValues, - TimeSeriesResults, TrajectoryResultValueAnnotations, TrajectoryResultValues, + TrajectoryResultValueAnnotations, TrajectoryResultValues, TransectResultValueAnnotations, TransectResultValues, Units, VariableExtensionPropertyValues, VariableExternalIdentifiers, Variables, ) @@ -38,8 +40,6 @@ from sqlalchemy import distinct, exists -import warnings - __author__ = 'sreeder' @@ -75,8 +75,9 @@ def __init__(self, affiliation, person, org): class SamplingFeatureDataSet(): - datasets={} - related_features={} + datasets = {} + related_features = {} + def __init__(self, samplingfeature, datasetresults, relatedfeatures): sf = samplingfeature @@ -94,7 +95,6 @@ def __init__(self, samplingfeature, datasetresults, relatedfeatures): self.assignDatasets(datasetresults) self.assignRelatedFeatures(relatedfeatures) - print(self.datasets) def assignDatasets(self, datasetresults): @@ -102,18 +102,17 @@ def assignDatasets(self, datasetresults): if datasetresults: for dsr in datasetresults: if dsr.DataSetObj not in self.datasets: - #if the dataset is not in the dictionary, add it and the first result - self.datasets[dsr.DataSetObj]=[] + # if the dataset is not in the dictionary, add it and the first result + self.datasets[dsr.DataSetObj] = [] res = dsr.ResultObj # res.FeatureActionObj = None self.datasets[dsr.DataSetObj].append(res) else: - #if the dataset is in the dictionary, append the result object to the list + # if the dataset is in the dictionary, append the result object to the list res = dsr.ResultObj # res.FeatureActionObj = None self.datasets[dsr.DataSetObj].append(res) - def assignRelatedFeatures(self, relatedfeatures): self.related_features = {} if relatedfeatures: @@ -169,7 +168,8 @@ def resultExists(self, result): ) return ret.scalar() - except: + except Exception as e: + print('Error running Query: {}'.format(e)) return None # Annotations @@ -225,7 +225,8 @@ def getAnnotations(self, annottype=None, codes=None, ids=None, **kwargs): query = query.filter(Annotations.AnnotationID.in_(ids)) return query.all() - except: + except Exception as e: + print('Error running Query: {}'.format(e)) return None # CV @@ -384,15 +385,16 @@ def getVariables(self, ids=None, codes=None, sitecode=None, results=False): variables = [ x[0] for x in self._session.query(distinct(Results.VariableID)) - .filter(Results.FeatureActionID == FeatureActions.FeatureActionID) - .filter(FeatureActions.SamplingFeatureID == SamplingFeatures.SamplingFeatureID) - .filter(SamplingFeatures.SamplingFeatureCode == sitecode).all() + .filter(Results.FeatureActionID == FeatureActions.FeatureActionID) + .filter(FeatureActions.SamplingFeatureID == SamplingFeatures.SamplingFeatureID) + .filter(SamplingFeatures.SamplingFeatureCode == sitecode).all() ] if ids: ids = list(set(ids).intersection(variables)) else: ids = variables - except: + except Exception as e: + print('Error running Query: {}'.format(e)) pass if results: @@ -402,7 +404,8 @@ def getVariables(self, ids=None, codes=None, sitecode=None, results=False): ids = list(set(ids).intersection(variables)) else: ids = variables - except: + except Exception as e: + print('Error running Query: {}'.format(e)) pass query = self._session.query(Variables) @@ -520,10 +523,10 @@ def getSamplingFeatures(self, ids=None, codes=None, uuids=None, if results: try: fas = [x[0] for x in self._session.query(distinct(Results.FeatureActionID)).all()] - except: + except Exception as e: + print('Error running Query: {}'.format(e)) return None - sf = [x[0] for x in self._session.query(distinct(FeatureActions.SamplingFeatureID)) - .filter(FeatureActions.FeatureActionID.in_(fas)).all()] + sf = [x[0] for x in self._session.query(distinct(FeatureActions.SamplingFeatureID)).filter(FeatureActions.FeatureActionID.in_(fas)).all()] # noqa if ids: ids = list(set(ids).intersection(sf)) else: @@ -557,8 +560,8 @@ def getRelatedSamplingFeatures(self, sfid=None, rfid=None, relationshiptype=None """ - sf = self._session.query(distinct(SamplingFeatures.SamplingFeatureID))\ - .select_from(RelatedFeatures) + sf = self._session.query(distinct(SamplingFeatures.SamplingFeatureID)) \ + .select_from(RelatedFeatures) if sfid: sf = sf.join(RelatedFeatures.RelatedFeatureObj).filter(RelatedFeatures.SamplingFeatureID == sfid) @@ -576,8 +579,6 @@ def getRelatedSamplingFeatures(self, sfid=None, rfid=None, relationshiptype=None print('Error running Query: {}'.format(e)) return None - - # Action def getActions(self, ids=None, acttype=None, sfid=None, **kwargs): """ @@ -656,7 +657,6 @@ def getUnits(self, ids=None, name=None, unittype=None, **kwargs): print('Error running Query: {}'.format(e)) return None - # Organization def getOrganizations(self, ids=None, codes=None): """ @@ -738,7 +738,7 @@ def getAffiliations(self, ids=None, personfirst=None, personlast=None, orgcode=N return None # Results - def getResults(self, ids=None, restype = None, uuids=None, actionid=None, simulationid=None, + def getResults(self, ids=None, restype=None, uuids=None, actionid=None, simulationid=None, variableid=None, siteid=None, sfids=None, sfuuids=None, sfcodes=None, **kwargs): # TODO what if user sends in both type and actionid vs just actionid @@ -791,10 +791,10 @@ def getResults(self, ids=None, restype = None, uuids=None, actionid=None, simula if uuids: query = query.filter(Results.ResultUUID.in_(uuids)) if simulationid: - query = query.join(FeatureActions)\ - .join(Actions)\ - .join(Simulations)\ - .filter_by(SimulationID=simulationid) + query = query.join(FeatureActions) \ + .join(Actions) \ + .join(Simulations) \ + .filter_by(SimulationID=simulationid) if actionid: query = query.join(FeatureActions).filter_by(ActionID=actionid) if 'sfid' in kwargs: @@ -810,16 +810,15 @@ def getResults(self, ids=None, restype = None, uuids=None, actionid=None, simula query = query.join(FeatureActions).filter(FeatureActions.SamplingFeatureID.in_(sfids)) if siteid: - sfids = [x[0] for x in self._session.query( distinct(SamplingFeatures.SamplingFeatureID)) .select_from(RelatedFeatures) .join(RelatedFeatures.SamplingFeatureObj) .filter(RelatedFeatures.RelatedFeatureID == siteid) .all() - ] + ] - #TODO does this code do the same thing as the code above? + # TODO does this code do the same thing as the code above? # sf_list = self.getRelatedSamplingFeatures(rfid=siteid) # sfids = [] # for sf in sf_list: @@ -834,7 +833,7 @@ def getResults(self, ids=None, restype = None, uuids=None, actionid=None, simula return None # Datasets - def getDataSets(self, ids= None, codes=None, uuids=None, dstype=None): + def getDataSets(self, ids=None, codes=None, uuids=None, dstype=None): """ Retrieve a list of Datasets @@ -906,7 +905,7 @@ def getDataSetsResults(self, ids=None, codes=None, uuids=None, dstype=None): if all(v is None for v in [ids, codes, uuids]): raise ValueError('Expected DataSetID OR DataSetUUID OR DataSetCode argument') - q = self._session.query(DataSetsResults)\ + q = self._session.query(DataSetsResults) \ .join(DataSets) if ids: q = q.filter(DataSets.DataSetID.in_(ids)) @@ -955,12 +954,11 @@ def getDataSetsValues(self, ids=None, codes=None, uuids=None, dstype=None): resids.append(ds.ResultID) try: - return self.getResultValues(resultids = resids) + return self.getResultValues(resultids=resids) except Exception as e: print('Error running Query {}'.format(e)) return None - def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=None, sftype=None): """ Retrieve a list of Datasets associated with the given sampling feature data. @@ -990,11 +988,12 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No """ - # make sure one of the three arguments has been sent in if all(v is None for v in [ids, codes, uuids, sftype]): - raise ValueError('Expected samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode OR samplingFeatureType ' - 'argument') + raise ValueError( + 'Expected samplingFeatureID OR samplingFeatureUUID ' + 'OR samplingFeatureCode OR samplingFeatureType ' + 'argument') sf_query = self._session.query(SamplingFeatures) if sftype: @@ -1011,18 +1010,17 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No sf_list.append(sf) try: - sfds=[] + sfds = [] for sf in sf_list: - q = self._session.query(DataSetsResults)\ - .join(Results)\ - .join(FeatureActions)\ + q = self._session.query(DataSetsResults) \ + .join(Results) \ + .join(FeatureActions) \ .filter(FeatureActions.SamplingFeatureID == sf.SamplingFeatureID) if dstype: q = q.filter_by(DatasetTypeCV=dstype) - vals = q.all() related = self.getRelatedSamplingFeatures(sf.SamplingFeatureID) @@ -1081,17 +1079,21 @@ def getEquipment(self, codes=None, equiptype=None, sfid=None, actionid=None, **k warnings.warn('The parameter \'type\' is deprecated. Please use the equiptype parameter instead.', DeprecationWarning, stacklevel=2) equiptype = kwargs['type'] + + # NOTE: Equiptype currently unused! + if equiptype: + pass e = self._session.query(Equipment) if sfid: e = e.join(EquipmentUsed) \ - .join(Actions) \ - .join(FeatureActions) \ - .filter(FeatureActions.SamplingFeatureID == sfid) + .join(Actions) \ + .join(FeatureActions) \ + .filter(FeatureActions.SamplingFeatureID == sfid) if codes: e = e.filter(Equipment.EquipmentCode.in_(codes)) if actionid: e = e.join(EquipmentUsed).join(Actions) \ - .filter(Actions.ActionID == actionid) + .filter(Actions.ActionID == actionid) return e.all() def CalibrationReferenceEquipment(self): diff --git a/odm2api/base.py b/odm2api/base.py index a164a1d..6887017 100644 --- a/odm2api/base.py +++ b/odm2api/base.py @@ -1,5 +1,6 @@ from __future__ import (absolute_import, division, print_function) + class serviceBase(object): def __init__(self, session_factory, debug=False): """Must send in either a session_factory.""" @@ -42,10 +43,9 @@ def __repr__(self): if 'obj' in v.lower(): del valuedict[v] - if v == "_sa_instance_state": - del valuedict["_sa_instance_state"] - return "<%s(%s)>" % (self.__class__.__name__, str(valuedict)) - + if v == '_sa_instance_state': + del valuedict['_sa_instance_state'] + return '<%s(%s)>' % (self.__class__.__name__, str(valuedict)) class modelBase(): From 42d984099b1defe5c1bbf1f97b70755f7eca9413 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Wed, 10 Jan 2018 08:46:28 -0800 Subject: [PATCH 48/55] Make lowercase columns default, add option to make them same as model --- odm2api/ODM2/services/readService.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index ec183d1..61edb0e 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -923,7 +923,7 @@ def getDataSetsResults(self, ids=None, codes=None, uuids=None, dstype=None): print('Error running Query {}'.format(e)) return None - def getDataSetsValues(self, ids=None, codes=None, uuids=None, dstype=None): + def getDataSetsValues(self, ids=None, codes=None, uuids=None, dstype=None, lowercols=True): """ Retrieve a list of datavalues associated with the given dataset info @@ -934,6 +934,8 @@ def getDataSetsValues(self, ids=None, codes=None, uuids=None, dstype=None): uuids (list, optional): List of Dataset UUIDs string. dstype (str, optional): Type of Dataset from `controlled vocabulary name `_. + lowercols (bool, optional): Make column names to be lowercase. + Default to True. Returns: @@ -945,7 +947,7 @@ def getDataSetsValues(self, ids=None, codes=None, uuids=None, dstype=None): >>> READ.getDataSetsValues(codes=['HOME', 'FIELD']) >>> READ.getDataSetsValues(uuids=['a6f114f1-5416-4606-ae10-23be32dbc202', ... '5396fdf3-ceb3-46b6-aaf9-454a37278bb4']) - >>> READ.getDataSetsValues(dstype='singleTimeSeries') + >>> READ.getDataSetsValues(dstype='singleTimeSeries', lowercols=False) """ @@ -956,7 +958,7 @@ def getDataSetsValues(self, ids=None, codes=None, uuids=None, dstype=None): resids.append(ds.ResultID) try: - return self.getResultValues(resultids = resids) + return self.getResultValues(resultids=resids, lowercols=lowercols) except Exception as e: print('Error running Query {}'.format(e)) return None @@ -1336,7 +1338,7 @@ def getResultDerivationEquations(self): """ return self._session.query(ResultDerivationEquations).all() - def getResultValues(self, resultids, starttime=None, endtime=None): + def getResultValues(self, resultids, starttime=None, endtime=None, lowercols=True): """ Retrieve result values associated with the given result. @@ -1345,6 +1347,8 @@ def getResultValues(self, resultids, starttime=None, endtime=None): resultids (list): List of SamplingFeatureIDs. starttime (object, optional): Start time to filter by as datetime object. endtime (object, optional): End time to filter by as datetime object. + lowercols (bool, optional): Make column names to be lowercase. + Default to True. Returns: DataFrame: Pandas dataframe of result values. @@ -1355,7 +1359,7 @@ def getResultValues(self, resultids, starttime=None, endtime=None): >>> READ.getResultValues(resultids=[100, 20, 34], starttime=datetime.today()) >>> READ.getResultValues(resultids=[1, 2, 3, 4], >>> starttime=datetime(2000, 01, 01), - >>> endtime=datetime(2003, 02, 01)) + >>> endtime=datetime(2003, 02, 01), lowercols=False) """ restype = self._session.query(Results).filter_by(ResultID=resultids[0]).first().ResultTypeCV @@ -1393,7 +1397,8 @@ def getResultValues(self, resultids, starttime=None, endtime=None): con=self._session_factory.engine, params=query.params ) - df.columns = [self._get_columns(ResultValues)[c] for c in df.columns] + if not lowercols: + df.columns = [self._get_columns(ResultValues)[c] for c in df.columns] return df except Exception as e: print('Error running Query: {}'.format(e)) From 007550f5918001bf925ec3307c5d246b318953bd Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Wed, 10 Jan 2018 09:48:42 -0800 Subject: [PATCH 49/55] Clean up updateService --- odm2api/ODM2/services/updateService.py | 67 ++++++-------------------- 1 file changed, 15 insertions(+), 52 deletions(-) diff --git a/odm2api/ODM2/services/updateService.py b/odm2api/ODM2/services/updateService.py index 1bec3ce..87e77b8 100644 --- a/odm2api/ODM2/services/updateService.py +++ b/odm2api/ODM2/services/updateService.py @@ -5,7 +5,7 @@ from datetime import datetime from odm2api.ODM2 import serviceBase -from odm2api.ODM2.models import * +from odm2api.ODM2.models import (Actions, Results) # ################################################################################ @@ -13,39 +13,39 @@ # ################################################################################ class UpdateODM2(serviceBase): - def update(self, value): self._session.add(value) self._session.commit() return value -# ################################################################################ -# Core -# ################################################################################ + # ################################################################################ + # Core + # ################################################################################ def updateResultValidDateTime(self, resultId, dateTime): - #check type of "validdatetime' - #if not datetime do this: + # check type of "validdatetime' + # if not datetime do this: # dt = dateTime.to_datetime() - #else dt = dateTime + # else dt = dateTime if (type(dateTime) != datetime): dt = dateTime.to_datetime() else: dt = dateTime - q = self._session.query(Results).filter(Results.ResultID == int(resultId)).update({'ValidDateTime': dt}) + q = self._session.query(Results).filter(Results.ResultID == int(resultId)) + q.update({'ValidDateTime': dt}) + self._session.commit() def updateResult(self, resultID=None, valuecount=None, result=None): if resultID: q = self._session.query(Results).filter(Results.ResultID == int(resultID)) if valuecount: - q.update({"ValueCount": valuecount}) + q.update({'ValueCount': valuecount}) if result: self._session.add(result) self._session.commit() - - def updateAction(self, actionID=None, begin=None, end=None, action = None): + def updateAction(self, actionID=None, begin=None, end=None, action=None): if actionID: q = self._session.query(Actions).filter(Actions.ActionID == int(actionID)) # if (type(begin) != datetime): @@ -54,77 +54,40 @@ def updateAction(self, actionID=None, begin=None, end=None, action = None): # end = end.to_datetime() if begin: - q.update({"BeginDateTime": begin}) + q.update({'BeginDateTime': begin}) if end: - q.update({"EndDateTime": end}) + q.update({'EndDateTime': end}) elif action: self._session.add(action) self._session.commit() - # ################################################################################ # Data Quality # ################################################################################ - - - - # ################################################################################ # Equipment # ################################################################################ - - - - - # ################################################################################ # Extension Properties # ################################################################################ - - - - # ################################################################################ # External Identifiers # ################################################################################ - - - - # ################################################################################ # Lab Analyses # ################################################################################ - - - - # ################################################################################ # Provenance # ################################################################################ - - - - # ################################################################################ # Results # ################################################################################ - - - - # ################################################################################ # Sampling Features # ################################################################################ - - - - # ################################################################################ # Sensors # ################################################################################ - - - -# ################################################################################ +################################################################################ # ODM2 # ################################################################################ From 26099a04a622d5e78c81ab33229b3a39fefbc540 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Wed, 10 Jan 2018 09:49:13 -0800 Subject: [PATCH 50/55] Cleanup model --- odm2api/ODM2/models.py | 110 ++++------------------------------------- 1 file changed, 10 insertions(+), 100 deletions(-) diff --git a/odm2api/ODM2/models.py b/odm2api/ODM2/models.py index af30d3a..5f8c4ac 100644 --- a/odm2api/ODM2/models.py +++ b/odm2api/ODM2/models.py @@ -5,6 +5,7 @@ from sqlalchemy import BigInteger, Boolean, Column, Date, DateTime, Float, ForeignKey, Integer, String, case from sqlalchemy.dialects import mysql, postgresql, sqlite from sqlalchemy.orm import relationship + Base = modelBase.Base BigIntegerType = BigInteger() @@ -27,7 +28,7 @@ def is_hex(s): ################################################################################ # CV ################################################################################ -class CV (object): +class CV(object): __table_args__ = {u'schema': 'odm2'} Term = Column('term', String(255), nullable=False) @@ -183,7 +184,6 @@ class Organizations(Base): class Affiliations(Base): - AffiliationID = Column('affiliationid', Integer, primary_key=True, nullable=False) PersonID = Column('personid', ForeignKey(People.PersonID), nullable=False) OrganizationID = Column('organizationid', ForeignKey(Organizations.OrganizationID)) @@ -232,7 +232,6 @@ class Actions(Base): class ActionBy(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), nullable=False) AffiliationID = Column('affiliationid', ForeignKey(Affiliations.AffiliationID), nullable=False) @@ -264,22 +263,22 @@ class SamplingFeatures(Base): index=True) """str: Dimensionality of SamplingFeature; point2d, line2d, etc.""" Elevation_m = Column('elevation_m', Float(53)) - """float: The elevation of the sampling feature in meters, or in the case of Specimen, - the elevation from where the SamplingFeature.Specimen was collected""" + """float: The elevation of the sampling feature in meters, or in the case of Specimen, + the elevation from where the SamplingFeature.Specimen was collected""" # noqa ElevationDatumCV = Column('elevationdatumcv', ForeignKey(CVElevationDatum.Name), index=True) """str: The code for the vertical geodetic datum that specifies the zero point for - the Sampling Feature Elevation""" + the Sampling Feature Elevation""" # noqa # FeatureGeometry = Column('featuregeometry', String(50)) """object: The location geometry of the sampling feature on the Earth expressed using a geometry data type. Can be a Point, Curve (profile, trajectory, etc), Surface (flat polygons, etc) or Solid/Volume (although often limited to - 2D geometries). """ + 2D geometries). """ # noqa FeatureGeometryWKT = Column('featuregeometrywkt', String(50)) """str: The location geometry of the sampling feature on the Earth expressed as well known text (WKT). Can be a Point, Curve (profile, trajectory, etc.), Surface (flat polygons, etc.), or Solid/Volume (although often limited to - 2D geometries).""" + 2D geometries).""" # noqa __mapper_args__ = { 'polymorphic_on': case( [ @@ -443,7 +442,6 @@ class Results(Base): class DataLoggerProgramFiles(Base): - ProgramID = Column('programid', Integer, primary_key=True, nullable=False) AffiliationID = Column('affiliationid', Integer, ForeignKey(Affiliations.AffiliationID), nullable=False) ProgramName = Column('programname', String(255), nullable=False) @@ -455,7 +453,6 @@ class DataLoggerProgramFiles(Base): class DataLoggerFiles(Base): - DataLoggerFileID = Column('dataloggerfileid', Integer, primary_key=True, nullable=False) ProgramID = Column('programid', Integer, ForeignKey(DataLoggerProgramFiles.ProgramID), nullable=False) DataLoggerFileName = Column('dataloggerfilename', String(255), nullable=False) @@ -466,7 +463,6 @@ class DataLoggerFiles(Base): class EquipmentModels(Base): - ModelID = Column('modelid', Integer, primary_key=True, nullable=False) ModelManufacturerID = Column('modelmanufacturerid', Integer, ForeignKey(Organizations.OrganizationID), nullable=False) @@ -481,7 +477,6 @@ class EquipmentModels(Base): class InstrumentOutputVariables(Base): - InstrumentOutputVariableID = Column( 'instrumentoutputvariableid', Integer, @@ -503,7 +498,6 @@ class InstrumentOutputVariables(Base): class DataLoggerFileColumns(Base): - DataLoggerFileColumnID = Column('dataloggerfilecolumnid', Integer, primary_key=True, nullable=False) ResultID = Column('resultid', BigIntegerType, ForeignKey(Results.ResultID)) DataLoggerFileID = Column('dataloggerfileid', Integer, @@ -539,7 +533,6 @@ class DataLoggerFileColumns(Base): class Equipment(Base): - EquipmentID = Column('equipmentid', Integer, primary_key=True, nullable=False) EquipmentCode = Column('equipmentcode', String(50), nullable=False) EquipmentName = Column('equipmentname', String(255), nullable=False) @@ -559,7 +552,6 @@ class Equipment(Base): class CalibrationReferenceEquipment(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), nullable=False) EquipmentID = Column('equipmentid', Integer, ForeignKey(Equipment.EquipmentID), nullable=False) @@ -569,7 +561,6 @@ class CalibrationReferenceEquipment(Base): class EquipmentActions(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) EquipmentID = Column('equipmentid', ForeignKey(Equipment.EquipmentID), nullable=False) ActionID = Column('actionid', ForeignKey(Actions.ActionID), nullable=False) @@ -579,7 +570,6 @@ class EquipmentActions(Base): class EquipmentUsed(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), nullable=False) EquipmentID = Column('equipmentid', Integer, ForeignKey(Equipment.EquipmentID), nullable=False) @@ -589,8 +579,7 @@ class EquipmentUsed(Base): class MaintenanceActions(Base): - - ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), primary_key=True, nullable=False) + ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), primary_key=True, nullable=False) IsFactoryService = Column('isfactoryservce', Boolean, nullable=False) MaintenanceCode = Column('maintenancecode', String(50)) MantenanceReason = Column('maintenancereason', String(50)) @@ -599,7 +588,6 @@ class MaintenanceActions(Base): class RelatedEquipment(Base): - RelationID = Column('relationid', Integer, primary_key=True, nullable=True) EquipmentID = Column('equipmentid', Integer, ForeignKey(Equipment.EquipmentID), nullable=True) RelationshipTypeCV = Column('relationshiptypecv', String(255), nullable=True, index=True) @@ -625,8 +613,7 @@ class RelatedEquipment(Base): class CalibrationActions(Base): - - ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), primary_key=True, nullable=False) + ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), primary_key=True, nullable=False) CalibrationCheckValue = Column('calibrationcheckvalue', Float(53)) InstrumentOutputVariableID = Column('instrumentoutputvariableid', Integer, ForeignKey(InstrumentOutputVariables.VariableID), nullable=False) @@ -635,20 +622,19 @@ class CalibrationActions(Base): ActionObj = relationship(Actions) InstrumentOutputVariableObj = relationship(InstrumentOutputVariables) + # ################################################################################ # Lab Analyses # ################################################################################ class Directives(Base): - DirectiveID = Column('directiveid', Integer, primary_key=True, nullable=False) DirectiveTypeCV = Column('directivetypecv', ForeignKey(CVDirectiveType.Name), nullable=False, index=True) DirectiveDescription = Column('directivedescription', String(500), nullable=False) class ActionDirectives(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', ForeignKey(Actions.ActionID), nullable=False) DirectiveID = Column('directiveid', ForeignKey(Directives.DirectiveID), nullable=False) @@ -678,7 +664,6 @@ class SpecimenBatchPositions(Base): # Sampling Features # ################################################################################ class SpatialReferences(Base): - SpatialReferenceID = Column('spatialreferenceid', Integer, primary_key=True, nullable=False) SRSCode = Column('srscode', String(50)) SRSName = Column('srsname', String(255), nullable=False) @@ -687,7 +672,6 @@ class SpatialReferences(Base): class Specimens(SamplingFeatures): - SamplingFeatureID = Column('samplingfeatureid', ForeignKey(SamplingFeatures.SamplingFeatureID), primary_key=True) SpecimenTypeCV = Column('specimentypecv', ForeignKey(CVSpecimenType.Name), nullable=False, index=True) @@ -700,7 +684,6 @@ class Specimens(SamplingFeatures): class SpatialOffsets(Base): - SpatialOffsetID = Column('spatialoffsetid', Integer, primary_key=True, nullable=False) SpatialOffsetTypeCV = Column('spatialoffsettypecv', ForeignKey(CVSpatialOffsetType.Name), nullable=False, index=True) @@ -717,7 +700,6 @@ class SpatialOffsets(Base): class Sites(SamplingFeatures): - SamplingFeatureID = Column('samplingfeatureid', ForeignKey(SamplingFeatures.SamplingFeatureID), primary_key=True) SpatialReferenceID = Column('spatialreferenceid', ForeignKey(SpatialReferences.SpatialReferenceID), @@ -734,7 +716,6 @@ class Sites(SamplingFeatures): class RelatedFeatures(Base): - RelationID = Column('relationid', Integer, primary_key=True, nullable=False) SamplingFeatureID = Column('samplingfeatureid', ForeignKey(SamplingFeatures.SamplingFeatureID), nullable=False) @@ -759,7 +740,6 @@ class RelatedFeatures(Base): class SpecimenTaxonomicClassifiers(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) SamplingFeatureID = Column('samplingfeatureid', ForeignKey(Specimens.SamplingFeatureID), nullable=False) TaxonomicClassifierID = Column('taxonomicclassifierid', @@ -774,7 +754,6 @@ class SpecimenTaxonomicClassifiers(Base): # Simulation # ################################################################################ class Models(Base): - ModelID = Column('modelid', Integer, primary_key=True, nullable=False) ModelCode = Column('modelcode', String(255), nullable=False) ModelName = Column('modelname', String(255), nullable=False) @@ -782,7 +761,6 @@ class Models(Base): class RelatedModels(Base): - RelatedID = Column('relatedid', Integer, primary_key=True, nullable=False) ModelID = Column('modelid', ForeignKey(Models.ModelID), nullable=False) RelationshipTypeCV = Column('relationshiptypecv', ForeignKey(CVRelationshipType.Name), nullable=False, @@ -794,7 +772,6 @@ class RelatedModels(Base): class Simulations(Base): - SimulationID = Column('simulationid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', ForeignKey(Actions.ActionID), nullable=False) SimulationName = Column('simulationname', String(255), nullable=False) @@ -817,7 +794,6 @@ class Simulations(Base): # Part of the Provenance table, needed here to meet dependencies class Citations(Base): - CitationID = Column('citationid', Integer, primary_key=True, nullable=False) Title = Column('title', String(255), nullable=False) Publisher = Column('publisher', String(255), nullable=False) @@ -829,7 +805,6 @@ class Citations(Base): # Annotations # ################################################################################ class Annotations(Base): - AnnotationID = Column('annotationid', Integer, primary_key=True, nullable=False) AnnotationTypeCV = Column( 'annotationtypecv', @@ -851,7 +826,6 @@ class Annotations(Base): class ActionAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', ForeignKey(Actions.ActionID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -861,7 +835,6 @@ class ActionAnnotations(Base): class EquipmentAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) EquipmentID = Column('valueid', BigIntegerType, ForeignKey(Equipment.EquipmentID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -871,7 +844,6 @@ class EquipmentAnnotations(Base): class MethodAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) MethodID = Column('methodid', ForeignKey(Methods.MethodID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -881,7 +853,6 @@ class MethodAnnotations(Base): class ResultAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ResultID = Column('resultid', ForeignKey(Results.ResultID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -893,7 +864,6 @@ class ResultAnnotations(Base): class SamplingFeatureAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) SamplingFeatureID = Column('samplingfeatureid', ForeignKey(SamplingFeatures.SamplingFeatureID), nullable=False) @@ -907,7 +877,6 @@ class SamplingFeatureAnnotations(Base): # Data Quality # ################################################################################ class DataSetsResults(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) DataSetID = Column('datasetid', ForeignKey(DataSets.DataSetID), nullable=False) ResultID = Column('resultid', ForeignKey(Results.ResultID), nullable=False) @@ -917,7 +886,6 @@ class DataSetsResults(Base): class DataQuality(Base): - DataQualityID = Column('dataqualityid', Integer, primary_key=True, nullable=False) DataQualityTypeCV = Column('dataqualitytypecv', ForeignKey(CVDataQualityType.Name), nullable=False, index=True) @@ -931,7 +899,6 @@ class DataQuality(Base): class ReferenceMaterials(Base): - ReferenceMaterialID = Column('referencematerialid', Integer, primary_key=True, nullable=False) ReferenceMaterialMediumCV = Column( 'referencematerialmediumcv', @@ -953,7 +920,6 @@ class ReferenceMaterials(Base): class CalibrationStandards(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), nullable=False) ReferenceMaterialID = Column( @@ -968,7 +934,6 @@ class CalibrationStandards(Base): class ReferenceMaterialValues(Base): - ReferenceMaterialValueID = Column('referencematerialvalueid', Integer, primary_key=True, nullable=False) ReferenceMaterialID = Column('referencematerialid', ForeignKey(ReferenceMaterials.ReferenceMaterialID), nullable=False) @@ -985,7 +950,6 @@ class ReferenceMaterialValues(Base): class ResultNormalizationValues(Base): - ResultID = Column(u'resultid', ForeignKey(Results.ResultID), primary_key=True) ReferenceMaterialValueID = Column(u'referencematerialvalueid', ForeignKey(ReferenceMaterialValues.ReferenceMaterialValueID), @@ -996,7 +960,6 @@ class ResultNormalizationValues(Base): class ResultsDataQuality(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ResultID = Column('resultid', ForeignKey(Results.ResultID), nullable=False) DataQualityID = Column('dataqualityid', ForeignKey(DataQuality.DataQualityID), nullable=False) @@ -1009,7 +972,6 @@ class ResultsDataQuality(Base): # Extension Properties # ################################################################################ class ExtensionProperties(Base): - PropertyID = Column('propertyid', Integer, primary_key=True, nullable=False) PropertyName = Column('propertyname', String(255), nullable=False) PropertyDescription = Column('propertydescription', String(500)) @@ -1021,7 +983,6 @@ class ExtensionProperties(Base): class ActionExtensionPropertyValues(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', ForeignKey(Actions.ActionID), nullable=False) PropertyID = Column('propertyid', ForeignKey(ExtensionProperties.PropertyID), nullable=False) @@ -1032,7 +993,6 @@ class ActionExtensionPropertyValues(Base): class CitationExtensionPropertyValues(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) CitationID = Column('citationid', ForeignKey(Citations.CitationID), nullable=False) PropertyID = Column('propertyid', ForeignKey(ExtensionProperties.PropertyID), nullable=False) @@ -1043,7 +1003,6 @@ class CitationExtensionPropertyValues(Base): class MethodExtensionPropertyValues(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) MethodID = Column('methodid', ForeignKey(Methods.MethodID), nullable=False) PropertyID = Column('propertyid', ForeignKey(ExtensionProperties.PropertyID), nullable=False) @@ -1054,7 +1013,6 @@ class MethodExtensionPropertyValues(Base): class ResultExtensionPropertyValues(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ResultID = Column('resultid', ForeignKey(Results.ResultID), nullable=False) PropertyID = Column('propertyid', ForeignKey(ExtensionProperties.PropertyID), nullable=False) @@ -1065,7 +1023,6 @@ class ResultExtensionPropertyValues(Base): class SamplingFeatureExtensionPropertyValues(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) SamplingFeatureID = Column('samplingfeatureid', ForeignKey(SamplingFeatures.SamplingFeatureID), nullable=False) @@ -1077,7 +1034,6 @@ class SamplingFeatureExtensionPropertyValues(Base): class VariableExtensionPropertyValues(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) VariableID = Column('variableid', ForeignKey(Variables.VariableID), nullable=False) PropertyID = Column('propertyid', ForeignKey(ExtensionProperties.PropertyID), nullable=False) @@ -1091,7 +1047,6 @@ class VariableExtensionPropertyValues(Base): # Extension Identifiers # ################################################################################ class ExternalIdentifierSystems(Base): - ExternalIdentifierSystemID = Column( 'externalidentifiersystemid', Integer, @@ -1108,7 +1063,6 @@ class ExternalIdentifierSystems(Base): class CitationExternalIdentifiers(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) CitationID = Column('citationid', ForeignKey(Citations.CitationID), nullable=False) ExternalIdentifierSystemID = Column('externalidentifiersystemid', @@ -1122,7 +1076,6 @@ class CitationExternalIdentifiers(Base): class MethodExternalIdentifiers(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) MethodID = Column('methodid', ForeignKey(Methods.MethodID), nullable=False) ExternalIdentifierSystemID = Column('externalidentifiersystemid', @@ -1137,7 +1090,6 @@ class MethodExternalIdentifiers(Base): class PersonExternalIdentifiers(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) PersonID = Column('personid', ForeignKey(People.PersonID), nullable=False) ExternalIdentifierSystemID = Column('externalidentifiersystemid', @@ -1151,7 +1103,6 @@ class PersonExternalIdentifiers(Base): class ReferenceMaterialExternalIdentifiers(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ReferenceMaterialID = Column(ForeignKey(ReferenceMaterials.ReferenceMaterialID), nullable=False) ExternalIdentifierSystemID = Column('externalidentifiersystemid', @@ -1169,7 +1120,6 @@ class ReferenceMaterialExternalIdentifiers(Base): class SamplingFeatureExternalIdentifiers(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) SamplingFeatureID = Column('samplingfeatureid', ForeignKey(SamplingFeatures.SamplingFeatureID), nullable=False) @@ -1188,7 +1138,6 @@ class SamplingFeatureExternalIdentifiers(Base): class SpatialReferenceExternalIdentifiers(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) SpatialReferenceID = Column('spatialreferenceid', ForeignKey(SpatialReferences.SpatialReferenceID), nullable=False) @@ -1207,7 +1156,6 @@ class SpatialReferenceExternalIdentifiers(Base): class TaxonomicClassifierExternalIdentifiers(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) TaxonomicClassifierID = Column('taxonomicclassifierid', ForeignKey(TaxonomicClassifiers.TaxonomicClassifierID), nullable=False) @@ -1226,7 +1174,6 @@ class TaxonomicClassifierExternalIdentifiers(Base): class VariableExternalIdentifiers(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) VariableID = Column('variableid', ForeignKey(Variables.VariableID), nullable=False) ExternalIdentifierSystemID = Column('externalidentifiersystemid', @@ -1244,7 +1191,6 @@ class VariableExternalIdentifiers(Base): # ################################################################################ class AuthorLists(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) CitationID = Column('citationid', ForeignKey(Citations.CitationID), nullable=False) PersonID = Column('personid', ForeignKey(People.PersonID), nullable=False) @@ -1255,7 +1201,6 @@ class AuthorLists(Base): class DataSetCitations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) DataSetID = Column('datasetid', ForeignKey(DataSets.DataSetID), nullable=False) RelationshipTypeCV = Column('relationshiptypecv', ForeignKey(CVRelationshipType.Name), nullable=False, @@ -1267,13 +1212,11 @@ class DataSetCitations(Base): class DerivationEquations(Base): - DerivationEquationID = Column('derivationequationid', Integer, primary_key=True, nullable=False) DerivationEquation = Column('derivationequation', String(255), nullable=False) class ResultDerivationEquations(Base): - ResultID = Column(u'resultid', ForeignKey(Results.ResultID), primary_key=True) DerivationEquationID = Column( u'derivationequationid', @@ -1286,7 +1229,6 @@ class ResultDerivationEquations(Base): class MethodCitations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) MethodID = Column('methodid', ForeignKey(Methods.MethodID), nullable=False) RelationshipTypeCV = Column('relationshiptypecv', ForeignKey(CVRelationshipType.Name), nullable=False, @@ -1298,7 +1240,6 @@ class MethodCitations(Base): class RelatedAnnotations(Base): - RelationID = Column('relationid', Integer, primary_key=True, nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) RelationshipTypeCV = Column('relationshiptypecv', ForeignKey(CVRelationshipType.Name), nullable=False, @@ -1316,7 +1257,6 @@ class RelatedAnnotations(Base): class RelatedCitations(Base): - RelationID = Column('relationid', Integer, primary_key=True, nullable=False) CitationID = Column('citationid', ForeignKey(Citations.CitationID), nullable=False) RelationshipTypeCV = Column('relationshiptypecv', ForeignKey(CVRelationshipType.Name), nullable=False, @@ -1331,7 +1271,6 @@ class RelatedCitations(Base): class RelatedDataSets(Base): - RelationID = Column('relationid', Integer, primary_key=True, nullable=False) DataSetID = Column('datasetid', ForeignKey(DataSets.DataSetID), nullable=False) RelationshipTypeCV = Column('relationshiptypecv', ForeignKey(CVRelationshipType.Name), nullable=False, @@ -1347,7 +1286,6 @@ class RelatedDataSets(Base): class RelatedResults(Base): - RelationID = Column('relationid', Integer, primary_key=True, nullable=False) ResultID = Column('resultid', ForeignKey(Results.ResultID), nullable=False) RelationshipTypeCV = Column('relationshiptypecv', ForeignKey(CVRelationshipType.Name), nullable=False, @@ -1364,7 +1302,6 @@ class RelatedResults(Base): # Results # ################################################################################ class PointCoverageResults(Results): - ResultID = Column('resultid', ForeignKey(Results.ResultID), primary_key=True) ZLocation = Column('zlocation', Float(53)) ZLocationUnitsID = Column('zlocationunitsid', ForeignKey(Units.UnitsID)) @@ -1396,7 +1333,6 @@ class PointCoverageResults(Results): class ProfileResults(Results): - ResultID = Column('resultid', ForeignKey(Results.ResultID), primary_key=True) XLocation = Column('xlocation', Float(53)) XLocationUnitsID = Column('xlocationunitsid', ForeignKey(Units.UnitsID)) @@ -1426,7 +1362,6 @@ class ProfileResults(Results): class CategoricalResults(Results): - ResultID = Column('resultid', ForeignKey(Results.ResultID), primary_key=True) XLocation = Column('xlocation', Float(53)) XLocationUnitsID = Column('xlocationunitsid', Integer, ForeignKey(Units.UnitsID)) @@ -1455,7 +1390,6 @@ class CategoricalResults(Results): class TransectResults(Results): - ResultID = Column('resultid', ForeignKey(Results.ResultID), primary_key=True) ZLocation = Column('zlocation', Float(53)) ZLocationUnitsID = Column('zlocationunitsid', ForeignKey(Units.UnitsID)) @@ -1482,7 +1416,6 @@ class TransectResults(Results): class SpectraResults(Results): - ResultID = Column('resultid', ForeignKey(Results.ResultID), primary_key=True) XLocation = Column('xlocation', Float(53)) XLocationUnitsID = Column('xlocationunitsid', ForeignKey(Units.UnitsID)) @@ -1509,7 +1442,6 @@ class SpectraResults(Results): class TimeSeriesResults(Results): - ResultID = Column('resultid', ForeignKey(Results.ResultID), primary_key=True) XLocation = Column('xlocation', Float(53)) XLocationUnitsID = Column('xlocationunitsid', ForeignKey(Units.UnitsID)) @@ -1536,7 +1468,6 @@ class TimeSeriesResults(Results): class SectionResults(Results): - ResultID = Column('resultid', ForeignKey(Results.ResultID), primary_key=True) YLocation = Column('ylocation', Float(53)) YLocationUnitsID = Column('ylocationunitsid', ForeignKey(Units.UnitsID)) @@ -1575,7 +1506,6 @@ class SectionResults(Results): class TrajectoryResults(Results): - ResultID = Column('resultid', ForeignKey(Results.ResultID), primary_key=True) SpatialReferenceID = Column('spatialreferenceid', ForeignKey(SpatialReferences.SpatialReferenceID)) IntendedTrajectorySpacing = Column('intendedtrajectoryspacing', Float(53)) @@ -1599,7 +1529,6 @@ class TrajectoryResults(Results): class MeasurementResults(Results): - ResultID = Column('resultid', ForeignKey(Results.ResultID), primary_key=True) XLocation = Column('xlocation', Float(53)) XLocationUnitsID = Column('xlocationunitsid', ForeignKey(Units.UnitsID)) @@ -1638,7 +1567,6 @@ class MeasurementResults(Results): class CategoricalResultValues(Base): - ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(CategoricalResults.ResultID), nullable=False) DataValue = Column('datavalue', String(255), nullable=False) @@ -1649,7 +1577,6 @@ class CategoricalResultValues(Base): class MeasurementResultValues(Base): - ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(MeasurementResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) @@ -1660,7 +1587,6 @@ class MeasurementResultValues(Base): class PointCoverageResultValues(Base): - ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(PointCoverageResults.ResultID), nullable=False) DataValue = Column('datavalue', BigIntegerType, nullable=False) @@ -1685,7 +1611,6 @@ class PointCoverageResultValues(Base): class ProfileResultValues(Base): - ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(ProfileResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) @@ -1712,7 +1637,6 @@ class ProfileResultValues(Base): class SectionResultValues(Base): - ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(SectionResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) @@ -1748,7 +1672,6 @@ class SectionResultValues(Base): class SpectraResultValues(Base): - ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(SpectraResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) @@ -1775,9 +1698,7 @@ class SpectraResultValues(Base): ) - class TimeSeriesResultValues(Base): - ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(TimeSeriesResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) @@ -1803,7 +1724,6 @@ def list_repr(self): class TrajectoryResultValues(Base): - ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(TrajectoryResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) @@ -1848,7 +1768,6 @@ class TrajectoryResultValues(Base): class TransectResultValues(Base): - ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(TransectResults.ResultID), nullable=False) DataValue = Column('datavalue', Float(53), nullable=False) @@ -1896,7 +1815,6 @@ class TransectResultValues(Base): class CategoricalResultValueAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ValueID = Column('valueid', BigIntegerType, ForeignKey(CategoricalResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -1906,7 +1824,6 @@ class CategoricalResultValueAnnotations(Base): class MeasurementResultValueAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ValueID = Column('valueid', BigIntegerType, ForeignKey(MeasurementResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -1916,7 +1833,6 @@ class MeasurementResultValueAnnotations(Base): class PointCoverageResultValueAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ValueID = Column('valueid', BigIntegerType, ForeignKey(PointCoverageResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -1926,7 +1842,6 @@ class PointCoverageResultValueAnnotations(Base): class ProfileResultValueAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ValueID = Column('valueid', BigIntegerType, ForeignKey(ProfileResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -1936,7 +1851,6 @@ class ProfileResultValueAnnotations(Base): class SectionResultValueAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ValueID = Column('valueid', BigIntegerType, ForeignKey(SectionResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -1946,7 +1860,6 @@ class SectionResultValueAnnotations(Base): class SpectraResultValueAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ValueID = Column('valueid', BigIntegerType, ForeignKey(SpectraResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -1956,7 +1869,6 @@ class SpectraResultValueAnnotations(Base): class TimeSeriesResultValueAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ValueID = Column('valueid', BigIntegerType, ForeignKey(TimeSeriesResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -1966,7 +1878,6 @@ class TimeSeriesResultValueAnnotations(Base): class TrajectoryResultValueAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ValueID = Column('valueid', BigIntegerType, ForeignKey(TrajectoryResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -1976,7 +1887,6 @@ class TrajectoryResultValueAnnotations(Base): class TransectResultValueAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ValueID = Column('valueid', BigIntegerType, ForeignKey(TransectResultValues.ValueID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) From b751673c5fb3772efc6a0338e00b2f18a90e99cd Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Wed, 10 Jan 2018 09:50:18 -0800 Subject: [PATCH 51/55] Update Travis to exclude python code in forms, since this was auto created. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 422e41d..14975c2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -74,7 +74,7 @@ script: fi - if [[ $TEST_TARGET == 'coding_standards' ]]; then - find . -type f -name "*.py" ! -name 'conf.py' ! -name '_version.py' ! -name 'versioneer.py' | xargs flake8 --max-line-length=110 ; + find . -type f -name "*.py" ! -name 'conf.py' ! -name '_version.py' ! -name 'versioneer.py' ! -name '*DBConfig.py' | xargs flake8 --max-line-length=110 ; fi - if [[ $TEST_TARGET == 'docs' ]]; then From 7b59e773aebeda2e8a636043517e0ccbd3483f61 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Wed, 10 Jan 2018 09:53:48 -0800 Subject: [PATCH 52/55] Clean up tests --- tests/test_connection.py | 2 ++ tests/test_odm2/test_readservice.py | 38 +++++++++++++---------------- 2 files changed, 19 insertions(+), 21 deletions(-) diff --git a/tests/test_connection.py b/tests/test_connection.py index e233eff..a8bfacf 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -2,7 +2,9 @@ from odm2api.ODM2.models import CVElevationDatum from odm2api.ODMconnection import dbconnection + import pytest + from sqlalchemy.engine import reflection diff --git a/tests/test_odm2/test_readservice.py b/tests/test_odm2/test_readservice.py index f1ca669..f1189aa 100644 --- a/tests/test_odm2/test_readservice.py +++ b/tests/test_odm2/test_readservice.py @@ -11,7 +11,6 @@ import sqlalchemy from sqlalchemy.orm import class_mapper - globals_vars = {} @@ -75,7 +74,6 @@ def setup(self): self.engine = globals_vars['engine'] self.db = globals_vars['db'] - # Sampling Features def test_getAllSamplingFeatures(self): # get all models from the database @@ -91,7 +89,7 @@ def test_getSamplingFeatureByID(self): # get all simulations using the api resapi = self.reader.getSamplingFeatures(ids=[sfid]) assert resapi is not None - + def test_getSamplingFeatureByCode(self): # get all models from the database res = self.engine.execute('SELECT * FROM SamplingFeatures').fetchone() @@ -100,7 +98,7 @@ def test_getSamplingFeatureByCode(self): resapi = self.reader.getSamplingFeatures(codes=[code]) assert resapi is not None -#DataSets + # DataSets def test_getDataSets(self): # get all datasets from the database ds = self.engine.execute('SELECT * FROM DataSets').fetchone() @@ -124,14 +122,13 @@ def test_getDataSetsValues(self): dsr = self.engine.execute('SELECT * FROM DataSetsResults').fetchone() dsid = dsr[2] - values= self.reader.getDataSetsValues(ids=[dsid]) + values = self.reader.getDataSetsValues(ids=[dsid]) assert values is not None assert len(values) > 0 - def test_getSamplingFeatureDataSets(self): try: - #find a sampling feature that is associated with a dataset + # find a sampling feature that is associated with a dataset sf = self.engine.execute( 'SELECT * from SamplingFeatures as sf ' 'inner join FeatureActions as fa on fa.SamplingFeatureID == sf.SamplingFeatureID ' @@ -140,7 +137,7 @@ def test_getSamplingFeatureDataSets(self): ).fetchone() assert len(sf) > 0 - #get the dataset associated with the sampling feature + # get the dataset associated with the sampling feature ds = self.engine.execute( 'SELECT * from DataSetsResults as ds ' 'inner join Results as r on r.ResultID == ds.ResultID ' @@ -149,7 +146,7 @@ def test_getSamplingFeatureDataSets(self): ).fetchone() assert len(ds) > 0 - print (sf[0]) + print(sf[0]) # get the dataset associated with the sampling feature using hte api dsapi = self.reader.getSamplingFeatureDatasets(ids=[sf[0]]) @@ -159,11 +156,11 @@ def test_getSamplingFeatureDataSets(self): assert dsapi[0].SamplingFeatureID == sf[0] # assert ds[0] == dsapi[0] except Exception as ex: + print(ex) assert False finally: self.reader._session.rollback() - # Results def test_getAllResults(self): @@ -174,7 +171,6 @@ def test_getAllResults(self): resapi = self.reader.getResults() assert len(res) == len(resapi) - def test_getResultsByID(self): # get a result from the database res = self.engine.execute('SELECT * FROM Results').fetchone() @@ -186,18 +182,18 @@ def test_getResultsByID(self): def test_getResultsBySFID(self): sf = self.engine.execute( - 'SELECT * from SamplingFeatures as sf ' - 'inner join FeatureActions as fa on fa.SamplingFeatureID == sf.SamplingFeatureID ' - 'inner join Results as r on fa.FeatureActionID == r.FeatureActionID ' - ).fetchone() + 'SELECT * from SamplingFeatures as sf ' + 'inner join FeatureActions as fa on fa.SamplingFeatureID == sf.SamplingFeatureID ' + 'inner join Results as r on fa.FeatureActionID == r.FeatureActionID ' + ).fetchone() assert len(sf) > 0 sfid = sf[0] res = self.engine.execute( - 'SELECT * from Results as r ' - 'inner join FeatureActions as fa on fa.FeatureActionID == r.FeatureActionID ' - 'where fa.SamplingFeatureID = ' + str(sfid) - ).fetchone() + 'SELECT * from Results as r ' + 'inner join FeatureActions as fa on fa.FeatureActionID == r.FeatureActionID ' + 'where fa.SamplingFeatureID = ' + str(sfid) + ).fetchone() assert len(res) > 0 @@ -223,7 +219,6 @@ def test_getModelByCode(self): resapi = self.reader.getModels(codes=[modelCode]) assert resapi is not None - # RelatedModels def test_getRelatedModelsByID(self): # get related models by id using the api @@ -247,7 +242,8 @@ def test_getRelatedModelsByCode(self): # test invalid argument resapi = self.reader.getRelatedModels(code=234123) assert not resapi -# Simulations + + # Simulations def test_getAllSimulations(self): # get all simulation from the database res = self.engine.execute('SELECT * FROM Simulations').fetchall() From 27cf30b0e16553828160f5d385c347ae4c0acd40 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Wed, 10 Jan 2018 10:06:54 -0800 Subject: [PATCH 53/55] Ingore Sample.py --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 14975c2..5b99a41 100644 --- a/.travis.yml +++ b/.travis.yml @@ -74,7 +74,7 @@ script: fi - if [[ $TEST_TARGET == 'coding_standards' ]]; then - find . -type f -name "*.py" ! -name 'conf.py' ! -name '_version.py' ! -name 'versioneer.py' ! -name '*DBConfig.py' | xargs flake8 --max-line-length=110 ; + find . -type f -name "*.py" ! -name 'conf.py' ! -name '_version.py' ! -name 'versioneer.py' ! -name '*DBConfig.py' ! -name 'Sample.py' | xargs flake8 --max-line-length=110 ; fi - if [[ $TEST_TARGET == 'docs' ]]; then From badb49843479730bb9a8e6bbe2b5b811f05cd7c3 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Tue, 16 Jan 2018 13:02:15 -0800 Subject: [PATCH 54/55] Add warning message for lowercols parameter --- odm2api/ODM2/services/readService.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 61edb0e..88a5b85 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -1349,6 +1349,10 @@ def getResultValues(self, resultids, starttime=None, endtime=None, lowercols=Tru endtime (object, optional): End time to filter by as datetime object. lowercols (bool, optional): Make column names to be lowercase. Default to True. + **Please start upgrading your code to rely on CamelCase column names, + In a near-future release, + the default will be changed to False, + and later the parameter may be removed**. Returns: DataFrame: Pandas dataframe of result values. @@ -1399,6 +1403,12 @@ def getResultValues(self, resultids, starttime=None, endtime=None, lowercols=Tru ) if not lowercols: df.columns = [self._get_columns(ResultValues)[c] for c in df.columns] + else: + warnings.warn( + 'In a near-future release, ' + 'the parameter \'lowercols\' default will be changed to False, ' + 'and later the parameter may be removed.', + DeprecationWarning, stacklevel=2) return df except Exception as e: print('Error running Query: {}'.format(e)) From 58a3616ea70033119791c2cade29e0adb94549d6 Mon Sep 17 00:00:00 2001 From: lsetiawan Date: Tue, 16 Jan 2018 13:12:44 -0800 Subject: [PATCH 55/55] Add warning doc in getDataSetsValues --- odm2api/ODM2/services/readService.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index 6d887df..cbe8cc4 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -935,6 +935,10 @@ def getDataSetsValues(self, ids=None, codes=None, uuids=None, dstype=None, lower `controlled vocabulary name `_. lowercols (bool, optional): Make column names to be lowercase. Default to True. + **Please start upgrading your code to rely on CamelCase column names, + In a near-future release, + the default will be changed to False, + and later the parameter may be removed**. Returns: