From 9fe2b3f573e834afb38e2cbe30492cef6600630d Mon Sep 17 00:00:00 2001 From: Aidar Negimatzhanov Date: Tue, 20 Aug 2024 13:26:26 +0200 Subject: [PATCH 1/5] Add digests verification --- src/verifier/core/reporting.py | 85 +++++++++++++++++-- tests/integration/test_service.py | 132 +++++++++++++++--------------- 2 files changed, 146 insertions(+), 71 deletions(-) diff --git a/src/verifier/core/reporting.py b/src/verifier/core/reporting.py index 68beca5..dfeb09c 100644 --- a/src/verifier/core/reporting.py +++ b/src/verifier/core/reporting.py @@ -4,11 +4,13 @@ import zipfile from collections import namedtuple from dataclasses import asdict +from hashlib import sha256 import falcon +import multibase from hio.base import doing from keri import kering -from keri.core import coring, Siger +from keri.core import coring, Siger, MtrDex from verifier.core.basing import ReportStats @@ -54,6 +56,13 @@ def loadEnds(app, hby, vdb, filer): app.add_route("/reports/{aid}/{dig}", reportEnd) +def get_non_prefixed_digest(dig): + prefix, digest = dig.split("_", 1) + if not digest: + raise kering.ValidationError(f"Digest ({dig}) must start with prefix") + return digest + + class Filer: """ Report status filer @@ -92,19 +101,82 @@ def create(self, aid, dig, filename, typ, stream): ) idx = 0 + non_pref_dig = get_non_prefixed_digest(dig) # Temporarily remove prefix + non_pref_dig = bytes.fromhex(non_pref_dig) + diger = coring.Diger(raw=non_pref_dig, code=MtrDex.SHA2_256) + + report = b'' while True: chunk = stream.read(4096) + report += chunk if not chunk: break - key = f"{dig}.{idx}".encode("utf-8") + key = f"{diger.qb64}.{idx}".encode("utf-8") self.vdb.setVal(db=self.vdb.imgs, key=key, val=chunk) idx += 1 stats.size += len(chunk) - diger = coring.Diger(qb64=dig) + if not diger.verify(report): + raise kering.ValidationError(f"Report digets({dig} verification failed)") + with tempfile.TemporaryFile("w+b") as tf: + tf.write(report) + tf.seek(0) + with tempfile.TemporaryDirectory() as tempdirname: + z = zipfile.ZipFile(tf) + z.extractall(path=tempdirname) + manifest = None + for root, dirs, _ in os.walk(tempdirname): + if "META-INF" not in dirs or 'reports' not in dirs: + continue + + metaDir = os.path.join(root, 'META-INF') + name = os.path.join(root, 'META-INF', 'reports.json') + if not os.path.exists(name): + continue + f = open(name, 'r') + manifest = json.load(f) + if "documentInfo" not in manifest: + raise kering.ValidationError("Invalid manifest file in report package, missing " + "'documentInfo") + reportsDir = os.path.join(root, 'reports') + files = os.listdir(reportsDir) + + if manifest is None: + raise kering.ValidationError("No manifest in file, invalid signed report package") + + docInfo = manifest["documentInfo"] + + if "digests" not in docInfo: + raise kering.ValidationError("No digests found in manifest file") + + digests = docInfo["digests"] + for digest in digests: + try: + file = digest["file"] + fullpath = os.path.normpath(os.path.join(metaDir, file)) + f = open(fullpath, 'rb') + file_object = f.read() + f.close() + non_pref_dig = get_non_prefixed_digest(digest["dig"]) # Remove prefix + non_pref_dig = bytes.fromhex(non_pref_dig) + tmp_diger = coring.Diger(raw=non_pref_dig, code=MtrDex.SHA2_256) + if not tmp_diger.verify(file_object): + raise kering.ValidationError(f"Invalid digest for file {fullpath}") + except KeyError as e: + raise kering.ValidationError(f"Invalid digest in manifest digest list" + f"missing '{e.args[0]}'") + except OSError: + raise kering.ValidationError(f"signature element={digest} point to invalid file") + + except Exception as e: + raise kering.ValidationError(f"{e}") + + + + self.vdb.rpts.add(keys=(aid,), val=diger) self.vdb.stts.add(keys=(stats.status,), val=diger) - self.vdb.stats.pin(keys=(dig,), val=stats) + self.vdb.stats.pin(keys=(diger.qb64,), val=stats) def get(self, dig): """ Return report stats for given report. @@ -116,7 +188,10 @@ def get(self, dig): ReportStats: Report stats for report with digest dig or None """ - if (stats := self.vdb.stats.get(keys=(dig,))) is None: + non_pref_dig = get_non_prefixed_digest(dig) # Temporarily remove prefix + non_pref_dig = bytes.fromhex(non_pref_dig) + diger = coring.Diger(raw=non_pref_dig, code=MtrDex.SHA2_256) + if (stats := self.vdb.stats.get(keys=(diger.qb64,))) is None: return None return stats diff --git a/tests/integration/test_service.py b/tests/integration/test_service.py index 0806fa7..8ba7f9e 100644 --- a/tests/integration/test_service.py +++ b/tests/integration/test_service.py @@ -112,79 +112,79 @@ def get(): if exceptions: raise exceptions[0] -# @pytest.mark.manual -# def test_service_integration(seeder): -# with habbing.openHab(name="sid", temp=True, salt=b'0123456789abcdef') as (hby, hab): - -# seeder.seedSchema(db=hby.db) -# regery, registry, verifier, seqner = reg_and_verf(hby, hab, registryName="qvireg") -# qvicred = get_qvi_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.QVI_SCHEMA, registry=registry) -# hab, qcrdntler, qsaid, qkmsgs, qtmsgs, qimsgs, qvimsgs = get_cred(hby, hab, regery, registry, verifier, Schema.QVI_SCHEMA, qvicred, seqner) +@pytest.mark.manual +def test_service_integration(seeder): + with habbing.openHab(name="sid", temp=True, salt=b'0123456789abcdef') as (hby, hab): + + seeder.seedSchema(db=hby.db) + regery, registry, verifier, seqner = reg_and_verf(hby, hab, registryName="qvireg") + qvicred = get_qvi_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.QVI_SCHEMA, registry=registry) + hab, qcrdntler, qsaid, qkmsgs, qtmsgs, qimsgs, qvimsgs = get_cred(hby, hab, regery, registry, verifier, Schema.QVI_SCHEMA, qvicred, seqner) -# qviedge = get_qvi_edge(qvicred.sad["d"], Schema.QVI_SCHEMA) + qviedge = get_qvi_edge(qvicred.sad["d"], Schema.QVI_SCHEMA) -# leicred = get_lei_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.LEI_SCHEMA, registry=registry, sedge=qviedge) -# hab, lcrdntler, lsaid, lkmsgs, ltmsgs, limsgs, leimsgs = get_cred(hby, hab, regery, registry, verifier, Schema.LEI_SCHEMA, leicred, seqner) + leicred = get_lei_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.LEI_SCHEMA, registry=registry, sedge=qviedge) + hab, lcrdntler, lsaid, lkmsgs, ltmsgs, limsgs, leimsgs = get_cred(hby, hab, regery, registry, verifier, Schema.LEI_SCHEMA, leicred, seqner) -# #chained ecr auth cred -# eaedge = get_ecr_auth_edge(lsaid,Schema.LEI_SCHEMA) + #chained ecr auth cred + eaedge = get_ecr_auth_edge(lsaid,Schema.LEI_SCHEMA) -# eacred = get_ecr_auth_cred(aid=hab.pre, issuer=hab.pre, recipient=hab.pre, schema=Schema.ECR_AUTH_SCHEMA, registry=registry, sedge=eaedge) -# hab, eacrdntler, easaid, eakmsgs, eatmsgs, eaimsgs, eamsgs = get_cred(hby, hab, regery, registry, verifier, Schema.ECR_AUTH_SCHEMA, eacred, seqner) + eacred = get_ecr_auth_cred(aid=hab.pre, issuer=hab.pre, recipient=hab.pre, schema=Schema.ECR_AUTH_SCHEMA, registry=registry, sedge=eaedge) + hab, eacrdntler, easaid, eakmsgs, eatmsgs, eaimsgs, eamsgs = get_cred(hby, hab, regery, registry, verifier, Schema.ECR_AUTH_SCHEMA, eacred, seqner) -# #chained ecr auth cred -# ecredge = get_ecr_edge(easaid,Schema.ECR_AUTH_SCHEMA) + #chained ecr auth cred + ecredge = get_ecr_edge(easaid,Schema.ECR_AUTH_SCHEMA) -# ecr = get_ecr_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.ECR_SCHEMA, registry=registry, sedge=ecredge) -# hab, eccrdntler, ecsaid, eckmsgs, ectmsgs, ecimsgs, ecmsgs = get_cred(hby, hab, regery, registry, verifier, Schema.ECR_SCHEMA, ecr, seqner) + ecr = get_ecr_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.ECR_SCHEMA, registry=registry, sedge=ecredge) + hab, eccrdntler, ecsaid, eckmsgs, ectmsgs, ecimsgs, ecmsgs = get_cred(hby, hab, regery, registry, verifier, Schema.ECR_SCHEMA, ecr, seqner) -# app = falcon.App( -# middleware=falcon.CORSMiddleware( -# allow_origins='*', -# allow_credentials='*', -# expose_headers=['cesr-attachment', 'cesr-date', 'content-type'])) -# vdb = basing.VerifierBaser(name=hby.name, temp=True) -# verifying.setup(app=app, hby=hby, vdb=vdb, reger=eccrdntler.rgy.reger) -# server = http.Server(port=port, app=app) -# httpServerDoer = http.ServerDoer(server=server) -# # class testCf: -# # def get(): -# # return dict(LEIs=[f"{LEI1}",f"{LEI2}"]) -# rootsCf = configing.Configer(name="verifier-config-public.json", -# headDirPath="/Users/meenyleeny/VSCode/vlei-verifier/scripts", -# base="", -# temp=False, reopen=True, clear=False) -# authDoers = authorizing.setup(hby, vdb=vdb, reger=eccrdntler.rgy.reger, cf=rootsCf) - -# reportDoers = reporting.setup(app=app, hby=hby, vdb=vdb) - -# doers = authDoers + reportDoers + [httpServerDoer] -# limit = 0.25 -# tock = 0.03125 -# doist = doing.Doist(limit=limit, tock=tock) -# doist.doers = doers -# doist.enter() -# # assert len(doist.deeds) == 2 -# # assert [val[1] for val in doist.deeds] == [0.0, 0.0] # retymes -# # for doer in doers: -# # assert doer.baser.opened -# # assert "_test/keri/db/test" in doer.baser.path -# try: -# doist.recur() -# except Exception as e: -# raise ValueError(f"Likely you have another service running on {port}") - -# issAndCred = bytearray() -# # issAndCred.extend(kmsgs) -# # issAndCred.extend(tmsgs) -# # issAndCred.extend(imsgs) -# issAndCred.extend(ecmsgs) -# acdc = issAndCred.decode("utf-8") - -# # use this for integration testing debugging sessions -# while True: -# time.sleep(1) -# doist.recur() + app = falcon.App( + middleware=falcon.CORSMiddleware( + allow_origins='*', + allow_credentials='*', + expose_headers=['cesr-attachment', 'cesr-date', 'content-type'])) + vdb = basing.VerifierBaser(name=hby.name, temp=True) + verifying.setup(app=app, hby=hby, vdb=vdb, reger=eccrdntler.rgy.reger) + server = http.Server(port=port, app=app) + httpServerDoer = http.ServerDoer(server=server) + # class testCf: + # def get(): + # return dict(LEIs=[f"{LEI1}",f"{LEI2}"]) + rootsCf = configing.Configer(name="verifier-config-public.json", + headDirPath="/home/aidar/Desktop/git/gleif/vlei-verifier/scripts", + base="", + temp=False, reopen=True, clear=False) + authDoers = authorizing.setup(hby, vdb=vdb, reger=eccrdntler.rgy.reger, cf=rootsCf) + + reportDoers = reporting.setup(app=app, hby=hby, vdb=vdb) + + doers = authDoers + reportDoers + [httpServerDoer] + limit = 0.25 + tock = 0.03125 + doist = doing.Doist(limit=limit, tock=tock) + doist.doers = doers + doist.enter() + # assert len(doist.deeds) == 2 + # assert [val[1] for val in doist.deeds] == [0.0, 0.0] # retymes + # for doer in doers: + # assert doer.baser.opened + # assert "_test/keri/db/test" in doer.baser.path + try: + doist.recur() + except Exception as e: + raise ValueError(f"Likely you have another service running on {port}") + + issAndCred = bytearray() + # issAndCred.extend(kmsgs) + # issAndCred.extend(tmsgs) + # issAndCred.extend(imsgs) + issAndCred.extend(ecmsgs) + acdc = issAndCred.decode("utf-8") + + # use this for integration testing debugging sessions + while True: + time.sleep(1) + doist.recur() def presentation_request(said, acdc, exceptions): try: From 54d0c499195229b1018bc4959b1cce162d6414cb Mon Sep 17 00:00:00 2001 From: Aidar Negimatzhanov Date: Tue, 20 Aug 2024 13:32:14 +0200 Subject: [PATCH 2/5] Fix: remove multibase package import --- src/verifier/core/reporting.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/verifier/core/reporting.py b/src/verifier/core/reporting.py index dfeb09c..3fcbad7 100644 --- a/src/verifier/core/reporting.py +++ b/src/verifier/core/reporting.py @@ -7,7 +7,6 @@ from hashlib import sha256 import falcon -import multibase from hio.base import doing from keri import kering from keri.core import coring, Siger, MtrDex From 44cf0a4329e65769e2a280fc638be3c4c41603ea Mon Sep 17 00:00:00 2001 From: Aidar Negimatzhanov Date: Tue, 20 Aug 2024 13:35:58 +0200 Subject: [PATCH 3/5] Fix: comment manual integration test --- tests/integration/test_service.py | 146 +++++++++++++++--------------- 1 file changed, 73 insertions(+), 73 deletions(-) diff --git a/tests/integration/test_service.py b/tests/integration/test_service.py index 8ba7f9e..c01f56f 100644 --- a/tests/integration/test_service.py +++ b/tests/integration/test_service.py @@ -112,79 +112,79 @@ def get(): if exceptions: raise exceptions[0] -@pytest.mark.manual -def test_service_integration(seeder): - with habbing.openHab(name="sid", temp=True, salt=b'0123456789abcdef') as (hby, hab): - - seeder.seedSchema(db=hby.db) - regery, registry, verifier, seqner = reg_and_verf(hby, hab, registryName="qvireg") - qvicred = get_qvi_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.QVI_SCHEMA, registry=registry) - hab, qcrdntler, qsaid, qkmsgs, qtmsgs, qimsgs, qvimsgs = get_cred(hby, hab, regery, registry, verifier, Schema.QVI_SCHEMA, qvicred, seqner) - - qviedge = get_qvi_edge(qvicred.sad["d"], Schema.QVI_SCHEMA) - - leicred = get_lei_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.LEI_SCHEMA, registry=registry, sedge=qviedge) - hab, lcrdntler, lsaid, lkmsgs, ltmsgs, limsgs, leimsgs = get_cred(hby, hab, regery, registry, verifier, Schema.LEI_SCHEMA, leicred, seqner) - - #chained ecr auth cred - eaedge = get_ecr_auth_edge(lsaid,Schema.LEI_SCHEMA) - - eacred = get_ecr_auth_cred(aid=hab.pre, issuer=hab.pre, recipient=hab.pre, schema=Schema.ECR_AUTH_SCHEMA, registry=registry, sedge=eaedge) - hab, eacrdntler, easaid, eakmsgs, eatmsgs, eaimsgs, eamsgs = get_cred(hby, hab, regery, registry, verifier, Schema.ECR_AUTH_SCHEMA, eacred, seqner) - - #chained ecr auth cred - ecredge = get_ecr_edge(easaid,Schema.ECR_AUTH_SCHEMA) - - ecr = get_ecr_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.ECR_SCHEMA, registry=registry, sedge=ecredge) - hab, eccrdntler, ecsaid, eckmsgs, ectmsgs, ecimsgs, ecmsgs = get_cred(hby, hab, regery, registry, verifier, Schema.ECR_SCHEMA, ecr, seqner) - - app = falcon.App( - middleware=falcon.CORSMiddleware( - allow_origins='*', - allow_credentials='*', - expose_headers=['cesr-attachment', 'cesr-date', 'content-type'])) - vdb = basing.VerifierBaser(name=hby.name, temp=True) - verifying.setup(app=app, hby=hby, vdb=vdb, reger=eccrdntler.rgy.reger) - server = http.Server(port=port, app=app) - httpServerDoer = http.ServerDoer(server=server) - # class testCf: - # def get(): - # return dict(LEIs=[f"{LEI1}",f"{LEI2}"]) - rootsCf = configing.Configer(name="verifier-config-public.json", - headDirPath="/home/aidar/Desktop/git/gleif/vlei-verifier/scripts", - base="", - temp=False, reopen=True, clear=False) - authDoers = authorizing.setup(hby, vdb=vdb, reger=eccrdntler.rgy.reger, cf=rootsCf) - - reportDoers = reporting.setup(app=app, hby=hby, vdb=vdb) - - doers = authDoers + reportDoers + [httpServerDoer] - limit = 0.25 - tock = 0.03125 - doist = doing.Doist(limit=limit, tock=tock) - doist.doers = doers - doist.enter() - # assert len(doist.deeds) == 2 - # assert [val[1] for val in doist.deeds] == [0.0, 0.0] # retymes - # for doer in doers: - # assert doer.baser.opened - # assert "_test/keri/db/test" in doer.baser.path - try: - doist.recur() - except Exception as e: - raise ValueError(f"Likely you have another service running on {port}") - - issAndCred = bytearray() - # issAndCred.extend(kmsgs) - # issAndCred.extend(tmsgs) - # issAndCred.extend(imsgs) - issAndCred.extend(ecmsgs) - acdc = issAndCred.decode("utf-8") - - # use this for integration testing debugging sessions - while True: - time.sleep(1) - doist.recur() +# @pytest.mark.manual +# def test_service_integration(seeder): +# with habbing.openHab(name="sid", temp=True, salt=b'0123456789abcdef') as (hby, hab): +# +# seeder.seedSchema(db=hby.db) +# regery, registry, verifier, seqner = reg_and_verf(hby, hab, registryName="qvireg") +# qvicred = get_qvi_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.QVI_SCHEMA, registry=registry) +# hab, qcrdntler, qsaid, qkmsgs, qtmsgs, qimsgs, qvimsgs = get_cred(hby, hab, regery, registry, verifier, Schema.QVI_SCHEMA, qvicred, seqner) +# +# qviedge = get_qvi_edge(qvicred.sad["d"], Schema.QVI_SCHEMA) +# +# leicred = get_lei_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.LEI_SCHEMA, registry=registry, sedge=qviedge) +# hab, lcrdntler, lsaid, lkmsgs, ltmsgs, limsgs, leimsgs = get_cred(hby, hab, regery, registry, verifier, Schema.LEI_SCHEMA, leicred, seqner) +# +# #chained ecr auth cred +# eaedge = get_ecr_auth_edge(lsaid,Schema.LEI_SCHEMA) +# +# eacred = get_ecr_auth_cred(aid=hab.pre, issuer=hab.pre, recipient=hab.pre, schema=Schema.ECR_AUTH_SCHEMA, registry=registry, sedge=eaedge) +# hab, eacrdntler, easaid, eakmsgs, eatmsgs, eaimsgs, eamsgs = get_cred(hby, hab, regery, registry, verifier, Schema.ECR_AUTH_SCHEMA, eacred, seqner) +# +# #chained ecr auth cred +# ecredge = get_ecr_edge(easaid,Schema.ECR_AUTH_SCHEMA) +# +# ecr = get_ecr_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.ECR_SCHEMA, registry=registry, sedge=ecredge) +# hab, eccrdntler, ecsaid, eckmsgs, ectmsgs, ecimsgs, ecmsgs = get_cred(hby, hab, regery, registry, verifier, Schema.ECR_SCHEMA, ecr, seqner) +# +# app = falcon.App( +# middleware=falcon.CORSMiddleware( +# allow_origins='*', +# allow_credentials='*', +# expose_headers=['cesr-attachment', 'cesr-date', 'content-type'])) +# vdb = basing.VerifierBaser(name=hby.name, temp=True) +# verifying.setup(app=app, hby=hby, vdb=vdb, reger=eccrdntler.rgy.reger) +# server = http.Server(port=port, app=app) +# httpServerDoer = http.ServerDoer(server=server) +# # class testCf: +# # def get(): +# # return dict(LEIs=[f"{LEI1}",f"{LEI2}"]) +# rootsCf = configing.Configer(name="verifier-config-public.json", +# headDirPath="/home/aidar/Desktop/git/gleif/vlei-verifier/scripts", +# base="", +# temp=False, reopen=True, clear=False) +# authDoers = authorizing.setup(hby, vdb=vdb, reger=eccrdntler.rgy.reger, cf=rootsCf) +# +# reportDoers = reporting.setup(app=app, hby=hby, vdb=vdb) +# +# doers = authDoers + reportDoers + [httpServerDoer] +# limit = 0.25 +# tock = 0.03125 +# doist = doing.Doist(limit=limit, tock=tock) +# doist.doers = doers +# doist.enter() +# # assert len(doist.deeds) == 2 +# # assert [val[1] for val in doist.deeds] == [0.0, 0.0] # retymes +# # for doer in doers: +# # assert doer.baser.opened +# # assert "_test/keri/db/test" in doer.baser.path +# try: +# doist.recur() +# except Exception as e: +# raise ValueError(f"Likely you have another service running on {port}") +# +# issAndCred = bytearray() +# # issAndCred.extend(kmsgs) +# # issAndCred.extend(tmsgs) +# # issAndCred.extend(imsgs) +# issAndCred.extend(ecmsgs) +# acdc = issAndCred.decode("utf-8") +# +# # use this for integration testing debugging sessions +# while True: +# time.sleep(1) +# doist.recur() def presentation_request(said, acdc, exceptions): try: From e8c66e2ceea4563030612139f7b76eaea54a07f4 Mon Sep 17 00:00:00 2001 From: Aidar Negimatzhanov Date: Tue, 20 Aug 2024 17:24:14 +0200 Subject: [PATCH 4/5] Add unit tests for digest generation --- src/verifier/core/reporting.py | 21 ++++----------------- src/verifier/core/utils.py | 24 ++++++++++++++++++++++++ tests/core/test_reporting.py | 32 ++++++++++++++++++++++++++++++++ 3 files changed, 60 insertions(+), 17 deletions(-) create mode 100644 src/verifier/core/utils.py create mode 100644 tests/core/test_reporting.py diff --git a/src/verifier/core/reporting.py b/src/verifier/core/reporting.py index 3fcbad7..48ccbf0 100644 --- a/src/verifier/core/reporting.py +++ b/src/verifier/core/reporting.py @@ -12,7 +12,7 @@ from keri.core import coring, Siger, MtrDex from verifier.core.basing import ReportStats - +from verifier.core.utils import DigerBuilder # Report Statuses. Reportage = namedtuple("Reportage", "accepted verified failed") @@ -55,13 +55,6 @@ def loadEnds(app, hby, vdb, filer): app.add_route("/reports/{aid}/{dig}", reportEnd) -def get_non_prefixed_digest(dig): - prefix, digest = dig.split("_", 1) - if not digest: - raise kering.ValidationError(f"Digest ({dig}) must start with prefix") - return digest - - class Filer: """ Report status filer @@ -100,10 +93,8 @@ def create(self, aid, dig, filename, typ, stream): ) idx = 0 - non_pref_dig = get_non_prefixed_digest(dig) # Temporarily remove prefix - non_pref_dig = bytes.fromhex(non_pref_dig) - diger = coring.Diger(raw=non_pref_dig, code=MtrDex.SHA2_256) + diger = DigerBuilder.sha256(dig) report = b'' while True: chunk = stream.read(4096) @@ -156,9 +147,7 @@ def create(self, aid, dig, filename, typ, stream): f = open(fullpath, 'rb') file_object = f.read() f.close() - non_pref_dig = get_non_prefixed_digest(digest["dig"]) # Remove prefix - non_pref_dig = bytes.fromhex(non_pref_dig) - tmp_diger = coring.Diger(raw=non_pref_dig, code=MtrDex.SHA2_256) + tmp_diger = DigerBuilder.sha256(digest["dig"]) if not tmp_diger.verify(file_object): raise kering.ValidationError(f"Invalid digest for file {fullpath}") except KeyError as e: @@ -187,9 +176,7 @@ def get(self, dig): ReportStats: Report stats for report with digest dig or None """ - non_pref_dig = get_non_prefixed_digest(dig) # Temporarily remove prefix - non_pref_dig = bytes.fromhex(non_pref_dig) - diger = coring.Diger(raw=non_pref_dig, code=MtrDex.SHA2_256) + diger = DigerBuilder.sha256(dig) if (stats := self.vdb.stats.get(keys=(diger.qb64,))) is None: return None diff --git a/src/verifier/core/utils.py b/src/verifier/core/utils.py new file mode 100644 index 0000000..2345710 --- /dev/null +++ b/src/verifier/core/utils.py @@ -0,0 +1,24 @@ +from keri import kering +from keri.core import MtrDex, coring + + +class DigerBuilder: + @staticmethod + def sha256(dig): + try: + non_pref_dig = DigerBuilder._get_non_prefixed_digest(dig) # Temporarily remove prefix + non_pref_dig = bytes.fromhex(non_pref_dig) + diger = coring.Diger(raw=non_pref_dig, code=MtrDex.SHA2_256) + return diger + except Exception as e: + raise e + + @staticmethod + def _get_non_prefixed_digest(dig): + try: + prefix, digest = dig.split("_", 1) + except ValueError: + raise kering.ValidationError(f"Digest ({dig}) must start with prefix") + except Exception: + raise kering.ValidationError(f"Invalid digest {dig}") + return digest diff --git a/tests/core/test_reporting.py b/tests/core/test_reporting.py new file mode 100644 index 0000000..bb2c6cc --- /dev/null +++ b/tests/core/test_reporting.py @@ -0,0 +1,32 @@ +import pytest +from hashlib import sha256 + +from keri import kering + +from src.verifier.core.utils import DigerBuilder + + +def test_diger_builder(): + BASE_STR = "fefUBIUhdo9032bfHf0UNONF0kubni9HnF22L0KD2".encode() + dig = sha256(BASE_STR).hexdigest() + dig = f"sha256_{dig}" + diger = DigerBuilder.sha256(dig) + assert diger.verify(BASE_STR) is True + + +def test_diger_builder_fail(): + BASE_STR = "fefUBIUhdo9032bfHf0UNONF0kubni9HnF22L0KD2".encode() + WRONG_BASE_STR = "fefUBIUhdo9032bfHf0UNONF0kubni9HnF22L0KDT".encode() + dig = sha256(BASE_STR).hexdigest() + dig = f"sha256_{dig}" + diger = DigerBuilder.sha256(dig) + assert diger.verify(WRONG_BASE_STR) is False + + +def test_diger_builder_wrong_dig(): + BASE_STR = "fefUBIUhdo9032bfHf0UNONF0kubni9HnF22L0KD2".encode() + dig = sha256(BASE_STR).hexdigest() + # Here the dig is not prefixed + with pytest.raises(kering.ValidationError) as exc_info: + diger = DigerBuilder.sha256(dig) + From 03a3445296a0569acff23d01825dc291960899a9 Mon Sep 17 00:00:00 2001 From: Aidar Negimatzhanov Date: Tue, 20 Aug 2024 18:02:45 +0200 Subject: [PATCH 5/5] Update unit tests --- src/verifier/core/reporting.py | 32 +++++++------------------------- src/verifier/core/utils.py | 9 ++++++--- 2 files changed, 13 insertions(+), 28 deletions(-) diff --git a/src/verifier/core/reporting.py b/src/verifier/core/reporting.py index 48ccbf0..ba6c1c8 100644 --- a/src/verifier/core/reporting.py +++ b/src/verifier/core/reporting.py @@ -108,6 +108,7 @@ def create(self, aid, dig, filename, typ, stream): if not diger.verify(report): raise kering.ValidationError(f"Report digets({dig} verification failed)") + with tempfile.TemporaryFile("w+b") as tf: tf.write(report) tf.seek(0) @@ -118,7 +119,6 @@ def create(self, aid, dig, filename, typ, stream): for root, dirs, _ in os.walk(tempdirname): if "META-INF" not in dirs or 'reports' not in dirs: continue - metaDir = os.path.join(root, 'META-INF') name = os.path.join(root, 'META-INF', 'reports.json') if not os.path.exists(name): @@ -128,22 +128,17 @@ def create(self, aid, dig, filename, typ, stream): if "documentInfo" not in manifest: raise kering.ValidationError("Invalid manifest file in report package, missing " "'documentInfo") - reportsDir = os.path.join(root, 'reports') - files = os.listdir(reportsDir) - if manifest is None: raise kering.ValidationError("No manifest in file, invalid signed report package") docInfo = manifest["documentInfo"] - if "digests" not in docInfo: raise kering.ValidationError("No digests found in manifest file") digests = docInfo["digests"] for digest in digests: try: - file = digest["file"] - fullpath = os.path.normpath(os.path.join(metaDir, file)) + fullpath = os.path.normpath(os.path.join(metaDir, digest["file"])) f = open(fullpath, 'rb') file_object = f.read() f.close() @@ -155,13 +150,9 @@ def create(self, aid, dig, filename, typ, stream): f"missing '{e.args[0]}'") except OSError: raise kering.ValidationError(f"signature element={digest} point to invalid file") - except Exception as e: raise kering.ValidationError(f"{e}") - - - self.vdb.rpts.add(keys=(aid,), val=diger) self.vdb.stts.add(keys=(stats.status,), val=diger) self.vdb.stats.pin(keys=(diger.qb64,), val=stats) @@ -177,10 +168,7 @@ def get(self, dig): """ diger = DigerBuilder.sha256(dig) - if (stats := self.vdb.stats.get(keys=(diger.qb64,))) is None: - return None - - return stats + return self.vdb.stats.get(keys=(diger.qb64,)) def getData(self, dig): """ Generator that yields image data in 4k chunks for identifier @@ -202,7 +190,7 @@ def getAcceptedIter(self): """ Generator that yields Diger values for all reports currently in Accepted status """ - for diger in self.vdb.stts.getIter(keys=(ReportStatus.accepted, )): + for diger in self.vdb.stts.getIter(keys=(ReportStatus.accepted,)): yield diger def update(self, diger, status, msg=None): @@ -344,6 +332,7 @@ def on_post(self, req, rep, aid, dig): rep.status = falcon.HTTP_202 rep.data = json.dumps(dict(msg=f"Upload {dig} received from {aid}")).encode("utf-8") + class ReportVerifier(doing.Doer): """ Doer (coroutine) capable of processing submitted report files @@ -355,7 +344,6 @@ class ReportVerifier(doing.Doer): 4. Verifies the signatures for each file against the contents of the file. 5. Validates that the submitter has signed all files in the report package. - """ def __init__(self, hby, vdb, filer, **kwargs): @@ -464,7 +452,7 @@ def recur(self, tyme): # raise kering.ValidationError(f"verfer {siger.verfer.qb64} invalid") # if siger.verfer.code != "D": # raise kering.ValidationError(f"verfer code {siger.verfer.code} invalid") - + verfed.append(os.path.basename(fullpath)) except KeyError as e: @@ -472,7 +460,7 @@ def recur(self, tyme): f"missing '{e.args[0]}'") except OSError: raise kering.ValidationError(f"signature element={signature} point to invalid file") - + except Exception as e: raise kering.ValidationError(f"{e}") @@ -490,9 +478,3 @@ def recur(self, tyme): except (kering.ValidationError, zipfile.BadZipFile) as e: self.filer.update(diger, ReportStatus.failed, e.args[0]) print(e.args[0]) - - - - - - diff --git a/src/verifier/core/utils.py b/src/verifier/core/utils.py index 2345710..8ef5819 100644 --- a/src/verifier/core/utils.py +++ b/src/verifier/core/utils.py @@ -8,7 +8,7 @@ def sha256(dig): try: non_pref_dig = DigerBuilder._get_non_prefixed_digest(dig) # Temporarily remove prefix non_pref_dig = bytes.fromhex(non_pref_dig) - diger = coring.Diger(raw=non_pref_dig, code=MtrDex.SHA2_256) + diger = DigerBuilder.build_diger(non_pref_dig, MtrDex.SHA2_256) return diger except Exception as e: raise e @@ -19,6 +19,9 @@ def _get_non_prefixed_digest(dig): prefix, digest = dig.split("_", 1) except ValueError: raise kering.ValidationError(f"Digest ({dig}) must start with prefix") - except Exception: - raise kering.ValidationError(f"Invalid digest {dig}") return digest + + @staticmethod + def build_diger(raw, code): + diger = coring.Diger(raw=raw, code=code) + return diger