Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add digests verification #26

Merged
merged 5 commits into from
Aug 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 62 additions & 19 deletions src/verifier/core/reporting.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,15 @@
import zipfile
from collections import namedtuple
from dataclasses import asdict
from hashlib import sha256

import falcon
from hio.base import doing
from keri import kering
from keri.core import coring, Siger
from keri.core import coring, Siger, MtrDex

from verifier.core.basing import ReportStats

from verifier.core.utils import DigerBuilder

# Report Statuses.
Reportage = namedtuple("Reportage", "accepted verified failed")
Expand Down Expand Up @@ -92,19 +93,69 @@ def create(self, aid, dig, filename, typ, stream):
)

idx = 0

diger = DigerBuilder.sha256(dig)
report = b''
while True:
chunk = stream.read(4096)
report += chunk
if not chunk:
break
key = f"{dig}.{idx}".encode("utf-8")
key = f"{diger.qb64}.{idx}".encode("utf-8")
self.vdb.setVal(db=self.vdb.imgs, key=key, val=chunk)
idx += 1
stats.size += len(chunk)

diger = coring.Diger(qb64=dig)
if not diger.verify(report):
raise kering.ValidationError(f"Report digets({dig} verification failed)")

with tempfile.TemporaryFile("w+b") as tf:
tf.write(report)
tf.seek(0)
with tempfile.TemporaryDirectory() as tempdirname:
z = zipfile.ZipFile(tf)
z.extractall(path=tempdirname)
manifest = None
for root, dirs, _ in os.walk(tempdirname):
if "META-INF" not in dirs or 'reports' not in dirs:
continue
metaDir = os.path.join(root, 'META-INF')
name = os.path.join(root, 'META-INF', 'reports.json')
if not os.path.exists(name):
continue
f = open(name, 'r')
manifest = json.load(f)
if "documentInfo" not in manifest:
raise kering.ValidationError("Invalid manifest file in report package, missing "
"'documentInfo")
if manifest is None:
raise kering.ValidationError("No manifest in file, invalid signed report package")

docInfo = manifest["documentInfo"]
if "digests" not in docInfo:
raise kering.ValidationError("No digests found in manifest file")

digests = docInfo["digests"]
for digest in digests:
try:
fullpath = os.path.normpath(os.path.join(metaDir, digest["file"]))
f = open(fullpath, 'rb')
file_object = f.read()
f.close()
tmp_diger = DigerBuilder.sha256(digest["dig"])
if not tmp_diger.verify(file_object):
raise kering.ValidationError(f"Invalid digest for file {fullpath}")
except KeyError as e:
raise kering.ValidationError(f"Invalid digest in manifest digest list"
f"missing '{e.args[0]}'")
except OSError:
raise kering.ValidationError(f"signature element={digest} point to invalid file")
except Exception as e:
raise kering.ValidationError(f"{e}")

self.vdb.rpts.add(keys=(aid,), val=diger)
self.vdb.stts.add(keys=(stats.status,), val=diger)
self.vdb.stats.pin(keys=(dig,), val=stats)
self.vdb.stats.pin(keys=(diger.qb64,), val=stats)

def get(self, dig):
""" Return report stats for given report.
Expand All @@ -116,10 +167,8 @@ def get(self, dig):
ReportStats: Report stats for report with digest dig or None

"""
if (stats := self.vdb.stats.get(keys=(dig,))) is None:
return None

return stats
diger = DigerBuilder.sha256(dig)
return self.vdb.stats.get(keys=(diger.qb64,))

def getData(self, dig):
""" Generator that yields image data in 4k chunks for identifier
Expand All @@ -141,7 +190,7 @@ def getAcceptedIter(self):
""" Generator that yields Diger values for all reports currently in Accepted status

"""
for diger in self.vdb.stts.getIter(keys=(ReportStatus.accepted, )):
for diger in self.vdb.stts.getIter(keys=(ReportStatus.accepted,)):
yield diger

def update(self, diger, status, msg=None):
Expand Down Expand Up @@ -283,6 +332,7 @@ def on_post(self, req, rep, aid, dig):
rep.status = falcon.HTTP_202
rep.data = json.dumps(dict(msg=f"Upload {dig} received from {aid}")).encode("utf-8")


class ReportVerifier(doing.Doer):
""" Doer (coroutine) capable of processing submitted report files

Expand All @@ -294,7 +344,6 @@ class ReportVerifier(doing.Doer):
4. Verifies the signatures for each file against the contents of the file.
5. Validates that the submitter has signed all files in the report package.


"""

def __init__(self, hby, vdb, filer, **kwargs):
Expand Down Expand Up @@ -403,15 +452,15 @@ def recur(self, tyme):
# raise kering.ValidationError(f"verfer {siger.verfer.qb64} invalid")
# if siger.verfer.code != "D":
# raise kering.ValidationError(f"verfer code {siger.verfer.code} invalid")

verfed.append(os.path.basename(fullpath))

except KeyError as e:
raise kering.ValidationError(f"Invalid signature in manifest signature list"
f"missing '{e.args[0]}'")
except OSError:
raise kering.ValidationError(f"signature element={signature} point to invalid file")

except Exception as e:
raise kering.ValidationError(f"{e}")

Expand All @@ -429,9 +478,3 @@ def recur(self, tyme):
except (kering.ValidationError, zipfile.BadZipFile) as e:
self.filer.update(diger, ReportStatus.failed, e.args[0])
print(e.args[0])






27 changes: 27 additions & 0 deletions src/verifier/core/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
from keri import kering
from keri.core import MtrDex, coring


class DigerBuilder:
@staticmethod
def sha256(dig):
try:
non_pref_dig = DigerBuilder._get_non_prefixed_digest(dig) # Temporarily remove prefix
non_pref_dig = bytes.fromhex(non_pref_dig)
diger = DigerBuilder.build_diger(non_pref_dig, MtrDex.SHA2_256)
return diger
except Exception as e:
raise e

@staticmethod
def _get_non_prefixed_digest(dig):
try:
prefix, digest = dig.split("_", 1)
except ValueError:
raise kering.ValidationError(f"Digest ({dig}) must start with prefix")
return digest

@staticmethod
def build_diger(raw, code):
diger = coring.Diger(raw=raw, code=code)
return diger
32 changes: 32 additions & 0 deletions tests/core/test_reporting.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import pytest
from hashlib import sha256

from keri import kering

from src.verifier.core.utils import DigerBuilder


def test_diger_builder():
BASE_STR = "fefUBIUhdo9032bfHf0UNONF0kubni9HnF22L0KD2".encode()
dig = sha256(BASE_STR).hexdigest()
dig = f"sha256_{dig}"
diger = DigerBuilder.sha256(dig)
assert diger.verify(BASE_STR) is True


def test_diger_builder_fail():
BASE_STR = "fefUBIUhdo9032bfHf0UNONF0kubni9HnF22L0KD2".encode()
WRONG_BASE_STR = "fefUBIUhdo9032bfHf0UNONF0kubni9HnF22L0KDT".encode()
dig = sha256(BASE_STR).hexdigest()
dig = f"sha256_{dig}"
diger = DigerBuilder.sha256(dig)
assert diger.verify(WRONG_BASE_STR) is False


def test_diger_builder_wrong_dig():
BASE_STR = "fefUBIUhdo9032bfHf0UNONF0kubni9HnF22L0KD2".encode()
dig = sha256(BASE_STR).hexdigest()
# Here the dig is not prefixed
with pytest.raises(kering.ValidationError) as exc_info:
diger = DigerBuilder.sha256(dig)

28 changes: 14 additions & 14 deletions tests/integration/test_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,31 +113,31 @@ def get():
raise exceptions[0]

# @pytest.mark.manual
# def test_service_integration(seeder):
# def test_service_integration(seeder):
# with habbing.openHab(name="sid", temp=True, salt=b'0123456789abcdef') as (hby, hab):

#
# seeder.seedSchema(db=hby.db)
# regery, registry, verifier, seqner = reg_and_verf(hby, hab, registryName="qvireg")
# qvicred = get_qvi_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.QVI_SCHEMA, registry=registry)
# hab, qcrdntler, qsaid, qkmsgs, qtmsgs, qimsgs, qvimsgs = get_cred(hby, hab, regery, registry, verifier, Schema.QVI_SCHEMA, qvicred, seqner)
#
# qviedge = get_qvi_edge(qvicred.sad["d"], Schema.QVI_SCHEMA)

#
# leicred = get_lei_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.LEI_SCHEMA, registry=registry, sedge=qviedge)
# hab, lcrdntler, lsaid, lkmsgs, ltmsgs, limsgs, leimsgs = get_cred(hby, hab, regery, registry, verifier, Schema.LEI_SCHEMA, leicred, seqner)

#
# #chained ecr auth cred
# eaedge = get_ecr_auth_edge(lsaid,Schema.LEI_SCHEMA)
#
# eacred = get_ecr_auth_cred(aid=hab.pre, issuer=hab.pre, recipient=hab.pre, schema=Schema.ECR_AUTH_SCHEMA, registry=registry, sedge=eaedge)
# hab, eacrdntler, easaid, eakmsgs, eatmsgs, eaimsgs, eamsgs = get_cred(hby, hab, regery, registry, verifier, Schema.ECR_AUTH_SCHEMA, eacred, seqner)
#
# #chained ecr auth cred
# ecredge = get_ecr_edge(easaid,Schema.ECR_AUTH_SCHEMA)
#
# ecr = get_ecr_cred(issuer=hab.pre, recipient=hab.pre, schema=Schema.ECR_SCHEMA, registry=registry, sedge=ecredge)
# hab, eccrdntler, ecsaid, eckmsgs, ectmsgs, ecimsgs, ecmsgs = get_cred(hby, hab, regery, registry, verifier, Schema.ECR_SCHEMA, ecr, seqner)
#
# app = falcon.App(
# middleware=falcon.CORSMiddleware(
# allow_origins='*',
Expand All @@ -151,13 +151,13 @@ def get():
# # def get():
# # return dict(LEIs=[f"{LEI1}",f"{LEI2}"])
# rootsCf = configing.Configer(name="verifier-config-public.json",
# headDirPath="/Users/meenyleeny/VSCode/vlei-verifier/scripts",
# headDirPath="/home/aidar/Desktop/git/gleif/vlei-verifier/scripts",
# base="",
# temp=False, reopen=True, clear=False)
# authDoers = authorizing.setup(hby, vdb=vdb, reger=eccrdntler.rgy.reger, cf=rootsCf)

#
# reportDoers = reporting.setup(app=app, hby=hby, vdb=vdb)

#
# doers = authDoers + reportDoers + [httpServerDoer]
# limit = 0.25
# tock = 0.03125
Expand All @@ -173,14 +173,14 @@ def get():
# doist.recur()
# except Exception as e:
# raise ValueError(f"Likely you have another service running on {port}")

#
# issAndCred = bytearray()
# # issAndCred.extend(kmsgs)
# # issAndCred.extend(tmsgs)
# # issAndCred.extend(imsgs)
# issAndCred.extend(ecmsgs)
# acdc = issAndCred.decode("utf-8")

#
# # use this for integration testing debugging sessions
# while True:
# time.sleep(1)
Expand Down
Loading