From 302aa383e463892ca6424de70e421e1d10bef164 Mon Sep 17 00:00:00 2001 From: Leonard Lyubich Date: Mon, 13 Jan 2025 10:18:43 +0300 Subject: [PATCH] node/metabase: Provide API for SearchV2 There is a need to serve `ObjectService.SearchV2` RPC by the SN. In order not to expand the structure and configuration of the node, the best place to store metadata is metabase. Metabases are extended with per-container object metadata buckets. For each object, following indexes are created: - OID; - attribute->OID; - OID->attribute. Integers are stored specifically to reach lexicographic comparisons without decoding. New `Search` method is provided: it allows to filter out container's objects and receive specified attributes. Count is also limited, op is paged via cursor. In other words, the method follows SearchV2 behavior within single metabase. Refs #3058. Signed-off-by: Leonard Lyubich --- .../metabase/containers.go | 5 + pkg/local_object_storage/metabase/metadata.go | 762 +++++++++ .../metabase/metadata_test.go | 1399 +++++++++++++++++ pkg/local_object_storage/metabase/put.go | 23 + pkg/local_object_storage/metabase/select.go | 4 +- pkg/local_object_storage/metabase/util.go | 22 + .../metabase/util_test.go | 24 + 7 files changed, 2237 insertions(+), 2 deletions(-) create mode 100644 pkg/local_object_storage/metabase/metadata.go create mode 100644 pkg/local_object_storage/metabase/metadata_test.go create mode 100644 pkg/local_object_storage/metabase/util_test.go diff --git a/pkg/local_object_storage/metabase/containers.go b/pkg/local_object_storage/metabase/containers.go index 60ec058dcf..5053e941f7 100644 --- a/pkg/local_object_storage/metabase/containers.go +++ b/pkg/local_object_storage/metabase/containers.go @@ -184,6 +184,11 @@ func (db *DB) DeleteContainer(cID cid.ID) error { return fmt.Errorf("link objects' bucket cleanup: %w", err) } + // Metadata + if err = tx.DeleteBucket(metaBucketKey(cID)); err != nil && !errors.Is(err, bbolt.ErrBucketNotFound) { + return fmt.Errorf("metadata bucket cleanup: %w", err) + } + // indexes err = tx.DeleteBucket(ownerBucketName(cID, buff)) diff --git a/pkg/local_object_storage/metabase/metadata.go b/pkg/local_object_storage/metabase/metadata.go new file mode 100644 index 0000000000..f918f0e765 --- /dev/null +++ b/pkg/local_object_storage/metabase/metadata.go @@ -0,0 +1,762 @@ +package meta + +import ( + "bytes" + "crypto/sha256" + "encoding/base64" + "encoding/hex" + "errors" + "fmt" + "math/big" + "slices" + + "github.com/google/uuid" + "github.com/mr-tron/base58" + "github.com/nspcc-dev/neofs-sdk-go/client" + cid "github.com/nspcc-dev/neofs-sdk-go/container/id" + "github.com/nspcc-dev/neofs-sdk-go/object" + oid "github.com/nspcc-dev/neofs-sdk-go/object/id" + "github.com/nspcc-dev/neofs-sdk-go/user" + "github.com/nspcc-dev/neofs-sdk-go/version" + "github.com/nspcc-dev/tzhash/tz" + "go.etcd.io/bbolt" +) + +const ( + metaPrefixID = byte(iota) + metaPrefixAttrIDInt + metaPrefixAttrIDPlain + metaPrefixIDAttr +) + +const ( + intValLen = 33 // prefix byte for sign + fixed256 in metaPrefixAttrIDInt + attrIDFixedLen = 1 + oid.Size + utf8DelimiterLen // prefix first +) + +var ( + maxUint256 = new(big.Int).SetBytes([]byte{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}) + maxUint256Neg = new(big.Int).Neg(maxUint256) +) + +var errInvalidCursor = errors.New("invalid cursor") + +func invalidMetaBucketKeyErr(key []byte, cause error) error { + return fmt.Errorf("invalid meta bucket key (prefix 0x%X): %w", key[0], cause) +} + +// TODO: fill on migration. +// TODO: ROOT and PHY props. +// TODO: cleaning on obj removal. +func putMetadata(tx *bbolt.Tx, cnr cid.ID, id oid.ID, ver version.Version, owner user.ID, typ object.Type, creationEpoch uint64, + payloadLen uint64, pldHash, pldHmmHash, splitID []byte, parentID, firstID oid.ID, attrs []object.Attribute) error { + metaBkt, err := tx.CreateBucketIfNotExists(metaBucketKey(cnr)) + if err != nil { + return fmt.Errorf("create meta bucket for container: %w", err) + } + idk := [1 + oid.Size]byte{metaPrefixID} + copy(idk[1:], id[:]) + if err := metaBkt.Put(idk[:], nil); err != nil { + return fmt.Errorf("put object ID to container's meta bucket: %w", err) + } + + var keyBuf keyBuffer + if err = putPlainAttribute(metaBkt, &keyBuf, id, object.FilterVersion, ver.String()); err != nil { + return err + } + if err = putPlainAttribute(metaBkt, &keyBuf, id, object.FilterOwnerID, string(owner[:])); err != nil { + return err + } + if err = putPlainAttribute(metaBkt, &keyBuf, id, object.FilterType, typ.String()); err != nil { + return err + } + if err = putIntAttribute(metaBkt, &keyBuf, id, object.FilterCreationEpoch, new(big.Int).SetUint64(creationEpoch)); err != nil { + return err + } + if err = putIntAttribute(metaBkt, &keyBuf, id, object.FilterPayloadSize, new(big.Int).SetUint64(payloadLen)); err != nil { + return err + } + if err = putPlainAttribute(metaBkt, &keyBuf, id, object.FilterPayloadChecksum, string(pldHash)); err != nil { + return err + } + if err = putPlainAttribute(metaBkt, &keyBuf, id, object.FilterPayloadHomomorphicHash, string(pldHmmHash)); err != nil { + return err + } + if len(splitID) > 0 { + if err = putPlainAttribute(metaBkt, &keyBuf, id, object.FilterSplitID, string(splitID)); err != nil { + return err + } + } + if !firstID.IsZero() { + if err = putPlainAttribute(metaBkt, &keyBuf, id, object.FilterFirstSplitObject, string(firstID[:])); err != nil { + return err + } + } + if !parentID.IsZero() { + if err = putPlainAttribute(metaBkt, &keyBuf, id, object.FilterParentID, string(parentID[:])); err != nil { + return err + } + } + for i := range attrs { + ak, av := attrs[i].Key(), attrs[i].Value() + if n, isInt := parseInt(av); isInt && n.Cmp(maxUint256Neg) >= 0 && n.Cmp(maxUint256) <= 0 { + err = putIntAttribute(metaBkt, &keyBuf, id, ak, n) + } else { + err = putPlainAttribute(metaBkt, &keyBuf, id, ak, av) + } + if err != nil { + return err + } + } + + return nil +} + +// Search selects up to count container's objects from the given container +// matching the specified filters. +func (db *DB) Search(cnr cid.ID, fs object.SearchFilters, attrs []string, cursor string, count uint16) ([]client.SearchResultItem, string, error) { + if blindlyProcess(fs) { + return nil, "", nil + } + var res []client.SearchResultItem + var resCursor []byte + var err error + if len(fs) == 0 { + res, resCursor, err = db.searchUnfiltered(cnr, []byte(cursor), count) + } else { + res, resCursor, err = db.search(cnr, fs, attrs, []byte(cursor), count) + } + if err != nil { + return nil, "", err + } + return res, base64.StdEncoding.EncodeToString(resCursor), nil +} + +func (db *DB) search(cnr cid.ID, fs object.SearchFilters, attrs []string, cursor []byte, count uint16) ([]client.SearchResultItem, []byte, error) { + seekKey := make([]byte, 1+base64.StdEncoding.DecodedLen(len(cursor))) // TODO: limit? + n, err := base64.StdEncoding.Decode(seekKey[1:], cursor) + if err != nil { + return nil, nil, fmt.Errorf("decode cursor from Base64: %w", err) + } + seekKey = seekKey[:1+n] + + var res []client.SearchResultItem + err = db.boltDB.View(func(tx *bbolt.Tx) error { + mb := tx.Bucket(metaBucketKey(cnr)) + if mb == nil { + return nil + } + var err error + res, cursor, err = db.searchInBucket(mb, fs, attrs, seekKey, count) + return err + }) + if err != nil { + return nil, nil, fmt.Errorf("view BoltDB: %w", err) + } + return res, cursor, nil +} + +func (db *DB) searchInBucket(metaBkt *bbolt.Bucket, fs object.SearchFilters, attrs []string, + primSeekKey []byte, count uint16) ([]client.SearchResultItem, []byte, error) { + // TODO: make as much as possible outside the Bolt tx + primMatcher := fs[0].Operation() + intPrimMatcher := isNumericOp(primMatcher) + notPresentPrimMatcher := primMatcher == object.MatchNotPresent + if notPresentPrimMatcher { + primSeekKey[0] = metaPrefixID + } else if intPrimMatcher { + primSeekKey[0] = metaPrefixAttrIDInt + } else { + primSeekKey[0] = metaPrefixAttrIDPlain + } + + primAttr := fs[0].Header() // attribute emptiness already prevented + repeated := len(primSeekKey) > 1 // 1st is prefix + var primSeekPrefix []byte + var prevResOID, prevResPrimVal []byte + if notPresentPrimMatcher { + primSeekPrefix = primSeekKey[:1] + } else if repeated { + ind := bytes.Index(primSeekKey[1:], utf8Delimiter) // 1st is prefix + if ind < 0 { + return nil, nil, fmt.Errorf("%w: missing delimiter", errInvalidCursor) + } + if !bytes.Equal(primSeekKey[1:1+ind], []byte(primAttr)) { + return nil, nil, fmt.Errorf("%w: wrong primary attribute", errInvalidCursor) + } + primSeekPrefix = primSeekKey[:1+len(primAttr)+len(utf8Delimiter)] + valID := primSeekKey[len(primSeekPrefix):] + if len(valID) <= oid.Size { + return nil, nil, fmt.Errorf("%w: too small VAL_OID len %d", errInvalidCursor, len(valID)) + } + prevResPrimVal, prevResOID = valID[:len(valID)-oid.Size], valID[len(valID)-oid.Size:] + } else { + if primMatcher == object.MatchStringEqual || primMatcher == object.MatchCommonPrefix || + primMatcher == object.MatchNumGT || primMatcher == object.MatchNumGE { + var err error + if primSeekKey, primSeekPrefix, err = encodeAttributeValue(primAttr, fs[0].Value()); err != nil { + return nil, nil, fmt.Errorf("invalid primary filter value: %w", err) + } + } else { + primSeekKey = slices.Concat(primSeekKey[:1], []byte(primAttr), utf8Delimiter) + primSeekPrefix = primSeekKey + } + } + + primCursor := metaBkt.Cursor() + primKey, _ := primCursor.Seek(primSeekKey) + if bytes.Equal(primKey, primSeekKey) { // points to the last response element, so go next + primKey, _ = primCursor.Next() + } + if primKey == nil { + return nil, nil, nil + } + + res := make([]client.SearchResultItem, count) + collectedPrimVals := make([][]byte, count) + collectedPrimKeys := make([][]byte, count) // TODO: can be done w/o slice + var n uint16 + var more bool + var id, dbVal []byte + var keyBuf keyBuffer + attrSkr := &metaAttributeSeeker{keyBuf: &keyBuf, bkt: metaBkt} + +nextPrimKey: + for ; bytes.HasPrefix(primKey, primSeekPrefix); primKey, _ = primCursor.Next() { + if notPresentPrimMatcher { + if id = primKey[1:]; len(id) != oid.Size { + return nil, nil, invalidMetaBucketKeyErr(primKey, fmt.Errorf("invalid OID len %d", len(id))) + } + } else { // apply primary filter + valID := primKey[len(primSeekPrefix):] // VAL_OID + if len(valID) <= oid.Size { + return nil, nil, fmt.Errorf("%w: too small VAL_OID len %d", errInvalidCursor, len(valID)) + } + dbVal, id = valID[:len(valID)-oid.Size], valID[len(valID)-oid.Size:] + if !intPrimMatcher && primKey[0] == metaPrefixAttrIDInt { + var err error + if dbVal, err = restoreIntAttributeVal(dbVal); err != nil { + return nil, nil, invalidMetaBucketKeyErr(primKey, fmt.Errorf("invalid integer value: %w", err)) + } + } + for i := range fs { + // there may be several filters by primary key, e.g. N >= 10 && N <= 20. We + // check them immediately before moving through the DB. + attr := fs[i].Header() + if i > 0 && attr != primAttr { + continue + } + checkedDBVal, fltVal, err := combineValues(attr, dbVal, fs[i].Value()) // TODO: deduplicate DB value preparation + if err != nil { + return nil, nil, fmt.Errorf("invalid key in meta bucket: invalid attribute %s value: %w", attr, err) + } + if !matchValues(checkedDBVal, fs[i].Operation(), fltVal) { + continue nextPrimKey + } + // TODO: attribute value can be requested, it can be collected here, or we can + // detect earlier when an object goes beyond the already collected result. The + // code can become even more complex. Same below + } + } + // apply other filters + for i := range fs { + if !notPresentPrimMatcher && i == 0 { // 1st already checked + continue + } + attr := fs[i].Header() // emptiness already prevented + for j := 1; j < i; j++ { + if fs[j].Header() == attr { // already match, checked in loop below + continue + } + } + var err error + if dbVal, err = attrSkr.get(id, attr); err != nil { + return nil, nil, err + } + var dbValInt *[]byte // nil means not yet checked, pointer to nil means non-int + for j := i; j < len(fs); j++ { + if j > 0 && fs[j].Header() != attr { + continue + } + m := fs[j].Operation() + if dbVal == nil { + if m == object.MatchNotPresent { + continue + } + continue nextPrimKey + } + if m == object.MatchNotPresent { + continue nextPrimKey + } + if dbValInt == nil { + if len(dbVal) != intValLen { + dbValInt = new([]byte) + } else { + // do the same as for primary attribute, but unlike there, here we don't know + // whether the attribute is expected to be integer or not. + dbValInt = new([]byte) + if attrSkr.isInt(id, attr, dbVal) { + var err error + if *dbValInt, err = restoreIntAttributeVal(dbVal); err != nil { + return nil, nil, invalidMetaBucketKeyErr(primKey, fmt.Errorf("invalid integer value: %w", err)) + } + } + } + } + var checkedDBVal []byte + if isNumericOp(m) { + if *dbValInt == nil { + continue nextPrimKey + } + checkedDBVal = dbVal + } else if *dbValInt != nil { + checkedDBVal = *dbValInt + } else { + checkedDBVal = dbVal + } + checkedDBVal, fltVal, err := combineValues(attr, checkedDBVal, fs[j].Value()) // TODO: deduplicate DB value preparation + if err != nil { + return nil, nil, invalidMetaBucketKeyErr(primKey, fmt.Errorf("invalid attribute %s value: %w", attr, err)) + } + if !matchValues(checkedDBVal, m, fltVal) { + continue nextPrimKey + } + } + } + // object matches, collect attributes + collected := make([]string, len(attrs)) + var primDBVal []byte + var insertI uint16 + if len(attrs) > 0 { + var err error + if primDBVal, err = attrSkr.get(id, attrs[0]); err != nil { + return nil, nil, err + } + if repeated { // can be < than previous response chunk + if c := bytes.Compare(primDBVal, prevResPrimVal); c < 0 || c == 0 && bytes.Compare(id, prevResOID) <= 0 { + continue nextPrimKey + } + // note that if both values are integers, they are already sorted. Otherwise, the order is undefined. + // We could treat non-int values as < then the int ones, but the code would have grown huge + } + for i := range n { + if c := bytes.Compare(primDBVal, collectedPrimVals[i]); c < 0 || c == 0 && bytes.Compare(id, res[i].ID[:]) < 0 { + break + } + if insertI++; insertI == count { + more = true + continue nextPrimKey + } + } + if collected[0], err = attrSkr.restoreVal(id, attrs[0], primDBVal); err != nil { + return nil, nil, err + } + } else { + if repeated { // can be < than previous response chunk + if bytes.Compare(id, prevResOID) <= 0 { + continue nextPrimKey + } + } + for i := insertI; i < n; i++ { + if bytes.Compare(id, res[i].ID[:]) >= 0 { + if insertI++; insertI == count { + more = true + continue nextPrimKey + } + } + } + } + for i := 1; i < len(attrs); i++ { + val, err := attrSkr.get(id, attrs[i]) + if err != nil { + return nil, nil, err + } + if collected[i], err = attrSkr.restoreVal(id, attrs[i], val); err != nil { + return nil, nil, err + } + } + if n == count { + more = true + } + copy(res[insertI+1:], res[insertI:]) + res[insertI].ID = oid.ID(id) + res[insertI].Attributes = collected + copy(collectedPrimVals[insertI+1:], collectedPrimVals[insertI:]) + collectedPrimVals[insertI] = primDBVal + copy(collectedPrimKeys[insertI+1:], collectedPrimKeys[insertI:]) + collectedPrimKeys[insertI] = primKey + if n < count { + n++ + } + } + var resCursor []byte + if more { + resCursor = collectedPrimKeys[n-1][1:] + } + return res[:n], slices.Clone(resCursor), nil +} + +// TODO: can be merged with filtered code? +func (db *DB) searchUnfiltered(cnr cid.ID, cursor []byte, count uint16) ([]client.SearchResultItem, []byte, error) { + seekKey := make([]byte, 1+base64.StdEncoding.DecodedLen(len(cursor))) + ln, err := base64.StdEncoding.Decode(seekKey[1:], cursor) + if err != nil { + return nil, nil, fmt.Errorf("decode cursor from Base64: %w", err) + } + if ln > oid.Size { + return nil, nil, fmt.Errorf("too big OID cursor len %d", ln) + } + seekKey[0] = metaPrefixID + seekKey = seekKey[:1+ln] + + res := make([]client.SearchResultItem, count) + var n uint16 + err = db.boltDB.View(func(tx *bbolt.Tx) error { + mb := tx.Bucket(metaBucketKey(cnr)) + if mb == nil { + return nil + } + + mbc := mb.Cursor() + k, _ := mbc.Seek(seekKey) + if len(cursor) > 0 && bytes.Equal(k, seekKey) { // cursor is the last response element, so go next + k, _ = mbc.Next() + } + for ; k[0] == metaPrefixID; k, _ = mbc.Next() { + if n == count { // there are still elements + cursor = res[n-1].ID[:] + return nil + } + if len(k) != oid.Size+1 { + return invalidMetaBucketKeyErr(k, fmt.Errorf("unexpected object key len %d", len(k))) + } + res[n].ID = oid.ID(k[1:]) + n++ + } + cursor = nil + return nil + }) + if err != nil { + return nil, nil, fmt.Errorf("view BoltDB: %w", err) + } + return res[:n], cursor, nil +} + +func encodeAttributeValue(attr, fltVal string) ([]byte, []byte, error) { + var dbVal []byte + switch attr { + default: + if n, ok := new(big.Int).SetString(fltVal, 10); ok && n.Cmp(maxUint256Neg) >= 0 && n.Cmp(maxUint256) <= 0 { + key := make([]byte, 1+len(attr)+utf8DelimiterLen+intValLen) // prefix 1st + key[0] = metaPrefixAttrIDInt + off := 1 + copy(key[1:], attr) + off += copy(key[off:], utf8Delimiter) + prefix := key[:off] + putInt(key[off:off+intValLen], n) + return key, prefix, nil + } + dbVal = []byte(fltVal) + case object.FilterOwnerID, object.FilterFirstSplitObject, object.FilterParentID: + var err error + if dbVal, err = base58.Decode(fltVal); err != nil { + return nil, nil, fmt.Errorf("decode %q attribute value from Base58: %w", attr, err) + } + case object.FilterPayloadChecksum, object.FilterPayloadHomomorphicHash: + var err error + if dbVal, err = hex.DecodeString(fltVal); err != nil { + return nil, nil, fmt.Errorf("decode %q attribute value from HEX: %w", attr, err) + } + case object.FilterSplitID: + uid, err := uuid.Parse(fltVal) + if err != nil { + return nil, nil, fmt.Errorf("decode %q UUID attribute: %w", attr, err) + } + dbVal = uid[:] + case object.FilterVersion, object.FilterType: + } + key := make([]byte, 1+len(attr)+utf8DelimiterLen+len(dbVal)) // prefix 1st + key[0] = metaPrefixAttrIDPlain + off := 1 + copy(key[1:], attr) + off += copy(key[off:], utf8Delimiter) + prefix := key[:off] + copy(key[off:], dbVal) + return key, prefix, nil +} + +// combines attribute's DB and NeoFS API SearchV2 values to the matchable +// format. Returns DB errors only. +func combineValues(attr string, dbVal []byte, fltVal string) ([]byte, []byte, error) { + switch attr { + case object.FilterOwnerID: + if len(dbVal) != user.IDSize { + return nil, nil, fmt.Errorf("invalid owner len %d != %d", len(dbVal), user.IDSize) + } + if b, _ := base58.Decode(fltVal); len(b) == user.IDSize { + return dbVal, b, nil + } + // consider filter 'owner PREFIX N': + // - any object matches it + // - decoded filter byte is always 21 while the DB one is always 53 + // so we'd get false mismatch. To avoid this, we have to decode each DB val. + dbVal = []byte(base58.Encode(dbVal)) + case object.FilterFirstSplitObject, object.FilterParentID: + if len(dbVal) != oid.Size { + return nil, nil, fmt.Errorf("invalid OID len %d != %d", len(dbVal), oid.Size) + } + if b, _ := base58.Decode(fltVal); len(b) == oid.Size { + return dbVal, b, nil + } + // same as owner + dbVal = []byte(base58.Encode(dbVal)) + case object.FilterPayloadChecksum: + if len(dbVal) != sha256.Size { + return nil, nil, fmt.Errorf("invalid payload checksum len %d != %d", len(dbVal), sha256.Size) + } + if b, err := hex.DecodeString(fltVal); err == nil { + return dbVal, b, nil + } + dbVal = []byte(hex.EncodeToString(dbVal)) + case object.FilterPayloadHomomorphicHash: + if len(dbVal) != tz.Size { + return nil, nil, fmt.Errorf("invalid payload homomorphic hash len %d != %d", len(dbVal), tz.Size) + } + if b, err := hex.DecodeString(fltVal); err == nil { + return dbVal, b, nil + } + dbVal = []byte(hex.EncodeToString(dbVal)) + case object.FilterSplitID: + if len(dbVal) != 16 { + return nil, nil, fmt.Errorf("invalid split ID len %d != 16", len(dbVal)) + } + uid, err := uuid.Parse(fltVal) + if err == nil { + return dbVal, uid[:], nil + } + copy(uid[:], dbVal) + dbVal = []byte(uid.String()) + } + return dbVal, []byte(fltVal), nil +} + +func metaBucketKey(cnr cid.ID) []byte { + k := [1 + cid.Size]byte{metadataPrefix} + copy(k[1:], cnr[:]) + return k[:] +} + +func intBytes(n *big.Int) []byte { + b := make([]byte, intValLen) + putInt(b, n) + return b +} + +func putInt(b []byte, n *big.Int) { + if len(b) < intValLen { + panic(fmt.Errorf("insufficient buffer len %d", len(b))) + } + neg := n.Sign() < 0 + if neg { + b[0] = 0 + } else { + b[0] = 1 + } + n.FillBytes(b[1:intValLen]) + if neg { + for i := range b[1:] { + b[1+i] = ^b[1+i] + } + } +} + +func restoreIntAttributeVal(b []byte) ([]byte, error) { + n, err := restoreIntAttribute(b) + if err != nil { + return nil, err + } + return []byte(n.String()), nil +} + +func restoreIntAttribute(b []byte) (*big.Int, error) { + if len(b) != intValLen { + return nil, fmt.Errorf("invalid len %d", len(b)) + } + switch b[0] { + default: + return nil, fmt.Errorf("invalid sign byte %d", b[0]) + case 1: + return new(big.Int).SetBytes(b[1:]), nil + case 0: + cp := slices.Clone(b[1:]) + for i := range cp { + cp[i] = ^cp[i] + } + n := new(big.Int).SetBytes(cp) + return n.Neg(n), nil + } +} + +// matches object attribute's search query value to the DB-stored one. Matcher +// must be supported but not [object.MatchNotPresent]. +func matchValues(dbVal []byte, matcher object.SearchMatchType, fltVal []byte) bool { + switch { + default: + return false // TODO: check whether supported in blindlyProcess. Then panic here + case matcher == object.MatchNotPresent: + panic(errors.New("unexpected matcher NOT_PRESENT")) + case matcher == object.MatchStringEqual: + return bytes.Equal(dbVal, fltVal) + case matcher == object.MatchStringNotEqual: + return !bytes.Equal(dbVal, fltVal) + case matcher == object.MatchCommonPrefix: + return bytes.HasPrefix(dbVal, fltVal) + case isNumericOp(matcher): + var n big.Int + return n.UnmarshalText(fltVal) == nil && intMatches(dbVal, matcher, &n) + } +} + +func intMatches(dbVal []byte, matcher object.SearchMatchType, fltVal *big.Int) bool { + if c := fltVal.Cmp(maxUint256); c >= 0 { + if matcher == object.MatchNumGT || c > 0 && matcher == object.MatchNumGE { + return false + } + if matcher == object.MatchNumLE || c > 0 && matcher == object.MatchNumLT { + return true + } + } + if c := fltVal.Cmp(maxUint256Neg); c <= 0 { + if matcher == object.MatchNumLT || c < 0 && matcher == object.MatchNumLE { + return false + } + if matcher == object.MatchNumGE || c < 0 && matcher == object.MatchNumGT { + return true + } + } + fltValBytes := intBytes(fltVal) // TODO: buffer can be useful for other filters + switch matcher { + default: + panic(fmt.Errorf("unexpected integer matcher %d", matcher)) + case object.MatchNumGT: + return bytes.Compare(dbVal, fltValBytes) > 0 + case object.MatchNumGE: + return bytes.Compare(dbVal, fltValBytes) >= 0 + case object.MatchNumLT: + return bytes.Compare(dbVal, fltValBytes) < 0 + case object.MatchNumLE: + return bytes.Compare(dbVal, fltValBytes) <= 0 + } +} + +// makes PREFIX_ATTR_DELIM_VAL_OID with unset VAL space, and returns offset of +// the VAL. Reuses previously allocated buffer if it is sufficient. +func prepareMetaAttrIDKey(buf *keyBuffer, id oid.ID, attr string, valLen int, intAttr bool) ([]byte, int) { + k := buf.alloc(attrIDFixedLen + len(attr) + valLen) + if intAttr { + k[0] = metaPrefixAttrIDInt + } else { + k[0] = metaPrefixAttrIDPlain + } + off := 1 + copy(k[1:], attr) + off += copy(k[off:], utf8Delimiter) + valOff := off + off += valLen + copy(k[off:], id[:]) + return k, valOff +} + +// similar to prepareMetaAttrIDKey but makes PREFIX_OID_ATTR_DELIM_VAL. +func prepareMetaIDAttrKey(buf *keyBuffer, id oid.ID, attr string, valLen int) []byte { + k := buf.alloc(attrIDFixedLen + len(attr) + valLen) + k[0] = metaPrefixIDAttr + off := 1 + copy(k[1:], id[:]) + off += copy(k[off:], attr) + copy(k[off:], utf8Delimiter) + return k +} + +func putPlainAttribute[V []byte | string](bkt *bbolt.Bucket, buf *keyBuffer, id oid.ID, attr string, val V) error { + k, off := prepareMetaAttrIDKey(buf, id, attr, len(val), false) + copy(k[off:], val) + if err := bkt.Put(k, nil); err != nil { + return fmt.Errorf("put object attribute %q to container's meta bucket (attribute-to-ID): %w", attr, err) + } + k = prepareMetaIDAttrKey(buf, id, attr, len(val)) // TODO: ATTR_DELIM_VAL can just be moved + copy(k[len(k)-len(val):], val) + if err := bkt.Put(k, nil); err != nil { + return fmt.Errorf("put object attribute %q to container's meta bucket (ID-to-attribute): %w", attr, err) // TODO: distinguishable context + } + return nil +} + +func putIntAttribute(bkt *bbolt.Bucket, buf *keyBuffer, id oid.ID, attr string, val *big.Int) error { + k, off := prepareMetaAttrIDKey(buf, id, attr, intValLen, true) + putInt(k[off:off+intValLen], val) + if err := bkt.Put(k, nil); err != nil { + return fmt.Errorf("put integer object attribute %q to container's meta bucket (attribute-to-ID): %w", attr, err) + } + k = prepareMetaIDAttrKey(buf, id, attr, intValLen) // TODO: ATTR_DELIM_VAL can just be moved + putInt(k[len(k)-intValLen:], val) + if err := bkt.Put(k, nil); err != nil { + return fmt.Errorf("put integer object attribute %q to container's meta bucket (ID-to-attribute): %w", attr, err) + } + return nil +} + +type metaAttributeSeeker struct { + keyBuf *keyBuffer + bkt *bbolt.Bucket + crsr *bbolt.Cursor +} + +func (x *metaAttributeSeeker) get(id []byte, attr string) ([]byte, error) { + pref := x.keyBuf.alloc(attrIDFixedLen + len(attr)) + pref[0] = metaPrefixIDAttr + off := 1 + copy(pref[1:], id) + off += copy(pref[off:], attr) + copy(pref[off:], utf8Delimiter) + if x.crsr == nil { + x.crsr = x.bkt.Cursor() + } + key, _ := x.crsr.Seek(pref) + if !bytes.HasPrefix(key, pref) { + return nil, nil + } + if len(key[len(pref):]) == 0 { + return nil, invalidMetaBucketKeyErr(key, errors.New("missing attribute value")) + } + return key[len(pref):], nil +} + +func (x *metaAttributeSeeker) isInt(id []byte, attr string, val []byte) bool { + key := x.keyBuf.alloc(attrIDFixedLen + len(attr) + len(val)) + key[0] = metaPrefixAttrIDInt + off := 1 + copy(key[1:], attr) + off += copy(key[off:], utf8Delimiter) + off += copy(key[off:], val) + copy(key[off:], id) + return x.bkt.Get(key) != nil +} + +func (x *metaAttributeSeeker) restoreVal(id []byte, attr string, stored []byte) (string, error) { + if len(stored) == intValLen && x.isInt(id, attr, stored) { + n, err := restoreIntAttribute(stored) + if err != nil { + return "", invalidMetaBucketKeyErr([]byte{metaPrefixAttrIDInt}, fmt.Errorf("invalid integer value: %w", err)) + } + return n.String(), nil + } + switch attr { + case object.FilterOwnerID, object.FilterFirstSplitObject, object.FilterParentID: + return base58.Encode(stored), nil + case object.FilterPayloadChecksum, object.FilterPayloadHomomorphicHash: + return hex.EncodeToString(stored), nil + case object.FilterSplitID: + uid, err := uuid.ParseBytes(stored) + if err != nil { + return "", invalidMetaBucketKeyErr([]byte{metaPrefixAttrIDPlain}, fmt.Errorf("decode split ID: decode UUID: %w", err)) + } + return uid.String(), nil + } + return string(stored), nil +} diff --git a/pkg/local_object_storage/metabase/metadata_test.go b/pkg/local_object_storage/metabase/metadata_test.go new file mode 100644 index 0000000000..dab8eb692d --- /dev/null +++ b/pkg/local_object_storage/metabase/metadata_test.go @@ -0,0 +1,1399 @@ +package meta + +import ( + "bytes" + "encoding/base64" + "math" + "math/big" + "math/rand" + "slices" + "strconv" + "testing" + + "github.com/nspcc-dev/neofs-sdk-go/checksum" + checksumtest "github.com/nspcc-dev/neofs-sdk-go/checksum/test" + "github.com/nspcc-dev/neofs-sdk-go/client" + cid "github.com/nspcc-dev/neofs-sdk-go/container/id" + cidtest "github.com/nspcc-dev/neofs-sdk-go/container/id/test" + "github.com/nspcc-dev/neofs-sdk-go/object" + oid "github.com/nspcc-dev/neofs-sdk-go/object/id" + oidtest "github.com/nspcc-dev/neofs-sdk-go/object/id/test" + "github.com/nspcc-dev/neofs-sdk-go/user" + "github.com/nspcc-dev/neofs-sdk-go/version" + "github.com/stretchr/testify/require" + "go.etcd.io/bbolt" +) + +func sortObjectIDs(ids []oid.ID) []oid.ID { + s := slices.Clone(ids) + slices.SortFunc(s, func(a, b oid.ID) int { return bytes.Compare(a[:], b[:]) }) + return s +} + +func appendAttribute(obj *object.Object, k, v string) { + obj.SetAttributes(append(obj.Attributes(), *object.NewAttribute(k, v))...) +} + +func assertAttrPrefixed[T string | []byte](t testing.TB, mb *bbolt.Bucket, id oid.ID, prefix byte, attr string, val T) { + k := []byte{prefix} + k = append(k, attr...) + k = append(k, 0xFF) + k = append(k, val...) + k = append(k, id[:]...) + require.Equal(t, []byte{}, mb.Get(k)) + k = []byte{0x03} + k = append(k, id[:]...) + k = append(k, attr...) + k = append(k, 0xFF) + k = append(k, val...) + require.Equal(t, []byte{}, mb.Get(k)) +} + +func assertAttr[T string | []byte](t testing.TB, mb *bbolt.Bucket, id oid.ID, attr string, val T) { + assertAttrPrefixed(t, mb, id, 0x02, attr, val) +} + +func assertIntAttr(t testing.TB, mb *bbolt.Bucket, id oid.ID, attr string, val []byte) { + assertAttrPrefixed(t, mb, id, 0x01, attr, val) +} + +func TestPutMetadata(t *testing.T) { + db := newDB(t) + cnr := cidtest.ID() + id := oidtest.ID() + owner := user.ID{53, 79, 133, 229, 135, 39, 60, 187, 194, 109, 18, 37, 225, 166, 197, 146, 118, 186, 18, 215, 33, 158, 202, 214, 188} + const creationEpoch = 7311064694303989735 + const payloadLen = 2091724451450177666 + const typ = 4 // can be any, max supported value at the moment + var ver version.Version + ver.SetMajor(2138538449) + ver.SetMinor(1476143219) + parentID := oid.ID{65, 202, 224, 1, 198, 23, 145, 189, 139, 236, 185, 132, 138, 222, 233, 224, 38, 204, 39, 52, 161, 38, 68, + 74, 8, 253, 255, 34, 110, 49, 90, 71} + firstID := oid.ID{207, 78, 197, 150, 88, 190, 144, 92, 46, 19, 159, 238, 189, 151, 253, 57, 82, 204, 23, 108, 6, 96, 55, 223, 108, + 74, 176, 135, 29, 55, 177, 219} + pldHashBytes := [32]byte{95, 165, 98, 74, 58, 67, 109, 195, 226, 238, 253, 241, 64, 7, 241, 240, 241, 46, 243, 182, 130, 17, 194, + 11, 7, 153, 171, 79, 131, 76, 154, 91} + pldHash := checksum.NewSHA256(pldHashBytes) + pldHmmHashBytes := [64]byte{124, 127, 67, 236, 186, 166, 150, 202, 4, 115, 163, 58, 242, 73, 149, 35, 153, 93, 4, 247, 62, 18, 13, 150, + 53, 141, 131, 172, 207, 164, 187, 240, 16, 30, 18, 30, 136, 0, 197, 213, 185, 62, 153, 223, 42, 213, 207, 86, 131, 144, 121, + 127, 251, 248, 253, 176, 145, 101, 69, 75, 12, 97, 27, 19} + pldHmmHash := checksum.NewTillichZemor(pldHmmHashBytes) + splitID := []byte{240, 204, 35, 185, 222, 70, 69, 124, 160, 224, 208, 185, 9, 114, 37, 109} + var attrs []object.Attribute + addAttr := func(k, v string) { attrs = append(attrs, *object.NewAttribute(k, v)) } + addAttr("attr_1", "val_1") + addAttr("attr_2", "val_2") + addAttr("num_negative_overflow", "-115792089237316195423570985008687907853269984665640564039457584007913129639936") + addAttr("num_negative_min", "-115792089237316195423570985008687907853269984665640564039457584007913129639935") + addAttr("num_negative_min64", "-9223372036854775808") + addAttr("num_negative_max", "-1") + addAttr("num_zero", "0") + addAttr("num_positive_min", "1") + addAttr("num_positive_max64", "18446744073709551615") + addAttr("num_positive_max", "115792089237316195423570985008687907853269984665640564039457584007913129639935") + addAttr("num_positive_overflow", "115792089237316195423570985008687907853269984665640564039457584007913129639936") + + var obj object.Object + obj.SetContainerID(cnr) + obj.SetID(id) + obj.SetOwnerID(&owner) + obj.SetCreationEpoch(creationEpoch) + obj.SetPayloadSize(payloadLen) + obj.SetType(typ) + obj.SetVersion(&ver) + obj.SetParentID(parentID) + obj.SetFirstID(firstID) + obj.SetPayloadChecksum(pldHash) + obj.SetPayloadHomomorphicHash(pldHmmHash) + obj.SetSplitID(object.NewSplitIDFromV2(splitID)) + obj.SetAttributes(attrs...) + + err := db.Put(&obj, nil, nil) + require.NoError(t, err) + + err = db.boltDB.View(func(tx *bbolt.Tx) error { + mb := tx.Bucket(append([]byte{0xFF}, cnr[:]...)) + require.NotNil(t, mb, "missing container's meta bucket") + + require.Equal(t, []byte{}, mb.Get(append([]byte{0x00}, id[:]...))) + assertAttr(t, mb, id, "$Object:version", "v2138538449.1476143219") + assertAttr(t, mb, id, "$Object:ownerID", owner[:]) + assertAttr(t, mb, id, "$Object:objectType", "LINK") + assertAttr(t, mb, id, "$Object:payloadHash", pldHashBytes[:]) + assertAttr(t, mb, id, "$Object:homomorphicHash", pldHmmHashBytes[:]) + assertAttr(t, mb, id, "$Object:split.parent", parentID[:]) + assertAttr(t, mb, id, "$Object:split.first", firstID[:]) + assertIntAttr(t, mb, id, "$Object:creationEpoch", []byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 101, 118, 30, 154, 145, 227, 159, 231}) + assertIntAttr(t, mb, id, "$Object:payloadLength", []byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 29, 7, 76, 78, 96, 175, 200, 130}) + assertAttr(t, mb, id, "attr_1", "val_1") + assertAttr(t, mb, id, "attr_2", "val_2") + assertAttr(t, mb, id, "num_negative_overflow", "-115792089237316195423570985008687907853269984665640564039457584007913129639936") + assertIntAttr(t, mb, id, "num_negative_min", []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + assertIntAttr(t, mb, id, "num_negative_min64", []byte{0, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 127, 255, 255, 255, 255, 255, 255, 255}) + assertIntAttr(t, mb, id, "num_negative_max", []byte{0, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254}) + assertIntAttr(t, mb, id, "num_zero", []byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + assertIntAttr(t, mb, id, "num_positive_max64", []byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255}) + assertIntAttr(t, mb, id, "num_positive_max", []byte{1, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}) + assertAttr(t, mb, id, "num_positive_overflow", "115792089237316195423570985008687907853269984665640564039457584007913129639936") + + return nil + }) + require.NoError(t, err) +} + +func TestApplyFilter(t *testing.T) { + t.Run("unsupported matcher", func(t *testing.T) { + ok := matchValues(nil, 9, nil) + require.False(t, ok) + }) + t.Run("not present", func(t *testing.T) { + require.Panics(t, func() { _ = matchValues(nil, object.MatchNotPresent, nil) }) + }) + check := func(dbVal []byte, m object.SearchMatchType, fltVal []byte, exp bool) { + ok := matchValues(dbVal, m, fltVal) + require.Equal(t, exp, ok) + } + anyData := []byte("Hello, world!") + t.Run("EQ", func(t *testing.T) { + check := func(dbVal, fltVal []byte, exp bool) { check(dbVal, object.MatchStringEqual, fltVal, exp) } + check(nil, nil, true) + check([]byte{}, nil, true) + check(anyData, anyData, true) + check(anyData, anyData[:len(anyData)-1], false) + check(anyData, append(anyData, 1), false) + for i := range anyData { + dbVal := slices.Clone(anyData) + dbVal[i]++ + check(dbVal, anyData, false) + } + }) + t.Run("NE", func(t *testing.T) { + check := func(dbVal, fltVal []byte, exp bool) { check(dbVal, object.MatchStringNotEqual, fltVal, exp) } + check(nil, nil, false) + check([]byte{}, nil, false) + check(anyData, anyData, false) + check(anyData, anyData[:len(anyData)-1], true) + check(anyData, append(anyData, 1), true) + for i := range anyData { + dbVal := slices.Clone(anyData) + dbVal[i]++ + check(dbVal, anyData, true) + } + }) + t.Run("has prefix", func(t *testing.T) { + check := func(dbVal, fltVal []byte, exp bool) { check(dbVal, object.MatchCommonPrefix, fltVal, exp) } + check(nil, nil, true) + check([]byte{}, nil, true) + check(anyData, anyData, true) + check(anyData, anyData[:len(anyData)-1], true) + check(anyData, append(anyData, 1), false) + for i := range anyData { + check(anyData, anyData[:i], true) + changed := slices.Concat(anyData[:i], []byte{anyData[i] + 1}, anyData[i+1:]) + check(anyData, changed[:i+1], false) + } + }) + t.Run("int", func(t *testing.T) { + t.Run("non-int filter value", func(t *testing.T) { + for _, matcher := range []object.SearchMatchType{ + object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + ok := matchValues(make([]byte, intValLen), matcher, []byte("1.5")) + require.False(t, ok) + } + }) + check := func(dbVal *big.Int, matcher object.SearchMatchType, fltVal *big.Int, exp bool) { + check(intBytes(dbVal), matcher, []byte(fltVal.String()), exp) + } + one := big.NewInt(1) + max64 := new(big.Int).SetUint64(math.MaxUint64) + ltMin := new(big.Int).Sub(maxUint256Neg, one) + gtMax := new(big.Int).Add(maxUint256, one) + ns := []*big.Int{ + maxUint256Neg, + new(big.Int).Add(maxUint256Neg, big.NewInt(1)), + new(big.Int).Neg(max64), + big.NewInt(-1), + big.NewInt(0), + one, + max64, + new(big.Int).Sub(maxUint256, big.NewInt(1)), + maxUint256, + } + for i, n := range ns { + check(n, object.MatchNumGT, ltMin, true) + check(n, object.MatchNumGE, ltMin, true) + check(n, object.MatchNumLT, ltMin, false) + check(n, object.MatchNumLE, ltMin, false) + + check(n, object.MatchNumGT, gtMax, false) + check(n, object.MatchNumGE, gtMax, false) + check(n, object.MatchNumLT, gtMax, true) + check(n, object.MatchNumLE, gtMax, true) + + check(n, object.MatchNumGT, n, false) + check(n, object.MatchNumGE, n, true) + check(n, object.MatchNumLT, n, false) + check(n, object.MatchNumLE, n, true) + + for j := range i { + check(n, object.MatchNumGT, ns[j], true) + check(n, object.MatchNumGE, ns[j], true) + check(n, object.MatchNumLT, ns[j], false) + check(n, object.MatchNumLE, ns[j], false) + } + for j := i + 1; j < len(ns); j++ { + check(n, object.MatchNumGT, ns[j], false) + check(n, object.MatchNumGE, ns[j], false) + check(n, object.MatchNumLT, ns[j], true) + check(n, object.MatchNumLE, ns[j], true) + } + + minusOne := new(big.Int).Sub(n, one) + check(n, object.MatchNumGT, minusOne, true) + check(n, object.MatchNumGE, minusOne, true) + check(n, object.MatchNumLT, minusOne, false) + check(n, object.MatchNumLE, minusOne, false) + plusOne := new(big.Int).Add(n, one) + check(n, object.MatchNumGT, plusOne, false) + check(n, object.MatchNumGE, plusOne, false) + check(n, object.MatchNumLT, plusOne, true) + check(n, object.MatchNumLE, plusOne, true) + } + }) +} + +func TestIntBucketOrder(t *testing.T) { + db := newDB(t) + ns := []*big.Int{ + maxUint256Neg, + new(big.Int).Add(maxUint256Neg, big.NewInt(1)), + big.NewInt(math.MinInt64), + big.NewInt(-1), + big.NewInt(0), + big.NewInt(1), + new(big.Int).SetUint64(math.MaxUint64), + new(big.Int).Sub(maxUint256, big.NewInt(1)), + maxUint256, + } + rand.Shuffle(len(ns), func(i, j int) { ns[i], ns[j] = ns[j], ns[i] }) + + err := db.boltDB.Update(func(tx *bbolt.Tx) error { + b, err := tx.CreateBucketIfNotExists([]byte("any")) + if err != nil { + return err + } + for _, n := range ns { + if err := b.Put(intBytes(n), nil); err != nil { + return err + } + } + return nil + }) + require.NoError(t, err) + + var collected []string + err = db.boltDB.View(func(tx *bbolt.Tx) error { + c := tx.Bucket([]byte("any")).Cursor() + for k, _ := c.First(); k != nil; k, _ = c.Next() { + n, err := restoreIntAttribute(k) + require.NoError(t, err) + collected = append(collected, n.String()) + } + return nil + }) + require.NoError(t, err) + + require.Equal(t, []string{ + "-115792089237316195423570985008687907853269984665640564039457584007913129639935", + "-115792089237316195423570985008687907853269984665640564039457584007913129639934", + "-9223372036854775808", + "-1", + "0", + "1", + "18446744073709551615", + "115792089237316195423570985008687907853269984665640564039457584007913129639934", + "115792089237316195423570985008687907853269984665640564039457584007913129639935", + }, collected) +} + +func TestDB_SearchObjects(t *testing.T) { + db := newDB(t) + t.Run("no filters", func(t *testing.T) { + t.Run("BoltDB failure", func(t *testing.T) { + db := newDB(t) + require.NoError(t, db.boltDB.Close()) + _, _, err := db.Search(cidtest.ID(), nil, nil, "", 1) + require.ErrorContains(t, err, "view BoltDB") + require.ErrorIs(t, err, bbolt.ErrDatabaseNotOpen) + }) + + cnr := cidtest.ID() + + t.Run("no objects", func(t *testing.T) { + res, cursor, err := db.Search(cnr, nil, nil, "", 1) + require.NoError(t, err) + require.Empty(t, cursor) + require.Empty(t, res) + }) + + const n = 10 + ids := oidtest.IDs(n) + objs := make([]object.Object, n) + for i := range objs { + objs[i].SetContainerID(cnr) + objs[i].SetID(ids[i]) + objs[i].SetPayloadChecksum(checksumtest.Checksum()) // required to Put + + err := db.Put(&objs[i], nil, nil) + require.NoError(t, err, i) + } + + idsSorted := sortObjectIDs(ids) + + t.Run("all at once", func(t *testing.T) { + for _, tc := range []struct { + name string + count uint16 + }{ + {name: "exact", count: n}, + {name: "more", count: n + 1}, + } { + res, cursor, err := db.Search(cnr, nil, nil, "", tc.count) + require.NoError(t, err) + require.Empty(t, cursor) + require.Len(t, res, n) + for i := range idsSorted { + require.Equal(t, idsSorted[i], res[i].ID, i) + require.Empty(t, res[i].Attributes) + } + } + }) + t.Run("paginated", func(t *testing.T) { + // request 3 first + res, cursor, err := db.Search(cnr, nil, nil, "", 3) + require.NoError(t, err) + require.Len(t, res, 3) + require.Equal(t, base64.StdEncoding.EncodeToString(res[2].ID[:]), cursor) + for i := range 3 { + require.Equal(t, idsSorted[i], res[i].ID, i) + require.Empty(t, res[i].Attributes) + } + // then 6 more + res, cursor, err = db.Search(cnr, nil, nil, cursor, 6) + require.NoError(t, err) + require.Len(t, res, 6) + require.Equal(t, base64.StdEncoding.EncodeToString(res[5].ID[:]), cursor) + for i := range 6 { + require.Equal(t, idsSorted[3+i], res[i].ID, i) + require.Empty(t, res[i].Attributes) + } + // and up to 2 more + res, cursor, err = db.Search(cnr, nil, nil, cursor, 2) + require.NoError(t, err) + require.Empty(t, cursor) + require.Len(t, res, 1) + require.Equal(t, idsSorted[n-1], res[0].ID) + require.Empty(t, res[0].Attributes) + }) + t.Run("corrupted element", func(t *testing.T) { + err := db.boltDB.Update(func(tx *bbolt.Tx) error { + mbk := [1 + cid.Size]byte{0xFF} + copy(mbk[1:], cnr[:]) + mb := tx.Bucket(mbk[:]) + require.NotNil(t, mb) + + k := [1 + oid.Size]byte{0x00} + copy(k[1:], ids[rand.Intn(len(ids))][:]) + v := mb.Get(k[:]) + require.NotNil(t, v) + + return mb.Put(k[:len(k)-1], nil) + }) + require.NoError(t, err) + + _, _, err = db.Search(cnr, nil, nil, "", n) + require.EqualError(t, err, "view BoltDB: invalid meta bucket key (prefix 0x0): unexpected object key len 32") + }) + }) + t.Run("filters", func(t *testing.T) { + // this test is focused on correct filters' application only, so only sorting by + // IDs is checked + const nRoot = 2 + const nPhy = 2 * nRoot + const nAll = nRoot + nPhy + all := []uint{0, 1, 2, 3, 4, 5} + group1 := []uint{0, 2, 4} + group2 := []uint{1, 3, 5} + ids := [nAll]oid.ID{ + // RSYscGLzKw1nkeVRGpowYTGgtgodXJrMyyiHTGGJW3S + {6, 66, 212, 15, 99, 92, 193, 89, 165, 111, 36, 160, 35, 150, 126, 177, 208, 51, 229, 148, 1, 245, 188, 147, 68, 92, 227, 128, 184, 49, 150, 25}, + // 6dMvfyLF7HZ1WsBRgrLUDZP4pLkvNRjB6HWGeNXP4fJp + {83, 155, 1, 16, 139, 16, 27, 84, 238, 110, 215, 181, 245, 231, 129, 220, 192, 80, 168, 236, 35, 215, 29, 238, 133, 31, 176, 13, 250, 67, 126, 185}, + // 6hkrsFBPpAKTAKHeC5gycCZsz2BQdKtAn9ADriNdWf4E + {84, 187, 66, 103, 55, 176, 48, 220, 171, 101, 83, 187, 75, 89, 244, 128, 14, 43, 160, 118, 226, 60, 180, 113, 95, 41, 15, 27, 151, 143, 183, 187}, + // BQY3VShN1BmU6XDKiQaDo2tk7s7rkYuaGeVgmcHcWsRY + {154, 156, 84, 7, 36, 243, 19, 205, 118, 179, 244, 56, 251, 80, 184, 244, 97, 142, 113, 120, 167, 50, 111, 94, 219, 78, 151, 180, 89, 102, 52, 15}, + // DsKLie7U2BVph5XkZttG8EERxt9DFQXkrowr6LFkxp8h + {191, 48, 5, 72, 64, 44, 163, 71, 127, 144, 18, 30, 134, 67, 189, 210, 243, 2, 101, 225, 63, 47, 174, 128, 41, 238, 107, 14, 87, 136, 50, 162}, + // Gv9XcEW7KREB8cnjFbW8HBdJesMnbNKknfGdBNsVtQmB + {236, 124, 186, 165, 234, 207, 5, 237, 62, 82, 41, 15, 133, 132, 132, 73, 55, 16, 69, 101, 214, 174, 160, 228, 101, 161, 18, 204, 241, 208, 155, 118}, + } + // HYFTEXkzpDWkXU6anQByuSPvV3imjzTKJBaAyD4VYg23 + cnr := cid.ID{245, 188, 86, 80, 170, 97, 147, 48, 75, 27, 115, 238, 61, 151, 182, 191, 95, 33, 160, 253, 239, 70, 174, 188, 220, 84, 57, 222, 9, 104, 4, 48} + // cnrStr := "HYFTEXkzpDWkXU6anQByuSPvV3imjzTKJBaAyD4VYg23" + owners := [nRoot]user.ID{ + // NfzJyPrn1hRGuVJNvMYLTfWZGW2ZVR9Qmj + {53, 220, 52, 178, 96, 0, 121, 121, 217, 160, 223, 119, 75, 71, 2, 233, 33, 138, 241, 182, 208, 164, 240, 222, 30}, + // NiUWeE8gb8njJmymdZTh229ojGeJ24WHSm + {53, 247, 122, 86, 36, 254, 120, 76, 10, 73, 62, 4, 132, 174, 224, 77, 32, 37, 224, 73, 102, 37, 121, 117, 46}, + } + checksums := [nAll][32]byte{ + // 8a61b9ff3de0983ed7ad7aa21db22ff91e5a2a07128cd45e3646282f90e4efd7 + {138, 97, 185, 255, 61, 224, 152, 62, 215, 173, 122, 162, 29, 178, 47, 249, 30, 90, 42, 7, 18, 140, 212, 94, 54, 70, 40, 47, 144, 228, 239, 215}, + // d501baff2dec96b7dec7d634e5ec13ed8be33048bfa4e8285a37dabc0537e677 + {213, 1, 186, 255, 45, 236, 150, 183, 222, 199, 214, 52, 229, 236, 19, 237, 139, 227, 48, 72, 191, 164, 232, 40, 90, 55, 218, 188, 5, 55, 230, 119}, + // 302b0610844a4da6874f566798018e9d79031a4cc8bf72357d8fc5413a54473e + {48, 43, 6, 16, 132, 74, 77, 166, 135, 79, 86, 103, 152, 1, 142, 157, 121, 3, 26, 76, 200, 191, 114, 53, 125, 143, 197, 65, 58, 84, 71, 62}, + // 9bcee80d024eb36a3dbb8e7948d1a9b672a82929950a85ccd350e31e34560672 + {155, 206, 232, 13, 2, 78, 179, 106, 61, 187, 142, 121, 72, 209, 169, 182, 114, 168, 41, 41, 149, 10, 133, 204, 211, 80, 227, 30, 52, 86, 6, 114}, + // 35d6c9f1aa664aa163f2ec0bffe48af0bd4e8bc640626c12759f187876007529 + {53, 214, 201, 241, 170, 102, 74, 161, 99, 242, 236, 11, 255, 228, 138, 240, 189, 78, 139, 198, 64, 98, 108, 18, 117, 159, 24, 120, 118, 0, 117, 41}, + // cc6c36b379e9a77a845a021498e2e92875131af404f825aa56bea91602785ef2 + {204, 108, 54, 179, 121, 233, 167, 122, 132, 90, 2, 20, 152, 226, 233, 40, 117, 19, 26, 244, 4, 248, 37, 170, 86, 190, 169, 22, 2, 120, 94, 242}, + } + hmmChecksums := [nAll][64]byte{ + // a73a37d54475df580b324d70f3d1ac922200af91f196dd9cb0f8f1cca5fefdf0cb3dbc4aaac639416e3fdd4c540e616e6b44ac6b56a3b194e8011925192a8be2 + {167, 58, 55, 213, 68, 117, 223, 88, 11, 50, 77, 112, 243, 209, 172, 146, 34, 0, 175, 145, 241, 150, 221, 156, 176, 248, 241, 204, 165, 254, 253, 240, 203, 61, 188, 74, 170, 198, 57, 65, 110, 63, 221, 76, 84, 14, 97, 110, 107, 68, 172, 107, 86, 163, 177, 148, 232, 1, 25, 37, 25, 42, 139, 226}, + // f72b6eb562c6dd5e69930ab51ca8a98b13bfa18013cd89df3254dbc615f86b8f8c042649fe76e01f54bea7216957fe6716ec0a33d6b6de25ec15a53f295196d1 + {247, 43, 110, 181, 98, 198, 221, 94, 105, 147, 10, 181, 28, 168, 169, 139, 19, 191, 161, 128, 19, 205, 137, 223, 50, 84, 219, 198, 21, 248, 107, 143, 140, 4, 38, 73, 254, 118, 224, 31, 84, 190, 167, 33, 105, 87, 254, 103, 22, 236, 10, 51, 214, 182, 222, 37, 236, 21, 165, 63, 41, 81, 150, 209}, + // 55a8577889ed275d15509b202b084fb7876c08408b8c61a1ba9ab26834f08c667ccde2acf55fcfc1755cb2a6f8316e1c6185bd48549b150767979cf76ede4b1c + {85, 168, 87, 120, 137, 237, 39, 93, 21, 80, 155, 32, 43, 8, 79, 183, 135, 108, 8, 64, 139, 140, 97, 161, 186, 154, 178, 104, 52, 240, 140, 102, 124, 205, 226, 172, 245, 95, 207, 193, 117, 92, 178, 166, 248, 49, 110, 28, 97, 133, 189, 72, 84, 155, 21, 7, 103, 151, 156, 247, 110, 222, 75, 28}, + // 4d97f1f4f17119efae4579ef916ca1535e68c4fa381c431ab4112cb5671ddb21e44dc78f02ae2b26c95d5f74bb5eb4350e00cdc5b270f60bf46deaafc1b84575 + {77, 151, 241, 244, 241, 113, 25, 239, 174, 69, 121, 239, 145, 108, 161, 83, 94, 104, 196, 250, 56, 28, 67, 26, 180, 17, 44, 181, 103, 29, 219, 33, 228, 77, 199, 143, 2, 174, 43, 38, 201, 93, 95, 116, 187, 94, 180, 53, 14, 0, 205, 197, 178, 112, 246, 11, 244, 109, 234, 175, 193, 184, 69, 117}, + // 80089235980bfbf6c01a93c4f507b2f1ff2ec8b0c29cfe6970ce95cbeb1739bef6a43626783d58f56c224cfb606c360301f632a198db63f599fca7be2e0c2566 + {128, 8, 146, 53, 152, 11, 251, 246, 192, 26, 147, 196, 245, 7, 178, 241, 255, 46, 200, 176, 194, 156, 254, 105, 112, 206, 149, 203, 235, 23, 57, 190, 246, 164, 54, 38, 120, 61, 88, 245, 108, 34, 76, 251, 96, 108, 54, 3, 1, 246, 50, 161, 152, 219, 99, 245, 153, 252, 167, 190, 46, 12, 37, 102}, + // f3b6eedc3f30b99309582a7e0ca09dd6a9234ce95bfa578ddfa6ef2a0fe9c56e8f6a86c82ce565d9216c02110c0fe44079a68275243ad2f9be6bf7dacdeed97c + {243, 182, 238, 220, 63, 48, 185, 147, 9, 88, 42, 126, 12, 160, 157, 214, 169, 35, 76, 233, 91, 250, 87, 141, 223, 166, 239, 42, 15, 233, 197, 110, 143, 106, 134, 200, 44, 229, 101, 217, 33, 108, 2, 17, 12, 15, 228, 64, 121, 166, 130, 117, 36, 58, 210, 249, 190, 107, 247, 218, 205, 238, 217, 124}, + } + groupAttrs := [nRoot]object.Attribute{ + *object.NewAttribute("group_attr_1", "group_val_1"), + *object.NewAttribute("group_attr_2", "group_val_2"), + } + types := [nRoot]object.Type{object.TypeRegular, object.TypeStorageGroup} + splitIDs := [nRoot][]byte{ + // 8b69e76d-5e95-4639-8213-46786c41ab73 + {139, 105, 231, 109, 94, 149, 70, 57, 130, 19, 70, 120, 108, 65, 171, 115}, + // 60c6b1ff-5e6d-4c0f-8699-15d54bf8a2e1 + {96, 198, 177, 255, 94, 109, 76, 15, 134, 153, 21, 213, 75, 248, 162, 225}, + } + firstIDs := [nRoot]oid.ID{ + // 61hnJaKip8c1QxvC2iT4Txfpxf37QBNRaw1XCeq72DbC + {74, 120, 139, 195, 149, 106, 19, 73, 151, 116, 227, 3, 83, 169, 108, 129, 20, 206, 146, 192, 140, 2, 85, 14, 244, 109, 247, 28, 51, 101, 212, 183}, + // Cdf8vnK5xTxmkdc1GcjkxaEQFtEmwHPRky4KRQik6rQH + {172, 212, 150, 43, 17, 126, 75, 161, 99, 197, 238, 169, 62, 209, 96, 183, 79, 236, 237, 83, 141, 73, 125, 166, 186, 82, 68, 27, 147, 18, 24, 2}, + } + + initObj := func(obj *object.Object, nGlobal, nGroup int) { + ver := version.New(100+uint32(nGroup), 200+uint32(nGroup)) + obj.SetVersion(&ver) + obj.SetContainerID(cnr) + obj.SetID(ids[nGlobal]) + obj.SetType(types[nGroup]) + obj.SetOwnerID(&owners[nGroup]) + obj.SetCreationEpoch(10 + uint64(nGroup)) + obj.SetPayloadSize(20 + uint64(nGroup)) + obj.SetPayloadChecksum(checksum.NewSHA256(checksums[nGlobal])) + obj.SetPayloadHomomorphicHash(checksum.NewTillichZemor(hmmChecksums[nGlobal])) + si := strconv.Itoa(nGlobal) + obj.SetAttributes( + *object.NewAttribute("attr_common", "val_common"), + *object.NewAttribute("unique_attr_"+si, "unique_val_"+si), + groupAttrs[nGroup], + *object.NewAttribute("global_non_integer", "not an integer"), + ) + } + + var pars [nRoot]object.Object + for i := range nRoot { + initObj(&pars[i], i, i) + } + + var phys [nPhy]object.Object + for i := range phys { + nGroup := i % nRoot + initObj(&phys[i], nRoot+i, nGroup) + phys[i].SetSplitID(object.NewSplitIDFromV2(splitIDs[nGroup])) + phys[i].SetFirstID(firstIDs[nGroup]) + phys[i].SetParent(&pars[nGroup]) + } + + appendAttribute(&pars[0], "attr_int", "-115792089237316195423570985008687907853269984665640564039457584007913129639935") + appendAttribute(&phys[0], "attr_int", "-18446744073709551615") + appendAttribute(&phys[1], "attr_int", "0") + appendAttribute(&phys[2], "attr_int", "18446744073709551615") + appendAttribute(&pars[1], "attr_int", "115792089237316195423570985008687907853269984665640564039457584007913129639935") + + for i := range phys { + require.NoError(t, db.Put(&phys[i], nil, nil)) + } + + check := func(k string, m object.SearchMatchType, v string, matchInds []uint) { + var fs object.SearchFilters + fs.AddFilter(k, v, m) + + res, cursor, err := db.Search(cnr, fs, nil, "", nAll) + require.NoError(t, err) + require.Empty(t, cursor) + require.Len(t, res, len(matchInds)) + for i, ind := range matchInds { + require.Equal(t, ids[ind], res[i].ID) + require.Empty(t, res[i].Attributes) + } + } + + t.Run("all", func(t *testing.T) { + check("attr_common", object.MatchStringEqual, "val_common", all) + }) + t.Run("user attributes", func(t *testing.T) { + // unique + for i := range all { + si := strconv.Itoa(i) + key := "unique_attr_" + si + val := "unique_val_" + si + check(key, object.MatchStringEqual, val, []uint{uint(i)}) + check(key, object.MatchStringNotEqual, "other_val", []uint{uint(i)}) + for j := range val { + check(key, object.MatchCommonPrefix, val[:j], []uint{uint(i)}) + } + for _, matcher := range []object.SearchMatchType{ + object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check(key, matcher, val, nil) + } + var others []uint + for j := range all { + if j != i { + others = append(others, uint(j)) + } + } + check(key, object.MatchNotPresent, "", others) + } + // group + const val1 = "group_val_1" + check("group_attr_1", object.MatchStringEqual, val1, group1) + check("group_attr_1", object.MatchStringNotEqual, val1, nil) + check("group_attr_1", object.MatchNotPresent, val1, group2) + for i := range val1 { + check("group_attr_1", object.MatchCommonPrefix, val1[:i], group1) + } + const val2 = "group_val_2" + check("group_attr_2", object.MatchStringEqual, val2, group2) + check("group_attr_2", object.MatchStringNotEqual, val2, nil) + check("group_attr_2", object.MatchNotPresent, val2, group1) + for i := range val1 { + check("group_attr_2", object.MatchCommonPrefix, val2[:i], group2) + } + for _, matcher := range []object.SearchMatchType{ + object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check("group_attr_1", matcher, val1, nil) + check("group_attr_2", matcher, val2, nil) + } + }) + t.Run("ROOT", func(t *testing.T) { + t.Skip("not supported yet") + check("$Object:ROOT", 0, "", []uint{0, 1}) + for _, matcher := range []object.SearchMatchType{ + object.MatchStringEqual, object.MatchStringNotEqual, object.MatchNotPresent, object.MatchCommonPrefix, + object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check("$Object:ROOT", matcher, "", nil) + } + }) + t.Run("PHY", func(t *testing.T) { + t.Skip("not supported yet") + check("$Object;PHY", 0, "", []uint{0, 1, 2, 3}) + for _, matcher := range []object.SearchMatchType{ + object.MatchStringEqual, object.MatchStringNotEqual, object.MatchNotPresent, object.MatchCommonPrefix, + object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check("$Object:PHY", matcher, "", nil) + } + }) + t.Run("version", func(t *testing.T) { + check := func(m object.SearchMatchType, v string, matchInds []uint) { + check("$Object:version", m, v, matchInds) + } + check(object.MatchStringEqual, "v100.200", group1) + check(object.MatchStringNotEqual, "v100.200", group2) + check(object.MatchStringEqual, "v101.201", group2) + check(object.MatchStringNotEqual, "v101.201", group1) + check(object.MatchStringEqual, "v102.202", nil) // other + check(object.MatchStringNotEqual, "v102.202", all) + for _, m := range []object.SearchMatchType{ + object.MatchNotPresent, object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check(m, "v100.200", nil) + } + check(object.MatchCommonPrefix, "", all) + check(object.MatchCommonPrefix, "v", all) + check(object.MatchCommonPrefix, "v1", all) + check(object.MatchCommonPrefix, "v10", all) + check(object.MatchCommonPrefix, "v100", group1) + check(object.MatchCommonPrefix, "v100.200", group1) + check(object.MatchCommonPrefix, "v100.2001", nil) + check(object.MatchCommonPrefix, "v101", group2) + check(object.MatchCommonPrefix, "v101.201", group2) + check(object.MatchCommonPrefix, "v2", nil) + }) + t.Run("owner", func(t *testing.T) { + check := func(m object.SearchMatchType, v string, matchInds []uint) { + check("$Object:ownerID", m, v, matchInds) + } + check(object.MatchStringEqual, "NfzJyPrn1hRGuVJNvMYLTfWZGW2ZVR9Qmj", group1) + check(object.MatchStringNotEqual, "NfzJyPrn1hRGuVJNvMYLTfWZGW2ZVR9Qmj", group2) + check(object.MatchStringEqual, "NiUWeE8gb8njJmymdZTh229ojGeJ24WHSm", group2) + check(object.MatchStringNotEqual, "NiUWeE8gb8njJmymdZTh229ojGeJ24WHSm", group1) + check(object.MatchStringEqual, "NhP5vErYP9WCfPjtCb78xqPV5MgHyhVNeL", nil) // other + check(object.MatchStringNotEqual, "NhP5vErYP9WCfPjtCb78xqPV5MgHyhVNeL", all) + for _, m := range []object.SearchMatchType{ + object.MatchNotPresent, object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check(m, "NfzJyPrn1hRGuVJNvMYLTfWZGW2ZVR9Qmj", nil) + } + check(object.MatchCommonPrefix, "N", all) + check(object.MatchCommonPrefix, "Nf", group1) + check(object.MatchCommonPrefix, "NfzJyPrn1hRGuVJNvMYLTfWZGW2ZVR9Qmj", group1) + check(object.MatchCommonPrefix, "NfzJyPrn1hRGuVJNvMYLTfWZGW2ZVR9Qmj1", nil) + }) + t.Run("type", func(t *testing.T) { + check := func(m object.SearchMatchType, v string, matchInds []uint) { + check("$Object:objectType", m, v, matchInds) + } + check(object.MatchStringEqual, "REGULAR", group1) + check(object.MatchStringNotEqual, "REGULAR", group2) + check(object.MatchStringEqual, "STORAGE_GROUP", group2) + check(object.MatchStringNotEqual, "STORAGE_GROUP", group1) + check(object.MatchStringEqual, "STORAGE_GROUP", group2) + check(object.MatchStringEqual, "TOMBSTONE", nil) + check(object.MatchStringNotEqual, "TOMBSTONE", all) + check(object.MatchStringEqual, "0", nil) // numeric enum value + check(object.MatchStringEqual, "2", nil) + for _, matcher := range []object.SearchMatchType{ + object.MatchNotPresent, object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check(matcher, "", nil) + check(matcher, "TOMBSTONE", nil) + check(matcher, "LOCK", nil) + // check(matcher, "1", nil) + // check(matcher, "3", nil) + } + check(object.MatchCommonPrefix, "", all) + check(object.MatchCommonPrefix, "R", group1) + check(object.MatchCommonPrefix, "S", group2) + check(object.MatchCommonPrefix, "L", nil) + }) + t.Run("payload checksum", func(t *testing.T) { + check := func(m object.SearchMatchType, v string, matchInds []uint) { + check("$Object:payloadHash", m, v, matchInds) + } + check(object.MatchStringEqual, "8a61b9ff3de0983ed7ad7aa21db22ff91e5a2a07128cd45e3646282f90e4efd7", []uint{0}) + check(object.MatchStringEqual, "d501baff2dec96b7dec7d634e5ec13ed8be33048bfa4e8285a37dabc0537e677", []uint{1}) + check(object.MatchStringEqual, "302b0610844a4da6874f566798018e9d79031a4cc8bf72357d8fc5413a54473e", []uint{2}) + check(object.MatchStringEqual, "9bcee80d024eb36a3dbb8e7948d1a9b672a82929950a85ccd350e31e34560672", []uint{3}) + check(object.MatchStringEqual, "35d6c9f1aa664aa163f2ec0bffe48af0bd4e8bc640626c12759f187876007529", []uint{4}) + check(object.MatchStringEqual, "cc6c36b379e9a77a845a021498e2e92875131af404f825aa56bea91602785ef2", []uint{5}) + check(object.MatchStringNotEqual, "8a61b9ff3de0983ed7ad7aa21db22ff91e5a2a07128cd45e3646282f90e4efd7", []uint{1, 2, 3, 4, 5}) + check(object.MatchStringNotEqual, "d501baff2dec96b7dec7d634e5ec13ed8be33048bfa4e8285a37dabc0537e677", []uint{0, 2, 3, 4, 5}) + check(object.MatchStringNotEqual, "302b0610844a4da6874f566798018e9d79031a4cc8bf72357d8fc5413a54473e", []uint{0, 1, 3, 4, 5}) + check(object.MatchStringNotEqual, "9bcee80d024eb36a3dbb8e7948d1a9b672a82929950a85ccd350e31e34560672", []uint{0, 1, 2, 4, 5}) + check(object.MatchStringNotEqual, "35d6c9f1aa664aa163f2ec0bffe48af0bd4e8bc640626c12759f187876007529", []uint{0, 1, 2, 3, 5}) + check(object.MatchStringNotEqual, "cc6c36b379e9a77a845a021498e2e92875131af404f825aa56bea91602785ef2", []uint{0, 1, 2, 3, 4}) + check(object.MatchStringEqual, "cc6c36b379e9a77a845a021498e2e92875131af404f825aa56bea91602785ef1", nil) // other + check(object.MatchStringNotEqual, "cc6c36b379e9a77a845a021498e2e92875131af404f825aa56bea91602785ef1", all) + for _, m := range []object.SearchMatchType{ + object.MatchNotPresent, object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check(m, "8a61b9ff3de0983ed7ad7aa21db22ff91e5a2a07128cd45e3646282f90e4efd7", nil) + } + check(object.MatchCommonPrefix, "", all) + check(object.MatchCommonPrefix, "8a", []uint{0}) + check(object.MatchCommonPrefix, "8a61b9ff3de0983ed7ad7aa21db22ff91e5a2a07128cd45e3646282f90e4efd7", []uint{0}) + check(object.MatchCommonPrefix, "4a", nil) + }) + t.Run("payload homomorphic checksum", func(t *testing.T) { + check := func(m object.SearchMatchType, v string, matchInds []uint) { + check("$Object:homomorphicHash", m, v, matchInds) + } + check(object.MatchStringEqual, "a73a37d54475df580b324d70f3d1ac922200af91f196dd9cb0f8f1cca5fefdf0cb3dbc4aaac639416e3fdd4c540e616e6b44ac6b56a3b194e8011925192a8be2", []uint{0}) + check(object.MatchStringEqual, "f72b6eb562c6dd5e69930ab51ca8a98b13bfa18013cd89df3254dbc615f86b8f8c042649fe76e01f54bea7216957fe6716ec0a33d6b6de25ec15a53f295196d1", []uint{1}) + check(object.MatchStringEqual, "55a8577889ed275d15509b202b084fb7876c08408b8c61a1ba9ab26834f08c667ccde2acf55fcfc1755cb2a6f8316e1c6185bd48549b150767979cf76ede4b1c", []uint{2}) + check(object.MatchStringEqual, "4d97f1f4f17119efae4579ef916ca1535e68c4fa381c431ab4112cb5671ddb21e44dc78f02ae2b26c95d5f74bb5eb4350e00cdc5b270f60bf46deaafc1b84575", []uint{3}) + check(object.MatchStringEqual, "80089235980bfbf6c01a93c4f507b2f1ff2ec8b0c29cfe6970ce95cbeb1739bef6a43626783d58f56c224cfb606c360301f632a198db63f599fca7be2e0c2566", []uint{4}) + check(object.MatchStringEqual, "f3b6eedc3f30b99309582a7e0ca09dd6a9234ce95bfa578ddfa6ef2a0fe9c56e8f6a86c82ce565d9216c02110c0fe44079a68275243ad2f9be6bf7dacdeed97c", []uint{5}) + check(object.MatchStringNotEqual, "a73a37d54475df580b324d70f3d1ac922200af91f196dd9cb0f8f1cca5fefdf0cb3dbc4aaac639416e3fdd4c540e616e6b44ac6b56a3b194e8011925192a8be2", []uint{1, 2, 3, 4, 5}) + check(object.MatchStringNotEqual, "f72b6eb562c6dd5e69930ab51ca8a98b13bfa18013cd89df3254dbc615f86b8f8c042649fe76e01f54bea7216957fe6716ec0a33d6b6de25ec15a53f295196d1", []uint{0, 2, 3, 4, 5}) + check(object.MatchStringNotEqual, "55a8577889ed275d15509b202b084fb7876c08408b8c61a1ba9ab26834f08c667ccde2acf55fcfc1755cb2a6f8316e1c6185bd48549b150767979cf76ede4b1c", []uint{0, 1, 3, 4, 5}) + check(object.MatchStringNotEqual, "4d97f1f4f17119efae4579ef916ca1535e68c4fa381c431ab4112cb5671ddb21e44dc78f02ae2b26c95d5f74bb5eb4350e00cdc5b270f60bf46deaafc1b84575", []uint{0, 1, 2, 4, 5}) + check(object.MatchStringNotEqual, "80089235980bfbf6c01a93c4f507b2f1ff2ec8b0c29cfe6970ce95cbeb1739bef6a43626783d58f56c224cfb606c360301f632a198db63f599fca7be2e0c2566", []uint{0, 1, 2, 3, 5}) + check(object.MatchStringNotEqual, "f3b6eedc3f30b99309582a7e0ca09dd6a9234ce95bfa578ddfa6ef2a0fe9c56e8f6a86c82ce565d9216c02110c0fe44079a68275243ad2f9be6bf7dacdeed97c", []uint{0, 1, 2, 3, 4}) + check(object.MatchStringEqual, "f3b6eedc3f30b99309582a7e0ca09dd6a9234ce95bfa578ddfa6ef2a0fe9c56e8f6a86c82ce565d9216c02110c0fe44079a68275243ad2f9be6bf7dacdeed97d", nil) // other + check(object.MatchStringNotEqual, "f3b6eedc3f30b99309582a7e0ca09dd6a9234ce95bfa578ddfa6ef2a0fe9c56e8f6a86c82ce565d9216c02110c0fe44079a68275243ad2f9be6bf7dacdeed97d", all) + for _, m := range []object.SearchMatchType{ + object.MatchNotPresent, object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check(m, "a73a37d54475df580b324d70f3d1ac922200af91f196dd9cb0f8f1cca5fefdf0cb3dbc4aaac639416e3fdd4c540e616e6b44ac6b56a3b194e8011925192a8be2", nil) + } + check(object.MatchCommonPrefix, "", all) + check(object.MatchCommonPrefix, "a7", []uint{0}) + check(object.MatchCommonPrefix, "f3", []uint{5}) + check(object.MatchCommonPrefix, "f3b6eedc3f30b99309582a7e0ca09dd6a9234ce95bfa578ddfa6ef2a0fe9c56e8f6a86c82ce565d9216c02110c0fe44079a68275243ad2f9be6bf7dacdeed97c", []uint{5}) + check(object.MatchCommonPrefix, "f3b6eedc3f30b99309582a7e0ca09dd6a9234ce95bfa578ddfa6ef2a0fe9c56e8f6a86c82ce565d9216c02110c0fe44079a68275243ad2f9be6bf7dacdeed97d", nil) + }) + t.Run("split ID", func(t *testing.T) { + check := func(m object.SearchMatchType, v string, matchInds []uint) { + check("$Object:split.splitID", m, v, matchInds) + } + group1, group2, all := []uint{2, 4}, []uint{3, 5}, []uint{2, 3, 4, 5} + check(object.MatchStringEqual, "8b69e76d-5e95-4639-8213-46786c41ab73", group1) + check(object.MatchStringNotEqual, "8b69e76d-5e95-4639-8213-46786c41ab73", group2) + check(object.MatchStringEqual, "60c6b1ff-5e6d-4c0f-8699-15d54bf8a2e1", group2) + check(object.MatchStringNotEqual, "60c6b1ff-5e6d-4c0f-8699-15d54bf8a2e1", group1) + check(object.MatchStringEqual, "2a6346f2-97de-4c8d-91bf-20145cf302d6", nil) // other + check(object.MatchStringNotEqual, "2a6346f2-97de-4c8d-91bf-20145cf302d6", all) + for _, m := range []object.SearchMatchType{ + object.MatchNotPresent, object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check(m, "60c6b1ff-5e6d-4c0f-8699-15d54bf8a2e1", nil) + } + check(object.MatchCommonPrefix, "8b69e76d-5e95-4639-8213-46786c41ab73", group1) + check(object.MatchCommonPrefix, "8b69e76d-5e95-4639-8213-46786c41ab74", nil) + }) + t.Run("first ID", func(t *testing.T) { + check := func(m object.SearchMatchType, v string, matchInds []uint) { + check("$Object:split.first", m, v, matchInds) + } + group1, group2, all := []uint{2, 4}, []uint{3, 5}, []uint{2, 3, 4, 5} + check(object.MatchStringEqual, "61hnJaKip8c1QxvC2iT4Txfpxf37QBNRaw1XCeq72DbC", group1) + check(object.MatchStringEqual, "Cdf8vnK5xTxmkdc1GcjkxaEQFtEmwHPRky4KRQik6rQH", group2) + check(object.MatchStringNotEqual, "61hnJaKip8c1QxvC2iT4Txfpxf37QBNRaw1XCeq72DbC", group2) + check(object.MatchStringNotEqual, "Cdf8vnK5xTxmkdc1GcjkxaEQFtEmwHPRky4KRQik6rQH", group1) + check(object.MatchStringEqual, "Dfot9FnhkJy9m8pXrF1fL5fmKmbHK8wL8PqExoQFNTrz", nil) // other + check(object.MatchStringNotEqual, "Dfot9FnhkJy9m8pXrF1fL5fmKmbHK8wL8PqExoQFNTrz", all) + for _, m := range []object.SearchMatchType{ + object.MatchNotPresent, object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check(m, "61hnJaKip8c1QxvC2iT4Txfpxf37QBNRaw1XCeq72DbC", nil) + } + check(object.MatchCommonPrefix, "6", group1) + check(object.MatchCommonPrefix, "C", group2) + check(object.MatchCommonPrefix, "Cdf8vnK5xTxmkdc1GcjkxaEQFtEmwHPRky4KRQik6rQH", group2) + check(object.MatchCommonPrefix, "Cdf8vnK5xTxmkdc1GcjkxaEQFtEmwHPRky4KRQik6rQH1", nil) + }) + t.Run("parent ID", func(t *testing.T) { + check := func(m object.SearchMatchType, v string, matchInds []uint) { + check("$Object:split.parent", m, v, matchInds) + } + group1, group2, all := []uint{2, 4}, []uint{3, 5}, []uint{2, 3, 4, 5} + check(object.MatchStringEqual, "RSYscGLzKw1nkeVRGpowYTGgtgodXJrMyyiHTGGJW3S", group1) + check(object.MatchStringEqual, "6dMvfyLF7HZ1WsBRgrLUDZP4pLkvNRjB6HWGeNXP4fJp", group2) + check(object.MatchStringNotEqual, "RSYscGLzKw1nkeVRGpowYTGgtgodXJrMyyiHTGGJW3S", group2) + check(object.MatchStringNotEqual, "6dMvfyLF7HZ1WsBRgrLUDZP4pLkvNRjB6HWGeNXP4fJp", group1) + check(object.MatchStringEqual, "Dfot9FnhkJy9m8pXrF1fL5fmKmbHK8wL8PqExoQFNTrz", nil) // other + check(object.MatchStringNotEqual, "Dfot9FnhkJy9m8pXrF1fL5fmKmbHK8wL8PqExoQFNTrz", all) + for _, m := range []object.SearchMatchType{ + object.MatchNotPresent, object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check(m, "RSYscGLzKw1nkeVRGpowYTGgtgodXJrMyyiHTGGJW3S", nil) + } + check(object.MatchCommonPrefix, "6dMvfyLF7HZ1WsBRgrLUDZP4pLkvNRjB6HWGeNXP4fJp", group2) + check(object.MatchCommonPrefix, "6dMvfyLF7HZ1WsBRgrLUDZP4pLkvNRjB6HWGeNXP4fJJ", nil) + }) + t.Run("integers", func(t *testing.T) { + allInt := []uint{0, 1, 2, 3, 4} + for _, matcher := range []object.SearchMatchType{ + object.MatchNumGT, object.MatchNumGE, object.MatchNumLT, object.MatchNumLE, + } { + check("global_non_integer", matcher, "123", nil) + // TODO: also check that BoltDB is untouched in following cases + check("attr_int", matcher, "text", nil) + check("attr_int", matcher, "1.5", nil) + } + check("attr_int", object.MatchNumLT, "-115792089237316195423570985008687907853269984665640564039457584007913129639936", nil) + check("attr_int", object.MatchNumLE, "-115792089237316195423570985008687907853269984665640564039457584007913129639936", nil) + check("attr_int", object.MatchNumGT, "-115792089237316195423570985008687907853269984665640564039457584007913129639936", nil) + check("attr_int", object.MatchNumGE, "-115792089237316195423570985008687907853269984665640564039457584007913129639936", nil) + check("attr_int", object.MatchNumLT, "115792089237316195423570985008687907853269984665640564039457584007913129639936", allInt) + check("attr_int", object.MatchNumLE, "115792089237316195423570985008687907853269984665640564039457584007913129639936", allInt) + check("attr_int", object.MatchNumGT, "115792089237316195423570985008687907853269984665640564039457584007913129639936", nil) + check("attr_int", object.MatchNumGE, "115792089237316195423570985008687907853269984665640564039457584007913129639936", nil) + check("attr_int", object.MatchNumLT, "-115792089237316195423570985008687907853269984665640564039457584007913129639935", nil) + check("attr_int", object.MatchNumLT, "-18446744073709551615", []uint{0}) + check("attr_int", object.MatchNumLT, "0", []uint{0, 2}) + check("attr_int", object.MatchNumLT, "18446744073709551615", []uint{0, 2, 3}) + check("attr_int", object.MatchNumLT, "115792089237316195423570985008687907853269984665640564039457584007913129639935", []uint{0, 2, 3, 4}) + check("attr_int", object.MatchNumLE, "-115792089237316195423570985008687907853269984665640564039457584007913129639935", []uint{0}) + check("attr_int", object.MatchNumLE, "-18446744073709551615", []uint{0, 2}) + check("attr_int", object.MatchNumLE, "0", []uint{0, 2, 3}) + check("attr_int", object.MatchNumLE, "18446744073709551615", []uint{0, 2, 3, 4}) + check("attr_int", object.MatchNumLE, "115792089237316195423570985008687907853269984665640564039457584007913129639935", []uint{0, 1, 2, 3, 4}) + check("attr_int", object.MatchNumGT, "-115792089237316195423570985008687907853269984665640564039457584007913129639935", []uint{1, 2, 3, 4}) + check("attr_int", object.MatchNumGT, "-18446744073709551615", []uint{1, 3, 4}) + check("attr_int", object.MatchNumGT, "0", []uint{1, 4}) + check("attr_int", object.MatchNumGT, "18446744073709551615", []uint{1}) + check("attr_int", object.MatchNumGT, "115792089237316195423570985008687907853269984665640564039457584007913129639935", nil) + check("attr_int", object.MatchNumGE, "-115792089237316195423570985008687907853269984665640564039457584007913129639935", []uint{0, 1, 2, 3, 4}) + check("attr_int", object.MatchNumGE, "-18446744073709551615", []uint{1, 2, 3, 4}) + check("attr_int", object.MatchNumGE, "0", []uint{1, 3, 4}) + check("attr_int", object.MatchNumGE, "18446744073709551615", []uint{1, 4}) + check("attr_int", object.MatchNumGE, "115792089237316195423570985008687907853269984665640564039457584007913129639935", []uint{1}) + for _, tc := range []struct { + name, key string + val1, val2 string + }{ + {name: "creation epoch", key: "$Object:creationEpoch", val1: "10", val2: "11"}, + {name: "payload length", key: "$Object:payloadLength", val1: "20", val2: "21"}, + } { + t.Run(tc.name, func(t *testing.T) { + check(tc.key, object.MatchNumLT, "-115792089237316195423570985008687907853269984665640564039457584007913129639935", nil) + check(tc.key, object.MatchNumLT, "0", nil) + check(tc.key, object.MatchNumLT, tc.val1, nil) + check(tc.key, object.MatchNumLT, tc.val2, group1) + check(tc.key, object.MatchNumLT, "18446744073709551615", all) + check(tc.key, object.MatchNumLT, "115792089237316195423570985008687907853269984665640564039457584007913129639935", all) + check(tc.key, object.MatchNumLE, "-115792089237316195423570985008687907853269984665640564039457584007913129639935", nil) + check(tc.key, object.MatchNumLE, "0", nil) + check(tc.key, object.MatchNumLE, tc.val1, group1) + check(tc.key, object.MatchNumLE, tc.val2, all) + check(tc.key, object.MatchNumLE, "18446744073709551615", all) + check(tc.key, object.MatchNumLE, "115792089237316195423570985008687907853269984665640564039457584007913129639935", all) + check(tc.key, object.MatchNumGT, "-115792089237316195423570985008687907853269984665640564039457584007913129639935", all) + check(tc.key, object.MatchNumGT, "0", all) + check(tc.key, object.MatchNumGT, tc.val1, group2) + check(tc.key, object.MatchNumGT, tc.val2, nil) + check(tc.key, object.MatchNumGT, "18446744073709551615", nil) + check(tc.key, object.MatchNumGT, "115792089237316195423570985008687907853269984665640564039457584007913129639935", nil) + check(tc.key, object.MatchNumGE, "-115792089237316195423570985008687907853269984665640564039457584007913129639935", all) + check(tc.key, object.MatchNumGE, "0", all) + check(tc.key, object.MatchNumGE, tc.val1, all) + check(tc.key, object.MatchNumGE, tc.val2, group2) + check(tc.key, object.MatchNumGE, "18446744073709551615", nil) + check(tc.key, object.MatchNumGE, "115792089237316195423570985008687907853269984665640564039457584007913129639935", nil) + }) + } + }) + t.Run("complex", func(t *testing.T) { + type filter struct { + k string + m object.SearchMatchType + v string + } + for _, tc := range []struct { + is []uint + fs []filter + }{ + {is: group1, fs: []filter{ + {k: "group_attr_1", m: object.MatchStringEqual, v: "group_val_1"}, + {k: "attr_int", m: object.MatchNumGE, v: "-115792089237316195423570985008687907853269984665640564039457584007913129639935"}, + }}, + {is: []uint{1, 3}, fs: []filter{ + {k: "group_attr_2", m: object.MatchStringNotEqual, v: "group_val_1"}, + {k: "attr_int", m: object.MatchNumLT, v: "115792089237316195423570985008687907853269984665640564039457584007913129639936"}, + }}, + {is: nil, fs: []filter{ + {k: "attr_common", m: object.MatchCommonPrefix, v: "val_c"}, + {k: "attr_int", m: object.MatchNumLT, v: "0"}, + {k: "attr_int", m: object.MatchNumGT, v: "0"}, + }}, + {is: []uint{0, 1, 2, 3, 4}, fs: []filter{ + {k: "attr_common", m: object.MatchStringEqual, v: "val_common"}, + {k: "attr_int", m: object.MatchNumGE, v: "-115792089237316195423570985008687907853269984665640564039457584007913129639935"}, + {k: "attr_int", m: object.MatchNumLE, v: "115792089237316195423570985008687907853269984665640564039457584007913129639935"}, + }}, + {is: []uint{0, 2, 3, 4, 5}, fs: []filter{ + {k: "unique_attr_1", m: object.MatchNotPresent}, + {k: "attr_common", m: object.MatchStringNotEqual, v: "wrong text"}, + }}, + {is: []uint{0, 2, 3, 4, 5}, fs: []filter{ + {k: "unique_attr_1", m: object.MatchNotPresent}, + {k: "attr_common", m: object.MatchStringNotEqual, v: "wrong text"}, + }}, + {is: []uint{4}, fs: []filter{ + {k: "attr_int", m: object.MatchNumGT, v: "-18446744073709551615"}, + {k: "group_attr_1", m: object.MatchStringNotEqual, v: "random"}, + {k: "global_non_integer", m: object.MatchCommonPrefix, v: "not"}, + {k: "random", m: object.MatchNotPresent}, + {k: "attr_int", m: object.MatchNumGE, v: "18446744073709551615"}, + }}, + {is: nil, fs: []filter{ // like previous but > instead of >= + {k: "attr_int", m: object.MatchNumGT, v: "-18446744073709551615"}, + {k: "group_attr_1", m: object.MatchStringNotEqual, v: "random"}, + {k: "global_non_integer", m: object.MatchCommonPrefix, v: "not"}, + {k: "random", m: object.MatchNotPresent}, + {k: "attr_int", m: object.MatchNumGT, v: "18446744073709551615"}, + }}, + {is: group2, fs: []filter{ + {k: "$Object:payloadLength", m: object.MatchNumGT, v: "20"}, + {k: "$Object:creationEpoch", m: object.MatchNumLT, v: "30"}, + }}, + {is: all, fs: []filter{ + {k: "$Object:payloadLength", m: object.MatchNumGT, v: "19"}, + {k: "$Object:creationEpoch", m: object.MatchNumLE, v: "21"}, + }}, + {is: []uint{2, 4}, fs: []filter{ + {k: "$Object:split.first", m: object.MatchStringEqual, v: "61hnJaKip8c1QxvC2iT4Txfpxf37QBNRaw1XCeq72DbC"}, + {k: "$Object:split.parent", m: object.MatchStringEqual, v: "RSYscGLzKw1nkeVRGpowYTGgtgodXJrMyyiHTGGJW3S"}, + }}, + {is: []uint{3, 5}, fs: []filter{ + {k: "$Object:split.parent", m: object.MatchStringNotEqual, v: "RSYscGLzKw1nkeVRGpowYTGgtgodXJrMyyiHTGGJW3S"}, + {k: "$Object:split.first", m: object.MatchStringEqual, v: "Cdf8vnK5xTxmkdc1GcjkxaEQFtEmwHPRky4KRQik6rQH"}, + }}, + {is: []uint{3, 5}, fs: []filter{ + {k: "random", m: object.MatchNotPresent}, + {k: "$Object:split.parent", m: object.MatchStringNotEqual, v: "RSYscGLzKw1nkeVRGpowYTGgtgodXJrMyyiHTGGJW3S"}, + }}, + {is: []uint{2, 4}, fs: []filter{ + {k: "$Object:split.splitID", m: object.MatchCommonPrefix, v: "8b69e76d-5e95-4639-8213-46786c41ab73"}, + {k: "random", m: object.MatchNotPresent}, + {k: "attr_common", m: object.MatchStringNotEqual, v: "random"}, + }}, + } { + t.Run("complex", func(t *testing.T) { + var fs object.SearchFilters + for _, f := range tc.fs { + fs.AddFilter(f.k, f.v, f.m) + } + res, cursor, err := db.Search(cnr, fs, nil, "", nAll) + require.NoError(t, err) + require.Empty(t, cursor) + require.Len(t, res, len(tc.is)) + for i, ind := range tc.is { + require.Equal(t, ids[ind], res[i].ID) + require.Empty(t, res[i].Attributes) + } + }) + } + }) + }) + t.Run("attributes", func(t *testing.T) { + t.Run("range over integer attribute", func(t *testing.T) { + // Similar scenario is used by NeoGo block fetcher storing blocks in the NeoFS. + // Note that the test does not copy the approach to constructing objects, only + // imitates. + // + // Let node store objects corresponding to the block with heights: 0, 1, 50, + // 100-102, 150, 4294967295 (max). Block#101 is presented thrice (undesired but + // possible). Additional integer attribute is also presented for testing. + const heightAttr = "Height" + const otherAttr = "SomeHash" + otherAttrs := []string{ // sorted + "0de1d9f050abdfb0fd8a4ff061aaa305dbbc63bf03d0ae2b8c93fbb8954b0201", + "13912c19601cc2aa2c35347bc734c469907bcebe5f81812de77a4cc192f3892c", + "1c3cd7853cfb53a134101205b6d894355ccb02ad454ac33a5ced5771a6f6dd14", + "1c80940a5099c05680035f3fcfee6a1dc36335622428bcf40635bf86a75d512b", + "1db039d30914eacfdf71780961e4957d512cfae597969c892ed1b59d258968e8", + "80dbfec78f2d4b5128d8fe51c95f3bc42be741832c77c127d53ab32f4f341505", + "9811b76c7a2b6b0020c30d4a9895fad8f2edab60037139e2a2b01761e137fb1a", + "b7d56b41e13a4502dca18420816bb1ba1a0bc10644c5e3f2bc5c511026df5aef", + "d6c059ae6852e04826419b0381690a1d76906721f195644863931f32f2d23842", + "d6dc85d4c2bab1bbd6da3ebcd4c2c56f12c5c369b685cc301e9b61449abe390b", + } + ids := []oid.ID{ // sorted + {5, 254, 154, 170, 83, 237, 109, 56, 68, 68, 97, 248, 50, 161, 183, 217, 28, 94, 162, 37, 79, 45, 175, 120, 104, 7, 87, 127, 92, 17, 218, 117}, + {41, 204, 35, 189, 128, 42, 229, 31, 7, 157, 117, 193, 98, 150, 30, 172, 103, 253, 100, 69, 223, 91, 232, 120, 70, 86, 242, 110, 88, 161, 62, 182}, + {54, 88, 178, 234, 172, 94, 155, 197, 69, 215, 33, 181, 122, 70, 178, 21, 158, 201, 54, 74, 21, 250, 193, 135, 123, 236, 137, 8, 81, 250, 21, 201}, + {92, 89, 108, 190, 140, 175, 71, 21, 243, 27, 88, 40, 156, 231, 102, 194, 230, 6, 109, 91, 135, 25, 190, 62, 246, 144, 137, 45, 90, 87, 186, 140}, + {116, 181, 195, 91, 211, 242, 145, 117, 174, 58, 195, 47, 208, 182, 46, 246, 18, 85, 0, 40, 129, 154, 68, 97, 225, 189, 89, 187, 194, 109, 201, 95}, + {162, 20, 218, 85, 5, 146, 98, 157, 137, 168, 59, 54, 102, 59, 86, 136, 160, 217, 143, 195, 200, 186, 192, 175, 235, 211, 101, 210, 147, 14, 141, 162}, + {178, 29, 204, 231, 34, 173, 251, 163, 135, 160, 94, 96, 171, 183, 2, 198, 53, 69, 84, 160, 76, 213, 208, 32, 247, 144, 230, 167, 70, 91, 158, 136}, + {199, 65, 97, 53, 71, 144, 40, 246, 194, 114, 139, 109, 213, 129, 253, 106, 141, 36, 249, 20, 130, 126, 245, 11, 110, 113, 50, 171, 153, 210, 119, 245}, + {237, 43, 4, 240, 144, 194, 224, 217, 7, 63, 14, 22, 147, 70, 8, 191, 226, 199, 69, 43, 131, 32, 37, 79, 151, 212, 149, 94, 172, 17, 137, 148}, + {245, 142, 55, 147, 121, 184, 29, 75, 74, 192, 85, 213, 243, 183, 80, 108, 181, 57, 119, 15, 84, 220, 143, 72, 202, 247, 28, 220, 245, 116, 128, 110}, + } + objs := make([]object.Object, len(ids)) // 2 more objects for #101 + appendAttribute(&objs[0], heightAttr, "0") + appendAttribute(&objs[1], heightAttr, "1") + appendAttribute(&objs[2], heightAttr, "50") + appendAttribute(&objs[3], heightAttr, "100") + appendAttribute(&objs[4], heightAttr, "101") + appendAttribute(&objs[5], heightAttr, "101") + appendAttribute(&objs[6], heightAttr, "101") + appendAttribute(&objs[7], heightAttr, "102") + appendAttribute(&objs[8], heightAttr, "150") + appendAttribute(&objs[9], heightAttr, "4294967295") + for i := range ids { + objs[i].SetID(ids[len(ids)-1-i]) // reverse order + } + for i := range otherAttrs { + appendAttribute(&objs[i], otherAttr, otherAttrs[i]) + } + heightSorted := []client.SearchResultItem{ + // attribute takes 1st order priority + {ID: ids[9], Attributes: []string{"0", otherAttrs[0]}}, + {ID: ids[8], Attributes: []string{"1", otherAttrs[1]}}, + {ID: ids[7], Attributes: []string{"50", otherAttrs[2]}}, + {ID: ids[6], Attributes: []string{"100", otherAttrs[3]}}, + // but if attribute equals, items are sorted by IDs. Secondary attributes have + // no effect, otherwise the order would not be reversed + {ID: ids[3], Attributes: []string{"101", otherAttrs[6]}}, + {ID: ids[4], Attributes: []string{"101", otherAttrs[5]}}, + {ID: ids[5], Attributes: []string{"101", otherAttrs[4]}}, + // attribute takes power again + {ID: ids[2], Attributes: []string{"102", otherAttrs[7]}}, + {ID: ids[1], Attributes: []string{"150", otherAttrs[8]}}, + {ID: ids[0], Attributes: []string{"4294967295", otherAttrs[9]}}, + } + // store + cnr := cidtest.ID() + for i := range objs { + objs[i].SetContainerID(cnr) + objs[i].SetPayloadChecksum(checksumtest.Checksum()) // Put requires + require.NoError(t, db.Put(&objs[i], nil, nil)) + } + t.Run("none", func(t *testing.T) { + for _, set := range []func(*object.SearchFilters){ + func(fs *object.SearchFilters) { fs.AddFilter(heightAttr, "", object.MatchNotPresent) }, + func(fs *object.SearchFilters) { fs.AddFilter(otherAttr, "", object.MatchNotPresent) }, + func(fs *object.SearchFilters) { fs.AddFilter(heightAttr, "0", object.MatchNumLT) }, + func(fs *object.SearchFilters) { fs.AddFilter(heightAttr, "4294967295", object.MatchNumGT) }, + func(fs *object.SearchFilters) { + fs.AddFilter(heightAttr, "0", object.MatchNumGE) + fs.AddFilter(heightAttr, "151", object.MatchStringEqual) + }, + } { + var fs object.SearchFilters + set(&fs) + res, cursor, err := db.Search(cnr, fs, nil, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Empty(t, res) + } + }) + t.Run("all", func(t *testing.T) { + t.Run("unfiltered", func(t *testing.T) { + res, cursor, err := db.Search(cnr, nil, nil, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Len(t, res, len(objs)) + for i := range res { + require.Equal(t, ids[i], res[i].ID) + require.Empty(t, res[i].Attributes) + } + }) + var fs object.SearchFilters + fs.AddFilter(heightAttr, "0", object.MatchNumGE) + t.Run("w/o attributes", func(t *testing.T) { + res, cursor, err := db.Search(cnr, fs, nil, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Len(t, res, len(heightSorted)) + for i := range res { + // expected order by IDs + require.Equal(t, ids[i], res[i].ID) + require.Empty(t, res[i].Attributes) + } + t.Run("paging", func(t *testing.T) { + res, cursor, err := db.Search(cnr, fs, nil, "", 2) + require.NoError(t, err) + require.Len(t, res, 2) + for i := range 2 { + require.Equal(t, ids[i], res[i].ID) + require.Empty(t, res[i].Attributes) + } + require.NotEmpty(t, cursor) + //nolint:staticcheck // drop with t.Skip + res, cursor, err = db.Search(cnr, fs, nil, cursor, 6) + require.NoError(t, err) + t.Skip("paging is broken when prim attribute is not requested, see also https://github.com/nspcc-dev/neofs-node/issues/3058#issuecomment-2553193094") + require.Len(t, res, 6) + for i := range 6 { + require.Equal(t, ids[2+i], res[i].ID, i) + require.Empty(t, res[i].Attributes) + } + require.NotEmpty(t, cursor) + res, cursor, err = db.Search(cnr, fs, nil, cursor, 3) + require.NoError(t, err) + require.Len(t, res, 2) + for i := range 2 { + require.Equal(t, ids[8+i], res[i].ID) + require.Empty(t, res[i].Attributes) + } + require.Empty(t, cursor) + }) + }) + t.Run("single attribute", func(t *testing.T) { + res, cursor, err := db.Search(cnr, fs, []string{heightAttr}, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Len(t, res, len(heightSorted)) + for i, r := range heightSorted { + require.Equal(t, r.ID, res[i].ID) + require.Equal(t, []string{r.Attributes[0]}, res[i].Attributes) + } + }) + t.Run("two attributes", func(t *testing.T) { + res, cursor, err := db.Search(cnr, fs, []string{heightAttr, otherAttr}, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Equal(t, heightSorted, res) + t.Run("paging", func(t *testing.T) { + res, cursor, err := db.Search(cnr, fs, []string{heightAttr, otherAttr}, "", 2) + require.NoError(t, err) + require.Equal(t, heightSorted[:2], res) + require.NotEmpty(t, cursor) + res, cursor, err = db.Search(cnr, fs, []string{heightAttr, otherAttr}, cursor, 6) + require.NoError(t, err) + require.Equal(t, heightSorted[2:8], res) + require.NotEmpty(t, cursor) + res, cursor, err = db.Search(cnr, fs, []string{heightAttr, otherAttr}, cursor, 3) + require.NoError(t, err) + require.Equal(t, heightSorted[8:], res) + require.Empty(t, cursor) + }) + }) + }) + t.Run("partial", func(t *testing.T) { + var fs object.SearchFilters + fs.AddFilter(heightAttr, "50", object.MatchNumGE) + fs.AddFilter(heightAttr, "150", object.MatchNumLE) + heightSorted := heightSorted[2:9] + ids := ids[1:8] + t.Run("w/o attributes", func(t *testing.T) { + res, cursor, err := db.Search(cnr, fs, nil, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Len(t, res, len(ids)) + for i := range res { + // expected order by IDs + require.Equal(t, ids[i], res[i].ID) + require.Empty(t, res[i].Attributes) + } + t.Run("paging", func(t *testing.T) { + res, cursor, err := db.Search(cnr, fs, nil, "", 2) + require.NoError(t, err) + require.Len(t, res, 2) + for i := range 2 { + require.Equal(t, ids[i], res[i].ID) + require.Empty(t, res[i].Attributes) + } + require.NotEmpty(t, cursor) + //nolint:staticcheck // drop with t.Skip + res, cursor, err = db.Search(cnr, fs, nil, cursor, 6) + require.NoError(t, err) + t.Skip("paging is broken when prim attribute is not requested, see also https://github.com/nspcc-dev/neofs-node/issues/3058#issuecomment-2553193094") + require.Len(t, res, 6) + for i := range 6 { + require.Equal(t, ids[2+i], res[i].ID, i) + require.Empty(t, res[i].Attributes) + } + require.NotEmpty(t, cursor) + res, cursor, err = db.Search(cnr, fs, nil, cursor, 3) + require.NoError(t, err) + require.Len(t, res, 2) + for i := range 2 { + require.Equal(t, ids[8+i], res[i].ID) + require.Empty(t, res[i].Attributes) + } + require.Empty(t, cursor) + }) + }) + t.Run("single attribute", func(t *testing.T) { + res, cursor, err := db.Search(cnr, fs, []string{heightAttr}, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Len(t, res, len(heightSorted)) + for i, r := range heightSorted { + require.Equal(t, r.ID, res[i].ID) + require.Equal(t, []string{r.Attributes[0]}, res[i].Attributes) + } + }) + t.Run("two attributes", func(t *testing.T) { + res, cursor, err := db.Search(cnr, fs, []string{heightAttr, otherAttr}, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Equal(t, heightSorted, res) + t.Run("paging", func(t *testing.T) { + res, cursor, err := db.Search(cnr, fs, []string{heightAttr, otherAttr}, "", 2) + require.NoError(t, err) + require.Equal(t, heightSorted[:2], res) + require.NotEmpty(t, cursor) + res, cursor, err = db.Search(cnr, fs, []string{heightAttr, otherAttr}, cursor, 3) + require.NoError(t, err) + require.Equal(t, heightSorted[2:5], res) + require.NotEmpty(t, cursor) + res, cursor, err = db.Search(cnr, fs, []string{heightAttr, otherAttr}, cursor, 3) + require.NoError(t, err) + require.Equal(t, heightSorted[5:], res) + require.Empty(t, cursor) + }) + }) + }) + }) + t.Run("FilePath+Timestamp", func(t *testing.T) { + // REST GW use-case + ids := []oid.ID{ // sorted + {5, 254, 154, 170, 83, 237, 109, 56, 68, 68, 97, 248, 50, 161, 183, 217, 28, 94, 162, 37, 79, 45, 175, 120, 104, 7, 87, 127, 92, 17, 218, 117}, + {41, 204, 35, 189, 128, 42, 229, 31, 7, 157, 117, 193, 98, 150, 30, 172, 103, 253, 100, 69, 223, 91, 232, 120, 70, 86, 242, 110, 88, 161, 62, 182}, + {54, 88, 178, 234, 172, 94, 155, 197, 69, 215, 33, 181, 122, 70, 178, 21, 158, 201, 54, 74, 21, 250, 193, 135, 123, 236, 137, 8, 81, 250, 21, 201}, + {92, 89, 108, 190, 140, 175, 71, 21, 243, 27, 88, 40, 156, 231, 102, 194, 230, 6, 109, 91, 135, 25, 190, 62, 246, 144, 137, 45, 90, 87, 186, 140}, + } + objs := make([]object.Object, len(ids)) + appendAttribute(&objs[0], object.AttributeFilePath, "cat1.jpg") + appendAttribute(&objs[0], object.AttributeTimestamp, "1738760790") + appendAttribute(&objs[1], object.AttributeFilePath, "cat2.jpg") + appendAttribute(&objs[1], object.AttributeTimestamp, "1738760792") + appendAttribute(&objs[2], object.AttributeFilePath, "cat2.jpg") + appendAttribute(&objs[2], object.AttributeTimestamp, "1738760791") + appendAttribute(&objs[3], object.AttributeFilePath, "cat2.jpg") + appendAttribute(&objs[3], object.AttributeTimestamp, "1738760793") + // store + cnr := cidtest.ID() + for i := range objs { + objs[i].SetID(ids[i]) + objs[i].SetContainerID(cnr) + objs[i].SetPayloadChecksum(checksumtest.Checksum()) // Put requires + require.NoError(t, db.Put(&objs[i], nil, nil)) + } + t.Run("none", func(t *testing.T) { + var fs object.SearchFilters + fs.AddFilter(object.AttributeFilePath, "cat4.jpg", object.MatchStringEqual) + res, cursor, err := db.Search(cnr, fs, []string{object.AttributeFilePath, object.AttributeTimestamp}, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Empty(t, res) + }) + t.Run("single", func(t *testing.T) { + var fs object.SearchFilters + fs.AddFilter(object.AttributeFilePath, "cat1.jpg", object.MatchStringEqual) + res, cursor, err := db.Search(cnr, fs, []string{object.AttributeFilePath, object.AttributeTimestamp}, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Equal(t, []client.SearchResultItem{ + {ID: ids[0], Attributes: []string{"cat1.jpg", "1738760790"}}, + }, res) + }) + t.Run("multiple", func(t *testing.T) { + t.Run("both attributes", func(t *testing.T) { + fullRes := []client.SearchResultItem{ + {ID: ids[1], Attributes: []string{"cat2.jpg", "1738760792"}}, + {ID: ids[2], Attributes: []string{"cat2.jpg", "1738760791"}}, + {ID: ids[3], Attributes: []string{"cat2.jpg", "1738760793"}}, + } + var fs object.SearchFilters + fs.AddFilter(object.AttributeFilePath, "cat2.jpg", object.MatchStringEqual) + res, cursor, err := db.Search(cnr, fs, []string{object.AttributeFilePath, object.AttributeTimestamp}, "", 1000) + require.NoError(t, err) + require.Equal(t, fullRes, res) + require.Empty(t, cursor) + t.Run("paging", func(t *testing.T) { + res, cursor, err := db.Search(cnr, fs, []string{object.AttributeFilePath, object.AttributeTimestamp}, "", 2) + require.NoError(t, err) + require.Equal(t, fullRes[:2], res) + require.NotEmpty(t, cursor) + res, cursor, err = db.Search(cnr, fs, []string{object.AttributeFilePath, object.AttributeTimestamp}, cursor, 1000) + require.NoError(t, err) + require.Equal(t, fullRes[2:], res) + require.Empty(t, cursor) + }) + }) + }) + }) + t.Run("precise select with many attributes", func(t *testing.T) { + // S3 GW use-case + ids := []oid.ID{ // sorted + {5, 254, 154, 170, 83, 237, 109, 56, 68, 68, 97, 248, 50, 161, 183, 217, 28, 94, 162, 37, 79, 45, 175, 120, 104, 7, 87, 127, 92, 17, 218, 117}, + {41, 204, 35, 189, 128, 42, 229, 31, 7, 157, 117, 193, 98, 150, 30, 172, 103, 253, 100, 69, 223, 91, 232, 120, 70, 86, 242, 110, 88, 161, 62, 182}, + {54, 88, 178, 234, 172, 94, 155, 197, 69, 215, 33, 181, 122, 70, 178, 21, 158, 201, 54, 74, 21, 250, 193, 135, 123, 236, 137, 8, 81, 250, 21, 201}, + {92, 89, 108, 190, 140, 175, 71, 21, 243, 27, 88, 40, 156, 231, 102, 194, 230, 6, 109, 91, 135, 25, 190, 62, 246, 144, 137, 45, 90, 87, 186, 140}, + } + objs := make([]object.Object, len(ids)) + appendAttribute(&objs[0], object.AttributeFilePath, "/home/Downloads/dog.jpg") + appendAttribute(&objs[0], "Type", "JPEG") + appendAttribute(&objs[0], "attr1", "val1_1") + appendAttribute(&objs[0], "attr2", "val2_1") + appendAttribute(&objs[1], object.AttributeFilePath, "/usr/local/bin/go") + appendAttribute(&objs[1], "Type", "BIN") + appendAttribute(&objs[1], "attr1", "val1_2") + appendAttribute(&objs[1], "attr2", "val2_2") + appendAttribute(&objs[2], object.AttributeFilePath, "/home/Downloads/cat.jpg") + appendAttribute(&objs[2], "Type", "JPEG") + appendAttribute(&objs[2], "attr1", "val1_3") + appendAttribute(&objs[2], "attr2", "val2_3") + appendAttribute(&objs[3], object.AttributeFilePath, "/var/log/neofs/node") + appendAttribute(&objs[3], "Type", "TEXT") + appendAttribute(&objs[3], "attr1", "val1_4") + appendAttribute(&objs[3], "attr2", "val2_4") + // store + cnr := cidtest.ID() + for i := range objs { + objs[i].SetID(ids[len(ids)-i-1]) + objs[i].SetContainerID(cnr) + objs[i].SetPayloadChecksum(checksumtest.Checksum()) // Put requires + require.NoError(t, db.Put(&objs[i], nil, nil)) + } + + attrs := []string{object.AttributeFilePath, "attr1", "attr2"} + + var fs object.SearchFilters + fs.AddFilter(object.AttributeFilePath, "/home/Downloads/", object.MatchCommonPrefix) + fs.AddFilter("Type", "JPEG", object.MatchStringEqual) + res, cursor, err := db.Search(cnr, fs, attrs, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Equal(t, []client.SearchResultItem{ + {ID: ids[1], Attributes: []string{"/home/Downloads/cat.jpg", "val1_3", "val2_3"}}, + {ID: ids[3], Attributes: []string{"/home/Downloads/dog.jpg", "val1_1", "val2_1"}}, + }, res) + + fs = fs[:0] + fs.AddFilter(object.AttributeFilePath, "/usr", object.MatchCommonPrefix) + fs.AddFilter("Type", "BIN", object.MatchStringEqual) + res, cursor, err = db.Search(cnr, fs, attrs, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Equal(t, []client.SearchResultItem{ + {ID: ids[2], Attributes: []string{"/usr/local/bin/go", "val1_2", "val2_2"}}, + }, res) + + fs = fs[:0] + fs.AddFilter(object.AttributeFilePath, "/", object.MatchCommonPrefix) + fs.AddFilter("Type", "BIN", object.MatchStringNotEqual) + res, cursor, err = db.Search(cnr, fs, attrs, "", 1000) + require.NoError(t, err) + require.Empty(t, cursor) + require.Equal(t, []client.SearchResultItem{ + {ID: ids[1], Attributes: []string{"/home/Downloads/cat.jpg", "val1_3", "val2_3"}}, + {ID: ids[3], Attributes: []string{"/home/Downloads/dog.jpg", "val1_1", "val2_1"}}, + {ID: ids[0], Attributes: []string{"/var/log/neofs/node", "val1_4", "val2_4"}}, + }, res) + + t.Run("paging", func(t *testing.T) { + fs = fs[:0] + fs.AddFilter(object.AttributeFilePath, "/home/", object.MatchCommonPrefix) + fs.AddFilter("Type", "TEXT", object.MatchStringNotEqual) + res, cursor, err := db.Search(cnr, fs, attrs, "", 1) + require.NoError(t, err) + require.Equal(t, []client.SearchResultItem{ + {ID: ids[1], Attributes: []string{"/home/Downloads/cat.jpg", "val1_3", "val2_3"}}, + }, res) + require.NotEmpty(t, cursor) + res, cursor, err = db.Search(cnr, fs, attrs, cursor, 1) + require.NoError(t, err) + require.Equal(t, []client.SearchResultItem{ + {ID: ids[3], Attributes: []string{"/home/Downloads/dog.jpg", "val1_1", "val2_1"}}, + }, res) + require.Empty(t, cursor) + }) + }) + }) +} diff --git a/pkg/local_object_storage/metabase/put.go b/pkg/local_object_storage/metabase/put.go index 6b3185ce9e..3f8ad2adfb 100644 --- a/pkg/local_object_storage/metabase/put.go +++ b/pkg/local_object_storage/metabase/put.go @@ -13,6 +13,8 @@ import ( apistatus "github.com/nspcc-dev/neofs-sdk-go/client/status" objectSDK "github.com/nspcc-dev/neofs-sdk-go/object" oid "github.com/nspcc-dev/neofs-sdk-go/object/id" + "github.com/nspcc-dev/neofs-sdk-go/user" + "github.com/nspcc-dev/neofs-sdk-go/version" "go.etcd.io/bbolt" ) @@ -68,6 +70,14 @@ func (db *DB) put( if cnr.IsZero() { return errors.New("missing container in object") } + owner := obj.OwnerID() + if owner == nil { + return user.ErrZeroID + } + pldHash, ok := obj.PayloadChecksum() + if !ok { + return errors.New("missing payload checksum") + } isParent := si != nil @@ -152,6 +162,19 @@ func (db *DB) put( } } + var ver version.Version + if v := obj.Version(); v != nil { + ver = *v + } + var pldHmmHash []byte + if h, ok := obj.PayloadHomomorphicHash(); ok { + pldHmmHash = h.Value() + } + if err := putMetadata(tx, cnr, obj.GetID(), ver, *owner, obj.Type(), obj.CreationEpoch(), obj.PayloadSize(), + pldHash.Value(), pldHmmHash, obj.SplitID().ToV2(), obj.GetParentID(), obj.GetFirstID(), obj.Attributes()); err != nil { + return fmt.Errorf("put metadata: %w", err) + } + return nil } diff --git a/pkg/local_object_storage/metabase/select.go b/pkg/local_object_storage/metabase/select.go index 372523bc86..7dd3602715 100644 --- a/pkg/local_object_storage/metabase/select.go +++ b/pkg/local_object_storage/metabase/select.go @@ -279,7 +279,7 @@ func (db *DB) selectFromFKBT( if isNumOp { // TODO: big math takes less code but inefficient - filterNum, ok := new(big.Int).SetString(f.Value(), 10) + filterNum, ok := parseInt(f.Value()) if !ok { db.log.Debug("unexpected non-decimal numeric filter", zap.String("value", f.Value())) return @@ -648,7 +648,7 @@ func groupFilters(filters object.SearchFilters) (filterGroup, error) { } // TODO: big math takes less code but inefficient - _, ok := new(big.Int).SetString(filters[i].Value(), 10) + _, ok := parseInt(filters[i].Value()) if !ok { return res, fmt.Errorf("%w: invalid filter #%d: numeric filter with non-decimal value", objectcore.ErrInvalidSearchQuery, i) diff --git a/pkg/local_object_storage/metabase/util.go b/pkg/local_object_storage/metabase/util.go index 49cf7f6203..9e40073dd0 100644 --- a/pkg/local_object_storage/metabase/util.go +++ b/pkg/local_object_storage/metabase/util.go @@ -4,6 +4,7 @@ import ( "bytes" "crypto/sha256" "fmt" + "math/big" cid "github.com/nspcc-dev/neofs-sdk-go/container/id" "github.com/nspcc-dev/neofs-sdk-go/object" @@ -11,6 +12,11 @@ import ( "go.etcd.io/bbolt" ) +// UTF-8 data separator used in DB. +var utf8Delimiter = []byte{0xFF} + +const utf8DelimiterLen = 1 + var ( // graveyardBucketName stores rows with the objects that have been // covered with Tombstone objects. That objects should not be returned @@ -132,6 +138,11 @@ const ( firstObjectIDPrefix ) +// key prefix for per-container buckets storing objects' metadata required to +// serve ObjectService.SearchV2. See VERSION.md for details. +// This data can be completely migrated, so special byte is occupied. +const metadataPrefix = 255 + const ( cidSize = sha256.Size bucketKeySize = 1 + cidSize @@ -296,3 +307,14 @@ func isLockObject(tx *bbolt.Tx, idCnr cid.ID, obj oid.ID) bool { bucketNameLockers(idCnr, make([]byte, bucketKeySize)), objectKey(obj, make([]byte, objectKeySize))) } + +func parseInt(s string) (*big.Int, bool) { return new(big.Int).SetString(s, 10) } + +type keyBuffer []byte + +func (x *keyBuffer) alloc(ln int) []byte { + if len(*x) < ln { + *x = make([]byte, ln) + } + return (*x)[:ln] +} diff --git a/pkg/local_object_storage/metabase/util_test.go b/pkg/local_object_storage/metabase/util_test.go new file mode 100644 index 0000000000..49f0783864 --- /dev/null +++ b/pkg/local_object_storage/metabase/util_test.go @@ -0,0 +1,24 @@ +package meta + +import ( + "testing" + "unicode/utf8" + + "github.com/stretchr/testify/require" +) + +func TestNonUTF8Delimiter(t *testing.T) { + t.Run("len", func(t *testing.T) { require.Len(t, utf8Delimiter, utf8DelimiterLen) }) + t.Run("format", func(t *testing.T) { require.False(t, utf8.Valid(utf8Delimiter)) }) +} + +func TestKeyBuffer(t *testing.T) { + var b keyBuffer + b1 := b.alloc(10) + require.Len(t, b1, 10) + b2 := b.alloc(20) + require.Len(t, b2, 20) + b1 = b.alloc(10) + require.Len(t, b1, 10) + require.Equal(t, &b2[0], &b1[0]) +}