diff --git a/blockchain/go.mod b/blockchain/go.mod index 016b007f..2c1a150e 100644 --- a/blockchain/go.mod +++ b/blockchain/go.mod @@ -3,6 +3,7 @@ module github.com/decred/dcrd/blockchain/v2 go 1.11 require ( + github.com/dchest/blake256 v1.1.0 // indirect github.com/decred/dcrd/blockchain/stake/v2 v2.0.1 github.com/decred/dcrd/blockchain/standalone v1.0.0 github.com/decred/dcrd/chaincfg/chainhash v1.0.2 diff --git a/blockchain/go.sum b/blockchain/go.sum index 4b5fb27c..5309b6ee 100644 --- a/blockchain/go.sum +++ b/blockchain/go.sum @@ -10,6 +10,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dchest/blake256 v1.0.0 h1:6gUgI5MHdz9g0TdrgKqXsoDX+Zjxmm1Sc6OsoGru50I= github.com/dchest/blake256 v1.0.0/go.mod h1:xXNWCE1jsAP8DAjP+rKw2MbeqLczjI3TRx2VK+9OEYY= +github.com/dchest/blake256 v1.1.0 h1:4AuEhGPT/3TTKFhTfBpZ8hgZE7wJpawcYaEawwsbtqM= +github.com/dchest/blake256 v1.1.0/go.mod h1:xXNWCE1jsAP8DAjP+rKw2MbeqLczjI3TRx2VK+9OEYY= github.com/dchest/siphash v1.2.1 h1:4cLinnzVJDKxTCl9B01807Yiy+W7ZzVHj/KIroQRvT4= github.com/dchest/siphash v1.2.1/go.mod h1:q+IRvb2gOSrUnYoPqHiyHXS0FOBBOdl6tONBlVnOnt4= github.com/decred/base58 v1.0.0 h1:BVi1FQCThIjZ0ehG+I99NJ51o0xcc9A/fDKhmJxY6+w= diff --git a/blockchain/indexers/cfindex.go b/blockchain/indexers/cfindex.go index ac3a79dc..0d12cbe0 100644 --- a/blockchain/indexers/cfindex.go +++ b/blockchain/indexers/cfindex.go @@ -174,7 +174,7 @@ func (idx *CFIndex) Create(dbTx database.Tx) error { // storeFilter stores a given filter, and performs the steps needed to // generate the filter's header. -func storeFilter(dbTx database.Tx, block *dcrutil.Block, f *gcs.Filter, filterType wire.FilterType) error { +func storeFilter(dbTx database.Tx, block *dcrutil.Block, f *gcs.FilterV1, filterType wire.FilterType) error { if uint8(filterType) > maxFilterType { return errors.New("unsupported filter type") } diff --git a/gcs/bench_test.go b/gcs/bench_test.go index f71ade09..b22f0059 100644 --- a/gcs/bench_test.go +++ b/gcs/bench_test.go @@ -46,7 +46,7 @@ func BenchmarkFilterBuild50000(b *testing.B) { b.ResetTimer() var key [KeySize]byte for i := 0; i < b.N; i++ { - _, err := NewFilter(P, key, contents) + _, err := NewFilterV1(P, key, contents) if err != nil { b.Fatalf("unable to generate filter: %v", err) } @@ -66,7 +66,7 @@ func BenchmarkFilterBuild100000(b *testing.B) { b.ResetTimer() var key [KeySize]byte for i := 0; i < b.N; i++ { - _, err := NewFilter(P, key, contents) + _, err := NewFilterV1(P, key, contents) if err != nil { b.Fatalf("unable to generate filter: %v", err) } @@ -83,7 +83,7 @@ func BenchmarkFilterMatch(b *testing.B) { } var key [KeySize]byte - filter, err := NewFilter(P, key, contents) + filter, err := NewFilterV1(P, key, contents) if err != nil { b.Fatalf("Failed to build filter") } @@ -114,7 +114,7 @@ func BenchmarkFilterMatchAny(b *testing.B) { } var key [KeySize]byte - filter, err := NewFilter(P, key, contents) + filter, err := NewFilterV1(P, key, contents) if err != nil { b.Fatalf("Failed to build filter") } diff --git a/gcs/blockcf/blockcf.go b/gcs/blockcf/blockcf.go index d40065e7..16beade1 100644 --- a/gcs/blockcf/blockcf.go +++ b/gcs/blockcf/blockcf.go @@ -28,8 +28,10 @@ import ( "github.com/decred/dcrd/wire" ) -// P is the collision probability used for block committed filters (2^-20) -const P = 20 +const ( + // P is the collision probability used for block committed filters (2^-20) + P = 20 +) // Entries describes all of the filter entries used to create a GCS filter and // provides methods for appending data structures found in blocks. @@ -95,7 +97,7 @@ func Key(hash *chainhash.Hash) [gcs.KeySize]byte { // contain all the previous regular outpoints spent within a block, as well as // the data pushes within all the outputs created within a block which can be // spent by regular transactions. -func Regular(block *wire.MsgBlock) (*gcs.Filter, error) { +func Regular(block *wire.MsgBlock) (*gcs.FilterV1, error) { var data Entries // Add "regular" data from stake transactions. For each class of stake @@ -163,14 +165,14 @@ func Regular(block *wire.MsgBlock) (*gcs.Filter, error) { blockHash := block.BlockHash() key := Key(&blockHash) - return gcs.NewFilter(P, key, data) + return gcs.NewFilterV1(P, key, data) } // Extended builds an extended GCS filter from a block. An extended filter // supplements a regular basic filter by including all transaction hashes of // regular and stake transactions, and adding the witness data (a.k.a. the // signature script) found within every non-coinbase regular transaction. -func Extended(block *wire.MsgBlock) (*gcs.Filter, error) { +func Extended(block *wire.MsgBlock) (*gcs.FilterV1, error) { var data Entries // For each stake transaction, commit the transaction hash. If the @@ -207,5 +209,5 @@ func Extended(block *wire.MsgBlock) (*gcs.Filter, error) { blockHash := block.BlockHash() key := Key(&blockHash) - return gcs.NewFilter(P, key, data) + return gcs.NewFilterV1(P, key, data) } diff --git a/gcs/gcs.go b/gcs/gcs.go index d8bc09e4..ccc7a399 100644 --- a/gcs/gcs.go +++ b/gcs/gcs.go @@ -31,6 +31,20 @@ func (s *uint64s) Len() int { return len(*s) } func (s *uint64s) Less(i, j int) bool { return (*s)[i] < (*s)[j] } func (s *uint64s) Swap(i, j int) { (*s)[i], (*s)[j] = (*s)[j], (*s)[i] } +// filter describes a versioned immutable filter that can be built from a set of +// data elements, serialized, deserialized, and queried in a thread-safe manner. +// +// It is used internally to implement the exported filter version types. +// +// See FilterV1 for more details. +type filter struct { + version uint16 + n uint32 + p uint8 + modulusNP uint64 + filterNData []byte // 4 bytes n big endian, remainder is filter data +} + // Filter describes an immutable filter that can be built from a set of data // elements, serialized, deserialized, and queried in a thread-safe manner. The // serialized form is compressed as a Golomb Coded Set (GCS), but does not @@ -38,20 +52,14 @@ func (s *uint64s) Swap(i, j int) { (*s)[i], (*s)[j] = (*s)[j], (*s)[i] } // necessary. The hash function used is SipHash, a keyed function; the key used // in building the filter is required in order to match filter values and is // not included in the serialized form. -type Filter struct { - n uint32 - p uint8 - modulusNP uint64 - filterNData []byte // 4 bytes n big endian, remainder is filter data +type FilterV1 struct { + filter } -// NewFilter builds a new GCS filter with the collision probability of -// `1/(2**P)`, key `key`, and including every `[]byte` in `data` as a member of -// the set. -func NewFilter(P uint8, key [KeySize]byte, data [][]byte) (*Filter, error) { - // Some initial parameter checks: make sure we have data from which to - // build the filter, and make sure our parameters will fit the hash - // function we're using. +// newFilter builds a new GCS filter of the specified version with the collision +// probability of `1/(2**P)`, key `key`, and including every `[]byte` in `data` +// as a member of the set. +func newFilter(version uint16, P uint8, key [KeySize]byte, data [][]byte) (*filter, error) { if len(data) > math.MaxInt32 { str := fmt.Sprintf("unable to create filter with %d entries greater "+ "than max allowed %d", len(data), math.MaxInt32) @@ -65,7 +73,8 @@ func NewFilter(P uint8, key [KeySize]byte, data [][]byte) (*Filter, error) { // Create the filter object and insert metadata. modP := uint64(1 << P) modPMask := modP - 1 - f := Filter{ + f := filter{ + version: version, n: uint32(len(data)), p: P, modulusNP: uint64(len(data)) * modP, @@ -126,9 +135,20 @@ func NewFilter(P uint8, key [KeySize]byte, data [][]byte) (*Filter, error) { return &f, nil } -// FromBytes deserializes a GCS filter from a known N, P, and serialized filter -// as returned by Bytes(). -func FromBytes(N uint32, P uint8, d []byte) (*Filter, error) { +// NewFilter builds a new version 1 GCS filter with the collision probability of +// `1/(2**P)`, key `key`, and including every `[]byte` in `data` as a member of +// the set. +func NewFilterV1(P uint8, key [KeySize]byte, data [][]byte) (*FilterV1, error) { + filter, err := newFilter(1, P, key, data) + if err != nil { + return nil, err + } + return &FilterV1{filter: *filter}, nil +} + +// FromBytesV1 deserializes a version 1 GCS filter from a known N, P, and +// serialized filter as returned by Bytes(). +func FromBytesV1(N uint32, P uint8, d []byte) (*FilterV1, error) { // Basic sanity check. if P > 32 { str := fmt.Sprintf("P value of %d is greater than max allowed 32", P) @@ -140,18 +160,21 @@ func FromBytes(N uint32, P uint8, d []byte) (*Filter, error) { binary.BigEndian.PutUint32(ndata, N) copy(ndata[4:], d) - f := &Filter{ - n: N, - p: P, - modulusNP: uint64(N) * uint64(1<= 4 { n = binary.BigEndian.Uint32(d[:4]) @@ -160,18 +183,21 @@ func FromNBytes(P uint8, d []byte) (*Filter, error) { return nil, makeError(ErrMisserialized, str) } - f := &Filter{ - n: n, - p: P, - modulusNP: uint64(n) * uint64(1<