diff --git a/token/core/fabtoken/v1/tokens.go b/token/core/fabtoken/v1/tokens.go index e0640109ac..a447113fc1 100644 --- a/token/core/fabtoken/v1/tokens.go +++ b/token/core/fabtoken/v1/tokens.go @@ -25,7 +25,7 @@ type TokensService struct { } func NewTokensService(pp *setup.PublicParams, identityDeserializer driver.Deserializer) (*TokensService, error) { - supportedTokens, err := SupportedTokenFormat(pp.QuantityPrecision) + supportedTokens, err := ComputeTokenFormat(pp.QuantityPrecision) if err != nil { return nil, errors.WithMessagef(err, "failed getting supported token types") } @@ -89,7 +89,7 @@ func (s *TokensUpgradeService) CheckUpgradeProof(ch driver.TokensUpgradeChalleng return false, errors.New("not supported") } -func SupportedTokenFormat(precision uint64) (token2.Format, error) { +func ComputeTokenFormat(precision uint64) (token2.Format, error) { hasher := utils.NewSHA256Hasher() if err := errors2.Join( hasher.AddInt32(fabtoken.Type), diff --git a/token/core/zkatdlog/nogh/v1/crypto/upgrade/service.go b/token/core/zkatdlog/nogh/v1/crypto/upgrade/service.go index dd2defb3a7..3bfe6ba32c 100644 --- a/token/core/zkatdlog/nogh/v1/crypto/upgrade/service.go +++ b/token/core/zkatdlog/nogh/v1/crypto/upgrade/service.go @@ -56,7 +56,7 @@ func NewService( // compute supported tokens var upgradeSupportedTokenFormatList []token.Format for _, precision := range []uint64{16, 32, 64} { - format, err := v1.SupportedTokenFormat(precision) + format, err := v1.ComputeTokenFormat(precision) if err != nil { return nil, errors.Wrapf(err, "failed computing fabtoken token format with precision [%d]", precision) } diff --git a/token/core/zkatdlog/nogh/v1/crypto/upgrade/service_test.go b/token/core/zkatdlog/nogh/v1/crypto/upgrade/service_test.go index 5cba548789..a3f0a4c929 100644 --- a/token/core/zkatdlog/nogh/v1/crypto/upgrade/service_test.go +++ b/token/core/zkatdlog/nogh/v1/crypto/upgrade/service_test.go @@ -47,7 +47,7 @@ func TestTokensService_GenUpgradeProof(t *testing.T) { } fabtokenOutputRaw, err := fabtokenOutput.Serialize() assert.NoError(t, err) - formatFabtoken16, err := v1.SupportedTokenFormat(16) + formatFabtoken16, err := v1.ComputeTokenFormat(16) assert.NoError(t, err) validTokens := []token.LedgerToken{{ ID: token.ID{TxId: "tx1", Index: 1}, @@ -178,7 +178,7 @@ func TestTokensService_CheckUpgradeProof(t *testing.T) { } fabtokenOutputRaw, err := fabtokenOutput.Serialize() assert.NoError(t, err) - formatFabtoken16, err := v1.SupportedTokenFormat(16) + formatFabtoken16, err := v1.ComputeTokenFormat(16) assert.NoError(t, err) validTokens := []token.LedgerToken{{ ID: token.ID{TxId: "tx1", Index: 1}, diff --git a/token/core/zkatdlog/nogh/v1/driver/driver.go b/token/core/zkatdlog/nogh/v1/driver/driver.go index 2ee5c8c453..0b761220d8 100644 --- a/token/core/zkatdlog/nogh/v1/driver/driver.go +++ b/token/core/zkatdlog/nogh/v1/driver/driver.go @@ -132,7 +132,7 @@ func (d *Driver) NewTokenService(tmsID driver.TMSID, publicParams []byte) (drive metricsProvider := metrics.NewTMSProvider(tmsConfig.ID(), d.metricsProvider) driverMetrics := v1.NewMetrics(metricsProvider) - tokensService, err := token3.NewTokensService(logger, ppm, deserializer) + tokensService, err := token3.NewTokensService(logger, ppm.PublicParams(), deserializer) if err != nil { return nil, errors.Wrapf(err, "failed to initiliaze token service for [%s:%s]", tmsID.Network, tmsID.Namespace) } diff --git a/token/core/zkatdlog/nogh/v1/token/fabtoken.go b/token/core/zkatdlog/nogh/v1/token/fabtoken.go index 8adc5b47c7..df13baf7b7 100644 --- a/token/core/zkatdlog/nogh/v1/token/fabtoken.go +++ b/token/core/zkatdlog/nogh/v1/token/fabtoken.go @@ -8,16 +8,18 @@ package token import ( "github.com/hyperledger-labs/fabric-smart-client/pkg/utils/errors" - "github.com/hyperledger-labs/fabric-token-sdk/token/core/fabtoken/v1/actions" + fabtoken "github.com/hyperledger-labs/fabric-token-sdk/token/core/fabtoken/v1/actions" "github.com/hyperledger-labs/fabric-token-sdk/token/token" ) -func ParseFabtokenToken(tok []byte, precision uint64, maxPrecision uint64) (*actions.Output, uint64, error) { - if precision < maxPrecision { +// ParseFabtokenToken unmarshals tok as a fabtoken.Output using precision to parse the quantity. +// If precision is larger than maxPrecision, it returns an error +func ParseFabtokenToken(tok []byte, precision uint64, maxPrecision uint64) (*fabtoken.Output, uint64, error) { + if precision > maxPrecision { return nil, 0, errors.Errorf("unsupported precision [%d], max [%d]", precision, maxPrecision) } - output := &actions.Output{} + output := &fabtoken.Output{} err := output.Deserialize(tok) if err != nil { return nil, 0, errors.Wrap(err, "failed to unmarshal fabtoken") diff --git a/token/core/zkatdlog/nogh/v1/token/fabtoken_test.go b/token/core/zkatdlog/nogh/v1/token/fabtoken_test.go new file mode 100644 index 0000000000..e0775e08cb --- /dev/null +++ b/token/core/zkatdlog/nogh/v1/token/fabtoken_test.go @@ -0,0 +1,113 @@ +/* +Copyright IBM Corp. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package token + +import ( + "testing" + + "github.com/hyperledger-labs/fabric-token-sdk/token/core/fabtoken/v1/actions" + "github.com/stretchr/testify/assert" +) + +func TestParseFabtokenToken(t *testing.T) { + nilGetTokFunc := func() (*actions.Output, []byte, error) { + return nil, nil, nil + } + tests := []struct { + name string + tok func() (*actions.Output, []byte, error) + precision uint64 + maxPrecision uint64 + wantErr bool + expectedError string + expectedQuantity uint64 + }{ + { + name: "precision is langer than maxPrecision", + tok: nilGetTokFunc, + precision: 10, + maxPrecision: 5, + wantErr: true, + expectedError: "unsupported precision [10], max [5]", + }, + { + name: "invalid tok", + tok: nilGetTokFunc, + precision: 5, + maxPrecision: 10, + wantErr: true, + expectedError: "failed to unmarshal fabtoken: failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: syntax error: sequence truncated", + }, + { + name: "invalid tok 2", + tok: func() (*actions.Output, []byte, error) { + return nil, []byte{}, nil + }, + precision: 5, + maxPrecision: 10, + wantErr: true, + expectedError: "failed to unmarshal fabtoken: failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: syntax error: sequence truncated", + }, + { + name: "invalid tok 3", + tok: func() (*actions.Output, []byte, error) { + return nil, []byte{0, 1, 2}, nil + }, + precision: 5, + maxPrecision: 10, + wantErr: true, + expectedError: "failed to unmarshal fabtoken: failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: structure error: tags don't match (16 vs {class:0 tag:0 length:1 isCompound:false}) {optional:false explicit:false application:false private:false defaultValue: tag: stringType:0 timeType:0 set:false omitEmpty:false} TypedToken @2", + }, + { + name: "invalid quantity", + tok: func() (*actions.Output, []byte, error) { + output := &actions.Output{ + Owner: nil, + Type: "", + Quantity: "", + } + raw, err := output.Serialize() + return output, raw, err + }, + precision: 5, + maxPrecision: 10, + wantErr: true, + expectedError: "failed to create quantity: invalid input [,5]", + }, + { + name: "success", + tok: func() (*actions.Output, []byte, error) { + output := &actions.Output{ + Owner: nil, + Type: "", + Quantity: "10", + } + raw, err := output.Serialize() + return output, raw, err + }, + precision: 5, + maxPrecision: 10, + wantErr: false, + expectedQuantity: 10, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tok, tokBytes, err := tt.tok() + assert.NoError(t, err) + output, quantity, err := ParseFabtokenToken(tokBytes, tt.precision, tt.maxPrecision) + if tt.wantErr { + assert.Error(t, err) + assert.EqualError(t, err, tt.expectedError) + } else { + assert.NoError(t, err) + assert.Equal(t, tok, output) + assert.Equal(t, tt.expectedQuantity, quantity) + } + }) + } +} diff --git a/token/core/zkatdlog/nogh/v1/token/mock/id.go b/token/core/zkatdlog/nogh/v1/token/mock/id.go new file mode 100644 index 0000000000..e7f5b67ce1 --- /dev/null +++ b/token/core/zkatdlog/nogh/v1/token/mock/id.go @@ -0,0 +1,117 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mock + +import ( + "sync" + + "github.com/hyperledger-labs/fabric-smart-client/platform/common/services/identity" + "github.com/hyperledger-labs/fabric-token-sdk/token/core/zkatdlog/nogh/v1/token" +) + +type IdentityDeserializer struct { + RecipientsStub func(identity.Identity) ([]identity.Identity, error) + recipientsMutex sync.RWMutex + recipientsArgsForCall []struct { + arg1 identity.Identity + } + recipientsReturns struct { + result1 []identity.Identity + result2 error + } + recipientsReturnsOnCall map[int]struct { + result1 []identity.Identity + result2 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *IdentityDeserializer) Recipients(arg1 identity.Identity) ([]identity.Identity, error) { + fake.recipientsMutex.Lock() + ret, specificReturn := fake.recipientsReturnsOnCall[len(fake.recipientsArgsForCall)] + fake.recipientsArgsForCall = append(fake.recipientsArgsForCall, struct { + arg1 identity.Identity + }{arg1}) + stub := fake.RecipientsStub + fakeReturns := fake.recipientsReturns + fake.recordInvocation("Recipients", []interface{}{arg1}) + fake.recipientsMutex.Unlock() + if stub != nil { + return stub(arg1) + } + if specificReturn { + return ret.result1, ret.result2 + } + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *IdentityDeserializer) RecipientsCallCount() int { + fake.recipientsMutex.RLock() + defer fake.recipientsMutex.RUnlock() + return len(fake.recipientsArgsForCall) +} + +func (fake *IdentityDeserializer) RecipientsCalls(stub func(identity.Identity) ([]identity.Identity, error)) { + fake.recipientsMutex.Lock() + defer fake.recipientsMutex.Unlock() + fake.RecipientsStub = stub +} + +func (fake *IdentityDeserializer) RecipientsArgsForCall(i int) identity.Identity { + fake.recipientsMutex.RLock() + defer fake.recipientsMutex.RUnlock() + argsForCall := fake.recipientsArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *IdentityDeserializer) RecipientsReturns(result1 []identity.Identity, result2 error) { + fake.recipientsMutex.Lock() + defer fake.recipientsMutex.Unlock() + fake.RecipientsStub = nil + fake.recipientsReturns = struct { + result1 []identity.Identity + result2 error + }{result1, result2} +} + +func (fake *IdentityDeserializer) RecipientsReturnsOnCall(i int, result1 []identity.Identity, result2 error) { + fake.recipientsMutex.Lock() + defer fake.recipientsMutex.Unlock() + fake.RecipientsStub = nil + if fake.recipientsReturnsOnCall == nil { + fake.recipientsReturnsOnCall = make(map[int]struct { + result1 []identity.Identity + result2 error + }) + } + fake.recipientsReturnsOnCall[i] = struct { + result1 []identity.Identity + result2 error + }{result1, result2} +} + +func (fake *IdentityDeserializer) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.recipientsMutex.RLock() + defer fake.recipientsMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *IdentityDeserializer) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} + +var _ token.IdentityDeserializer = new(IdentityDeserializer) diff --git a/token/core/zkatdlog/nogh/v1/token/service.go b/token/core/zkatdlog/nogh/v1/token/service.go index d1ea6864a0..b1f73b079a 100644 --- a/token/core/zkatdlog/nogh/v1/token/service.go +++ b/token/core/zkatdlog/nogh/v1/token/service.go @@ -12,7 +12,6 @@ import ( math2 "github.com/IBM/mathlib" "github.com/hyperledger-labs/fabric-smart-client/platform/common/utils" - "github.com/hyperledger-labs/fabric-token-sdk/token/core/common" v1 "github.com/hyperledger-labs/fabric-token-sdk/token/core/fabtoken/v1" "github.com/hyperledger-labs/fabric-token-sdk/token/core/fabtoken/v1/actions" "github.com/hyperledger-labs/fabric-token-sdk/token/core/zkatdlog/nogh/v1/crypto/math" @@ -26,36 +25,50 @@ import ( ) var Precisions = map[token.Format]uint64{ - utils.MustGet(v1.SupportedTokenFormat(16)): 16, - utils.MustGet(v1.SupportedTokenFormat(32)): 32, - utils.MustGet(v1.SupportedTokenFormat(64)): 64, + utils.MustGet(v1.ComputeTokenFormat(16)): 16, + utils.MustGet(v1.ComputeTokenFormat(32)): 32, + utils.MustGet(v1.ComputeTokenFormat(64)): 64, +} + +//go:generate counterfeiter -o mock/id.go -fake-name IdentityDeserializer . IdentityDeserializer + +type IdentityDeserializer interface { + // Recipients returns the recipient identities from the given serialized representation + Recipients(raw driver.Identity) ([]driver.Identity, error) } type TokensService struct { - Logger logging.Logger - PublicParametersManager common.PublicParametersManager[*setup.PublicParams] - IdentityDeserializer driver.Deserializer + Logger logging.Logger + PublicParameters *setup.PublicParams + IdentityDeserializer IdentityDeserializer OutputTokenFormat token.Format SupportedTokenFormatList []token.Format } -func NewTokensService(logger logging.Logger, publicParametersManager common.PublicParametersManager[*setup.PublicParams], identityDeserializer driver.Deserializer) (*TokensService, error) { +func NewTokensService(logger logging.Logger, publicParams *setup.PublicParams, identityDeserializer IdentityDeserializer) (*TokensService, error) { + // validate input + if publicParams == nil { + return nil, errors.New("publicParams cannot be nil") + } + if identityDeserializer == nil { + return nil, errors.New("identityDeserializer cannot be nil") + } + // compute supported tokens - pp := publicParametersManager.PublicParams() - maxPrecision := pp.RangeProofParams.BitLength + maxPrecision := publicParams.Precision() // dlog without graph hiding - outputTokenFormat, err := supportedTokenFormat(pp, maxPrecision) + outputTokenFormat, err := ComputeTokenFormat(publicParams, maxPrecision) if err != nil { return nil, errors.Wrapf(err, "failed computing comm token types") } - supportedTokenFormatList := make([]token.Format, 0, 3*len(pp.IdemixIssuerPublicKeys)) + supportedTokenFormatList := make([]token.Format, 0, 3*len(publicParams.IdemixIssuerPublicKeys)) for _, precision := range setup.SupportedPrecisions { // these Precisions are supported directly if precision <= maxPrecision { - format, err := supportedTokenFormat(pp, precision) + format, err := ComputeTokenFormat(publicParams, precision) if err != nil { return nil, errors.Wrapf(err, "failed computing comm token types") } @@ -65,7 +78,7 @@ func NewTokensService(logger logging.Logger, publicParametersManager common.Publ // in addition, we support all fabtoken with precision less than maxPrecision for _, precision := range []uint64{16, 32, 64} { - format, err := v1.SupportedTokenFormat(precision) + format, err := v1.ComputeTokenFormat(precision) if err != nil { return nil, errors.Wrapf(err, "failed computing fabtoken token format with precision [%d]", precision) } @@ -76,7 +89,7 @@ func NewTokensService(logger logging.Logger, publicParametersManager common.Publ return &TokensService{ Logger: logger, - PublicParametersManager: publicParametersManager, + PublicParameters: publicParams, IdentityDeserializer: identityDeserializer, OutputTokenFormat: outputTokenFormat, SupportedTokenFormatList: supportedTokenFormatList, @@ -107,8 +120,20 @@ func (s *TokensService) Deobfuscate(output driver.TokenOutput, outputMetadata dr if err == nil { return tok, issuer, recipients, format, nil } + err = errors.Wrapf(err, "failed to deobfuscate comm token") + // try fabtoken type - return s.deobfuscateAsFabtokenType(output, outputMetadata) + tok, issuer, recipients, format, err2 := s.deobfuscateAsFabtokenType(output, outputMetadata) + if err2 != nil { + return nil, nil, nil, "", errors.Wrapf( + errors2.Join( + err, + errors.Wrapf(err2, "failed to deobfuscate fabtoken token"), + ), + "failed to deobfuscate", + ) + } + return tok, issuer, recipients, format, nil } func (s *TokensService) deobfuscateAsCommType(output driver.TokenOutput, outputMetadata driver.TokenOutputMetadata) (*token.Token, driver.Identity, []driver.Identity, token.Format, error) { @@ -132,7 +157,7 @@ func (s *TokensService) deobfuscateAsFabtokenType(output driver.TokenOutput, out metadata := &actions.OutputMetadata{} if err := metadata.Deserialize(outputMetadata); err != nil { - return nil, nil, nil, "", errors.Wrap(err, "failed unmarshalling token information") + return nil, nil, nil, "", errors.Wrap(err, "failed unmarshalling token metadata") } recipients, err := s.IdentityDeserializer.Recipients(tok.Owner) @@ -173,13 +198,12 @@ func (s *TokensService) DeserializeToken(outputFormat token.Format, outputRaw [] if !ok { return nil, nil, nil, errors.Errorf("unsupported token format [%s]", outputFormat) } - fabToken, value, err := ParseFabtokenToken(outputRaw, precision, s.PublicParametersManager.PublicParams().RangeProofParams.BitLength) + fabToken, value, err := ParseFabtokenToken(outputRaw, precision, s.PublicParameters.RangeProofParams.BitLength) if err != nil { return nil, nil, nil, errors.Wrapf(err, "failed to unmarshal fabtoken token") } - pp := s.PublicParametersManager.PublicParams() - curve := math2.Curves[pp.Curve] - tokens, meta, err := GetTokensWithWitness([]uint64{value}, fabToken.Type, pp.PedersenGenerators, curve) + curve := math2.Curves[s.PublicParameters.Curve] + tokens, meta, err := GetTokensWithWitness([]uint64{value}, fabToken.Type, s.PublicParameters.PedersenGenerators, curve) if err != nil { return nil, nil, nil, errors.Wrapf(err, "failed to compute commitment") } @@ -226,9 +250,8 @@ func (s *TokensService) deserializeCommToken(outputRaw []byte, metadataRaw []byt if err != nil { return nil, nil, nil, errors.Wrapf(err, "failed to deserialize token metadata [%d][%v]", len(metadataRaw), metadataRaw) } - pp := s.PublicParametersManager.PublicParams() - tok, err := output.ToClear(metadata, pp) + tok, err := output.ToClear(metadata, s.PublicParameters) if err != nil { return nil, nil, nil, errors.Wrap(err, "failed to deserialize token") } @@ -243,13 +266,13 @@ func (s *TokensService) getOutput(outputRaw []byte, checkOwner bool) (*Token, er if checkOwner && len(output.Owner) == 0 { return nil, errors.Errorf("token owner not found in output") } - if err := math.CheckElement(output.Data, s.PublicParametersManager.PublicParams().Curve); err != nil { + if err := math.CheckElement(output.Data, s.PublicParameters.Curve); err != nil { return nil, errors.Wrap(err, "data in invalid in output") } return output, nil } -func supportedTokenFormat(pp *setup.PublicParams, precision uint64) (token.Format, error) { +func ComputeTokenFormat(pp *setup.PublicParams, precision uint64) (token.Format, error) { hasher := utils2.NewSHA256Hasher() if err := errors2.Join( hasher.AddInt32(comm.Type), diff --git a/token/core/zkatdlog/nogh/v1/token/service_test.go b/token/core/zkatdlog/nogh/v1/token/service_test.go new file mode 100644 index 0000000000..379ce3d826 --- /dev/null +++ b/token/core/zkatdlog/nogh/v1/token/service_test.go @@ -0,0 +1,376 @@ +/* +Copyright IBM Corp. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package token_test + +import ( + "testing" + + math "github.com/IBM/mathlib" + "github.com/hyperledger-labs/fabric-smart-client/platform/common/utils/collections" + v1 "github.com/hyperledger-labs/fabric-token-sdk/token/core/fabtoken/v1" + "github.com/hyperledger-labs/fabric-token-sdk/token/core/fabtoken/v1/actions" + "github.com/hyperledger-labs/fabric-token-sdk/token/core/zkatdlog/nogh/v1/setup" + token2 "github.com/hyperledger-labs/fabric-token-sdk/token/core/zkatdlog/nogh/v1/token" + "github.com/hyperledger-labs/fabric-token-sdk/token/core/zkatdlog/nogh/v1/token/mock" + "github.com/hyperledger-labs/fabric-token-sdk/token/driver" + "github.com/hyperledger-labs/fabric-token-sdk/token/services/logging" + "github.com/hyperledger-labs/fabric-token-sdk/token/services/tokens/core/comm" + "github.com/hyperledger-labs/fabric-token-sdk/token/token" + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" +) + +func TestNewTokensService(t *testing.T) { + tests := []struct { + name string + init func() (logging.Logger, *setup.PublicParams, token2.IdentityDeserializer, error) + check func(pp *setup.PublicParams, ts *token2.TokensService) error + wantErr bool + expectedError string + }{ + { + name: "publicParams cannot be nil", + init: func() (logging.Logger, *setup.PublicParams, token2.IdentityDeserializer, error) { + return nil, nil, nil, nil + }, + wantErr: true, + expectedError: "publicParams cannot be nil", + }, + { + name: "identityDeserializer cannot be nil", + init: func() (logging.Logger, *setup.PublicParams, token2.IdentityDeserializer, error) { + pp, err := setup.Setup(32, nil, math.FP256BN_AMCL) + if err != nil { + return nil, nil, nil, err + } + return nil, pp, nil, nil + }, + wantErr: true, + expectedError: "identityDeserializer cannot be nil", + }, + { + name: "success", + init: func() (logging.Logger, *setup.PublicParams, token2.IdentityDeserializer, error) { + pp, err := setup.Setup(32, []byte("issuer public key"), math.FP256BN_AMCL) + if err != nil { + return nil, nil, nil, err + } + return nil, pp, &mock.IdentityDeserializer{}, nil + }, + check: func(pp *setup.PublicParams, ts *token2.TokensService) error { + // check pp + if ts.PublicParameters != pp { + return errors.Errorf("public parameters not equal") + } + // check OutputTokenFormat + outputTokenFormat, err := token2.ComputeTokenFormat(ts.PublicParameters, 32) + if err != nil { + return err + } + if ts.OutputTokenFormat != outputTokenFormat { + return errors.Errorf("invalid token format [%s]", ts.OutputTokenFormat) + } + + if len(ts.SupportedTokenFormats()) != 4 { + return errors.Errorf("invalid number of supported token formats [%d]", len(ts.SupportedTokenFormats())) + } + dlog16, err1 := token2.ComputeTokenFormat(pp, 16) + dlog32, err2 := token2.ComputeTokenFormat(pp, 32) + ft16, err3 := v1.ComputeTokenFormat(16) + ft32, err4 := v1.ComputeTokenFormat(32) + if err1 != nil || err2 != nil || err3 != nil || err4 != nil { + return errors.Errorf("failed computing token format") + } + stf := collections.NewSet[token.Format](ts.SupportedTokenFormats()...) + if !stf.Contains(dlog16) { + return errors.Errorf("stf does not contain dlog16") + } + if !stf.Contains(dlog32) { + return errors.Errorf("stf does not contain dlog32") + } + if !stf.Contains(ft16) { + return errors.Errorf("stf does not contain ft16") + } + if !stf.Contains(ft32) { + return errors.Errorf("stf does not contain ft32") + } + return nil + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + logger, pp, deserializer, err := tt.init() + assert.NoError(t, err) + ts, err := token2.NewTokensService(logger, pp, deserializer) + if tt.wantErr { + assert.Error(t, err) + assert.EqualError(t, err, tt.expectedError) + assert.Nil(t, ts) + } else { + assert.NoError(t, err) + assert.NotNil(t, ts) + assert.NoError(t, tt.check(pp, ts)) + } + }) + } +} + +func TestTokensService_Recipients(t *testing.T) { + pp, err := setup.Setup(32, []byte("issuer public key"), math.FP256BN_AMCL) + assert.NoError(t, err) + + tests := []struct { + name string + inputs func() (*token2.TokensService, driver.TokenOutput, error) + wantErr bool + expectedError string + expectedIdentities []driver.Identity + }{ + { + name: "failed to deserialize token", + inputs: func() (*token2.TokensService, driver.TokenOutput, error) { + ts, err := token2.NewTokensService(nil, pp, &mock.IdentityDeserializer{}) + if err != nil { + return nil, nil, err + } + return ts, nil, nil + }, + wantErr: true, + expectedError: "failed to deserialize token: failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: syntax error: sequence truncated", + }, + { + name: "failed to deserialize token 2", + inputs: func() (*token2.TokensService, driver.TokenOutput, error) { + ts, err := token2.NewTokensService(nil, pp, &mock.IdentityDeserializer{}) + if err != nil { + return nil, nil, err + } + return ts, []byte{}, nil + }, + wantErr: true, + expectedError: "failed to deserialize token: failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: syntax error: sequence truncated", + }, + { + name: "failed to deserialize token 3", + inputs: func() (*token2.TokensService, driver.TokenOutput, error) { + ts, err := token2.NewTokensService(nil, pp, &mock.IdentityDeserializer{}) + if err != nil { + return nil, nil, err + } + return ts, []byte{0, 1, 2, 3}, nil + }, + wantErr: true, + expectedError: "failed to deserialize token: failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: structure error: tags don't match (16 vs {class:0 tag:0 length:1 isCompound:false}) {optional:false explicit:false application:false private:false defaultValue: tag: stringType:0 timeType:0 set:false omitEmpty:false} TypedToken @2", + }, + { + name: "failed to deserialize token 4", + inputs: func() (*token2.TokensService, driver.TokenOutput, error) { + id := &mock.IdentityDeserializer{} + id.RecipientsReturns(nil, nil) + ts, err := token2.NewTokensService(nil, pp, id) + if err != nil { + return nil, nil, err + } + raw, err := comm.WrapTokenWithType([]byte{0, 1, 2, 3}) + if err != nil { + return nil, nil, err + } + return ts, driver.TokenOutput(raw), nil + }, + wantErr: true, + expectedError: "failed to deserialize token: failed unmarshalling token: proto: cannot parse invalid wire-format data", + }, + { + name: "failed identity deserialize", + inputs: func() (*token2.TokensService, driver.TokenOutput, error) { + id := &mock.IdentityDeserializer{} + id.RecipientsReturns(nil, errors.New("pineapple")) + ts, err := token2.NewTokensService(nil, pp, id) + if err != nil { + return nil, nil, err + } + tok := &token2.Token{} + raw, err := tok.Serialize() + if err != nil { + return nil, nil, err + } + return ts, raw, nil + }, + wantErr: true, + expectedError: "failed to get recipients: pineapple", + }, + { + name: "success", + inputs: func() (*token2.TokensService, driver.TokenOutput, error) { + id := &mock.IdentityDeserializer{} + id.RecipientsReturns([]driver.Identity{driver.Identity("alice")}, nil) + ts, err := token2.NewTokensService(nil, pp, id) + if err != nil { + return nil, nil, err + } + tok := &token2.Token{} + raw, err := tok.Serialize() + if err != nil { + return nil, nil, err + } + return ts, raw, nil + }, + wantErr: false, + expectedIdentities: []driver.Identity{driver.Identity("alice")}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ts, outputs, err := tt.inputs() + assert.NoError(t, err) + identities, err := ts.Recipients(outputs) + if tt.wantErr { + assert.Error(t, err) + assert.EqualError(t, err, tt.expectedError) + assert.Nil(t, identities) + } else { + assert.NoError(t, err) + assert.NotNil(t, ts) + assert.Equal(t, tt.expectedIdentities, identities) + } + }) + } +} + +func TestTokensService_Deobfuscate(t *testing.T) { + pp, err := setup.Setup(32, []byte("issuer public key"), math.FP256BN_AMCL) + assert.NoError(t, err) + + tests := []struct { + name string + inputs func() (*token2.TokensService, driver.TokenOutput, driver.TokenOutputMetadata, error) + wantErr bool + expectedError string + }{ + { + name: "failed to deserialize token", + inputs: func() (*token2.TokensService, driver.TokenOutput, driver.TokenOutputMetadata, error) { + ts, err := token2.NewTokensService(nil, pp, &mock.IdentityDeserializer{}) + if err != nil { + return nil, nil, nil, err + } + return ts, nil, nil, nil + }, + wantErr: true, + expectedError: "failed to deobfuscate: failed to deobfuscate comm token: failed to deobfuscate token: failed getting token output: failed to deserialize token: failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: syntax error: sequence truncated\nfailed to deobfuscate fabtoken token: failed unmarshalling token: failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: syntax error: sequence truncated", + }, + { + name: "failed to deserialize token 2", + inputs: func() (*token2.TokensService, driver.TokenOutput, driver.TokenOutputMetadata, error) { + ts, err := token2.NewTokensService(nil, pp, &mock.IdentityDeserializer{}) + if err != nil { + return nil, nil, nil, err + } + return ts, []byte{}, nil, nil + }, + wantErr: true, + expectedError: "failed to deobfuscate: failed to deobfuscate comm token: failed to deobfuscate token: failed getting token output: failed to deserialize token: failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: syntax error: sequence truncated\nfailed to deobfuscate fabtoken token: failed unmarshalling token: failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: syntax error: sequence truncated", + }, + { + name: "failed to deserialize token 3", + inputs: func() (*token2.TokensService, driver.TokenOutput, driver.TokenOutputMetadata, error) { + ts, err := token2.NewTokensService(nil, pp, &mock.IdentityDeserializer{}) + if err != nil { + return nil, nil, nil, err + } + return ts, []byte{0, 1, 2, 3}, nil, nil + }, + wantErr: true, + expectedError: "failed to deobfuscate: failed to deobfuscate comm token: failed to deobfuscate token: failed getting token output: failed to deserialize token: failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: structure error: tags don't match (16 vs {class:0 tag:0 length:1 isCompound:false}) {optional:false explicit:false application:false private:false defaultValue: tag: stringType:0 timeType:0 set:false omitEmpty:false} TypedToken @2\nfailed to deobfuscate fabtoken token: failed unmarshalling token: failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: structure error: tags don't match (16 vs {class:0 tag:0 length:1 isCompound:false}) {optional:false explicit:false application:false private:false defaultValue: tag: stringType:0 timeType:0 set:false omitEmpty:false} TypedToken @2", + }, + { + name: "failed to deserialize fabtoken metadata", + inputs: func() (*token2.TokensService, driver.TokenOutput, driver.TokenOutputMetadata, error) { + ts, err := token2.NewTokensService(nil, pp, &mock.IdentityDeserializer{}) + if err != nil { + return nil, nil, nil, err + } + tok := &actions.Output{} + raw, err := tok.Serialize() + if err != nil { + return nil, nil, nil, err + } + return ts, raw, nil, nil + }, + wantErr: true, + expectedError: "failed to deobfuscate: failed to deobfuscate comm token: failed to deobfuscate token: failed getting token output: failed to deserialize token: failed deserializing token: invalid token type [1]\nfailed to deobfuscate fabtoken token: failed unmarshalling token metadata: failed deserializing metadata: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: syntax error: sequence truncated", + }, + { + name: "failed to deserialize fabtoken owner identity", + inputs: func() (*token2.TokensService, driver.TokenOutput, driver.TokenOutputMetadata, error) { + des := &mock.IdentityDeserializer{} + des.RecipientsReturns(nil, errors.New("pineapple")) + ts, err := token2.NewTokensService(nil, pp, des) + if err != nil { + return nil, nil, nil, err + } + tok := &actions.Output{} + raw, err := tok.Serialize() + if err != nil { + return nil, nil, nil, err + } + + meta := &actions.OutputMetadata{} + metaRaw, err := meta.Serialize() + if err != nil { + return nil, nil, nil, err + } + + return ts, raw, metaRaw, nil + }, + wantErr: true, + expectedError: "failed to deobfuscate: failed to deobfuscate comm token: failed to deobfuscate token: failed getting token output: failed to deserialize token: failed deserializing token: invalid token type [1]\nfailed to deobfuscate fabtoken token: failed to get recipients: pineapple", + }, + { + name: "fabtoken output, cannot deserialize output", + inputs: func() (*token2.TokensService, driver.TokenOutput, driver.TokenOutputMetadata, error) { + des := &mock.IdentityDeserializer{} + des.RecipientsReturns(nil, errors.New("pineapple")) + ts, err := token2.NewTokensService(nil, pp, des) + if err != nil { + return nil, nil, nil, err + } + tok := &actions.Output{} + raw, err := tok.Serialize() + if err != nil { + return nil, nil, nil, err + } + + meta := &actions.OutputMetadata{} + metaRaw, err := meta.Serialize() + if err != nil { + return nil, nil, nil, err + } + + return ts, raw, metaRaw, nil + }, + wantErr: true, + expectedError: "failed to deobfuscate: failed to deobfuscate comm token: failed to deobfuscate token: failed getting token output: failed to deserialize token: failed deserializing token: invalid token type [1]\nfailed to deobfuscate fabtoken token: failed to get recipients: pineapple", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ts, output, metadata, err := tt.inputs() + assert.NoError(t, err) + _, _, _, _, err = ts.Deobfuscate(output, metadata) + if tt.wantErr { + assert.Error(t, err) + assert.EqualError(t, err, tt.expectedError) + } else { + assert.NoError(t, err) + assert.NotNil(t, ts) + } + }) + } +} diff --git a/token/core/zkatdlog/nogh/v1/token/token.go b/token/core/zkatdlog/nogh/v1/token/token.go index 1d3040802d..07455d4d9e 100644 --- a/token/core/zkatdlog/nogh/v1/token/token.go +++ b/token/core/zkatdlog/nogh/v1/token/token.go @@ -53,9 +53,6 @@ func (t *Token) Deserialize(bytes []byte) error { if err != nil { return errors.Wrapf(err, "failed deserializing token") } - if typed.Type != comm.Type { - return errors.Errorf("invalid token type [%v]", typed.Type) - } token := &actions.Token{} if err := proto.Unmarshal(typed.Token, token); err != nil { return errors.Wrapf(err, "failed unmarshalling token") @@ -67,7 +64,7 @@ func (t *Token) Deserialize(bytes []byte) error { // ToClear returns Token in the clear func (t *Token) ToClear(meta *Metadata, pp *noghv1.PublicParams) (*token.Token, error) { - com, err := commit([]*math.Zr{math.Curves[pp.Curve].HashToZr([]byte(meta.Type)), meta.Value, meta.BlindingFactor}, pp.PedersenGenerators, math.Curves[pp.Curve]) + com, err := Commit([]*math.Zr{math.Curves[pp.Curve].HashToZr([]byte(meta.Type)), meta.Value, meta.BlindingFactor}, pp.PedersenGenerators, math.Curves[pp.Curve]) if err != nil { return nil, errors.Wrap(err, "cannot retrieve token in the clear: failed to check token data") } @@ -97,7 +94,7 @@ func computeTokens(tw []*Metadata, pp []*math.G1, c *math.Curve) ([]*math.G1, er var err error for i := 0; i < len(tw); i++ { hash := c.HashToZr([]byte(tw[i].Type)) - tokens[i], err = commit([]*math.Zr{hash, tw[i].Value, tw[i].BlindingFactor}, pp, c) + tokens[i], err = Commit([]*math.Zr{hash, tw[i].Value, tw[i].BlindingFactor}, pp, c) if err != nil { return nil, errors.WithMessagef(err, "failed to compute token [%d]", i) } @@ -144,13 +141,13 @@ func NewMetadata(curve math.CurveID, tokenType token.Type, values []uint64, bfs // Deserialize un-marshals Metadata func (m *Metadata) Deserialize(b []byte) error { - typed, err := comm.UnmarshalTypedToken(b) + typed, err := comm.UnmarshalTypedMetadata(b) if err != nil { - return errors.Wrapf(err, "failed deserializing metadata") + return errors.Wrapf(err, "failed to deserialize metadata") } metadata := &actions.TokenMetadata{} - if err := proto.Unmarshal(typed.Token, metadata); err != nil { - return errors.Wrapf(err, "failed unmarshalling metadata") + if err := proto.Unmarshal(typed.Metadata, metadata); err != nil { + return errors.Wrapf(err, "failed to deserialize metadata") } m.Type = token.Type(metadata.Type) m.Value, err = utils.FromZrProto(metadata.Value) @@ -171,11 +168,11 @@ func (m *Metadata) Deserialize(b []byte) error { func (m *Metadata) Serialize() ([]byte, error) { value, err := utils.ToProtoZr(m.Value) if err != nil { - return nil, errors.Wrapf(err, "failed to deserialize metadata") + return nil, errors.Wrapf(err, "failed to serialize metadata") } blindingFactor, err := utils.ToProtoZr(m.BlindingFactor) if err != nil { - return nil, errors.Wrapf(err, "failed to deserialize metadata") + return nil, errors.Wrapf(err, "failed to serialize metadata") } raw, err := proto.Marshal(&actions.TokenMetadata{ Type: string(m.Type), @@ -184,7 +181,7 @@ func (m *Metadata) Serialize() ([]byte, error) { Issuer: &pp.Identity{Raw: m.Issuer}, }) if err != nil { - return nil, errors.Wrapf(err, "failed serializing token") + return nil, errors.Wrapf(err, "failed to serialize metadata") } return comm.WrapMetadataWithType(raw) } @@ -198,7 +195,11 @@ func (m *Metadata) Clone() *Metadata { } } -func commit(vector []*math.Zr, generators []*math.G1, c *math.Curve) (*math.G1, error) { +// Commit computes the Pedersen commitment of the passed elements using the passed bases +func Commit(vector []*math.Zr, generators []*math.G1, c *math.Curve) (*math.G1, error) { + if len(generators) != len(vector) { + return nil, errors.Errorf("number of generators is not equal to number of vector elements, [%d]!=[%d]", len(generators), len(vector)) + } com := c.NewG1() for i := range vector { if vector[i] == nil { diff --git a/token/core/zkatdlog/nogh/v1/token/token_test.go b/token/core/zkatdlog/nogh/v1/token/token_test.go index 411fde65d1..baf64976d7 100644 --- a/token/core/zkatdlog/nogh/v1/token/token_test.go +++ b/token/core/zkatdlog/nogh/v1/token/token_test.go @@ -10,9 +10,13 @@ import ( "testing" math "github.com/IBM/mathlib" + fabtokenv1 "github.com/hyperledger-labs/fabric-token-sdk/token/core/fabtoken/v1/actions" v1 "github.com/hyperledger-labs/fabric-token-sdk/token/core/zkatdlog/nogh/v1/setup" token2 "github.com/hyperledger-labs/fabric-token-sdk/token/core/zkatdlog/nogh/v1/token" + "github.com/hyperledger-labs/fabric-token-sdk/token/services/tokens" + "github.com/hyperledger-labs/fabric-token-sdk/token/services/tokens/core/comm" token3 "github.com/hyperledger-labs/fabric-token-sdk/token/token" + "github.com/pkg/errors" "github.com/stretchr/testify/assert" ) @@ -63,12 +67,508 @@ func FuzzSerialization(f *testing.F) { assert.NoError(f, err) assert.NotNil(t, raw) - token2 := &token2.Token{} - err = token2.Deserialize(raw) + desToken := &token2.Token{} + err = desToken.Deserialize(raw) if err != nil { - t.Errorf("failed to deserialize token [owner: %s, putData: %v]: [%v]", owner, putData, err) + t.Errorf("failed to deserialize metadata [owner: %s, putData: %v]: [%v]", owner, putData, err) } - assert.Equal(t, len(token.Owner), len(token2.Owner), "owner mismatch [owner: %s, putData: %v]", owner, putData) - assert.Equal(t, token.Data, token2.Data) + assert.Equal(t, len(token.Owner), len(desToken.Owner), "owner mismatch [owner: %s, putData: %v]", owner, putData) + assert.Equal(t, token.Data, desToken.Data) }) } + +func TestTokenGetOwner(t *testing.T) { + token := &token2.Token{ + Owner: []byte("Alice"), + } + assert.Equal(t, token.GetOwner(), token.Owner) +} + +func TestTokenIsRedeem(t *testing.T) { + token := &token2.Token{ + Owner: []byte("Alice"), + } + assert.False(t, token.IsRedeem()) + + token = &token2.Token{} + assert.True(t, token.IsRedeem()) + + token = &token2.Token{ + Owner: []byte{}, + } + assert.True(t, token.IsRedeem()) +} + +func TestGetTokensWithWitness(t *testing.T) { + tests := []struct { + name string + values []uint64 + tokenType token3.Type + pp []*math.G1 + curve *math.Curve + validate func([]*math.G1, []*token2.Metadata) error + wantErr bool + expectedError string + }{ + { + name: "curve is nil", + wantErr: true, + expectedError: "cannot get tokens with witness: please initialize curve", + }, + { + name: "curve is not nil", + curve: math.Curves[math.FP256BN_AMCL], + wantErr: false, + validate: func(tokens []*math.G1, data []*token2.Metadata) error { + if len(tokens) != 0 { + return errors.New("tokens should be empty") + } + if len(data) != 0 { + return errors.New("tokens should be empty") + } + return nil + }, + }, + { + name: "number of generators is not equal to number of vector elements", + values: []uint64{10}, + tokenType: "token type", + pp: nil, + curve: math.Curves[math.FP256BN_AMCL], + wantErr: true, + expectedError: "cannot get tokens with witness: failed to compute token [0]: number of generators is not equal to number of vector elements, [0]!=[3]", + }, + { + name: "success", + values: []uint64{10}, + tokenType: "token type", + pp: []*math.G1{ + math.Curves[math.FP256BN_AMCL].NewG1(), + math.Curves[math.FP256BN_AMCL].NewG1(), + math.Curves[math.FP256BN_AMCL].NewG1(), + }, + curve: math.Curves[math.FP256BN_AMCL], + wantErr: false, + validate: func(toks []*math.G1, data []*token2.Metadata) error { + if len(toks) != 1 { + return errors.New("one token was expected") + } + if len(data) != 1 { + return errors.New("one data was expected") + } + c := math.Curves[math.FP256BN_AMCL] + pp := []*math.G1{ + math.Curves[math.FP256BN_AMCL].NewG1(), + math.Curves[math.FP256BN_AMCL].NewG1(), + math.Curves[math.FP256BN_AMCL].NewG1(), + } + + for i, token := range toks { + hash := c.HashToZr([]byte(data[i].Type)) + tok, err := token2.Commit( + []*math.Zr{ + hash, + data[i].Value, + data[i].BlindingFactor, + }, + pp, + c, + ) + if err != nil { + return err + } + if !token.Equals(tok) { + return errors.New("token does not match") + } + } + return nil + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + g1s, witnesses, err := token2.GetTokensWithWitness( + tt.values, + tt.tokenType, + tt.pp, + tt.curve, + ) + if tt.wantErr { + assert.Error(t, err) + assert.EqualError(t, err, tt.expectedError) + } else { + assert.NoError(t, err) + assert.NoError(t, tt.validate(g1s, witnesses)) + } + }) + } +} + +func TestTokenValidate(t *testing.T) { + tests := []struct { + name string + token func() (*token2.Token, error) + owner bool + wantErr bool + expectedError string + }{ + { + name: "owner is nil", + owner: true, + token: func() (*token2.Token, error) { + return &token2.Token{}, nil + }, + wantErr: true, + expectedError: "token owner cannot be empty", + }, + { + name: "owner is empty", + owner: true, + token: func() (*token2.Token, error) { + return &token2.Token{Owner: []byte{}}, nil + }, + wantErr: true, + expectedError: "token owner cannot be empty", + }, + { + name: "data is empty", + owner: true, + token: func() (*token2.Token, error) { + return &token2.Token{Owner: []byte("owner")}, nil + }, + wantErr: true, + expectedError: "token data cannot be empty", + }, + { + name: "data is empty with no owner", + owner: false, + token: func() (*token2.Token, error) { + return &token2.Token{}, nil + }, + wantErr: true, + expectedError: "token data cannot be empty", + }, + { + name: "valid with no owner", + owner: false, + token: func() (*token2.Token, error) { + return &token2.Token{Data: &math.G1{}}, nil + }, + wantErr: false, + }, + { + name: "valid with owner", + owner: true, + token: func() (*token2.Token, error) { + return &token2.Token{Owner: []byte("owner"), Data: &math.G1{}}, nil + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tok, err := tt.token() + assert.NoError(t, err) + err = tok.Validate(tt.owner) + if tt.wantErr { + assert.Error(t, err) + assert.EqualError(t, err, tt.expectedError) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestTokenDeserialize(t *testing.T) { + tests := []struct { + name string + token func() (*token2.Token, []byte, error) + owner bool + wantErr bool + expectedError string + }{ + { + name: "nil raw", + owner: true, + token: func() (*token2.Token, []byte, error) { + return nil, nil, nil + }, + wantErr: true, + expectedError: "failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: syntax error: sequence truncated", + }, + { + name: "empty raw", + owner: true, + token: func() (*token2.Token, []byte, error) { + return nil, []byte{}, nil + }, + wantErr: true, + expectedError: "failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: syntax error: sequence truncated", + }, + { + name: "invalid raw", + owner: true, + token: func() (*token2.Token, []byte, error) { + return nil, []byte{0, 1, 2, 3}, nil + }, + wantErr: true, + expectedError: "failed deserializing token: failed unmarshalling token: failed to unmarshal to TypedToken: asn1: structure error: tags don't match (16 vs {class:0 tag:0 length:1 isCompound:false}) {optional:false explicit:false application:false private:false defaultValue: tag: stringType:0 timeType:0 set:false omitEmpty:false} TypedToken @2", + }, + { + name: "invalid token type", + owner: true, + token: func() (*token2.Token, []byte, error) { + raw, err := tokens.WrapWithType(-1, []byte{0, 1, 2, 3}) + return nil, raw, err + }, + wantErr: true, + expectedError: "failed deserializing token: invalid token type [-1]", + }, + { + name: "valid token raw, nil", + owner: true, + token: func() (*token2.Token, []byte, error) { + raw, err := tokens.WrapWithType(comm.Type, nil) + return &token2.Token{}, raw, err + }, + wantErr: false, + }, + { + name: "invalid token raw, invalid", + owner: true, + token: func() (*token2.Token, []byte, error) { + raw, err := tokens.WrapWithType(comm.Type, []byte{0, 1, 2, 3}) + return nil, raw, err + }, + wantErr: true, + expectedError: "failed unmarshalling token: proto: cannot parse invalid wire-format data", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tok, raw, err := tt.token() + assert.NoError(t, err) + tok2 := &token2.Token{} + err = tok2.Deserialize(raw) + if tt.wantErr { + assert.Error(t, err) + assert.EqualError(t, err, tt.expectedError) + } else { + assert.NoError(t, err) + assert.Equal(t, tok, tok2) + } + }) + } +} + +func TestMetadataDeserialize(t *testing.T) { + tests := []struct { + name string + metadata func() (*token2.Metadata, []byte, error) + owner bool + wantErr bool + expectedError string + }{ + { + name: "nil raw", + owner: true, + metadata: func() (*token2.Metadata, []byte, error) { + return nil, nil, nil + }, + wantErr: true, + expectedError: "failed to deserialize metadata: failed unmarshalling metadata: failed to unmarshal to TypedMetadata: asn1: syntax error: sequence truncated", + }, + { + name: "empty raw", + owner: true, + metadata: func() (*token2.Metadata, []byte, error) { + return nil, []byte{}, nil + }, + wantErr: true, + expectedError: "failed to deserialize metadata: failed unmarshalling metadata: failed to unmarshal to TypedMetadata: asn1: syntax error: sequence truncated", + }, + { + name: "invalid raw", + owner: true, + metadata: func() (*token2.Metadata, []byte, error) { + return nil, []byte{0, 1, 2, 3}, nil + }, + wantErr: true, + expectedError: "failed to deserialize metadata: failed unmarshalling metadata: failed to unmarshal to TypedMetadata: asn1: structure error: tags don't match (16 vs {class:0 tag:0 length:1 isCompound:false}) {optional:false explicit:false application:false private:false defaultValue: tag: stringType:0 timeType:0 set:false omitEmpty:false} TypedMetadata @2", + }, + { + name: "invalid metadata type", + owner: true, + metadata: func() (*token2.Metadata, []byte, error) { + raw, err := tokens.WrapWithType(-1, []byte{0, 1, 2, 3}) + return nil, raw, err + }, + wantErr: true, + expectedError: "failed to deserialize metadata: invalid metadata type [-1]", + }, + { + name: "valid metadata raw, nil", + owner: true, + metadata: func() (*token2.Metadata, []byte, error) { + raw, err := tokens.WrapWithType(comm.Type, nil) + return &token2.Metadata{}, raw, err + }, + wantErr: false, + }, + { + name: "invalid metadata raw, invalid", + owner: true, + metadata: func() (*token2.Metadata, []byte, error) { + raw, err := tokens.WrapWithType(comm.Type, []byte{0, 1, 2, 3}) + return nil, raw, err + }, + wantErr: true, + expectedError: "failed to deserialize metadata: proto: cannot parse invalid wire-format data", + }, + { + name: "invalid metadata raw, invalid", + owner: true, + metadata: func() (*token2.Metadata, []byte, error) { + raw, err := tokens.WrapWithType(comm.Type, []byte{0, 1, 2, 3}) + return nil, raw, err + }, + wantErr: true, + expectedError: "failed to deserialize metadata: proto: cannot parse invalid wire-format data", + }, + { + name: "valid metadata", + owner: true, + metadata: func() (*token2.Metadata, []byte, error) { + c := math.Curves[math.BN254] + rand, err := c.Rand() + assert.NoError(t, err) + metadata := &token2.Metadata{ + Type: "token type", + Value: c.NewRandomZr(rand), + BlindingFactor: c.NewRandomZr(rand), + Issuer: []byte("issuer"), + } + raw, err := metadata.Serialize() + return metadata, raw, err + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + metadata, raw, err := tt.metadata() + assert.NoError(t, err) + metadata2 := &token2.Metadata{} + err = metadata2.Deserialize(raw) + if tt.wantErr { + assert.Error(t, err) + assert.EqualError(t, err, tt.expectedError) + } else { + assert.NoError(t, err) + assert.Equal(t, metadata, metadata2) + } + }) + } +} + +func TestUpgradeWitnessValidate(t *testing.T) { + tests := []struct { + name string + token func() (*token2.UpgradeWitness, error) + wantErr bool + expectedError string + }{ + { + name: "missing FabToken", + token: func() (*token2.UpgradeWitness, error) { + return &token2.UpgradeWitness{}, nil + }, + wantErr: true, + expectedError: "missing FabToken", + }, + { + name: "missing FabToken.Owner", + token: func() (*token2.UpgradeWitness, error) { + return &token2.UpgradeWitness{ + FabToken: &fabtokenv1.Output{}, + BlindingFactor: nil, + }, nil + }, + wantErr: true, + expectedError: "missing FabToken.Owner", + }, + { + name: "missing FabToken.Type", + token: func() (*token2.UpgradeWitness, error) { + return &token2.UpgradeWitness{ + FabToken: &fabtokenv1.Output{ + Owner: []byte("owner"), + Type: "", + Quantity: "", + }, + BlindingFactor: nil, + }, nil + }, + wantErr: true, + expectedError: "missing FabToken.Type", + }, + { + name: "missing FabToken.Quantity", + token: func() (*token2.UpgradeWitness, error) { + return &token2.UpgradeWitness{ + FabToken: &fabtokenv1.Output{ + Owner: []byte("owner"), + Type: "token type", + Quantity: "", + }, + BlindingFactor: nil, + }, nil + }, + wantErr: true, + expectedError: "missing FabToken.Quantity", + }, + { + name: "missing BlindingFactor", + token: func() (*token2.UpgradeWitness, error) { + return &token2.UpgradeWitness{ + FabToken: &fabtokenv1.Output{ + Owner: []byte("owner"), + Type: "token type", + Quantity: "quantity", + }, + BlindingFactor: nil, + }, nil + }, + wantErr: true, + expectedError: "missing BlindingFactor", + }, + { + name: "success", + token: func() (*token2.UpgradeWitness, error) { + return &token2.UpgradeWitness{ + FabToken: &fabtokenv1.Output{ + Owner: []byte("owner"), + Type: "token type", + Quantity: "quantity", + }, + BlindingFactor: &math.Zr{}, + }, nil + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tok, err := tt.token() + assert.NoError(t, err) + err = tok.Validate() + if tt.wantErr { + assert.Error(t, err) + assert.EqualError(t, err, tt.expectedError) + } else { + assert.NoError(t, err) + } + }) + } +}