diff --git a/.golangci.yml b/.golangci.yml index d7cae30..fc3f925 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -11,7 +11,7 @@ run: tests: true build-tags: [""] skip-dirs: - - pkg/crypto/primitive/bbs12381g2pub/internal/kilic/bls12-381 + - method/sidetreelongform/sidetree-core/docutil output: format: colored-line-number diff --git a/doc/json/canonicalizer/canonicalizer.go b/doc/json/canonicalizer/canonicalizer.go new file mode 100644 index 0000000..8b5325b --- /dev/null +++ b/doc/json/canonicalizer/canonicalizer.go @@ -0,0 +1,29 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package canonicalizer + +import ( + "encoding/json" + + "github.com/trustbloc/did-go/doc/json/canonicalizer/internal/jsoncanonicalizer" +) + +// MarshalCanonical is using JCS RFC canonicalization. +func MarshalCanonical(value interface{}) ([]byte, error) { + valueBytes, ok := value.([]byte) + + if !ok { + var err error + + valueBytes, err = json.Marshal(value) + if err != nil { + return nil, err + } + } + + return jsoncanonicalizer.Transform(valueBytes) +} diff --git a/doc/json/canonicalizer/canonicalizer_test.go b/doc/json/canonicalizer/canonicalizer_test.go new file mode 100644 index 0000000..83e9cc3 --- /dev/null +++ b/doc/json/canonicalizer/canonicalizer_test.go @@ -0,0 +1,43 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package canonicalizer + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestMarshalCanonical(t *testing.T) { + t.Run("success", func(t *testing.T) { + test := struct { + Beta string `json:"beta"` + Alpha string `json:"alpha"` + }{ + Beta: "beta", + Alpha: "alpha", + } + + result, err := MarshalCanonical(test) + require.NoError(t, err) + require.Equal(t, string(result), `{"alpha":"alpha","beta":"beta"}`) + }) + + t.Run("success - accepts bytes", func(t *testing.T) { + result, err := MarshalCanonical([]byte(`{"beta":"beta","alpha":"alpha"}`)) + require.NoError(t, err) + require.Equal(t, string(result), `{"alpha":"alpha","beta":"beta"}`) + }) + + t.Run("marshal error", func(t *testing.T) { + var c chan int + result, err := MarshalCanonical(c) + require.Error(t, err) + require.Empty(t, result) + require.Contains(t, err.Error(), "json: unsupported type: chan int") + }) +} diff --git a/doc/json/canonicalizer/internal/jsoncanonicalizer/README.md b/doc/json/canonicalizer/internal/jsoncanonicalizer/README.md new file mode 100644 index 0000000..5c91be7 --- /dev/null +++ b/doc/json/canonicalizer/internal/jsoncanonicalizer/README.md @@ -0,0 +1,4 @@ +## JSON Cononicalizer + +The files in this folder are copied AS-IS from [Cyberphone JSON Canonicalization Go Library](https://github.com/cyberphone/json-canonicalization/tree/master/go/src/webpki.org/jsoncanonicalizer). +The licence details are available at [LICENCE](https://github.com/cyberphone/json-canonicalization/blob/master/LICENSE). diff --git a/doc/json/canonicalizer/internal/jsoncanonicalizer/es6numfmt.go b/doc/json/canonicalizer/internal/jsoncanonicalizer/es6numfmt.go new file mode 100644 index 0000000..2d9d426 --- /dev/null +++ b/doc/json/canonicalizer/internal/jsoncanonicalizer/es6numfmt.go @@ -0,0 +1,95 @@ +// +// Copyright 2006-2019 WebPKI.org (http://webpki.org). +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +// This package converts numbers in IEEE-754 double precision into the +// format specified for JSON in EcmaScript Version 6 and forward. +// The core application for this is canonicalization: +// https://tools.ietf.org/html/draft-rundgren-json-canonicalization-scheme-02 + +package jsoncanonicalizer + +import ( + "errors" + "math" + "strconv" + "strings" +) + +const invalidPattern uint64 = 0x7ff0000000000000 + +func NumberToJSON(ieeeF64 float64) (res string, err error) { //nolint:funlen,gocyclo,golint + ieeeU64 := math.Float64bits(ieeeF64) + + // Special case: NaN and Infinity are invalid in JSON + if (ieeeU64 & invalidPattern) == invalidPattern { + return "null", errors.New("Invalid JSON number: " + strconv.FormatUint(ieeeU64, 16)) + } + + // Special case: eliminate "-0" as mandated by the ES6-JSON/JCS specifications + if ieeeF64 == 0 { // Right, this line takes both -0 and 0 + return "0", nil + } + + // Deal with the sign separately + var sign string = "" + + if ieeeF64 < 0 { + ieeeF64 = -ieeeF64 + sign = "-" + } + + // ES6 has a unique "g" format + var format byte = 'e' + if ieeeF64 < 1e+21 && ieeeF64 >= 1e-6 { + format = 'f' + } + + // The following should (in "theory") do the trick: + es6Formatted := strconv.FormatFloat(ieeeF64, format, -1, 64) + + // Unfortunately Go version 1.11.4 is a bit buggy with respect to + // rounding for -1 precision which is dealt with below. + // https://github.com/golang/go/issues/29491 + exponent := strings.IndexByte(es6Formatted, 'e') + //nolint:nestif + if exponent > 0 { + gform := strconv.FormatFloat(ieeeF64, 'g', 17, 64) + if len(gform) == len(es6Formatted) { + // "g" occasionally produces another result which also is the correct one + es6Formatted = gform + } + // Go outputs "1e+09" which must be rewritten as "1e+9" + if es6Formatted[exponent+2] == '0' { + es6Formatted = es6Formatted[:exponent+2] + es6Formatted[exponent+3:] + } + } else if strings.IndexByte(es6Formatted, '.') < 0 && len(es6Formatted) >= 12 { + i := len(es6Formatted) + for es6Formatted[i-1] == '0' { + i-- + } + if i != len(es6Formatted) { + fix := strconv.FormatFloat(ieeeF64, 'f', 0, 64) + if fix[i] >= '5' { + // "f" with precision 0 occasionally produces another result which also is + // the correct one although it must be rounded to match the -1 precision + // (which fortunately seems to be correct with respect to trailing zeroes) + es6Formatted = fix[:i-1] + string(fix[i-1]+1) + es6Formatted[i:] + } + } + } + + return sign + es6Formatted, nil +} diff --git a/doc/json/canonicalizer/internal/jsoncanonicalizer/jsoncanonicalizer.go b/doc/json/canonicalizer/internal/jsoncanonicalizer/jsoncanonicalizer.go new file mode 100644 index 0000000..f3ca1f6 --- /dev/null +++ b/doc/json/canonicalizer/internal/jsoncanonicalizer/jsoncanonicalizer.go @@ -0,0 +1,422 @@ +// +// Copyright 2006-2019 WebPKI.org (http://webpki.org). +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +// This package transforms JSON data in UTF-8 according to: +// https://tools.ietf.org/html/draft-rundgren-json-canonicalization-scheme-02 + +package jsoncanonicalizer + +import ( + "container/list" + "errors" + "fmt" + "strconv" + "strings" + "unicode/utf16" +) + +type nameValueType struct { + name string + sortKey []uint16 + value string +} + +// JSON standard escapes (modulo \u). +var asciiEscapes = []byte{'\\', '"', 'b', 'f', 'n', 'r', 't'} //nolint:gochecknoglobals +var binaryEscapes = []byte{'\\', '"', '\b', '\f', '\n', '\r', '\t'} //nolint:gochecknoglobals + +// JSON literals. +var literals = []string{"true", "false", "null"} //nolint:gochecknoglobals + +func Transform(jsonData []byte) (result []byte, e error) { //nolint:funlen,gocyclo,golint + // JSON data MUST be UTF-8 encoded + var jsonDataLength int = len(jsonData) + + // Current pointer in jsonData + var index int = 0 + + // "Forward" declarations are needed for closures referring each other + var parseElement func() string + + var parseSimpleType func() string + + var parseQuotedString func() string + + var parseObject func() string + + var parseArray func() string + + var globalError error = nil + + checkError := func(e error) { + // We only honor the first reported error + if globalError == nil { + globalError = e + } + } + + setError := func(msg string) { + checkError(errors.New(msg)) + } + + isWhiteSpace := func(c byte) bool { + return c == 0x20 || c == 0x0a || c == 0x0d || c == 0x09 + } + + nextChar := func() byte { + if index < jsonDataLength { + c := jsonData[index] + if c > 0x7f { + setError("Unexpected non-ASCII character") + } + index++ + + return c + } + + setError("Unexpected EOF reached") + + return '"' + } + + scan := func() byte { + for { + c := nextChar() + if isWhiteSpace(c) { + continue + } + + return c + } + } + + scanFor := func(expected byte) { + c := scan() + if c != expected { + setError("Expected '" + string(expected) + "' but got '" + string(c) + "'") + } + } + + getUEscape := func() rune { + start := index + + nextChar() + nextChar() + nextChar() + nextChar() + + if globalError != nil { + return 0 + } + + u16, err := strconv.ParseUint(string(jsonData[start:index]), 16, 64) + checkError(err) + + return rune(u16) + } + + testNextNonWhiteSpaceChar := func() byte { + save := index + c := scan() + index = save + + return c + } + + decorateString := func(rawUTF8 string) string { + var quotedString strings.Builder + + quotedString.WriteByte('"') + CoreLoop: + for _, c := range []byte(rawUTF8) { + // Is this within the JSON standard escapes? + for i, esc := range binaryEscapes { + if esc == c { + quotedString.WriteByte('\\') + quotedString.WriteByte(asciiEscapes[i]) + continue CoreLoop + } + } + if c < 0x20 { + // Other ASCII control characters must be escaped with \uhhhh + quotedString.WriteString(fmt.Sprintf("\\u%04x", c)) + } else { + quotedString.WriteByte(c) + } + } + quotedString.WriteByte('"') + + return quotedString.String() + } + + parseQuotedString = func() string { + var rawString strings.Builder + CoreLoop: + for globalError == nil { + var c byte + if index < jsonDataLength { + c = jsonData[index] + index++ + } else { + nextChar() + break + } + if c == '"' { + break + } + + //nolint:nestif + if c < ' ' { + setError("Unterminated string literal") + } else if c == '\\' { + // Escape sequence + c = nextChar() + if c == 'u' { + // The \u escape + firstUTF16 := getUEscape() + if utf16.IsSurrogate(firstUTF16) { + // If the first UTF-16 code unit has a certain value there must be + // another succeeding UTF-16 code unit as well + if nextChar() != '\\' || nextChar() != 'u' { + setError("Missing surrogate") + } else { + // Output the UTF-32 code point as UTF-8 + rawString.WriteRune(utf16.DecodeRune(firstUTF16, getUEscape())) + } + } else { + // Single UTF-16 code identical to UTF-32. Output as UTF-8 + rawString.WriteRune(firstUTF16) + } + } else if c == '/' { + // Benign but useless escape + rawString.WriteByte('/') + } else { + // The JSON standard escapes + for i, esc := range asciiEscapes { + if esc == c { + rawString.WriteByte(binaryEscapes[i]) + continue CoreLoop + } + } + setError("Unexpected escape: \\" + string(c)) + } + } else { + // Just an ordinary ASCII character alternatively a UTF-8 byte + // outside of ASCII. + // Note that properly formatted UTF-8 never clashes with ASCII + // making byte per byte search for ASCII break characters work + // as expected. + rawString.WriteByte(c) + } + } + + return rawString.String() + } + + parseSimpleType = func() string { + var token strings.Builder + index-- + + for globalError == nil { + c := testNextNonWhiteSpaceChar() + if c == ',' || c == ']' || c == '}' { + break + } + + c = nextChar() + + if isWhiteSpace(c) { + break + } + + token.WriteByte(c) + } + + if token.Len() == 0 { + setError("Missing argument") + } + + value := token.String() + // Is it a JSON literal? + for _, literal := range literals { + if literal == value { + return literal + } + } + // Apparently not so we assume that it is a I-JSON number + ieeeF64, err := strconv.ParseFloat(value, 64) + checkError(err) + value, err = NumberToJSON(ieeeF64) + checkError(err) + + return value + } + + parseElement = func() string { + switch scan() { + case '{': + return parseObject() + case '"': + return decorateString(parseQuotedString()) + case '[': + return parseArray() + default: + return parseSimpleType() + } + } + + parseArray = func() string { + var arrayData strings.Builder + + arrayData.WriteByte('[') + + var next bool = false + + for globalError == nil && testNextNonWhiteSpaceChar() != ']' { + if next { + scanFor(',') + arrayData.WriteByte(',') + } else { + next = true + } + + arrayData.WriteString(parseElement()) + } + scan() + arrayData.WriteByte(']') + + return arrayData.String() + } + + lexicographicallyPrecedes := func(sortKey []uint16, e *list.Element) bool { + // Find the minimum length of the sortKeys + oldSortKey := e.Value.(nameValueType).sortKey + minLength := len(oldSortKey) + + if minLength > len(sortKey) { + minLength = len(sortKey) + } + + for q := 0; q < minLength; q++ { + diff := int(sortKey[q]) - int(oldSortKey[q]) + if diff < 0 { + // Smaller => Precedes + return true + } else if diff > 0 { + // Bigger => No match + return false + } + } + // The sortKeys compared equal up to minLength + if len(sortKey) < len(oldSortKey) { + // Shorter => Precedes + return true + } + + if len(sortKey) == len(oldSortKey) { + setError("Duplicate key: " + e.Value.(nameValueType).name) + } + // Longer => No match + return false + } + + parseObject = func() string { + nameValueList := list.New() + + var next bool = false + CoreLoop: + for globalError == nil && testNextNonWhiteSpaceChar() != '}' { + if next { + scanFor(',') + } + next = true + scanFor('"') + rawUTF8 := parseQuotedString() + if globalError != nil { + break + } + // Sort keys on UTF-16 code units + // Since UTF-8 doesn't have endianess this is just a value transformation + // In the Go case the transformation is UTF-8 => UTF-32 => UTF-16 + sortKey := utf16.Encode([]rune(rawUTF8)) + scanFor(':') + + nameValue := nameValueType{rawUTF8, sortKey, parseElement()} + + for e := nameValueList.Front(); e != nil; e = e.Next() { + // Check if the key is smaller than a previous key + if lexicographicallyPrecedes(sortKey, e) { + // Precedes => Insert before and exit sorting + nameValueList.InsertBefore(nameValue, e) + continue CoreLoop + } + // Continue searching for a possibly succeeding sortKey + // (which is straightforward since the list is ordered) + } + // The sortKey is either the first or is succeeding all previous sortKeys + nameValueList.PushBack(nameValue) + } + // Scan away '}' + scan() + // Now everything is sorted so we can properly serialize the object + var objectData strings.Builder + + objectData.WriteByte('{') + + next = false + + for e := nameValueList.Front(); e != nil; e = e.Next() { + if next { + objectData.WriteByte(',') + } + + next = true + nameValue := e.Value.(nameValueType) //nolint: errcheck + objectData.WriteString(decorateString(nameValue.name)) + objectData.WriteByte(':') + objectData.WriteString(nameValue.value) + } + + objectData.WriteByte('}') + + return objectData.String() + } + + // /////////////////////////////////////////////// + // This is where Transform actually begins... // + // /////////////////////////////////////////////// + var transformed string + + if testNextNonWhiteSpaceChar() == '[' { + scan() + + transformed = parseArray() + } else { + scanFor('{') + + transformed = parseObject() + } + + for index < jsonDataLength { + if !isWhiteSpace(jsonData[index]) { + setError("Improperly terminated JSON object") + break + } + index++ + } + + return []byte(transformed), globalError +} diff --git a/go.mod b/go.mod index f89d3e8..3753828 100644 --- a/go.mod +++ b/go.mod @@ -7,26 +7,32 @@ module github.com/trustbloc/did-go go 1.21 require ( + github.com/btcsuite/btcd v0.22.3 github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce github.com/cenkalti/backoff/v4 v4.1.3 + github.com/evanphx/json-patch v4.1.0+incompatible github.com/go-jose/go-jose/v3 v3.0.1-0.20221117193127-916db76e8214 github.com/google/uuid v1.3.0 + github.com/gorilla/mux v1.8.0 github.com/mitchellh/mapstructure v1.5.0 github.com/multiformats/go-multibase v0.1.1 + github.com/multiformats/go-multihash v0.0.14 github.com/piprate/json-gold v0.5.1-0.20230111113000-6ddbe6e6f19f + github.com/pkg/errors v0.9.1 + github.com/square/go-jose/v3 v3.0.0-20200630053402-0a67ce9b0693 github.com/stretchr/testify v1.8.2 github.com/trustbloc/kms-go v0.0.0-20230906134914-b9afaf3b793d - github.com/trustbloc/sidetree-core-go v1.0.0 + github.com/trustbloc/logutil-go v1.0.0-rc1 github.com/xeipuuv/gojsonschema v1.2.0 + go.uber.org/zap v1.23.0 + golang.org/x/crypto v0.1.0 ) require ( github.com/IBM/mathlib v0.0.3-0.20230605104224-932ab92f2ce0 // indirect - github.com/btcsuite/btcd v0.22.3 // indirect github.com/consensys/bavard v0.1.13 // indirect github.com/consensys/gnark-crypto v0.9.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/evanphx/json-patch v4.1.0+incompatible // indirect github.com/hyperledger/fabric-amcl v0.0.0-20230602173724-9e02669dceb2 // indirect github.com/kilic/bls12-381 v0.1.1-0.20210503002446-7b7597926c69 // indirect github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1 // indirect @@ -35,24 +41,20 @@ require ( github.com/mr-tron/base58 v1.2.0 // indirect github.com/multiformats/go-base32 v0.1.0 // indirect github.com/multiformats/go-base36 v0.1.0 // indirect - github.com/multiformats/go-multihash v0.0.14 // indirect github.com/multiformats/go-varint v0.0.6 // indirect - github.com/pkg/errors v0.9.1 // indirect + github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pquerna/cachecontrol v0.1.0 // indirect github.com/spaolacci/murmur3 v1.1.0 // indirect - github.com/square/go-jose/v3 v3.0.0-20200630053402-0a67ce9b0693 // indirect github.com/teserakt-io/golang-ed25519 v0.0.0-20210104091850-3888c087a4c8 // indirect - github.com/trustbloc/logutil-go v1.0.0-rc1 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect go.opentelemetry.io/otel v1.14.0 // indirect go.opentelemetry.io/otel/trace v1.14.0 // indirect go.uber.org/atomic v1.9.0 // indirect go.uber.org/multierr v1.6.0 // indirect - go.uber.org/zap v1.23.0 // indirect - golang.org/x/crypto v0.1.0 // indirect golang.org/x/sys v0.11.0 // indirect + gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect gopkg.in/yaml.v3 v3.0.1 // indirect rsc.io/tmplfunc v0.0.3 // indirect ) diff --git a/go.sum b/go.sum index 34fe91d..a8fdef8 100644 --- a/go.sum +++ b/go.sum @@ -43,6 +43,8 @@ github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/hyperledger/fabric-amcl v0.0.0-20230602173724-9e02669dceb2 h1:B1Nt8hKb//KvgGRprk0h1t4lCnwhE9/ryb1WqfZbV+M= github.com/hyperledger/fabric-amcl v0.0.0-20230602173724-9e02669dceb2/go.mod h1:X+DIyUsaTmalOpmpQfIvFZjKHQedrURQ5t4YqquX7lE= @@ -51,6 +53,8 @@ github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlT github.com/kilic/bls12-381 v0.1.1-0.20210503002446-7b7597926c69 h1:kMJlf8z8wUcpyI+FQJIdGjAhfTww1y0AbQEv86bpVQI= github.com/kilic/bls12-381 v0.1.1-0.20210503002446-7b7597926c69/go.mod h1:tlkavyke+Ac7h8R3gZIjI5LKBcvMlSWnXNMgT3vZXo8= github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c= @@ -111,8 +115,6 @@ github.com/trustbloc/kms-go v0.0.0-20230906134914-b9afaf3b793d h1:g74oJV+MT440kU github.com/trustbloc/kms-go v0.0.0-20230906134914-b9afaf3b793d/go.mod h1:1rvrHRuIRT7qLapB0lSpFgy1CjFunx85EyEzcZpIxG8= github.com/trustbloc/logutil-go v1.0.0-rc1 h1:rRJbvgQfrlUfyej+mY0nuQJymGqjRW4oZEwKi544F4c= github.com/trustbloc/logutil-go v1.0.0-rc1/go.mod h1:JlxT0oZfNKgIlSNtgc001WEeDMxlnAvOM43gNm8DQVc= -github.com/trustbloc/sidetree-core-go v1.0.0 h1:kzfKZOJ0sgDy9D1AYNcoR3JHutqtMtKvF2P9UwUcDjU= -github.com/trustbloc/sidetree-core-go v1.0.0/go.mod h1:jdxAFuorlIwFOGVW6O455/lZqxg2mZkRHNTEolcZdDI= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= diff --git a/method/jwk/creator.go b/method/jwk/creator.go index 2b1493e..f629847 100644 --- a/method/jwk/creator.go +++ b/method/jwk/creator.go @@ -11,9 +11,9 @@ import ( "fmt" "github.com/trustbloc/kms-go/doc/jose/jwk" - "github.com/trustbloc/sidetree-core-go/pkg/canonicalizer" "github.com/trustbloc/did-go/doc/did" + "github.com/trustbloc/did-go/doc/json/canonicalizer" vdrapi "github.com/trustbloc/did-go/vdr/api" ) diff --git a/method/jwk/creator_test.go b/method/jwk/creator_test.go index 83820a9..d25404d 100644 --- a/method/jwk/creator_test.go +++ b/method/jwk/creator_test.go @@ -15,9 +15,9 @@ import ( "github.com/stretchr/testify/require" jwkapi "github.com/trustbloc/kms-go/doc/jose/jwk" "github.com/trustbloc/kms-go/doc/jose/jwk/jwksupport" - "github.com/trustbloc/sidetree-core-go/pkg/canonicalizer" "github.com/trustbloc/did-go/doc/did" + "github.com/trustbloc/did-go/doc/json/canonicalizer" "github.com/trustbloc/did-go/method/jwk" ) diff --git a/method/jwk/resolver_test.go b/method/jwk/resolver_test.go index 623102e..f87cb81 100644 --- a/method/jwk/resolver_test.go +++ b/method/jwk/resolver_test.go @@ -12,9 +12,9 @@ import ( "testing" "github.com/stretchr/testify/require" - "github.com/trustbloc/sidetree-core-go/pkg/canonicalizer" "github.com/trustbloc/did-go/doc/did" + "github.com/trustbloc/did-go/doc/json/canonicalizer" "github.com/trustbloc/did-go/method/jwk" ) diff --git a/method/sidetreelongform/dochandler/dochandler.go b/method/sidetreelongform/dochandler/dochandler.go index 87d9504..38bbf88 100644 --- a/method/sidetreelongform/dochandler/dochandler.go +++ b/method/sidetreelongform/dochandler/dochandler.go @@ -14,13 +14,13 @@ import ( "fmt" "strings" - "github.com/trustbloc/sidetree-core-go/pkg/api/operation" - "github.com/trustbloc/sidetree-core-go/pkg/api/protocol" - "github.com/trustbloc/sidetree-core-go/pkg/canonicalizer" - "github.com/trustbloc/sidetree-core-go/pkg/dochandler" - "github.com/trustbloc/sidetree-core-go/pkg/document" - "github.com/trustbloc/sidetree-core-go/pkg/docutil" - "github.com/trustbloc/sidetree-core-go/pkg/encoder" + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/dochandler" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/docutil" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/encoder" "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocol/nsprovider" "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocol/verprovider" diff --git a/method/sidetreelongform/dochandler/dochandler_test.go b/method/sidetreelongform/dochandler/dochandler_test.go index bb5523c..5988c59 100644 --- a/method/sidetreelongform/dochandler/dochandler_test.go +++ b/method/sidetreelongform/dochandler/dochandler_test.go @@ -11,10 +11,11 @@ import ( "testing" "github.com/stretchr/testify/require" - "github.com/trustbloc/sidetree-core-go/pkg/document" - "github.com/trustbloc/sidetree-core-go/pkg/encoder" - "github.com/trustbloc/sidetree-core-go/pkg/mocks" - "github.com/trustbloc/sidetree-core-go/pkg/versions/1_0/model" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/encoder" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" ) const ( diff --git a/method/sidetreelongform/dochandler/protocol/nsprovider/namespaceprovider.go b/method/sidetreelongform/dochandler/protocol/nsprovider/namespaceprovider.go index 67c0a5e..d27f636 100644 --- a/method/sidetreelongform/dochandler/protocol/nsprovider/namespaceprovider.go +++ b/method/sidetreelongform/dochandler/protocol/nsprovider/namespaceprovider.go @@ -10,7 +10,7 @@ import ( "fmt" "sync" - "github.com/trustbloc/sidetree-core-go/pkg/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" ) // New creates new client version provider per namespace. diff --git a/method/sidetreelongform/dochandler/protocol/nsprovider/namespaceprovider_test.go b/method/sidetreelongform/dochandler/protocol/nsprovider/namespaceprovider_test.go index c8499cb..a542c4b 100644 --- a/method/sidetreelongform/dochandler/protocol/nsprovider/namespaceprovider_test.go +++ b/method/sidetreelongform/dochandler/protocol/nsprovider/namespaceprovider_test.go @@ -10,8 +10,9 @@ import ( "testing" "github.com/stretchr/testify/require" - "github.com/trustbloc/sidetree-core-go/pkg/api/protocol" - coremocks "github.com/trustbloc/sidetree-core-go/pkg/mocks" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + coremocks "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocol/nsprovider" "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocol/verprovider" diff --git a/method/sidetreelongform/dochandler/protocol/verprovider/versionprovider.go b/method/sidetreelongform/dochandler/protocol/verprovider/versionprovider.go index 796afd5..960ae23 100644 --- a/method/sidetreelongform/dochandler/protocol/verprovider/versionprovider.go +++ b/method/sidetreelongform/dochandler/protocol/verprovider/versionprovider.go @@ -10,7 +10,7 @@ import ( "fmt" "sort" - "github.com/trustbloc/sidetree-core-go/pkg/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" ) // ClientVersionProvider implements client versions. diff --git a/method/sidetreelongform/dochandler/protocol/verprovider/versionprovider_test.go b/method/sidetreelongform/dochandler/protocol/verprovider/versionprovider_test.go index 1bcc73e..3f4f16a 100644 --- a/method/sidetreelongform/dochandler/protocol/verprovider/versionprovider_test.go +++ b/method/sidetreelongform/dochandler/protocol/verprovider/versionprovider_test.go @@ -10,8 +10,9 @@ import ( "testing" "github.com/stretchr/testify/require" - "github.com/trustbloc/sidetree-core-go/pkg/api/protocol" - coremocks "github.com/trustbloc/sidetree-core-go/pkg/mocks" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + coremocks "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocol/verprovider" ) diff --git a/method/sidetreelongform/dochandler/protocolversion/clientregistry/clientregistry.go b/method/sidetreelongform/dochandler/protocolversion/clientregistry/clientregistry.go index 7d5e030..54f83ea 100644 --- a/method/sidetreelongform/dochandler/protocolversion/clientregistry/clientregistry.go +++ b/method/sidetreelongform/dochandler/protocolversion/clientregistry/clientregistry.go @@ -10,7 +10,7 @@ import ( "fmt" "sync" - "github.com/trustbloc/sidetree-core-go/pkg/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" vercommon "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocolversion/common" "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocolversion/versions/common" diff --git a/method/sidetreelongform/dochandler/protocolversion/clientregistry/clientregistry_test.go b/method/sidetreelongform/dochandler/protocolversion/clientregistry/clientregistry_test.go index 9cacdb9..77275a0 100644 --- a/method/sidetreelongform/dochandler/protocolversion/clientregistry/clientregistry_test.go +++ b/method/sidetreelongform/dochandler/protocolversion/clientregistry/clientregistry_test.go @@ -10,7 +10,8 @@ import ( "testing" "github.com/stretchr/testify/require" - coremocks "github.com/trustbloc/sidetree-core-go/pkg/mocks" + + coremocks "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocolversion/clientregistry" crmocks "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocolversion/clientregistry/mocks" diff --git a/method/sidetreelongform/dochandler/protocolversion/clientregistry/mocks/clientfactory.gen.go b/method/sidetreelongform/dochandler/protocolversion/clientregistry/mocks/clientfactory.gen.go index e00229e..67b40a8 100644 --- a/method/sidetreelongform/dochandler/protocolversion/clientregistry/mocks/clientfactory.gen.go +++ b/method/sidetreelongform/dochandler/protocolversion/clientregistry/mocks/clientfactory.gen.go @@ -4,7 +4,7 @@ package mocks import ( "sync" - "github.com/trustbloc/sidetree-core-go/pkg/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocolversion/versions/common" ) diff --git a/method/sidetreelongform/dochandler/protocolversion/versions/common/protocol.go b/method/sidetreelongform/dochandler/protocolversion/versions/common/protocol.go index 1b36b60..f983f7e 100644 --- a/method/sidetreelongform/dochandler/protocolversion/versions/common/protocol.go +++ b/method/sidetreelongform/dochandler/protocolversion/versions/common/protocol.go @@ -7,7 +7,7 @@ SPDX-License-Identifier: Apache-2.0 package common import ( - "github.com/trustbloc/sidetree-core-go/pkg/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" ) // ProtocolVersion implements the protocol.Version interface. diff --git a/method/sidetreelongform/dochandler/protocolversion/versions/common/protocol_test.go b/method/sidetreelongform/dochandler/protocolversion/versions/common/protocol_test.go index c6d50c6..9823f6e 100644 --- a/method/sidetreelongform/dochandler/protocolversion/versions/common/protocol_test.go +++ b/method/sidetreelongform/dochandler/protocolversion/versions/common/protocol_test.go @@ -10,8 +10,9 @@ import ( "testing" "github.com/stretchr/testify/require" - "github.com/trustbloc/sidetree-core-go/pkg/api/protocol" - coremocks "github.com/trustbloc/sidetree-core-go/pkg/mocks" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + coremocks "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocolversion/versions/common" ) diff --git a/method/sidetreelongform/dochandler/protocolversion/versions/v1_0/client/client.go b/method/sidetreelongform/dochandler/protocolversion/versions/v1_0/client/client.go index d63ba1d..16a9e3b 100644 --- a/method/sidetreelongform/dochandler/protocolversion/versions/v1_0/client/client.go +++ b/method/sidetreelongform/dochandler/protocolversion/versions/v1_0/client/client.go @@ -7,12 +7,12 @@ SPDX-License-Identifier: Apache-2.0 package client import ( - "github.com/trustbloc/sidetree-core-go/pkg/api/protocol" - "github.com/trustbloc/sidetree-core-go/pkg/versions/1_0/doccomposer" - "github.com/trustbloc/sidetree-core-go/pkg/versions/1_0/doctransformer/didtransformer" - "github.com/trustbloc/sidetree-core-go/pkg/versions/1_0/docvalidator/didvalidator" - "github.com/trustbloc/sidetree-core-go/pkg/versions/1_0/operationapplier" - "github.com/trustbloc/sidetree-core-go/pkg/versions/1_0/operationparser" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/didtransformer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/didvalidator" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationapplier" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationparser" vcommon "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocolversion/versions/common" protocolcfg "github.com/trustbloc/did-go/method/sidetreelongform/dochandler/protocolversion/versions/v1_0/config" diff --git a/method/sidetreelongform/dochandler/protocolversion/versions/v1_0/config/protocol.go b/method/sidetreelongform/dochandler/protocolversion/versions/v1_0/config/protocol.go index d59aed4..26bf830 100644 --- a/method/sidetreelongform/dochandler/protocolversion/versions/v1_0/config/protocol.go +++ b/method/sidetreelongform/dochandler/protocolversion/versions/v1_0/config/protocol.go @@ -7,7 +7,7 @@ SPDX-License-Identifier: Apache-2.0 package config import ( - "github.com/trustbloc/sidetree-core-go/pkg/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" ) // GetProtocolConfig returns protocol config for this version. diff --git a/method/sidetreelongform/sidetree-core/api/cas/client.go b/method/sidetreelongform/sidetree-core/api/cas/client.go new file mode 100644 index 0000000..ec47c90 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/api/cas/client.go @@ -0,0 +1,17 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package cas + +// Client defines interface for accessing the underlying content addressable storage. +type Client interface { + // Write writes the given content to CASClient. + Write(content []byte) (string, error) + + // Read reads the content of the given address in CASClient. + // returns the content of the given address. + Read(address string) ([]byte, error) +} diff --git a/method/sidetreelongform/sidetree-core/api/operation/models.go b/method/sidetreelongform/sidetree-core/api/operation/models.go new file mode 100644 index 0000000..bbd4ec3 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/api/operation/models.go @@ -0,0 +1,128 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operation + +// Property contains a key-value pair. +type Property struct { + Key string + Value interface{} +} + +// Operation holds minimum information required for parsing/validating client request. +type Operation struct { + + // Type defines operation type. + Type Type + + // UniqueSuffix defines document unique suffix. + UniqueSuffix string + + // ID defines ID + ID string + + // OperationRequest is the original operation request + OperationRequest []byte + + // AnchorOrigin defines anchor origin. + AnchorOrigin interface{} + + // Properties contains an arbitrary set of implementation-specific name-value pairs. + Properties []Property +} + +// Reference holds minimum information about did operation (suffix and type). +type Reference struct { + + // UniqueSuffix defines document unique suffix. + UniqueSuffix string + + // Type defines operation type. + Type Type + + // AnchorOrigin defines anchor origin. + AnchorOrigin interface{} +} + +// AnchoredOperation defines an anchored operation (stored in document operation store). +type AnchoredOperation struct { + + // Type defines operation type. + Type Type `json:"type"` + + // UniqueSuffix defines document unique suffix. + UniqueSuffix string `json:"uniqueSuffix"` + + // OperationRequest is the original operation request + OperationRequest []byte `json:"operation"` + + // TransactionTime is the logical anchoring time (block number in case of blockchain) for this operation in the + // anchoring system (blockchain). + TransactionTime uint64 `json:"transactionTime"` + + // TransactionNumber is the transaction number of the transaction this operation was batched within. + TransactionNumber uint64 `json:"transactionNumber"` + + // ProtocolVersion is the genesis time (version) of the protocol that was used for this operation. + ProtocolVersion uint64 `json:"protocolVersion"` + + // CanonicalReference contains canonical reference that applies to this operation. + CanonicalReference string `json:"canonicalReference,omitempty"` + + // EquivalenceReferences contains equivalence reference that applies to this operation. + EquivalentReferences []string `json:"equivalentReferences,omitempty"` + + // AnchorOrigin is anchor origin + AnchorOrigin interface{} `json:"anchorOrigin,omitempty"` +} + +// Type defines valid values for operation type. +type Type string + +const ( + + // TypeCreate captures "create" operation type. + TypeCreate Type = "create" + + // TypeUpdate captures "update" operation type. + TypeUpdate Type = "update" + + // TypeDeactivate captures "deactivate" operation type. + TypeDeactivate Type = "deactivate" + + // TypeRecover captures "recover" operation type. + TypeRecover Type = "recover" +) + +// QueuedOperation stores minimum required operation info for operations queue. +type QueuedOperation struct { + Type Type + OperationRequest []byte + UniqueSuffix string + Namespace string + AnchorOrigin interface{} + Properties []Property +} + +// QueuedOperationAtTime contains queued operation info with protocol genesis time. +type QueuedOperationAtTime struct { + QueuedOperation + ProtocolVersion uint64 +} + +// QueuedOperationsAtTime contains a collection of queued operations with protocol genesis time. +type QueuedOperationsAtTime []*QueuedOperationAtTime + +// QueuedOperations returns a collection of QueuedOperation. +func (o QueuedOperationsAtTime) QueuedOperations() []*QueuedOperation { + ops := make([]*QueuedOperation, len(o)) + + for i, op := range o { + ops[i] = &op.QueuedOperation + } + + return ops +} diff --git a/method/sidetreelongform/sidetree-core/api/protocol/protocol.go b/method/sidetreelongform/sidetree-core/api/protocol/protocol.go new file mode 100644 index 0000000..264fd6b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/api/protocol/protocol.go @@ -0,0 +1,211 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package protocol + +import ( + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/txn" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +//go:generate counterfeiter -o ../../mocks/txnprocessor.gen.go --fake-name TxnProcessor . TxnProcessor +//go:generate counterfeiter -o ../../mocks/operationparser.gen.go --fake-name OperationParser . OperationParser +//go:generate counterfeiter -o ../../mocks/operationapplier.gen.go --fake-name OperationApplier . OperationApplier +//go:generate counterfeiter -o ../../mocks/protocolversion.gen.go --fake-name ProtocolVersion . Version +//go:generate counterfeiter -o ../../mocks/txnprocessor.gen.go --fake-name TxnProcessor . TxnProcessor +//go:generate counterfeiter -o ../../mocks/documentcomposer.gen.go --fake-name DocumentComposer . DocumentComposer +//go:generate counterfeiter -o ../../mocks/documentvalidator.gen.go --fake-name DocumentValidator . DocumentValidator +//go:generate counterfeiter -o ../../mocks/documenttransformer.gen.go --fake-name DocumentTransformer . DocumentTransformer +//go:generate counterfeiter -o ../../mocks/operationhandler.gen.go --fake-name OperationHandler . OperationHandler +//go:generate counterfeiter -o ../../mocks/operationprovider.gen.go --fake-name OperationProvider . OperationProvider + +// Protocol defines protocol parameters. +type Protocol struct { + // GenesisTime is inclusive starting logical anchoring time that this protocol applies to. + // (e.g. block number in a blockchain) + GenesisTime uint64 `json:"genesisTime"` + + // MultihashAlgorithms are supported multihash algorithm codes + MultihashAlgorithms []uint `json:"multihashAlgorithms"` + + // MaxOperationCount defines maximum number of operations per batch. + MaxOperationCount uint `json:"maxOperationCount"` + + // MaxOperationSize is maximum operation size in bytes (used to reject operations before parsing them) + // It has to be greater than max delta size (big) + max proof size (medium) + other small + // values (operation type, suffix-data) + MaxOperationSize uint `json:"maxOperationSize"` + + // MaxOperationHashLength is maximum operation hash length + MaxOperationHashLength uint `json:"maxOperationHashLength"` + + // MaxDeltaSize is maximum size of operation's delta property. + MaxDeltaSize uint `json:"maxDeltaSize"` + + // MaxCasUriLength is maximum length of CAS URI in batch files. + MaxCasURILength uint `json:"maxCasUriLength"` + + // CompressionAlgorithm is file compression algorithm. + CompressionAlgorithm string `json:"compressionAlgorithm"` + + // MaxCoreIndexFileSize is maximum allowed size (in bytes) of core index file stored in CAS. + MaxCoreIndexFileSize uint `json:"maxCoreIndexFileSize"` + + // MaxProofFileSize is maximum allowed size (in bytes) of proof files stored in CAS. + MaxProofFileSize uint `json:"maxProofFileSize"` + + // MaxProvisionalIndexFileSize is maximum allowed size (in bytes) of provisional index file stored in CAS. + MaxProvisionalIndexFileSize uint `json:"maxProvisionalIndexFileSize"` + + // MaxChunkFileSize is maximum allowed size (in bytes) of chunk file stored in CAS. + MaxChunkFileSize uint `json:"maxChunkFileSize"` + + // Patches contains the list of allowed patches. + Patches []string `json:"patches"` + + // SignatureAlgorithms contain supported signature algorithms for + // signed operations (e.g. EdDSA, ES256, ES384, ES512, ES256K). + SignatureAlgorithms []string `json:"signatureAlgorithms"` + + // KeyAlgorithms contain supported key algorithms for signed + // operations (e.g. secp256k1, P-256, P-384, P-512, Ed25519). + KeyAlgorithms []string `json:"keyAlgorithms"` + + // MaxOperationTimeDelta is maximum time that operation should be valid before + // it expires; used with anchor from time + MaxOperationTimeDelta uint64 `json:"maxOperationTimeDelta"` + + // NonceSize is the number of bytes in nonce values + NonceSize uint64 `json:"nonceSize"` + + // MaxMemoryDecompressionFactor is maximum file size after decompression (e.g. 3 times maximum file size) + MaxMemoryDecompressionFactor uint `json:"maxMemoryDecompressionFactor"` +} + +// TxnProcessor defines the functions for processing a Sidetree transaction. +type TxnProcessor interface { + Process(sidetreeTxn txn.SidetreeTxn, suffixes ...string) (numProcessed int, err error) +} + +// OperationParser defines the functions for parsing operations. +type OperationParser interface { + Parse(namespace string, operation []byte) (*operation.Operation, error) + ParseDID(namespace, shortOrLongFormDID string) (string, []byte, error) + GetRevealValue(operation []byte) (string, error) + GetCommitment(operation []byte) (string, error) +} + +// ResolutionModel contains temporary data during document resolution. +type ResolutionModel struct { + Doc document.Document + CreatedTime uint64 + UpdatedTime uint64 + LastOperationTransactionTime uint64 + LastOperationTransactionNumber uint64 + LastOperationProtocolVersion uint64 + UpdateCommitment string + RecoveryCommitment string + Deactivated bool + AnchorOrigin interface{} + EquivalentReferences []string + CanonicalReference string + VersionID string + PublishedOperations []*operation.AnchoredOperation + UnpublishedOperations []*operation.AnchoredOperation +} + +// OperationApplier applies the given operation to the document. +type OperationApplier interface { + Apply(op *operation.AnchoredOperation, rm *ResolutionModel) (*ResolutionModel, error) +} + +// DocumentComposer applies patches to the given document. +type DocumentComposer interface { + ApplyPatches(doc document.Document, patches []patch.Patch) (document.Document, error) +} + +// AnchorDocumentType defines valid values for anchor document type. +type AnchorDocumentType string + +const ( + + // TypePermanent captures "permanent" anchor document type. + TypePermanent AnchorDocumentType = "permanent" + + // TypeProvisional captures "provisional" anchor document type. + TypeProvisional AnchorDocumentType = "provisional" +) + +// AnchorDocument describes Sidetree batch files. +type AnchorDocument struct { + ID string + Desc string + Type AnchorDocumentType +} + +// AnchoringInfo contains anchoring info plus additional batch information. +type AnchoringInfo struct { + AnchorString string + Artifacts []*AnchorDocument + OperationReferences []*operation.Reference + AdditionalOperations []*operation.QueuedOperation + ExpiredOperations []*operation.QueuedOperation +} + +// OperationHandler defines an interface for creating batch files. +type OperationHandler interface { + // PrepareTxnFiles operations will create relevant batch files, store them in CAS and return anchor string. + PrepareTxnFiles(ops []*operation.QueuedOperation) (*AnchoringInfo, error) +} + +// OperationProvider retrieves the anchored operations for the given Sidetree transaction. +type OperationProvider interface { + GetTxnOperations(sidetreeTxn *txn.SidetreeTxn) ([]*operation.AnchoredOperation, error) +} + +// DocumentValidator is an interface for validating document operations. +type DocumentValidator interface { + IsValidOriginalDocument(payload []byte) error + IsValidPayload(payload []byte) error +} + +// DocumentTransformer transforms internal resolution model into external document(resolution result). +type DocumentTransformer interface { + TransformDocument(rm *ResolutionModel, info TransformationInfo) (*document.ResolutionResult, error) +} + +// TransformationInfo contains document transformation info. +type TransformationInfo map[string]interface{} + +// Version contains the protocol and corresponding implementations that are compatible with the protocol version. +type Version interface { + Version() string + Protocol() Protocol + TransactionProcessor() TxnProcessor + OperationParser() OperationParser + OperationApplier() OperationApplier + OperationHandler() OperationHandler + OperationProvider() OperationProvider + DocumentComposer() DocumentComposer + DocumentValidator() DocumentValidator + DocumentTransformer() DocumentTransformer +} + +// Client defines interface for accessing protocol version/information. +type Client interface { + // Current returns latest version of protocol. + Current() (Version, error) + + // Get returns the version at the given transaction time. + Get(transactionTime uint64) (Version, error) +} + +// ClientProvider returns a protocol client for the given namespace. +type ClientProvider interface { + ForNamespace(namespace string) (Client, error) +} diff --git a/method/sidetreelongform/sidetree-core/api/txn/sidetree.go b/method/sidetreelongform/sidetree-core/api/txn/sidetree.go new file mode 100644 index 0000000..49a7014 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/api/txn/sidetree.go @@ -0,0 +1,19 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package txn + +// SidetreeTxn defines info about sidetree transaction. +type SidetreeTxn struct { + TransactionTime uint64 + TransactionNumber uint64 + AnchorString string + Namespace string + ProtocolVersion uint64 + CanonicalReference string + EquivalentReferences []string + AlternateSources []string +} diff --git a/method/sidetreelongform/sidetree-core/batch/cutter/cutter.go b/method/sidetreelongform/sidetree-core/batch/cutter/cutter.go new file mode 100644 index 0000000..19da266 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/batch/cutter/cutter.go @@ -0,0 +1,172 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package cutter + +import ( + "fmt" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + logfields "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/log" +) + +var logger = log.New("sidetree-core-cutter") + +// OperationQueue defines the functions for adding and removing operations from a queue. +type OperationQueue interface { + // Add adds the given operation to the tail of the queue and returns the new length of the queue. + Add(data *operation.QueuedOperation, protocolVersion uint64) (uint, error) + // Remove removes (up to) the given number of items from the head of the queue and returns: + // - The operations that are to be removed. + // - The 'Ack' function that must be called to commit the remove. + // - The 'Nack' function that must be called to roll back the remove. + Remove(num uint) (ops operation.QueuedOperationsAtTime, ack func() uint, nack func(error), err error) + // Peek returns (up to) the given number of operations from the head of the queue but does not remove them. + Peek(num uint) (operation.QueuedOperationsAtTime, error) + // Len returns the number of operation in the queue. + Len() uint +} + +// Committer is invoked to commit a batch Cut. The new number of pending items +// in the queue is returned. +type Committer = func() (pending uint, err error) + +// Result is the result of a batch 'Cut'. +type Result struct { + // Operations holds the operations that were cut from the queue + Operations []*operation.QueuedOperation + // ProtocolVersion is the genesis time of the protocol version that was used to add the operations to the queue + ProtocolVersion uint64 + // Pending is the number of operations remaining in the queue + Pending uint + // Ack commits the remove from the queue and returns the number of pending operations. + Ack func() uint + // Nack rolls back the remove so that a retry may occur. + Nack func(error) +} + +// BatchCutter implements batch cutting. +type BatchCutter struct { + pendingBatch OperationQueue + client protocol.Client +} + +// New creates a Cutter implementation. +func New(client protocol.Client, queue OperationQueue) *BatchCutter { + return &BatchCutter{ + client: client, + pendingBatch: queue, + } +} + +// Add adds the given operation to pending batch queue and returns the total +// number of pending operations. +func (r *BatchCutter) Add(op *operation.QueuedOperation, protocolVersion uint64) (uint, error) { + // Enqueuing operation into batch + return r.pendingBatch.Add(op, protocolVersion) +} + +// Cut returns the current batch along with number of items that should be remaining in the queue after the committer +// is called. If force is false then the batch will be cut only if it has reached the max batch size (as specified +// in the protocol). If force is true then the batch will be cut if there is at least one Data in the batch +// Note that the operations are removed from the queue when Result.Ack is invoked, +// otherwise Result.Nack should be called in order to place the operations back in the queue so that +// they be processed again. +func (r *BatchCutter) Cut(force bool) (Result, error) { + pending := r.pendingBatch.Len() + + currentProtocol, err := r.client.Current() + if err != nil { + return Result{}, err + } + + maxOperationsPerBatch := currentProtocol.Protocol().MaxOperationCount + if !force && pending < maxOperationsPerBatch { + return Result{Pending: pending}, nil + } + + batchSize := min(pending, maxOperationsPerBatch) + + ops, err := r.pendingBatch.Peek(batchSize) + if err != nil { + return Result{Pending: pending}, nil //nolint: nilerr + } + + operations, protocolVersion := getOperationsAtProtocolVersion(ops) + + batchSize = uint(len(operations)) + + if batchSize == 0 { + return Result{Pending: pending}, nil + } + + pending -= batchSize + + logger.Info("Removing operations from queue.", logfields.WithTotalPending(pending), + logfields.WithMaxSize(int(maxOperationsPerBatch)), logfields.WithSize(int(batchSize))) + + ops, ack, nack, err := r.pendingBatch.Remove(batchSize) + if err != nil { + return Result{}, fmt.Errorf("pending batch queue remove: %w", err) + } + + return Result{ + Operations: ops.QueuedOperations(), + ProtocolVersion: protocolVersion, + Pending: pending, + Ack: ack, + Nack: nack, + }, nil +} + +// getOperationsAtProtocolVersion iterates through the operations and returns the operations +// which are at the same protocol genesis time. +func getOperationsAtProtocolVersion(opsAtTime []*operation.QueuedOperationAtTime, +) ([]*operation.QueuedOperation, uint64) { + var ( + ops []*operation.QueuedOperation + protocolVersion uint64 + ) + + for _, op := range opsAtTime { + if protocolVersion == 0 { + protocolVersion = op.ProtocolVersion + } + + if op.ProtocolVersion != protocolVersion { + // This operation was added using a different transaction time so it can't go into the same batch + logger.Info("Not adding operation since its protocol genesis time is different from the "+ + "protocol genesis time of the existing ops in the batch.", + logfields.WithOperationGenesisTime(op.ProtocolVersion), + logfields.WithGenesisTime(protocolVersion)) + + break + } + + ops = append(ops, + &operation.QueuedOperation{ + Type: op.Type, + OperationRequest: op.OperationRequest, + UniqueSuffix: op.UniqueSuffix, + Namespace: op.Namespace, + Properties: op.Properties, + }, + ) + } + + return ops, protocolVersion +} + +func min(i, j uint) uint { + if i < j { + return i + } + + return j +} diff --git a/method/sidetreelongform/sidetree-core/batch/cutter/cutter_test.go b/method/sidetreelongform/sidetree-core/batch/cutter/cutter_test.go new file mode 100644 index 0000000..1480e24 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/batch/cutter/cutter_test.go @@ -0,0 +1,129 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package cutter + +import ( + "errors" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/batch/opqueue" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" +) + +//nolint:gochecknoglobals +var ( + operation1 = &operation.QueuedOperation{UniqueSuffix: "1", OperationRequest: []byte("operation1")} + operation2 = &operation.QueuedOperation{UniqueSuffix: "2", OperationRequest: []byte("operation2")} + operation3 = &operation.QueuedOperation{UniqueSuffix: "3", OperationRequest: []byte("operation3")} + operation4 = &operation.QueuedOperation{UniqueSuffix: "4", OperationRequest: []byte("operation4")} + operation5 = &operation.QueuedOperation{UniqueSuffix: "5", OperationRequest: []byte("operation5")} + operation6 = &operation.QueuedOperation{UniqueSuffix: "6", OperationRequest: []byte("operation6")} +) + +func TestBatchCutter(t *testing.T) { + c := mocks.NewMockProtocolClient() + c.Protocol.MaxOperationCount = 3 + c.CurrentVersion.ProtocolReturns(c.Protocol) + + r := New(c, &opqueue.MemQueue{}) + + c.Err = fmt.Errorf("injected protocol error") + result, err := r.Cut(false) + require.EqualError(t, err, c.Err.Error()) + require.Empty(t, result.Operations) + require.Zero(t, result.Pending) + require.Zero(t, result.ProtocolVersion) + + c.Err = nil + + result, err = r.Cut(false) + require.NoError(t, err) + require.Empty(t, result.Operations) + require.Zero(t, result.Pending) + require.Zero(t, result.ProtocolVersion) + + l, err := r.Add(operation1, 10) + require.NoError(t, err) + require.Equal(t, uint(1), l) + l, err = r.Add(operation2, 10) + require.NoError(t, err) + require.Equal(t, uint(2), l) + + result, err = r.Cut(false) + require.NoError(t, err) + require.Empty(t, result.Operations) + require.Equal(t, uint(2), result.Pending) + require.Equal(t, uint64(0), result.ProtocolVersion) + + result, err = r.Cut(true) + require.NoError(t, err) + require.Len(t, result.Operations, 2) + require.Equal(t, operation1, result.Operations[0]) + require.Equal(t, operation2, result.Operations[1]) + require.Zero(t, result.Pending) + require.Equal(t, uint64(10), result.ProtocolVersion) + + result.Nack(errors.New("injected error")) + + // After a rollback, the operations should still be in the queue + result, err = r.Cut(true) + require.NoError(t, err) + require.Len(t, result.Operations, 2) + require.Equal(t, operation1, result.Operations[0]) + require.Equal(t, operation2, result.Operations[1]) + require.Zero(t, result.Pending) + + require.Zero(t, result.Ack()) + + // After a commit, the operations should be gone + result, err = r.Cut(true) + require.NoError(t, err) + require.Empty(t, result.Operations) + require.Zero(t, result.Pending) + + l, err = r.Add(operation3, 10) + require.NoError(t, err) + require.Equal(t, uint(1), l) + l, err = r.Add(operation4, 10) + require.NoError(t, err) + require.Equal(t, uint(2), l) + + result, err = r.Cut(false) + require.NoError(t, err) + require.Empty(t, result.Operations) + require.Equal(t, uint(2), result.Pending) + + l, err = r.Add(operation5, 20) + require.NoError(t, err) + require.Equal(t, uint(3), l) + l, err = r.Add(operation6, 20) + require.NoError(t, err) + require.Equal(t, uint(4), l) + + result, err = r.Cut(false) + require.NoError(t, err) + require.Lenf(t, result.Operations, 2, "should have only cut two operations since the third "+ + "operation in the queue is using a different protocol version") + require.Equal(t, operation3, result.Operations[0]) + require.Equal(t, operation4, result.Operations[1]) + require.Equal(t, uint(2), result.Pending) + + require.Equal(t, uint(2), result.Ack()) + + result, err = r.Cut(true) + require.NoError(t, err) + require.Len(t, result.Operations, 2) + require.Equal(t, operation5, result.Operations[0]) + require.Equal(t, operation6, result.Operations[1]) + require.Zero(t, result.Pending) + + require.Zero(t, result.Ack()) +} diff --git a/method/sidetreelongform/sidetree-core/batch/opqueue/memqueue.go b/method/sidetreelongform/sidetree-core/batch/opqueue/memqueue.go new file mode 100644 index 0000000..492f41a --- /dev/null +++ b/method/sidetreelongform/sidetree-core/batch/opqueue/memqueue.go @@ -0,0 +1,83 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package opqueue + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" +) + +// MemQueue implements an in-memory operation queue. +type MemQueue struct { + items []*operation.QueuedOperationAtTime + mutex sync.RWMutex +} + +// Add adds the given data to the tail of the queue and returns the new length of the queue. +func (q *MemQueue) Add(data *operation.QueuedOperation, protocolVersion uint64) (uint, error) { + q.mutex.Lock() + defer q.mutex.Unlock() + + q.items = append(q.items, &operation.QueuedOperationAtTime{ + QueuedOperation: *data, + ProtocolVersion: protocolVersion, + }) + + return uint(len(q.items)), nil +} + +// Peek returns (up to) the given number of operations from the head of the queue but does not remove them. +func (q *MemQueue) Peek(num uint) (operation.QueuedOperationsAtTime, error) { + q.mutex.RLock() + defer q.mutex.RUnlock() + + n := int(num) + if len(q.items) < n { + n = len(q.items) + } + + return q.items[0:n], nil +} + +// Remove removes (up to) the given number of items from the head of the queue. +func (q *MemQueue) Remove(num uint) ( + ops operation.QueuedOperationsAtTime, ack func() uint, nack func(error), err error) { + q.mutex.Lock() + defer q.mutex.Unlock() + + n := int(num) + if len(q.items) < n { + n = len(q.items) + } + + items := q.items[0:n] + q.items = q.items[n:] + + return items, + func() uint { + q.mutex.RLock() + defer q.mutex.RUnlock() + + return uint(len(q.items)) + }, + func(error) { + q.mutex.Lock() + defer q.mutex.Unlock() + + // Add the items to the head of the queue. + q.items = append(items, q.items...) + }, nil +} + +// Len returns the length of the queue. +func (q *MemQueue) Len() uint { + q.mutex.RLock() + defer q.mutex.RUnlock() + + return uint(len(q.items)) +} diff --git a/method/sidetreelongform/sidetree-core/batch/opqueue/memqueue_test.go b/method/sidetreelongform/sidetree-core/batch/opqueue/memqueue_test.go new file mode 100644 index 0000000..4a43940 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/batch/opqueue/memqueue_test.go @@ -0,0 +1,91 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package opqueue + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" +) + +//nolint:gochecknoglobals +var ( + op1 = &operation.QueuedOperation{Namespace: "ns", UniqueSuffix: "op1", OperationRequest: []byte("op1")} + op2 = &operation.QueuedOperation{Namespace: "ns", UniqueSuffix: "op2", OperationRequest: []byte("op2")} + op3 = &operation.QueuedOperation{Namespace: "ns", UniqueSuffix: "op3", OperationRequest: []byte("op3")} +) + +func TestMemQueue(t *testing.T) { + q := &MemQueue{} + require.Zero(t, q.Len()) + + ops, err := q.Peek(1) + require.NoError(t, err) + require.Empty(t, ops) + + l, err := q.Add(op1, 10) + require.NoError(t, err) + require.Equal(t, uint(1), l) + require.Equal(t, uint(1), q.Len()) + + l, err = q.Add(op2, 10) + require.NoError(t, err) + require.Equal(t, uint(2), l) + require.Equal(t, uint(2), q.Len()) + + l, err = q.Add(op3, 10) + require.NoError(t, err) + require.Equal(t, uint(3), l) + require.Equal(t, uint(3), q.Len()) + + ops, err = q.Peek(1) + require.NoError(t, err) + require.Len(t, ops, 1) + require.Equal(t, *op1, ops[0].QueuedOperation) + + ops, err = q.Peek(4) + require.NoError(t, err) + require.Len(t, ops, 3) + require.Equal(t, *op1, ops[0].QueuedOperation) + require.Equal(t, *op2, ops[1].QueuedOperation) + require.Equal(t, *op3, ops[2].QueuedOperation) + + ops, ack, nack, err := q.Remove(1) + require.NoError(t, err) + require.NotNil(t, ack) + require.NotNil(t, nack) + require.Len(t, ops, 1) + require.Equal(t, *op1, ops[0].QueuedOperation) + + require.Equal(t, uint(2), ack()) + + ops, err = q.Peek(1) + require.NoError(t, err) + require.Len(t, ops, 1) + require.Equal(t, *op2, ops[0].QueuedOperation) + + ops, _, nack, err = q.Remove(5) + require.NoError(t, err) + require.NotNil(t, nack) + require.Len(t, ops, 2) + require.Equal(t, *op2, ops[0].QueuedOperation) + require.Equal(t, *op3, ops[1].QueuedOperation) + + nack(errors.New("injected error")) + + ops, ack, _, err = q.Remove(5) + require.NoError(t, err) + require.NotNil(t, ack) + require.Len(t, ops, 2) + require.Equal(t, *op2, ops[0].QueuedOperation) + require.Equal(t, *op3, ops[1].QueuedOperation) + + require.Zero(t, ack()) +} diff --git a/method/sidetreelongform/sidetree-core/batch/writer.go b/method/sidetreelongform/sidetree-core/batch/writer.go new file mode 100644 index 0000000..a75666d --- /dev/null +++ b/method/sidetreelongform/sidetree-core/batch/writer.go @@ -0,0 +1,336 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +// Package batch batches multiple operations into batch files and stores the batch files in a distributed +// content-addressable storage (DCAS or CAS). A reference to the main batch file (core index) is then +// anchored on the anchoring system as Sidetree transaction. +// +// Batch Writer basic flow: +// +// 1) accept operations being delivered via Add method +// 2) 'cut' configurable number of operations into batch files +// 3) store batch files into CAS (content addressable storage) +// 4) write the anchor string referencing core index file URI to the underlying anchoring system +package batch + +import ( + "fmt" + "sync/atomic" + "time" + + "github.com/pkg/errors" + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/txn" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/batch/cutter" + logfields "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/log" +) + +const ( + loggerModule = "sidetree-core-writer" + + defaultBatchTimeout = 2 * time.Second + defaultMonitorInterval = time.Second +) + +// Option defines Writer options such as batch timeout. +type Option func(opts *Options) error + +type batchCutter interface { + Add(operation *operation.QueuedOperation, protocolVersion uint64) (uint, error) + Cut(force bool) (cutter.Result, error) +} + +// Writer implements batch writer. +type Writer struct { + namespace string + context Context + batchCutter batchCutter + exitChan chan struct{} + stopped uint32 + protocol protocol.Client + monitorTicker *time.Ticker + batchTimeoutTicker *time.Ticker + logger *log.Log +} + +// Context contains batch writer context. +// 1) protocol information client +// 2) content addressable storage client +// 3) anchor writer. +type Context interface { + Protocol() protocol.Client + Anchor() AnchorWriter + OperationQueue() cutter.OperationQueue +} + +// AnchorWriter defines an interface to access the underlying anchoring system. +type AnchorWriter interface { + // WriteAnchor writes the anchor string as a transaction to anchoring system + WriteAnchor( + anchor string, artifacts []*protocol.AnchorDocument, ops []*operation.Reference, protocolVersion uint64) error + // Read ledger transaction + Read(sinceTransactionNumber int) (bool, *txn.SidetreeTxn) +} + +// CompressionProvider defines an interface for handling different types of compression. +type CompressionProvider interface { + + // Compress will compress data using specified algorithm. + Compress(alg string, data []byte) ([]byte, error) +} + +// New creates a new Writer with the given namespace. +// Writer accepts operations being delivered via Add, orders them, and then uses the batch +// cutter to form the operations batch files. The URI of main batch file (index core) +// will be written as part of anchor string to the given ledger. +func New(namespace string, context Context, options ...Option) (*Writer, error) { + rOpts, err := prepareOptsFromOptions(options...) + if err != nil { + return nil, fmt.Errorf("failed to read opts: %s", err) + } + + batchTimeout := defaultBatchTimeout + if rOpts.BatchTimeout != 0 { + batchTimeout = rOpts.BatchTimeout + } + + monitorInterval := defaultMonitorInterval + if rOpts.MonitorInterval != 0 { + monitorInterval = rOpts.MonitorInterval + } + + return &Writer{ + namespace: namespace, + batchCutter: cutter.New(context.Protocol(), context.OperationQueue()), + exitChan: make(chan struct{}), + context: context, + protocol: context.Protocol(), + batchTimeoutTicker: time.NewTicker(batchTimeout), + monitorTicker: time.NewTicker(monitorInterval), + logger: log.New(loggerModule, log.WithFields(logfields.WithNamespace(namespace))), + }, nil +} + +// Start periodic anchoring of operation batches to anchoring system. +func (r *Writer) Start() { + go r.main() +} + +// Stop frees the resources which were allocated by start. +func (r *Writer) Stop() { + if !atomic.CompareAndSwapUint32(&r.stopped, 0, 1) { + // Already stopped + return + } + + select { + case <-r.exitChan: + // Allow multiple halts without panic + default: + close(r.exitChan) + } +} + +// Stopped returns true if the writer has been stopped. +func (r *Writer) Stopped() bool { + return atomic.LoadUint32(&r.stopped) == 1 +} + +// Add the given operation to a queue of operations to be batched and anchored on anchoring system. +func (r *Writer) Add(op *operation.QueuedOperation, protocolVersion uint64) error { + if r.Stopped() { + return errors.New("writer is stopped") + } + + _, err := r.batchCutter.Add(op, protocolVersion) + if err != nil { + return err + } + + return nil +} + +func (r *Writer) main() { + // On startup, there may be operations in the queue. Process them immediately. + r.processAvailable(true) + + for { + select { + case <-r.monitorTicker.C: + r.processAvailable(false) + + case <-r.batchTimeoutTicker.C: + r.processAvailable(true) + + case <-r.exitChan: + r.logger.Info("Exiting batch writer") + + return + } + } +} + +func (r *Writer) processAvailable(forceCut bool) uint { + // First drain the queue of all of the operations that are ready to form a batch + pending, err := r.drain() + if err != nil { + r.logger.Warn("Error draining operations queue.", + log.WithError(err), logfields.WithTotalPending(pending)) + + return pending + } + + if pending == 0 || !forceCut { + return pending + } + + r.logger.Debug("Forcefully processing operations", + logfields.WithTotalPending(pending)) + + // Now process the remaining operations + n, pending, err := r.cutAndProcess(true) + if err != nil { + r.logger.Warn("Error processing operations", log.WithError(err), + logfields.WithTotalPending(pending)) + } else { + r.logger.Info("Successfully processed operations.", logfields.WithTotal(n), + logfields.WithTotalPending(pending)) + } + + return pending +} + +// drain cuts and processes all pending operations that are ready to form a batch. +func (r *Writer) drain() (pending uint, err error) { + for { + n, pending, err := r.cutAndProcess(false) + if err != nil { + r.logger.Error("Error draining operations: cutting and processing returned an error", + log.WithError(err)) + + return pending, err + } + + if n == 0 { + return pending, nil + } + + r.logger.Info(" ... drain processed operations into batch.", + logfields.WithTotal(n), logfields.WithTotalPending(pending)) + } +} + +func (r *Writer) cutAndProcess(forceCut bool) (numProcessed int, pending uint, err error) { + result, err := r.batchCutter.Cut(forceCut) + if err != nil { + r.logger.Error("Error cutting batch", log.WithError(err)) + + return 0, 0, err + } + + if len(result.Operations) == 0 { + return 0, result.Pending, nil + } + + r.logger.Info("Processing batch operations for protocol genesis time...", + logfields.WithTotal(len(result.Operations)), logfields.WithGenesisTime(result.ProtocolVersion)) + + err = r.process(result.Operations, result.ProtocolVersion) + if err != nil { + r.logger.Error("Error processing batch operations", logfields.WithTotal(len(result.Operations)), log.WithError(err)) + + result.Nack(err) + + return 0, result.Pending + uint(len(result.Operations)), err + } + + r.logger.Info("Successfully processed batch operations. Committing to batch cutter ...", + logfields.WithTotal(len(result.Operations))) + + pending = result.Ack() + + r.logger.Info("Successfully committed to batch cutter.", logfields.WithTotalPending(pending)) + + return len(result.Operations), pending, nil +} + +func (r *Writer) process(ops []*operation.QueuedOperation, protocolVersion uint64) error { + if len(ops) == 0 { + return errors.New("create batch called with no pending operations, should not happen") + } + + p, err := r.protocol.Get(protocolVersion) + if err != nil { + return err + } + + anchoringInfo, err := p.OperationHandler().PrepareTxnFiles(ops) + if err != nil { + return err + } + + r.logger.Info("Writing anchor string", logfields.WithAnchorString(anchoringInfo.AnchorString)) + + // Create Sidetree transaction in anchoring system (write anchor string) + err = r.context.Anchor().WriteAnchor(anchoringInfo.AnchorString, anchoringInfo.Artifacts, + anchoringInfo.OperationReferences, protocolVersion) + if err != nil { + return fmt.Errorf("write anchor [%s]: %w", anchoringInfo.AnchorString, err) + } + + // Sidetree spec allows for one operation per suffix in the batch + // Process additional operations for suffix in the next batch + for _, op := range anchoringInfo.AdditionalOperations { + if e := r.Add(op, protocolVersion); e != nil { + // this error should never happen since parsing of this operation has already been done for the previous batch + r.logger.Warn("Unable to add additional operation to the next batch", + logfields.WithSuffix(op.UniqueSuffix), log.WithError(e)) + } + } + + return nil +} + +// WithBatchTimeout allows for specifying batch timeout. +func WithBatchTimeout(batchTimeout time.Duration) Option { + return func(o *Options) error { + o.BatchTimeout = batchTimeout + + return nil + } +} + +// WithMonitorInterval specifies the interval in which the operation queue is monitored in order to see +// if the maximum batch size has been reached. +func WithMonitorInterval(interval time.Duration) Option { + return func(o *Options) error { + o.MonitorInterval = interval + + return nil + } +} + +// Options allows the user to specify more advanced options. +type Options struct { + BatchTimeout time.Duration + MonitorInterval time.Duration +} + +// prepareOptsFromOptions reads options. +func prepareOptsFromOptions(options ...Option) (Options, error) { + rOpts := Options{} + for _, option := range options { + err := option(&rOpts) + if err != nil { + return rOpts, err + } + } + + return rOpts, nil +} diff --git a/method/sidetreelongform/sidetree-core/batch/writer_test.go b/method/sidetreelongform/sidetree-core/batch/writer_test.go new file mode 100644 index 0000000..7c9fe4c --- /dev/null +++ b/method/sidetreelongform/sidetree-core/batch/writer_test.go @@ -0,0 +1,592 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package batch + +import ( + "encoding/json" + "errors" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/cas" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/batch/cutter" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/batch/opqueue" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/compression" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/client" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationapplier" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationparser" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models" +) + +//go:generate counterfeiter -o ../mocks/operationqueue.gen.go --fake-name OperationQueue ./cutter OperationQueue + +const ( + sha2_256 = 18 + namespace = "did:sidetree" + compressionAlgorithm = "GZIP" +) + +func TestNew(t *testing.T) { + ctx := newMockContext() + writer, err := New(namespace, ctx) + require.Nil(t, err) + require.NotNil(t, writer) + + writer, err = New(namespace, ctx, WithBatchTimeout(10*time.Second)) + require.Nil(t, err) + require.NotNil(t, writer) + + writer, err = New(namespace, ctx, withError()) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to read opts: test error") + require.Nil(t, writer) + + writer, err = New(namespace, ctx) + require.Nil(t, err) + require.NotNil(t, writer) + + writer, err = New(namespace, ctx) + require.Nil(t, err) + require.NotNil(t, writer) +} + +func TestStart(t *testing.T) { + ctx := newMockContext() + writer, err := New(namespace, ctx) + require.Nil(t, err) + + writer.Start() + defer writer.Stop() + + operations := generateOperations(8) + + for _, op := range operations { + err = writer.Add(op, 0) + require.Nil(t, err) + } + + time.Sleep(2 * time.Second) + + // we should have 4 anchors: 8 operations % max 2 operations per batch + require.Equal(t, 4, len(ctx.AnchorWriter.GetAnchors())) + + ad, err := txnprovider.ParseAnchorData(ctx.AnchorWriter.GetAnchors()[0]) + require.NoError(t, err) + + // Check that first anchor has two operations per batch + cif, pif, cf, err := getBatchFiles(ctx.ProtocolClient.CasClient, ad.CoreIndexFileURI) + require.Nil(t, err) + + require.Equal(t, 2, len(cif.Operations.Create)) + require.Nil(t, pif.Operations) + require.Equal(t, 2, len(cf.Deltas)) +} + +func getBatchFiles( + cc cas.Client, anchor string) (*models.CoreIndexFile, *models.ProvisionalIndexFile, *models.ChunkFile, error) { + bytes, err := cc.Read(anchor) + if err != nil { + return nil, nil, nil, err + } + + compression := compression.New(compression.WithDefaultAlgorithms()) + + content, err := compression.Decompress(compressionAlgorithm, bytes) + if err != nil { + return nil, nil, nil, err + } + + var cif models.CoreIndexFile + + err = json.Unmarshal(content, &cif) + if err != nil { + return nil, nil, nil, err + } + + bytes, err = cc.Read(cif.ProvisionalIndexFileURI) + if err != nil { + return nil, nil, nil, err + } + + content, err = compression.Decompress(compressionAlgorithm, bytes) + if err != nil { + return nil, nil, nil, err + } + + var pif models.ProvisionalIndexFile + + err = json.Unmarshal(content, &pif) + if err != nil { + return nil, nil, nil, err + } + + bytes, err = cc.Read(pif.Chunks[0].ChunkFileURI) + if err != nil { + return nil, nil, nil, err + } + + content, err = compression.Decompress(compressionAlgorithm, bytes) + if err != nil { + return nil, nil, nil, err + } + + var cf models.ChunkFile + + err = json.Unmarshal(content, &cf) + if err != nil { + return nil, nil, nil, err + } + + return &cif, &pif, &cf, nil +} + +func TestBatchTimer(t *testing.T) { + ctx := newMockContext() + writer, err := New(namespace, ctx, WithBatchTimeout(2*time.Second), WithMonitorInterval(time.Second)) + require.Nil(t, err) + + writer.Start() + defer writer.Stop() + + testOp, err := generateOperation(0) + require.NoError(t, err) + + err = writer.Add(testOp, 0) + require.Nil(t, err) + + // Batch will be cut after 2 seconds even though + // maximum operations(=2) have not been reached + time.Sleep(3 * time.Second) + + require.Equal(t, 1, len(ctx.AnchorWriter.GetAnchors())) + + ad, err := txnprovider.ParseAnchorData(ctx.AnchorWriter.GetAnchors()[0]) + require.NoError(t, err) + + cif, pif, cf, err := getBatchFiles(ctx.ProtocolClient.CasClient, ad.CoreIndexFileURI) + require.Nil(t, err) + + require.Equal(t, 1, len(cif.Operations.Create)) + require.Equal(t, 0, len(cif.Operations.Recover)) + require.Equal(t, 0, len(cif.Operations.Deactivate)) + + require.Nil(t, pif.Operations) + + require.Equal(t, 1, len(cf.Deltas)) +} + +func TestAdditionalSuffixInBatchFile(t *testing.T) { + ctx := newMockContext() + writer, err := New(namespace, ctx) + require.Nil(t, err) + + writer.Start() + defer writer.Stop() + + op, err := generateOperation(1) + require.NoError(t, err) + + err = writer.Add(op, 0) + require.Nil(t, err) + + // add same operation again + err = writer.Add(op, 0) + require.Nil(t, err) + + time.Sleep(3 * time.Second) + + // we should have 2 anchors because we have two operations for the same suffix in one batch + // second one will be processed in the next batch + require.Equal(t, 2, len(ctx.AnchorWriter.GetAnchors())) + + ad, err := txnprovider.ParseAnchorData(ctx.AnchorWriter.GetAnchors()[0]) + require.NoError(t, err) + + // Check that first anchor has one operation per batch; second one will be processed in the next batch + cif, pif, _, err := getBatchFiles(ctx.ProtocolClient.CasClient, ad.CoreIndexFileURI) + require.NoError(t, err) + + require.Equal(t, 1, len(cif.Operations.Create)) + require.Equal(t, 0, len(cif.Operations.Recover)) + require.Equal(t, 0, len(cif.Operations.Deactivate)) + + require.Nil(t, pif.Operations) + + ad, err = txnprovider.ParseAnchorData(ctx.AnchorWriter.GetAnchors()[1]) + require.NoError(t, err) + + // Check that first anchor has one operation per batch; second one has been discarded + cif, pif, cf, err := getBatchFiles(ctx.ProtocolClient.CasClient, ad.CoreIndexFileURI) + require.NoError(t, err) + + require.Equal(t, 1, len(cif.Operations.Create)) + require.Equal(t, 0, len(cif.Operations.Recover)) + require.Equal(t, 0, len(cif.Operations.Deactivate)) + + require.Nil(t, pif.Operations) + + require.Equal(t, 1, len(cf.Deltas)) +} + +func TestProcessOperationsError(t *testing.T) { + ctx := newMockContext() + ctx.ProtocolClient.CasClient.SetError(fmt.Errorf("CAS Error")) + + writer, err := New(namespace, ctx, WithBatchTimeout(2*time.Second)) + require.Nil(t, err) + + writer.Start() + defer writer.Stop() + + operations := generateOperations(3) + for _, op := range operations { + err = writer.Add(op, 0) + require.Nil(t, err) + } + + time.Sleep(3 * time.Second) + + require.Equal(t, 0, len(ctx.AnchorWriter.GetAnchors())) +} + +func TestAnchorError(t *testing.T) { + ctx := newMockContext() + writer, err := New(namespace, ctx, WithBatchTimeout(2*time.Second)) + require.Nil(t, err) + + ctx.AnchorWriter = mocks.NewMockAnchorWriter(fmt.Errorf("anchor writer error")) + + writer.Start() + defer writer.Stop() + + operations := generateOperations(3) + for _, op := range operations { + err = writer.Add(op, 0) + require.Nil(t, err) + } + + time.Sleep(3 * time.Second) + + require.Equal(t, 0, len(ctx.AnchorWriter.GetAnchors())) +} + +func TestAddAfterStop(t *testing.T) { + writer, err := New(namespace, newMockContext()) + require.Nil(t, err) + require.False(t, writer.Stopped()) + + writer.Stop() + // Should be able to call stop multiple times + writer.Stop() + + require.True(t, writer.Stopped()) + + testOp, err := generateOperation(0) + require.NoError(t, err) + + err = writer.Add(testOp, 0) + require.EqualError(t, err, "writer is stopped") +} + +func TestProcessBatchErrorRecovery(t *testing.T) { + ctx := newMockContext() + ctx.ProtocolClient.Protocol.MaxOperationCount = 2 + ctx.ProtocolClient.CasClient = mocks.NewMockCasClient(fmt.Errorf("CAS Error")) + + writer, err := New(namespace, ctx, WithBatchTimeout(500*time.Millisecond)) + require.Nil(t, err) + + writer.Start() + defer writer.Stop() + + const ( + n = 12 + numBatchesExpected = 7 + ) + + firstOp, err := generateOperation(0) + require.NoError(t, err) + + require.NoError(t, writer.Add(firstOp, 0)) + time.Sleep(1 * time.Second) + + for _, op := range generateOperations(n) { + require.NoError(t, writer.Add(op, 0)) + } + + // Clear the error. The batch writer should recover by processing all of the pending batches + ctx.ProtocolClient.CasClient.SetError(nil) + time.Sleep(1 * time.Second) + + require.Equal(t, numBatchesExpected, len(ctx.AnchorWriter.GetAnchors())) +} + +func TestAddError(t *testing.T) { + errExpected := errors.New("injected operation queue error") + q := &mocks.OperationQueue{} + q.AddReturns(0, errExpected) + + ctx := newMockContext() + ctx.OpQueue = q + + writer, err := New(namespace, ctx) + require.NoError(t, err) + require.EqualError(t, writer.Add(&operation.QueuedOperation{}, 0), errExpected.Error()) +} + +func TestStartWithExistingItems(t *testing.T) { + const ( + numOperations = 23 + maxOperationsPerBatch = 4 + numBatchesExpected = 6 + ) + + opQueue := &opqueue.MemQueue{} + + ctx := newMockContext() + ctx.ProtocolClient.Protocol.MaxOperationCount = maxOperationsPerBatch + ctx.ProtocolClient.CurrentVersion.ProtocolReturns(ctx.ProtocolClient.Protocol) + ctx.OpQueue = opQueue + + writer, err := New(namespace, ctx) + require.Nil(t, err) + + // Add operations to the queue directly + for _, op := range generateOperations(numOperations) { + _, err = opQueue.Add(op, 0) + require.Nil(t, err) + } + + writer.Start() + defer writer.Stop() + + time.Sleep(time.Second) + require.Equal(t, numBatchesExpected, len(ctx.AnchorWriter.GetAnchors())) +} + +func TestProcessError(t *testing.T) { + t.Run("process operation error", func(t *testing.T) { + q := &mocks.OperationQueue{} + + invalidQueue := []*operation.QueuedOperationAtTime{{ + QueuedOperation: operation.QueuedOperation{ + OperationRequest: []byte(""), + UniqueSuffix: "unique", + Namespace: "ns", + }}} + + q.LenReturns(1) + q.PeekReturns(invalidQueue, nil) + q.RemoveReturns(nil, func() uint { return 0 }, func(error) {}, nil) + + ctx := newMockContext() + ctx.ProtocolClient.Protocol.MaxOperationCount = 1 + ctx.OpQueue = q + + writer, err := New("test1", ctx, WithBatchTimeout(10*time.Millisecond)) + require.NoError(t, err) + + writer.Start() + defer writer.Stop() + + time.Sleep(50 * time.Millisecond) + + require.Zero(t, len(ctx.AnchorWriter.GetAnchors())) + }) + + t.Run("Cut error", func(t *testing.T) { + errExpected := errors.New("injected operation queue error") + q := &mocks.OperationQueue{} + + const numOperations = 3 + q.LenReturns(numOperations) + q.PeekReturns(nil, errExpected) + + ctx := newMockContext() + ctx.ProtocolClient.Protocol.MaxOperationCount = 2 + ctx.OpQueue = q + + writer, err := New("test1", ctx, WithBatchTimeout(10*time.Millisecond)) + require.NoError(t, err) + + writer.Start() + defer writer.Stop() + + time.Sleep(50 * time.Millisecond) + + require.Zero(t, len(ctx.AnchorWriter.GetAnchors())) + }) + + t.Run("Cutter commit error", func(t *testing.T) { + errExpected := errors.New("injected operation queue error") + q := &mocks.OperationQueue{} + + const numOperations = 3 + q.LenReturns(numOperations) + q.PeekReturns(generateOperationsAtTime(numOperations, 0), nil) + q.RemoveReturns(nil, nil, nil, errExpected) + + ctx := newMockContext() + ctx.ProtocolClient.Protocol.MaxOperationCount = 2 + ctx.OpQueue = q + + writer, err := New("test2", ctx, WithBatchTimeout(10*time.Millisecond)) + require.NoError(t, err) + + writer.Start() + defer writer.Stop() + + time.Sleep(50 * time.Millisecond) + }) +} + +// withError allows for testing an error in options. +func withError() Option { + return func(o *Options) error { + return fmt.Errorf("test error") + } +} + +func generateOperations(numOfOperations int) (ops []*operation.QueuedOperation) { + for j := 1; j <= numOfOperations; j++ { + op, err := generateOperation(j) + if err != nil { + panic(err) + } + + ops = append(ops, op) + } + + return +} + +func generateOperationsAtTime(numOfOperations int, protocolVersion uint64) (ops []*operation.QueuedOperationAtTime) { + for j := 1; j <= numOfOperations; j++ { + op, err := generateOperation(j) + if err != nil { + panic(err) + } + + ops = append(ops, &operation.QueuedOperationAtTime{ + QueuedOperation: *op, + ProtocolVersion: protocolVersion, + }) + } + + return +} + +func generateOperation(num int) (*operation.QueuedOperation, error) { + updateJwk := &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", + } + + recoverJWK := &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", + Y: "y", + } + + updateCommitment, err := commitment.GetCommitment(updateJwk, sha2_256) + if err != nil { + return nil, err + } + + recoverComitment, err := commitment.GetCommitment(recoverJWK, sha2_256) + if err != nil { + return nil, err + } + + doc := fmt.Sprintf(`{"test":%d}`, num) + info := &client.CreateRequestInfo{ + OpaqueDocument: doc, + RecoveryCommitment: recoverComitment, + UpdateCommitment: updateCommitment, + MultihashCode: sha2_256, + } + + request, err := client.NewCreateRequest(info) + if err != nil { + return nil, err + } + + op := &operation.QueuedOperation{ + Namespace: "did:sidetree", + UniqueSuffix: fmt.Sprint(num), + OperationRequest: request, + } + + return op, nil +} + +// mockContext implements mock batch writer context. +type mockContext struct { + ProtocolClient *mocks.MockProtocolClient + AnchorWriter *mocks.MockAnchorWriter + OpQueue cutter.OperationQueue +} + +// newMockContext returns a new mockContext object. +func newMockContext() *mockContext { + return &mockContext{ + ProtocolClient: newMockProtocolClient(), + AnchorWriter: mocks.NewMockAnchorWriter(nil), + OpQueue: &opqueue.MemQueue{}, + } +} + +// Protocol returns the Client. +func (m *mockContext) Protocol() protocol.Client { + return m.ProtocolClient +} + +// Anchor returns the block chain client. +func (m *mockContext) Anchor() AnchorWriter { + return m.AnchorWriter +} + +// OperationQueue returns the queue containing the pending operations. +func (m *mockContext) OperationQueue() cutter.OperationQueue { + return m.OpQueue +} + +func newMockProtocolClient() *mocks.MockProtocolClient { + pc := mocks.NewMockProtocolClient() + parser := operationparser.New(pc.Protocol) + dc := doccomposer.New() + oa := operationapplier.New(pc.Protocol, parser, dc) + + pc.CasClient = mocks.NewMockCasClient(nil) + th := txnprovider.NewOperationHandler(pc.Protocol, pc.CasClient, compression.New(compression.WithDefaultAlgorithms()), + parser, &mocks.MetricsProvider{}) + + pv := mocks.GetProtocolVersion(pc.Protocol) + + pv.OperationParserReturns(parser) + pv.OperationApplierReturns(oa) + pv.DocumentComposerReturns(dc) + pv.OperationHandlerReturns(th) + pv.OperationParserReturns(parser) + + pc.CurrentVersion = pv + pc.Versions = []*mocks.ProtocolVersion{pv} + + return pc +} diff --git a/method/sidetreelongform/sidetree-core/commitment/hash.go b/method/sidetreelongform/sidetree-core/commitment/hash.go new file mode 100644 index 0000000..7a58e89 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/commitment/hash.go @@ -0,0 +1,73 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package commitment + +import ( + "fmt" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/encoder" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + logfields "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/log" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" +) + +var logger = log.New("sidetree-core-commitment") + +// GetCommitment will calculate commitment from JWK. +func GetCommitment(jwk *jws.JWK, multihashCode uint) (string, error) { + data, err := canonicalizer.MarshalCanonical(jwk) + if err != nil { + return "", err + } + + logger.Debug("Calculating commitment from JWK", logfields.WithData(data)) + + hash, err := hashing.GetHashFromMultihash(multihashCode) + if err != nil { + return "", err + } + + dataHash, err := hashing.GetHash(hash, data) + if err != nil { + return "", err + } + + multiHash, err := hashing.ComputeMultihash(multihashCode, dataHash) + if err != nil { + return "", err + } + + return encoder.EncodeToString(multiHash), nil +} + +// GetRevealValue will calculate reveal value from JWK. +func GetRevealValue(jwk *jws.JWK, multihashCode uint) (string, error) { + rv, err := hashing.CalculateModelMultihash(jwk, multihashCode) + if err != nil { + return "", fmt.Errorf("failed to get reveal value: %s", err.Error()) + } + + return rv, nil +} + +// GetCommitmentFromRevealValue will calculate commitment from reveal value. +func GetCommitmentFromRevealValue(rv string) (string, error) { + mh, err := hashing.GetMultihash(rv) + if err != nil { + return "", fmt.Errorf("failed to get commitment from reveal value (get multihash): %s", err.Error()) + } + + multiHash, err := hashing.ComputeMultihash(uint(mh.Code), mh.Digest) + if err != nil { + return "", fmt.Errorf("failed to get commitment from reveal value (compute multihash): %s", err.Error()) + } + + return encoder.EncodeToString(multiHash), nil +} diff --git a/method/sidetreelongform/sidetree-core/commitment/hash_test.go b/method/sidetreelongform/sidetree-core/commitment/hash_test.go new file mode 100644 index 0000000..c7a492e --- /dev/null +++ b/method/sidetreelongform/sidetree-core/commitment/hash_test.go @@ -0,0 +1,115 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package commitment + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" +) + +const ( + sha2_256 uint = 18 // multihash code +) + +func TestGetCommitment(t *testing.T) { + jwk := &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", + Y: "y", + } + + t.Run("success", func(t *testing.T) { + commitment, err := GetCommitment(jwk, sha2_256) + require.NoError(t, err) + require.NotEmpty(t, commitment) + }) + + t.Run(" error - multihash not supported", func(t *testing.T) { + commitment, err := GetCommitment(jwk, 55) + require.Error(t, err) + require.Empty(t, commitment) + require.Contains(t, err.Error(), "algorithm not supported, unable to compute hash") + }) + + t.Run("error - canonicalization failed", func(t *testing.T) { + commitment, err := GetCommitment(nil, sha2_256) + require.Error(t, err) + require.Empty(t, commitment) + require.Contains(t, err.Error(), "Expected '{' but got 'n'") + }) + + t.Run("interop test", func(t *testing.T) { + jwk := &jws.JWK{ + Kty: "EC", + Crv: "secp256k1", + X: "5s3-bKjD1Eu_3NJu8pk7qIdOPl1GBzU_V8aR3xiacoM", + Y: "v0-Q5H3vcfAfQ4zsebJQvMrIg3pcsaJzRvuIYZ3_UOY", + } + + canonicalized, err := canonicalizer.MarshalCanonical(jwk) + require.NoError(t, err) + + //nolint:lll + expected := `{"crv":"secp256k1","kty":"EC","x":"5s3-bKjD1Eu_3NJu8pk7qIdOPl1GBzU_V8aR3xiacoM","y":"v0-Q5H3vcfAfQ4zsebJQvMrIg3pcsaJzRvuIYZ3_UOY"}` + require.Equal(t, string(canonicalized), expected) + }) +} + +func TestGetRevealValue(t *testing.T) { + jwk := &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", + Y: "y", + } + + t.Run("success", func(t *testing.T) { + rv, err := GetRevealValue(jwk, sha2_256) + require.NoError(t, err) + require.NotEmpty(t, rv) + }) + + t.Run("error - wrong multihash code", func(t *testing.T) { + rv, err := GetRevealValue(jwk, 55) + require.Error(t, err) + require.Empty(t, rv) + require.Contains(t, err.Error(), "failed to get reveal value: algorithm not supported, unable to compute hash") + }) +} + +func TestGetCommitmentFromRevealValue(t *testing.T) { + jwk := &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", + Y: "y", + } + + t.Run("success", func(t *testing.T) { + rv, err := GetRevealValue(jwk, sha2_256) + require.NoError(t, err) + + cFromRv, err := GetCommitmentFromRevealValue(rv) + require.NoError(t, err) + + c, err := GetCommitment(jwk, sha2_256) + require.NoError(t, err) + require.Equal(t, c, cFromRv) + }) + + t.Run("error - reveal value is not a multihash", func(t *testing.T) { + cFromRv, err := GetCommitmentFromRevealValue("reveal") + require.Error(t, err) + require.Empty(t, cFromRv) + require.Contains(t, err.Error(), "failed to get commitment from reveal value") + }) +} diff --git a/method/sidetreelongform/sidetree-core/compression/gzip/algorithm.go b/method/sidetreelongform/sidetree-core/compression/gzip/algorithm.go new file mode 100644 index 0000000..4f39d72 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/compression/gzip/algorithm.go @@ -0,0 +1,74 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package gzip + +import ( + "bytes" + "compress/gzip" + "fmt" + "io" +) + +const algName = "GZIP" + +// Algorithm implements gzip compression/decompression. +type Algorithm struct { +} + +// New creates new gzip algorithm instance. +func New() *Algorithm { + return &Algorithm{} +} + +// Compress will compress data using gzip. +func (a *Algorithm) Compress(data []byte) ([]byte, error) { + var buf bytes.Buffer + zw := gzip.NewWriter(&buf) + + _, err := zw.Write(data) + if err != nil { + return nil, fmt.Errorf("failed to write data: %s", err.Error()) + } + + if err := zw.Close(); err != nil { + return nil, fmt.Errorf("failed to close writer: %s", err.Error()) + } + + return buf.Bytes(), nil +} + +// Decompress will decompress compressed data. +func (a *Algorithm) Decompress(data []byte) ([]byte, error) { + buf := bytes.NewBuffer(data) + + zr, err := gzip.NewReader(buf) + if err != nil { + return nil, fmt.Errorf("failed to create new reader: %s", err.Error()) + } + + zrBytes, err := io.ReadAll(zr) + if err != nil { + return nil, fmt.Errorf("failed to read compressed data: %s", err.Error()) + } + + if err := zr.Close(); err != nil { + return nil, fmt.Errorf("failed to close reader: %s", err.Error()) + } + + return zrBytes, nil +} + +// Accept algorithm. +func (a *Algorithm) Accept(alg string) bool { + return alg == algName +} + +// Close closes open resources. +func (a *Algorithm) Close() error { + // nothing to do for gzip + return nil +} diff --git a/method/sidetreelongform/sidetree-core/compression/gzip/algorithm_test.go b/method/sidetreelongform/sidetree-core/compression/gzip/algorithm_test.go new file mode 100644 index 0000000..ac8c530 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/compression/gzip/algorithm_test.go @@ -0,0 +1,77 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package gzip + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestAlgorithm_Accept(t *testing.T) { + t.Run("success", func(t *testing.T) { + alg := New() + require.True(t, alg.Accept("GZIP")) + require.False(t, alg.Accept("other")) + }) +} + +func TestAlgorithm_Compress(t *testing.T) { + t.Run("success", func(t *testing.T) { + alg := New() + + test := []byte("test data") + compressed, err := alg.Compress(test) + require.NoError(t, err) + require.NotEmpty(t, compressed) + + data, err := alg.Decompress(compressed) + require.NoError(t, err) + require.NotEmpty(t, data) + require.Equal(t, data, test) + }) + t.Run("error reading header", func(t *testing.T) { + alg := New() + + test := []byte("hello data") + compressed, err := alg.Compress(test) + require.NoError(t, err) + require.NotEmpty(t, compressed) + }) +} + +func TestAlgorithm_Decompress(t *testing.T) { + t.Run("success", func(t *testing.T) { + alg := New() + + test := []byte("hello world") + compressed, err := alg.Compress(test) + require.NoError(t, err) + require.NotEmpty(t, compressed) + + data, err := alg.Decompress(compressed) + require.NoError(t, err) + require.NotEmpty(t, data) + require.Equal(t, data, test) + }) + t.Run("error - data not compressed", func(t *testing.T) { + alg := New() + + test := []byte("test data") + data, err := alg.Decompress(test) + require.Error(t, err) + require.Empty(t, data) + require.Contains(t, err.Error(), "unexpected EOF") + }) +} + +func TestAlgorithm_Close(t *testing.T) { + t.Run("success", func(t *testing.T) { + alg := New() + require.NoError(t, alg.Close()) + }) +} diff --git a/method/sidetreelongform/sidetree-core/compression/registry.go b/method/sidetreelongform/sidetree-core/compression/registry.go new file mode 100644 index 0000000..b90d7f1 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/compression/registry.go @@ -0,0 +1,110 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package compression + +import ( + "fmt" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/compression/gzip" +) + +// Option is a registry instance option. +type Option func(opts *Registry) + +// Registry contains compression algorithms. +type Registry struct { + algorithms []Algorithm +} + +// Algorithm defines compression/decompression algorithm functionality. +type Algorithm interface { + Compress(value []byte) ([]byte, error) + Decompress(value []byte) ([]byte, error) + Accept(alg string) bool + Close() error +} + +// New return new instance of compression algorithm registry. +func New(opts ...Option) *Registry { + registry := &Registry{} + + // apply options + for _, opt := range opts { + opt(registry) + } + + return registry +} + +// Compress data using specified algorithm. +func (r *Registry) Compress(alg string, data []byte) ([]byte, error) { + // resolve compression algorithm + algorithm, err := r.resolveAlgorithm(alg) + if err != nil { + return nil, err + } + + // compress data using specified algorithm + result, err := algorithm.Compress(data) + if err != nil { + return nil, fmt.Errorf("compression failed for algorithm[%s]: %s", alg, err.Error()) + } + + return result, nil +} + +// Decompress will decompress compressed data using specified algorithm. +func (r *Registry) Decompress(alg string, data []byte) ([]byte, error) { + // resolve compression algorithm + algorithm, err := r.resolveAlgorithm(alg) + if err != nil { + return nil, err + } + + // decompress data using specified algorithm + result, err := algorithm.Decompress(data) + if err != nil { + return nil, fmt.Errorf("decompression failed for alg[%s]: %s", alg, err.Error()) + } + + return result, nil +} + +// Close frees resources being maintained by compression algorithm. +func (r *Registry) Close() error { + for _, v := range r.algorithms { + if err := v.Close(); err != nil { + return fmt.Errorf("close algorithm: %w", err) + } + } + + return nil +} + +func (r *Registry) resolveAlgorithm(alg string) (Algorithm, error) { + for _, v := range r.algorithms { + if v.Accept(alg) { + return v, nil + } + } + + return nil, fmt.Errorf("compression algorithm '%s' not supported", alg) +} + +// WithAlgorithm adds compression algorithm to the list of available algorithms. +func WithAlgorithm(alg Algorithm) Option { + return func(opts *Registry) { + opts.algorithms = append(opts.algorithms, alg) + } +} + +// WithDefaultAlgorithms adds default compression algorithms to the list of available algorithms. +func WithDefaultAlgorithms() Option { + return func(opts *Registry) { + opts.algorithms = append(opts.algorithms, gzip.New()) + } +} diff --git a/method/sidetreelongform/sidetree-core/compression/registry_test.go b/method/sidetreelongform/sidetree-core/compression/registry_test.go new file mode 100644 index 0000000..0b3743b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/compression/registry_test.go @@ -0,0 +1,151 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package compression + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/compression/gzip" +) + +const algGZIP = "GZIP" + +func TestNew(t *testing.T) { + t.Run("test new success", func(t *testing.T) { + registry := New() + require.NotNil(t, registry) + }) + t.Run("test new with gzip algorithm option", func(t *testing.T) { + registry := New(WithAlgorithm(gzip.New())) + require.NotNil(t, registry) + }) + t.Run("test new with default algorithms", func(t *testing.T) { + registry := New(WithDefaultAlgorithms()) + require.NotNil(t, registry) + }) +} + +func TestRegistry_Compress(t *testing.T) { + t.Run("success", func(t *testing.T) { + registry := New(WithAlgorithm(gzip.New())) + + test := []byte("hello world") + compressed, err := registry.Compress(algGZIP, test) + require.NoError(t, err) + require.NotEmpty(t, compressed) + + data, err := registry.Decompress(algGZIP, compressed) + require.NoError(t, err) + require.NotEmpty(t, data) + require.Equal(t, data, test) + }) + + t.Run("error - algorithm not supported", func(t *testing.T) { + registry := New() + + test := []byte("test data") + compressed, err := registry.Compress(algGZIP, test) + require.Error(t, err) + require.Empty(t, compressed) + require.Contains(t, err.Error(), "compression algorithm 'GZIP' not supported") + }) + + t.Run("error - compression error", func(t *testing.T) { + registry := New(WithAlgorithm(&mockAlgorithm{CompressErr: errors.New("test error")})) + + test := []byte("test data") + compressed, err := registry.Compress(algGZIP, test) + require.Error(t, err) + require.Empty(t, compressed) + require.Contains(t, err.Error(), "test error") + }) +} + +func TestRegistry_Decompress(t *testing.T) { + t.Run("success", func(t *testing.T) { + registry := New(WithAlgorithm(gzip.New())) + + test := []byte("hello world") + compressed, err := registry.Compress(algGZIP, test) + require.NoError(t, err) + require.NotEmpty(t, compressed) + + data, err := registry.Decompress(algGZIP, compressed) + require.NoError(t, err) + require.NotEmpty(t, data) + require.Equal(t, data, test) + }) + + t.Run("error - algorithm not supported", func(t *testing.T) { + registry := New() + + test := []byte("test data") + data, err := registry.Decompress("alg", test) + require.Error(t, err) + require.Empty(t, data) + require.Contains(t, err.Error(), "compression algorithm 'alg' not supported") + }) + + t.Run("error - compression error", func(t *testing.T) { + registry := New(WithAlgorithm(&mockAlgorithm{DecompressErr: errors.New("test error")})) + + test := []byte("test data") + compressed, err := registry.Decompress("mock", test) + require.Error(t, err) + require.Empty(t, compressed) + require.Contains(t, err.Error(), "test error") + }) +} + +func TestRegistry_Close(t *testing.T) { + t.Run("success", func(t *testing.T) { + registry := New(WithAlgorithm(gzip.New()), WithAlgorithm(&mockAlgorithm{})) + + require.NoError(t, registry.Close()) + }) + t.Run("success", func(t *testing.T) { + registry := New(WithAlgorithm(&mockAlgorithm{CloseErr: errors.New("close error")})) + require.Error(t, registry.Close()) + }) +} + +type mockAlgorithm struct { + CompressErr error + DecompressErr error + CloseErr error +} + +// Compress will mock compressing data. +func (m *mockAlgorithm) Compress(data []byte) ([]byte, error) { + if m.CompressErr != nil { + return nil, m.CompressErr + } + + return data, nil +} + +// Decompress will mock decompressing compressed data. +func (m *mockAlgorithm) Decompress(data []byte) ([]byte, error) { + if m.DecompressErr != nil { + return nil, m.DecompressErr + } + + return data, nil +} + +// Accept algorithm. +func (m *mockAlgorithm) Accept(alg string) bool { + return true +} + +// Close will close resources. +func (m *mockAlgorithm) Close() error { + return m.CloseErr +} diff --git a/method/sidetreelongform/sidetree-core/dochandler/handler.go b/method/sidetreelongform/sidetree-core/dochandler/handler.go new file mode 100644 index 0000000..4764ce0 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/dochandler/handler.go @@ -0,0 +1,641 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +// Package dochandler performs document operation processing and document resolution. +// +// During operation processing it will use configured validator to validate document operation and then it will call +// batch writer to add it to the batch. +// +// Document resolution is based on ID or encoded original document. +// 1) ID - the latest document will be returned if found. +// +// 2) Encoded original document - The encoded document is hashed using the current supported hashing algorithm to +// compute ID, after which the resolution is done against the computed ID. If a document cannot be found, +// the supplied document is used directly to generate and return a resolved document. In this case the supplied document +// is subject to the same validation as an original document in a create operation. +package dochandler + +import ( + "errors" + "fmt" + "strings" + "time" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/docutil" + logfields "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/log" +) + +var logger = log.New("sidetree-core-dochandler") + +const ( + keyID = "id" + + badRequest = "bad request" +) + +// DocumentHandler implements document handler. +type DocumentHandler struct { + protocol protocol.Client + processor operationProcessor + decorator operationDecorator + writer batchWriter + namespace string + aliases []string // namespace aliases + domain string + label string + + unpublishedOperationStore unpublishedOperationStore + unpublishedOperationTypes []operation.Type + + metrics metricsProvider +} + +type unpublishedOperationStore interface { + // Put saves operation into unpublished operation store. + Put(op *operation.AnchoredOperation) error + // Delete deletes operation from unpublished operation store. + Delete(op *operation.AnchoredOperation) error +} + +// operationDecorator is an interface for validating/pre-processing operations. +type operationDecorator interface { + Decorate(operation *operation.Operation) (*operation.Operation, error) +} + +// operationProcessor is an interface which resolves the document based on the unique suffix. +type operationProcessor interface { + Resolve(uniqueSuffix string, opts ...document.ResolutionOption) (*protocol.ResolutionModel, error) +} + +// batchWriter is an interface to add an operation to the batch. +type batchWriter interface { + Add(operation *operation.QueuedOperation, protocolVersion uint64) error +} + +// Option is an option for document handler. +type Option func(opts *DocumentHandler) + +// WithDomain sets optional domain hint for unpublished/interim documents. +func WithDomain(domain string) Option { + return func(opts *DocumentHandler) { + opts.domain = domain + } +} + +// WithLabel sets optional label for unpublished/interim documents. +func WithLabel(label string) Option { + return func(opts *DocumentHandler) { + opts.label = label + } +} + +// WithUnpublishedOperationStore stores unpublished operation into unpublished operation store. +func WithUnpublishedOperationStore(store unpublishedOperationStore, operationTypes []operation.Type) Option { + return func(opts *DocumentHandler) { + opts.unpublishedOperationStore = store + opts.unpublishedOperationTypes = operationTypes + } +} + +// WithOperationDecorator sets an optional operation decorator (used for additional business validation/pre-processing). +func WithOperationDecorator(decorator operationDecorator) Option { + return func(opts *DocumentHandler) { + opts.decorator = decorator + } +} + +type metricsProvider interface { + ProcessOperation(duration time.Duration) + GetProtocolVersionTime(since time.Duration) + ParseOperationTime(since time.Duration) + ValidateOperationTime(since time.Duration) + DecorateOperationTime(since time.Duration) + AddUnpublishedOperationTime(since time.Duration) + AddOperationToBatchTime(since time.Duration) + GetCreateOperationResultTime(since time.Duration) +} + +// New creates a new document handler with the context. +func New(namespace string, aliases []string, pc protocol.Client, writer batchWriter, processor operationProcessor, + metrics metricsProvider, opts ...Option) *DocumentHandler { + dh := &DocumentHandler{ + protocol: pc, + processor: processor, + decorator: &defaultOperationDecorator{processor: processor}, + writer: writer, + namespace: namespace, + aliases: aliases, + metrics: metrics, + unpublishedOperationStore: &noopUnpublishedOpsStore{}, + unpublishedOperationTypes: []operation.Type{}, + } + + // apply options + for _, opt := range opts { + opt(dh) + } + + return dh +} + +// Namespace returns the namespace of the document handler. +func (r *DocumentHandler) Namespace() string { + return r.namespace +} + +// ProcessOperation validates operation and adds it to the batch. +// +//nolint:funlen +func (r *DocumentHandler) ProcessOperation( + operationBuffer []byte, protocolVersion uint64) (*document.ResolutionResult, error) { + startTime := time.Now() + + defer func() { + r.metrics.ProcessOperation(time.Since(startTime)) + }() + + getProtocolVersionTime := time.Now() + + pv, err := r.protocol.Get(protocolVersion) + if err != nil { + return nil, err + } + + r.metrics.GetProtocolVersionTime(time.Since(getProtocolVersionTime)) + + parseOperationStartTime := time.Now() + + op, err := pv.OperationParser().Parse(r.namespace, operationBuffer) + if err != nil { + return nil, fmt.Errorf("%s: %s", badRequest, err.Error()) + } + + r.metrics.ParseOperationTime(time.Since(parseOperationStartTime)) + + validateOperationStartTime := time.Now() + + // perform validation for operation request + err = r.validateOperation(op, pv) + if err != nil { + return nil, fmt.Errorf("%s: %s", badRequest, err.Error()) + } + + r.metrics.ValidateOperationTime(time.Since(validateOperationStartTime)) + + decorateOperationStartTime := time.Now() + + op, err = r.decorator.Decorate(op) + if err != nil { + return nil, fmt.Errorf("%s: %s", badRequest, err.Error()) + } + + r.metrics.DecorateOperationTime(time.Since(decorateOperationStartTime)) + + unpublishedOp := r.getUnpublishedOperation(op, pv) + + addUnpublishedOperationStartTime := time.Now() + + err = r.addOperationToUnpublishedOpsStore(unpublishedOp) + if err != nil { + return nil, fmt.Errorf( + "failed to add operation for suffix[%s] to unpublished operation store: %s", + op.UniqueSuffix, err.Error()) + } + + r.metrics.AddUnpublishedOperationTime(time.Since(addUnpublishedOperationStartTime)) + + addToBatchStartTime := time.Now() + + // validated operation will be added to the batch + if err := r.addToBatch(op, pv.Protocol().GenesisTime); err != nil { + logger.Error("Failed to add operation to batch", log.WithError(err)) + + r.deleteOperationFromUnpublishedOpsStore(unpublishedOp) + + return nil, err + } + + r.metrics.AddOperationToBatchTime(time.Since(addToBatchStartTime)) + + logger.Debug("Operation added to the batch", logfields.WithOperationID(op.ID)) + + // create operation will also return document + if op.Type == operation.TypeCreate { + return r.getCreateResponse(op, pv) + } + + return nil, nil +} + +func (r *DocumentHandler) getUnpublishedOperation( + op *operation.Operation, pv protocol.Version) *operation.AnchoredOperation { + if !contains(r.unpublishedOperationTypes, op.Type) { + return nil + } + + return &operation.AnchoredOperation{ + Type: op.Type, + UniqueSuffix: op.UniqueSuffix, + OperationRequest: op.OperationRequest, + TransactionTime: uint64(time.Now().Unix()), + ProtocolVersion: pv.Protocol().GenesisTime, + AnchorOrigin: op.AnchorOrigin, + } +} + +func (r *DocumentHandler) addOperationToUnpublishedOpsStore(unpublishedOp *operation.AnchoredOperation) error { + if unpublishedOp == nil { + // nothing to do + return nil + } + + return r.unpublishedOperationStore.Put(unpublishedOp) +} + +func (r *DocumentHandler) deleteOperationFromUnpublishedOpsStore(unpublishedOp *operation.AnchoredOperation) { + if unpublishedOp == nil { + // nothing to do + return + } + + err := r.unpublishedOperationStore.Delete(unpublishedOp) + if err != nil { + logger.Warn("Failed to delete operation from unpublished store", log.WithError(err)) + } +} + +func contains(values []operation.Type, value operation.Type) bool { + for _, v := range values { + if v == value { + return true + } + } + + return false +} + +//nolint:golint +func GetCreateResult(op *operation.Operation, pv protocol.Version) (*protocol.ResolutionModel, error) { + // we can use operation applier to generate create response even though operation is not anchored yet + anchored := &operation.AnchoredOperation{ + Type: op.Type, + UniqueSuffix: op.UniqueSuffix, + OperationRequest: op.OperationRequest, + TransactionTime: uint64(time.Now().Unix()), + ProtocolVersion: pv.Protocol().GenesisTime, + AnchorOrigin: op.AnchorOrigin, + } + + rm := &protocol.ResolutionModel{UnpublishedOperations: []*operation.AnchoredOperation{anchored}} + + rm, err := pv.OperationApplier().Apply(anchored, rm) + if err != nil { + return nil, err + } + + // if returned document is empty (e.g. applying patches failed) we can reject this request at API level + if len(rm.Doc.JSONLdObject()) == 0 { + return nil, errors.New("applying delta resulted in an empty document (most likely due to an invalid patch)") + } + + return rm, nil +} + +func (r *DocumentHandler) getCreateResponse( + op *operation.Operation, pv protocol.Version) (*document.ResolutionResult, error) { + startTime := time.Now() + + defer func() { + r.metrics.GetCreateOperationResultTime(time.Since(startTime)) + }() + + rm, err := GetCreateResult(op, pv) + if err != nil { + return nil, err + } + + ti := GetTransformationInfoForUnpublished(r.namespace, r.domain, r.label, op.UniqueSuffix, "") + + return pv.DocumentTransformer().TransformDocument(rm, ti) +} + +// GetTransformationInfoForUnpublished will create transformation info object for unpublished document. +func GetTransformationInfoForUnpublished( + namespace, domain, label, suffix, createRequestJCS string) protocol.TransformationInfo { + ti := make(protocol.TransformationInfo) + ti[document.PublishedProperty] = false + + id := fmt.Sprintf("%s:%s", namespace, suffix) + + // For interim/unpublished documents we should set optional label if specified. + if label != "" { + id = fmt.Sprintf("%s:%s:%s", namespace, label, suffix) + } + + var equivalentIDs []string + + if createRequestJCS != "" { + // we should always set short form equivalent id for long form resolution + equivalentIDs = append(equivalentIDs, id) + } + + // Also, if optional domain is specified, we should set equivalent id with domain hint + if label != "" && domain != "" { + equivalentID := id + if !strings.Contains(label, domain) { + equivalentID = fmt.Sprintf("%s:%s:%s:%s", namespace, domain, label, suffix) + } + + equivalentIDs = append(equivalentIDs, equivalentID) + } + + if len(equivalentIDs) > 0 { + ti[document.EquivalentIDProperty] = equivalentIDs + } + + if createRequestJCS != "" { + id = fmt.Sprintf("%s:%s", id, createRequestJCS) + } + + ti[document.IDProperty] = id + + return ti +} + +// ResolveDocument fetches the latest DID Document of a DID. Two forms of string can be passed in the URI: +// +// 1. Standard DID format: did:METHOD: +// +// 2. Long Form DID format: +// did:METHOD::Base64url(JCS({suffix-data-object, delta-object})) +// +// Standard resolution is performed if the DID is found to be registered on the anchoring system. +// If the DID Document cannot be found, the and are used +// to generate and return resolved DID Document. In this case the supplied delta and suffix objects +// are subject to the same validation as during processing create operation. +func (r *DocumentHandler) ResolveDocument(shortOrLongFormDID string, + opts ...document.ResolutionOption) (*document.ResolutionResult, error) { + ns, err := r.getNamespace(shortOrLongFormDID) + if err != nil { + return nil, fmt.Errorf("%s: %s", badRequest, err.Error()) + } + + pv, err := r.protocol.Current() + if err != nil { + return nil, err + } + + // extract did and optional initial document value + shortFormDID, createReq, err := pv.OperationParser().ParseDID(ns, shortOrLongFormDID) + if err != nil { + return nil, fmt.Errorf("%s: %s", badRequest, err.Error()) + } + + uniquePortion, err := getSuffix(ns, shortFormDID) + if err != nil { + return nil, fmt.Errorf("%s: %s", badRequest, err.Error()) + } + + // resolve document from the blockchain + doc, err := r.resolveRequestWithID(shortFormDID, uniquePortion, pv, opts...) + if err == nil { + return doc, nil + } + + // if document was not found on the blockchain and initial value has been provided resolve using initial value + if createReq != nil && strings.Contains(err.Error(), "not found") { + return r.resolveRequestWithInitialState(uniquePortion, shortOrLongFormDID, createReq, pv) + } + + return nil, err +} + +func (r *DocumentHandler) getNamespace(shortOrLongFormDID string) (string, error) { + // check aliases first (if configured) + for _, ns := range r.aliases { + if strings.HasPrefix(shortOrLongFormDID, ns+docutil.NamespaceDelimiter) { + return ns, nil + } + } + + // check namespace + if strings.HasPrefix(shortOrLongFormDID, r.namespace+docutil.NamespaceDelimiter) { + return r.namespace, nil + } + + return "", fmt.Errorf("did must start with configured namespace[%s] or aliases%v", r.namespace, r.aliases) +} + +func (r *DocumentHandler) resolveRequestWithID(shortFormDid, uniquePortion string, pv protocol.Version, + opts ...document.ResolutionOption) (*document.ResolutionResult, error) { + internalResult, err := r.processor.Resolve(uniquePortion, opts...) + if err != nil { + logger.Debug("Failed to resolve uniquePortion", logfields.WithSuffix(uniquePortion), log.WithError(err)) + + return nil, err + } + + var ti protocol.TransformationInfo + + if len(internalResult.PublishedOperations) == 0 { + hint, err := GetHint(shortFormDid, r.namespace, uniquePortion) + if err != nil { + return nil, err + } + + ti = GetTransformationInfoForUnpublished(r.namespace, r.domain, hint, uniquePortion, "") + } else { + ti = GetTransformationInfoForPublished(r.namespace, shortFormDid, uniquePortion, internalResult) + } + + return pv.DocumentTransformer().TransformDocument(internalResult, ti) +} + +// GetHint returns hint from id. +func GetHint(id, namespace, suffix string) (string, error) { + posSuffix := strings.LastIndex(id, suffix) + if posSuffix == -1 { + return "", fmt.Errorf("invalid ID [%s]", id) + } + + if len(namespace)+1 > posSuffix-1 { + return "", nil + } + + hint := id[len(namespace)+1 : posSuffix-1] + + return hint, nil +} + +// GetTransformationInfoForPublished will create transformation info object for published document. +func GetTransformationInfoForPublished(namespace, id, suffix string, + internalResult *protocol.ResolutionModel) protocol.TransformationInfo { + ti := make(protocol.TransformationInfo) + ti[document.IDProperty] = id + ti[document.PublishedProperty] = true + + canonicalRef := "" + if internalResult.CanonicalReference != "" { + canonicalRef = docutil.NamespaceDelimiter + internalResult.CanonicalReference + } + + canonicalID := namespace + canonicalRef + docutil.NamespaceDelimiter + suffix + + // we should always set canonical id if document has been published + ti[document.CanonicalIDProperty] = canonicalID + + equivalentIDs := []string{canonicalID} + + if len(internalResult.EquivalentReferences) > 0 { + for _, eqRef := range internalResult.EquivalentReferences { + equivalentID := namespace + docutil.NamespaceDelimiter + eqRef + docutil.NamespaceDelimiter + suffix + equivalentIDs = append(equivalentIDs, equivalentID) + } + } + + // equivalent ids should always include canonical id (if specified) + ti[document.EquivalentIDProperty] = equivalentIDs + + return ti +} + +func (r *DocumentHandler) resolveRequestWithInitialState(uniqueSuffix, longFormDID string, initialBytes []byte, + pv protocol.Version) (*document.ResolutionResult, error) { + op, err := pv.OperationParser().Parse(r.namespace, initialBytes) + if err != nil { + return nil, fmt.Errorf("%s: %s", badRequest, err.Error()) + } + + if uniqueSuffix != op.UniqueSuffix { + return nil, fmt.Errorf("%s: provided did doesn't match did created from initial state", badRequest) + } + + rm, err := GetCreateResult(op, pv) + if err != nil { + return nil, err + } + + docBytes, err := canonicalizer.MarshalCanonical(rm.Doc) + if err != nil { + return nil, err + } + + err = pv.DocumentValidator().IsValidOriginalDocument(docBytes) + if err != nil { + return nil, fmt.Errorf("%s: validate initial document: %s", badRequest, err.Error()) + } + + createRequestJCS := longFormDID[strings.LastIndex(longFormDID, docutil.NamespaceDelimiter)+1:] + + ti := GetTransformationInfoForUnpublished(r.namespace, r.domain, r.label, uniqueSuffix, createRequestJCS) + + externalResult, err := pv.DocumentTransformer().TransformDocument(rm, ti) + if err != nil { + return nil, fmt.Errorf("failed to transform create with initial state to external document: %s", err.Error()) + } + + return externalResult, nil +} + +// helper for adding operations to the batch. +func (r *DocumentHandler) addToBatch(op *operation.Operation, versionTime uint64) error { + return r.writer.Add( + &operation.QueuedOperation{ + Type: op.Type, + Namespace: r.namespace, + UniqueSuffix: op.UniqueSuffix, + OperationRequest: op.OperationRequest, + AnchorOrigin: op.AnchorOrigin, + Properties: op.Properties, + }, versionTime) +} + +func (r *DocumentHandler) validateOperation(op *operation.Operation, pv protocol.Version) error { + if op.Type == operation.TypeCreate { + return r.validateCreateDocument(op, pv) + } + + return pv.DocumentValidator().IsValidPayload(op.OperationRequest) +} + +func (r *DocumentHandler) validateCreateDocument(op *operation.Operation, pv protocol.Version) error { + rm, err := GetCreateResult(op, pv) + if err != nil { + return err + } + + docBytes, err := canonicalizer.MarshalCanonical(rm.Doc) + if err != nil { + return err + } + + return pv.DocumentValidator().IsValidOriginalDocument(docBytes) +} + +// getSuffix fetches unique portion of ID which is string after namespace. +func getSuffix(namespace, idOrDocument string) (string, error) { + ns := namespace + docutil.NamespaceDelimiter + + pos := strings.Index(idOrDocument, ns) + if pos == -1 { + return "", errors.New("did must start with configured namespace") + } + + lastDelimiter := strings.LastIndex(idOrDocument, docutil.NamespaceDelimiter) + + adjustedPos := lastDelimiter + 1 + if adjustedPos >= len(idOrDocument) { + return "", errors.New("did suffix is empty") + } + + return idOrDocument[adjustedPos:], nil +} + +type noopUnpublishedOpsStore struct { +} + +func (noop *noopUnpublishedOpsStore) Put(_ *operation.AnchoredOperation) error { + return nil +} + +func (noop *noopUnpublishedOpsStore) Delete(_ *operation.AnchoredOperation) error { + return nil +} + +type defaultOperationDecorator struct { + processor operationProcessor +} + +func (d *defaultOperationDecorator) Decorate(op *operation.Operation) (*operation.Operation, error) { + if op.Type != operation.TypeCreate { + internalResult, err := d.processor.Resolve(op.UniqueSuffix) + if err != nil { + logger.Debug("Failed to resolve suffix for operation", logfields.WithSuffix(op.UniqueSuffix), + logfields.WithOperationType(string(op.Type)), log.WithError(err)) + + return nil, err + } + + logger.Debug("Processor returned internal result for suffix", logfields.WithSuffix(op.UniqueSuffix), + logfields.WithOperationType(string(op.Type)), logfields.WithResolutionModel(internalResult)) + + if internalResult.Deactivated { + return nil, fmt.Errorf("document has been deactivated, no further operations are allowed") + } + + if op.Type == operation.TypeUpdate || op.Type == operation.TypeDeactivate { + op.AnchorOrigin = internalResult.AnchorOrigin + } + } + + return op, nil +} diff --git a/method/sidetreelongform/sidetree-core/dochandler/handler_test.go b/method/sidetreelongform/sidetree-core/dochandler/handler_test.go new file mode 100644 index 0000000..fec0af9 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/dochandler/handler_test.go @@ -0,0 +1,1229 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package dochandler + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "encoding/json" + "errors" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/cas" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/batch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/batch/cutter" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/batch/opqueue" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/compression" + docmocks "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/dochandler/mocks" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/docutil" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/encoder" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/processor" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/ecsigner" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/pubkey" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/client" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/didtransformer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/doctransformer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationapplier" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationparser" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider" +) + +//go:generate counterfeiter -o ./mocks/operationprocessor.gen.go --fake-name OperationProcessor . operationProcessor + +const ( + namespace = "did:sidetree" + alias = "did:domain.com" + + sha2_256 = 18 +) + +func TestDocumentHandler_New(t *testing.T) { + aliases := []string{"alias1", "alias2"} + dh := New(namespace, aliases, nil, nil, nil, &mocks.MetricsProvider{}) + require.Equal(t, namespace, dh.Namespace()) + require.Equal(t, aliases, dh.aliases) + require.Empty(t, dh.domain) + + const ( + domain = "domain.com" + label = "interim" + ) + + opDecorator := &mockOperationDecorator{} + + dh = New(namespace, nil, nil, nil, nil, &mocks.MetricsProvider{}, + WithLabel(label), WithDomain(domain), WithOperationDecorator(opDecorator)) + require.Equal(t, namespace, dh.Namespace()) + require.Equal(t, domain, dh.domain) + require.Equal(t, label, dh.label) + require.Equal(t, opDecorator, dh.decorator) +} + +func TestDocumentHandler_Protocol(t *testing.T) { + pc := newMockProtocolClient() + dh := New("", nil, pc, nil, nil, &mocks.MetricsProvider{}) + require.NotNil(t, dh) +} + +func TestDocumentHandler_ProcessOperation_Create(t *testing.T) { + dochandler, cleanup := getDocumentHandler(mocks.NewMockOperationStore(nil)) + require.NotNil(t, dochandler) + + defer cleanup() + + createOp := getCreateOperation() + + doc, err := dochandler.ProcessOperation(createOp.OperationRequest, 0) + require.NoError(t, err) + require.NotNil(t, doc) +} + +func TestDocumentHandler_DefaultDecorator(t *testing.T) { + t.Run("success - create", func(t *testing.T) { + processor := processor.New("test", mocks.NewMockOperationStore(nil), newMockProtocolClient()) + + decorator := &defaultOperationDecorator{processor: processor} + + updateOp := &operation.Operation{ + Type: operation.TypeCreate, + UniqueSuffix: "suffix", + } + + op, err := decorator.Decorate(updateOp) + require.NoError(t, err) + require.NotNil(t, op) + }) + t.Run("success - update", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + createOp := getCreateOperation() + + createOpBuffer, err := json.Marshal(createOp) + require.NoError(t, err) + + err = store.Put(&operation.AnchoredOperation{ + UniqueSuffix: createOp.UniqueSuffix, + Type: operation.TypeCreate, + OperationRequest: createOpBuffer}) + require.NoError(t, err) + + processor := processor.New("test", store, newMockProtocolClient()) + + decorator := &defaultOperationDecorator{processor: processor} + + updateOp := &operation.Operation{ + Type: operation.TypeUpdate, + UniqueSuffix: createOp.UniqueSuffix, + } + + op, err := decorator.Decorate(updateOp) + require.NoError(t, err) + require.NotNil(t, op) + require.Equal(t, op.AnchorOrigin, createOp.AnchorOrigin) + }) + + t.Run("error - processor error", func(t *testing.T) { + processor := &docmocks.OperationProcessor{} + processor.ResolveReturns(nil, fmt.Errorf("processor error")) + + decorator := &defaultOperationDecorator{processor: processor} + + updateOp := &operation.Operation{ + Type: operation.TypeUpdate, + UniqueSuffix: "suffix", + } + + op, err := decorator.Decorate(updateOp) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), "processor error") + }) + + t.Run("error - document has been deactivated, no further operations allowed", func(t *testing.T) { + rm := &protocol.ResolutionModel{ + Deactivated: true, + } + + processor := &docmocks.OperationProcessor{} + processor.ResolveReturns(rm, nil) + + decorator := &defaultOperationDecorator{processor: processor} + + updateOp := &operation.Operation{ + Type: operation.TypeUpdate, + UniqueSuffix: "suffix", + } + + op, err := decorator.Decorate(updateOp) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), "document has been deactivated, no further operations are allowed") + }) +} + +func TestDocumentHandler_ProcessOperation_Update(t *testing.T) { + t.Run("success", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + dochandler, cleanup := getDocumentHandler(store) + require.NotNil(t, dochandler) + defer cleanup() + + createOp := getCreateOperation() + + createOpBuffer, err := json.Marshal(createOp) + require.NoError(t, err) + + updateOp, err := generateUpdateOperation(createOp.UniqueSuffix) + require.NoError(t, err) + + err = store.Put(&operation.AnchoredOperation{ + UniqueSuffix: createOp.UniqueSuffix, Type: operation.TypeCreate, OperationRequest: createOpBuffer}) + require.NoError(t, err) + + doc, err := dochandler.ProcessOperation(updateOp, 0) + require.NoError(t, err) + require.Nil(t, doc) + }) + + t.Run("success - unpublished operation store option", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + opt := WithUnpublishedOperationStore(&mockUnpublishedOpsStore{}, []operation.Type{operation.TypeUpdate}) + + dochandler, cleanup := getDocumentHandler(store, opt) + require.NotNil(t, dochandler) + defer cleanup() + + createOp := getCreateOperation() + + createOpBuffer, err := json.Marshal(createOp) + require.NoError(t, err) + + updateOp, err := generateUpdateOperation(createOp.UniqueSuffix) + require.NoError(t, err) + + err = store.Put(&operation.AnchoredOperation{ + UniqueSuffix: createOp.UniqueSuffix, Type: operation.TypeCreate, OperationRequest: createOpBuffer}) + require.NoError(t, err) + + doc, err := dochandler.ProcessOperation(updateOp, 0) + require.NoError(t, err) + require.Nil(t, doc) + }) + + t.Run("success - unpublished operation store option(create and update)", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + createOp := getCreateOperation() + + updateOp, err := generateUpdateOperation(createOp.UniqueSuffix) + require.NoError(t, err) + + unpublishedOperationStore := &mockUnpublishedOpsStore{ + Ops: []*operation.AnchoredOperation{ + { + Type: "create", + OperationRequest: createOp.OperationRequest, + UniqueSuffix: createOp.UniqueSuffix, + }, + }, + } + + protocol := newMockProtocolClient() + + processor := processor.New("test", + store, protocol, processor.WithUnpublishedOperationStore(unpublishedOperationStore)) + + ctx := &BatchContext{ + ProtocolClient: protocol, + CasClient: mocks.NewMockCasClient(nil), + AnchorWriter: mocks.NewMockAnchorWriter(nil), + OpQueue: &opqueue.MemQueue{}, + } + writer, err := batch.New("test", ctx) + if err != nil { + panic(err) + } + + // start go routine for cutting batches + writer.Start() + + dochandler, cleanup := New(namespace, []string{alias}, protocol, writer, processor, &mocks.MetricsProvider{}, + WithUnpublishedOperationStore( + unpublishedOperationStore, + []operation.Type{operation.TypeCreate, operation.TypeUpdate})), func() { writer.Stop() } + require.NotNil(t, dochandler) + defer cleanup() + + doc, err := dochandler.ProcessOperation(updateOp, 0) + require.NoError(t, err) + require.Nil(t, doc) + + doc, err = dochandler.ResolveDocument(createOp.ID) + require.NoError(t, err) + fmt.Printf("%+v", doc) + + idWithHint := namespace + ":domain.com" + createOp.UniqueSuffix + + _, err = dochandler.ResolveDocument(idWithHint) + require.NoError(t, err) + }) + + t.Run("error - update without unpublished/published create", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + createOp := getCreateOperation() + + updateOp, err := generateUpdateOperation(createOp.UniqueSuffix) + require.NoError(t, err) + + unpublishedOperationStore := &mockUnpublishedOpsStore{} + + protocol := newMockProtocolClient() + + processor := processor.New("test", + store, protocol, processor.WithUnpublishedOperationStore(unpublishedOperationStore)) + + ctx := &BatchContext{ + ProtocolClient: protocol, + CasClient: mocks.NewMockCasClient(nil), + AnchorWriter: mocks.NewMockAnchorWriter(nil), + OpQueue: &opqueue.MemQueue{}, + } + writer, err := batch.New("test", ctx) + if err != nil { + panic(err) + } + + // start go routine for cutting batches + writer.Start() + + dochandler, cleanup := New(namespace, []string{alias}, protocol, writer, processor, &mocks.MetricsProvider{}, + WithUnpublishedOperationStore( + unpublishedOperationStore, + []operation.Type{operation.TypeCreate, operation.TypeUpdate})), func() { writer.Stop() } + require.NotNil(t, dochandler) + defer cleanup() + + doc, err := dochandler.ProcessOperation(updateOp, 0) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "bad request: create operation not found") + }) + + t.Run("error - batch writer error (unpublished operation store option)", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + opt := WithUnpublishedOperationStore(&noopUnpublishedOpsStore{}, []operation.Type{operation.TypeUpdate}) + + dochandler, cleanup := getDocumentHandler(store, opt) + require.NotNil(t, dochandler) + defer cleanup() + + createOp := getCreateOperation() + + createOpBuffer, err := json.Marshal(createOp) + require.NoError(t, err) + + updateOp, err := generateUpdateOperation(createOp.UniqueSuffix) + require.NoError(t, err) + + err = store.Put(&operation.AnchoredOperation{ + UniqueSuffix: createOp.UniqueSuffix, Type: operation.TypeCreate, OperationRequest: createOpBuffer}) + require.NoError(t, err) + + dochandler.writer = &mockBatchWriter{Err: fmt.Errorf("batch writer error")} + + doc, err := dochandler.ProcessOperation(updateOp, 0) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "batch writer error") + }) + + t.Run("error - unpublished operation store put error", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + opt := WithUnpublishedOperationStore( + &mockUnpublishedOpsStore{PutErr: fmt.Errorf("put error")}, + []operation.Type{operation.TypeUpdate}) + + dochandler, cleanup := getDocumentHandler(store, opt) + require.NotNil(t, dochandler) + defer cleanup() + + createOp := getCreateOperation() + + createOpBuffer, err := json.Marshal(createOp) + require.NoError(t, err) + + updateOp, err := generateUpdateOperation(createOp.UniqueSuffix) + require.NoError(t, err) + + err = store.Put(&operation.AnchoredOperation{ + UniqueSuffix: createOp.UniqueSuffix, Type: operation.TypeCreate, OperationRequest: createOpBuffer}) + require.NoError(t, err) + + doc, err := dochandler.ProcessOperation(updateOp, 0) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "put error") + }) + + t.Run("error - unpublished operation store delete error", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + opt := WithUnpublishedOperationStore( + &mockUnpublishedOpsStore{DeleteErr: fmt.Errorf("delete error")}, + []operation.Type{operation.TypeUpdate}) + + dochandler, cleanup := getDocumentHandler(store, opt) + require.NotNil(t, dochandler) + defer cleanup() + + dochandler.deleteOperationFromUnpublishedOpsStore(&operation.AnchoredOperation{UniqueSuffix: "suffix"}) + }) + + t.Run("error - decorator error", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + dochandler, cleanup := getDocumentHandler(store) + require.NotNil(t, dochandler) + defer cleanup() + + processor := &docmocks.OperationProcessor{} + processor.ResolveReturns(nil, fmt.Errorf("processor error")) + + dochandler.decorator = &defaultOperationDecorator{processor: processor} + + updateOp, err := generateUpdateOperation("suffix") + require.NoError(t, err) + + doc, err := dochandler.ProcessOperation(updateOp, 0) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "processor error") + }) +} + +func TestDocumentHandler_ProcessOperation_Create_WithDomain(t *testing.T) { + dochandler, cleanup := getDocumentHandler(mocks.NewMockOperationStore(nil)) + require.NotNil(t, dochandler) + + defer cleanup() + + dochandler.domain = "https:domain.com" + dochandler.label = "interim" + + createOp := getCreateOperation() + + result, err := dochandler.ProcessOperation(createOp.OperationRequest, 0) + require.NoError(t, err) + require.NotNil(t, result) + + require.Contains(t, result.Document.ID(), namespace+":interim") + + equivalentIds := result.DocumentMetadata[document.EquivalentIDProperty].([]string) //nolint:errcheck + require.Len(t, equivalentIds, 1) + require.Contains(t, equivalentIds[0], namespace+":https:domain.com:interim") +} + +func TestDocumentHandler_ProcessOperation_Create_ApplyDeltaError(t *testing.T) { + dochandler, cleanup := getDocumentHandler(mocks.NewMockOperationStore(nil)) + require.NotNil(t, dochandler) + + defer cleanup() + + p, err := patch.NewJSONPatch(errorPatch) + require.NoError(t, err) + + delta := &model.DeltaModel{ + UpdateCommitment: encodedMultihash([]byte("updateReveal")), + Patches: []patch.Patch{p}, + } + + suffixData, err := getSuffixData(delta) + require.NoError(t, err) + + createOp, err := getCreateOperationWithInitialState(suffixData, delta) + require.NoError(t, err) + + doc, err := dochandler.ProcessOperation(createOp.OperationRequest, 0) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "applying delta resulted in an empty document (most likely due to an invalid patch)") +} + +func TestDocumentHandler_ProcessOperation_ProtocolError(t *testing.T) { + pc := newMockProtocolClient() + pc.Err = fmt.Errorf("injected protocol error") + dochandler, cleanup := getDocumentHandlerWithProtocolClient(mocks.NewMockOperationStore(nil), pc) + require.NotNil(t, dochandler) + + defer cleanup() + + createOp := getCreateOperation() + + doc, err := dochandler.ProcessOperation(createOp.OperationRequest, 0) + require.EqualError(t, err, pc.Err.Error()) + require.Nil(t, doc) +} + +func TestDocumentHandler_ResolveDocument_DID(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + dochandler, cleanup := getDocumentHandler(store) + require.NotNil(t, dochandler) + + defer cleanup() + + docID := getCreateOperation().ID + uniqueSuffix := getCreateOperation().UniqueSuffix + + // scenario: not found in the store + result, err := dochandler.ResolveDocument(docID) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "not found") + + // insert document in the store + err = store.Put(getAnchoredCreateOperation()) + require.NoError(t, err) + + // scenario: resolved document (success) + result, err = dochandler.ResolveDocument(docID) + require.NoError(t, err) + require.NotNil(t, result) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + + // scenario: resolve document with alias namespace (success) + aliasID := alias + ":" + uniqueSuffix + result, err = dochandler.ResolveDocument(aliasID) + require.NoError(t, err) + require.NotNil(t, result) + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, result.DocumentMetadata[document.CanonicalIDProperty], docID) + require.Equal(t, result.Document[keyID], aliasID) + + // scenario: invalid namespace + result, err = dochandler.ResolveDocument("doc:invalid") + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "must start with configured namespace") + + // scenario: invalid id + result, err = dochandler.ResolveDocument(namespace + docutil.NamespaceDelimiter) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "did suffix is empty") +} + +func TestDocumentHandler_ResolveDocument_DID_With_References(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + dochandler, cleanup := getDocumentHandler(store) + require.NotNil(t, dochandler) + + defer cleanup() + + const ( + reference = "reference" + equivalent1 = "equivalent1" + equivalent2 = "equivalent2" + ) + + anchoredOp := getAnchoredCreateOperation() + anchoredOp.CanonicalReference = reference + anchoredOp.EquivalentReferences = []string{equivalent1, equivalent2} + + err := store.Put(anchoredOp) + require.NoError(t, err) + + result, err := dochandler.ResolveDocument(namespace + docutil.NamespaceDelimiter + anchoredOp.UniqueSuffix) + require.NoError(t, err) + require.NotNil(t, result) + + expectedCanonical := namespace + + docutil.NamespaceDelimiter + reference + docutil.NamespaceDelimiter + anchoredOp.UniqueSuffix + require.Equal(t, expectedCanonical, result.DocumentMetadata[document.CanonicalIDProperty]) + + expectedEquivalent1 := namespace + + docutil.NamespaceDelimiter + equivalent1 + docutil.NamespaceDelimiter + anchoredOp.UniqueSuffix + expectedEquivalent2 := namespace + + docutil.NamespaceDelimiter + equivalent2 + docutil.NamespaceDelimiter + anchoredOp.UniqueSuffix + expectedEquivalence := []string{expectedCanonical, expectedEquivalent1, expectedEquivalent2} + + require.Equal(t, expectedEquivalence, result.DocumentMetadata[document.EquivalentIDProperty]) +} + +func TestDocumentHandler_ResolveDocument_InitialValue(t *testing.T) { + pc := newMockProtocolClient() + dochandler, cleanup := getDocumentHandlerWithProtocolClient(mocks.NewMockOperationStore(nil), pc) + require.NotNil(t, dochandler) + + defer cleanup() + + createOp := getCreateOperation() + docID := createOp.ID + + createReq, err := canonicalizer.MarshalCanonical(model.CreateRequest{ + Delta: createOp.Delta, + SuffixData: createOp.SuffixData, + }) + require.NoError(t, err) + + longFormPart := ":" + encoder.EncodeToString(createReq) + + t.Run("success - initial state", func(t *testing.T) { + result, err := dochandler.ResolveDocument(docID + longFormPart) + require.NoError(t, err) + require.NotNil(t, result) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, false, methodMetadata[document.PublishedProperty]) + + equivalentIds := result.DocumentMetadata[document.EquivalentIDProperty].([]string) //nolint:errcheck + require.Len(t, equivalentIds, 1) + }) + + t.Run("success - initial state with label and domain", func(t *testing.T) { + docHandlerWithDomain, clean := getDocumentHandlerWithProtocolClient(mocks.NewMockOperationStore(nil), pc) + require.NotNil(t, docHandlerWithDomain) + defer clean() + + const label = "interim" + const domain = "domain.com" + + docHandlerWithDomain.label = label + docHandlerWithDomain.domain = domain + + result, err := docHandlerWithDomain.ResolveDocument(docID + longFormPart) + require.NoError(t, err) + require.NotNil(t, result) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, false, methodMetadata[document.PublishedProperty]) + + require.Contains(t, result.Document.ID(), fmt.Sprintf("%s:%s", namespace, label)) + + equivalentIds := result.DocumentMetadata[document.EquivalentIDProperty].([]string) //nolint:errcheck + require.Len(t, equivalentIds, 2) + require.Contains(t, equivalentIds[0], fmt.Sprintf("%s:%s", namespace, label)) + require.NotContains(t, equivalentIds[0], fmt.Sprintf("%s:%s%s", namespace, label, domain)) + require.Contains(t, equivalentIds[1], fmt.Sprintf("%s:%s:%s", namespace, domain, label)) + }) + + t.Run("error - invalid initial state format (not encoded JCS)", func(t *testing.T) { + result, err := dochandler.ResolveDocument(docID + ":payload") + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "bad request: invalid character") + }) + + t.Run("error - did doesn't match the one created by parsing original create request", func(t *testing.T) { + result, err := dochandler.ResolveDocument(dochandler.namespace + ":someID" + longFormPart) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "provided did doesn't match did created from initial state") + }) + + t.Run("error - transform create with initial state to external document", func(t *testing.T) { + transformer := &mocks.DocumentTransformer{} + transformer.TransformDocumentReturns(nil, errors.New("test error")) + + pc := newMockProtocolClient() + pc.CurrentVersion.DocumentTransformerReturns(transformer) + + dochandlerWithValidator, cleanup := + getDocumentHandlerWithProtocolClient(mocks.NewMockOperationStore(nil), pc) + require.NotNil(t, dochandlerWithValidator) + defer cleanup() + + result, err := dochandlerWithValidator.ResolveDocument(docID + longFormPart) + require.Error(t, err) + require.Nil(t, result) + require.Equal(t, err.Error(), + "failed to transform create with initial state to external document: test error") + }) + + t.Run("error - original (create) document is not valid", func(t *testing.T) { + dv := &mocks.DocumentValidator{} + dv.IsValidOriginalDocumentReturns(errors.New("test error")) + + pc := newMockProtocolClient() + pc.CurrentVersion.DocumentValidatorReturns(dv) + + dochandlerWithValidator, cleanup := getDocumentHandlerWithProtocolClient(mocks.NewMockOperationStore(nil), pc) + require.NotNil(t, dochandlerWithValidator) + defer cleanup() + + result, err := dochandlerWithValidator.ResolveDocument(docID + longFormPart) + require.Error(t, err) + require.Nil(t, result) + require.Equal(t, err.Error(), "bad request: validate initial document: test error") + }) + + t.Run("error - protocol error", func(t *testing.T) { + pc := newMockProtocolClient() + pc.Err = fmt.Errorf("injected protocol error") + + dochandler, cleanup := getDocumentHandlerWithProtocolClient(mocks.NewMockOperationStore(nil), pc) + require.NotNil(t, dochandler) + defer cleanup() + + result, err := dochandler.ResolveDocument(docID + longFormPart) + require.EqualError(t, err, pc.Err.Error()) + require.Nil(t, result) + }) +} + +func TestDocumentHandler_ResolveDocument_Interop(t *testing.T) { + pc := newMockProtocolClient() + pc.Protocol.Patches = []string{"replace", "add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"} //nolint:lll + + parser := operationparser.New(pc.Protocol) + oa := operationapplier.New(pc.Protocol, parser, doccomposer.New()) + transformer := didtransformer.New() + + pv := pc.CurrentVersion + pv.OperationParserReturns(parser) + pv.OperationApplierReturns(oa) + pv.DocumentTransformerReturns(transformer) + + pc.CurrentVersion.ProtocolReturns(pc.Protocol) + + dochandler, cleanup := getDocumentHandlerWithProtocolClient(mocks.NewMockOperationStore(nil), pc) + require.NotNil(t, dochandler) + + defer cleanup() + + dochandler.protocol = pc + + result, err := dochandler.ResolveDocument(interopResolveDidWithInitialState) + require.NoError(t, err) + require.NotNil(t, result) +} + +func TestDocumentHandler_ResolveDocument_InitialDocumentNotValid(t *testing.T) { + dochandler, cleanup := getDocumentHandler(mocks.NewMockOperationStore(nil)) + require.NotNil(t, dochandler) + + defer cleanup() + + createReq, err := getCreateRequestWithDoc(invalidDocNoKeyType) + require.NoError(t, err) + + createOp, err := getCreateOperationWithInitialState(createReq.SuffixData, createReq.Delta) + require.NoError(t, err) + + docID := createOp.ID + + initialReq, err := canonicalizer.MarshalCanonical(model.CreateRequest{ + Delta: createOp.Delta, + SuffixData: createOp.SuffixData, + }) + require.NoError(t, err) + + longFormPart := ":" + encoder.EncodeToString(initialReq) + + result, err := dochandler.ResolveDocument(docID + longFormPart) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "bad request: key 'type' is required for public key") +} + +func TestGetUniquePortion(t *testing.T) { + const namespace = "did:sidetree" + + // id doesn't contain namespace + _, err := getSuffix(namespace, "invalid") + require.Error(t, err) + require.Contains(t, err.Error(), "did must start with configured namespace") + + // id equals namespace; unique portion is empty + _, err = getSuffix(namespace, namespace+docutil.NamespaceDelimiter) + require.Error(t, err) + require.Contains(t, err.Error(), "did suffix is empty") + + // valid unique portion + const unique = "exKwW0HjS5y4zBtJ7vYDwglYhtckdO15JDt1j5F5Q0A" + uniquePortion, err := getSuffix(namespace, namespace+docutil.NamespaceDelimiter+unique) + require.NoError(t, err) + require.Equal(t, unique, uniquePortion) +} + +func TestProcessOperation_ParseOperationError(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + dochandler, cleanup := getDocumentHandler(store) + require.NotNil(t, dochandler) + + defer cleanup() + + // insert document in the store + err := store.Put(getAnchoredCreateOperation()) + require.NoError(t, err) + + doc, err := dochandler.ProcessOperation(getUpdateOperation().OperationRequest, 0) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "bad request: missing signed data") +} + +func TestGetHint(t *testing.T) { + const ( + namespace = "did:sidetree" + testID = "did:sidetree:unique" + ) + + t.Run("success", func(t *testing.T) { + hint, err := GetHint("did:sidetree:hint:unique", namespace, "unique") + require.NoError(t, err) + require.Equal(t, "hint", hint) + }) + + t.Run("success - no hint", func(t *testing.T) { + t.Run("success", func(t *testing.T) { + hint, err := GetHint(testID, namespace, "unique") + require.NoError(t, err) + require.Empty(t, hint) + }) + }) + + t.Run("error - wrong suffix", func(t *testing.T) { + t.Run("success", func(t *testing.T) { + hint, err := GetHint(testID, namespace, "other") + require.Error(t, err) + require.Empty(t, hint) + }) + }) +} + +// BatchContext implements batch writer context. +type BatchContext struct { + ProtocolClient *mocks.MockProtocolClient + CasClient *mocks.MockCasClient + AnchorWriter *mocks.MockAnchorWriter + OpQueue cutter.OperationQueue +} + +// Protocol returns the ProtocolClient. +func (m *BatchContext) Protocol() protocol.Client { + return m.ProtocolClient +} + +// Anchor returns the block chain client. +func (m *BatchContext) Anchor() batch.AnchorWriter { + return m.AnchorWriter +} + +// CAS returns the CAS client. +func (m *BatchContext) CAS() cas.Client { + return m.CasClient +} + +// OperationQueue returns the queue of operations pending to be cut. +func (m *BatchContext) OperationQueue() cutter.OperationQueue { + return m.OpQueue +} + +type cleanup func() + +func getDocumentHandler(store *mocks.MockOperationStore, opts ...Option) (*DocumentHandler, cleanup) { + return getDocumentHandlerWithProtocolClient(store, newMockProtocolClient(), opts...) +} + +func getDocumentHandlerWithProtocolClient( + store *mocks.MockOperationStore, protocol *mocks.MockProtocolClient, opts ...Option, +) (*DocumentHandler, cleanup) { //nolint: interfacer + processor := processor.New("test", store, protocol) + + ctx := &BatchContext{ + ProtocolClient: protocol, + CasClient: mocks.NewMockCasClient(nil), + AnchorWriter: mocks.NewMockAnchorWriter(nil), + OpQueue: &opqueue.MemQueue{}, + } + + writer, err := batch.New("test", ctx) + if err != nil { + panic(err) + } + + // start go routine for cutting batches + writer.Start() + + return New(namespace, + []string{alias}, protocol, writer, processor, &mocks.MetricsProvider{}, opts...), func() { writer.Stop() } +} + +func getCreateOperation() *model.Operation { + request, err := getCreateRequest() + if err != nil { + panic(err) + } + + op, err := getCreateOperationWithInitialState(request.SuffixData, request.Delta) + if err != nil { + panic(err) + } + + return op +} + +func getCreateOperationWithInitialState(suffixData *model.SuffixDataModel, delta *model.DeltaModel, +) (*model.Operation, error) { + request := &model.CreateRequest{ + Operation: operation.TypeCreate, + SuffixData: suffixData, + Delta: delta, + } + + payload, err := canonicalizer.MarshalCanonical(request) + if err != nil { + return nil, err + } + + uniqueSuffix, err := hashing.CalculateModelMultihash(suffixData, sha2_256) + if err != nil { + return nil, err + } + + return &model.Operation{ + Type: operation.TypeCreate, + UniqueSuffix: uniqueSuffix, + ID: namespace + docutil.NamespaceDelimiter + uniqueSuffix, + OperationRequest: payload, + Delta: delta, + SuffixData: suffixData, + }, nil +} + +func getAnchoredCreateOperation() *operation.AnchoredOperation { + op := getCreateOperation() + + return getAnchoredOperation(op) +} + +func getAnchoredOperation(op *model.Operation) *operation.AnchoredOperation { + anchoredOp, err := model.GetAnchoredOperation(op) + if err != nil { + panic(err) + } + + return anchoredOp +} + +const validDoc = `{ + "publicKey": [{ + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const invalidDocNoKeyType = `{ + "publicKey": [{ + "id": "key1", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const errorPatch = `[ +{ + "op": "move", + "path": "/test", + "value": "new value" +} +]` + +func getCreateRequest() (*model.CreateRequest, error) { + return getCreateRequestWithDoc(validDoc) +} + +func getCreateRequestWithDoc(doc string) (*model.CreateRequest, error) { + delta, err := getDeltaWithDoc(doc) + if err != nil { + return nil, err + } + + suffixData, err := getSuffixData(delta) + if err != nil { + return nil, err + } + + return &model.CreateRequest{ + Operation: operation.TypeCreate, + Delta: delta, + SuffixData: suffixData, + }, nil +} + +func getDeltaWithDoc(doc string) (*model.DeltaModel, error) { + patches, err := newAddPublicKeysPatch(doc) + if err != nil { + return nil, err + } + + return &model.DeltaModel{ + Patches: []patch.Patch{patches}, + UpdateCommitment: encodedMultihash([]byte("updateReveal")), + }, nil +} + +// newAddPublicKeysPatch creates new add public keys patch without validation. +func newAddPublicKeysPatch(doc string) (patch.Patch, error) { + parsed, err := document.FromBytes([]byte(doc)) + if err != nil { + return nil, err + } + + p := make(patch.Patch) + p[patch.ActionKey] = patch.AddPublicKeys + p[patch.PublicKeys] = parsed.PublicKeys() + + return p, nil +} + +func getSuffixData(delta *model.DeltaModel) (*model.SuffixDataModel, error) { + jwk := &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + } + + c, err := commitment.GetCommitment(jwk, sha2_256) + if err != nil { + return nil, err + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, err + } + + return &model.SuffixDataModel{ + DeltaHash: deltaHash, + RecoveryCommitment: c, + }, nil +} + +func encodedMultihash(data []byte) string { + mh, err := hashing.ComputeMultihash(sha2_256, data) + if err != nil { + panic(err) + } + + return encoder.EncodeToString(mh) +} + +func getUpdateDelta() *model.DeltaModel { + return &model.DeltaModel{ + UpdateCommitment: encodedMultihash([]byte("updateReveal")), + } +} + +func getUpdateOperation() *operation.Operation { + request := &model.UpdateRequest{ + Operation: operation.TypeUpdate, + DidSuffix: getCreateOperation().UniqueSuffix, + Delta: getUpdateDelta(), + } + + payload, err := json.Marshal(request) + if err != nil { + panic(err) + } + + return &operation.Operation{ + OperationRequest: payload, + Type: operation.TypeUpdate, + UniqueSuffix: request.DidSuffix, + ID: namespace + docutil.NamespaceDelimiter + request.DidSuffix, + } +} + +func generateUpdateRequestInfo(uniqueSuffix string) (*client.UpdateRequestInfo, error) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return nil, err + } + + testPatch, err := getTestPatch() + if err != nil { + return nil, err + } + + updateCommitment, err := generateUniqueCommitment() + if err != nil { + return nil, err + } + + updatePubKey, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(updatePubKey, sha2_256) + if err != nil { + return nil, err + } + + return &client.UpdateRequestInfo{ + DidSuffix: uniqueSuffix, + Signer: ecsigner.New(privateKey, "ES256", ""), + UpdateCommitment: updateCommitment, + UpdateKey: updatePubKey, + Patches: []patch.Patch{testPatch}, + MultihashCode: sha2_256, + RevealValue: rv, + }, nil +} + +func generateUpdateOperation(uniqueSuffix string) ([]byte, error) { + info, err := generateUpdateRequestInfo(uniqueSuffix) + if err != nil { + return nil, err + } + + return client.NewUpdateRequest(info) +} + +func getTestPatch() (patch.Patch, error) { + return patch.NewJSONPatch(`[{"op": "replace", "path": "/name", "value": "Jane"}]`) +} + +func generateUniqueCommitment() (string, error) { + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return "", err + } + + pubKey, err := pubkey.GetPublicKeyJWK(&key.PublicKey) + if err != nil { + return "", err + } + + c, err := commitment.GetCommitment(pubKey, sha2_256) + if err != nil { + return "", err + } + + return c, nil +} + +// test value taken from reference implementation. +// +//nolint:lll +const interopResolveDidWithInitialState = "did:sidetree:EiDyOQbbZAa3aiRzeCkV7LOx3SERjjH93EXoIM3UoN4oWg:eyJkZWx0YSI6eyJwYXRjaGVzIjpbeyJhY3Rpb24iOiJyZXBsYWNlIiwiZG9jdW1lbnQiOnsicHVibGljS2V5cyI6W3siaWQiOiJwdWJsaWNLZXlNb2RlbDFJZCIsInB1YmxpY0tleUp3ayI6eyJjcnYiOiJzZWNwMjU2azEiLCJrdHkiOiJFQyIsIngiOiJ0WFNLQl9ydWJYUzdzQ2pYcXVwVkpFelRjVzNNc2ptRXZxMVlwWG45NlpnIiwieSI6ImRPaWNYcWJqRnhvR0otSzAtR0oxa0hZSnFpY19EX09NdVV3a1E3T2w2bmsifSwicHVycG9zZXMiOlsiYXV0aGVudGljYXRpb24iLCJrZXlBZ3JlZW1lbnQiXSwidHlwZSI6IkVjZHNhU2VjcDI1NmsxVmVyaWZpY2F0aW9uS2V5MjAxOSJ9XSwic2VydmljZXMiOlt7ImlkIjoic2VydmljZTFJZCIsInNlcnZpY2VFbmRwb2ludCI6Imh0dHA6Ly93d3cuc2VydmljZTEuY29tIiwidHlwZSI6InNlcnZpY2UxVHlwZSJ9XX19XSwidXBkYXRlQ29tbWl0bWVudCI6IkVpREtJa3dxTzY5SVBHM3BPbEhrZGI4Nm5ZdDBhTnhTSFp1MnItYmhFem5qZEEifSwic3VmZml4RGF0YSI6eyJkZWx0YUhhc2giOiJFaUNmRFdSbllsY0Q5RUdBM2RfNVoxQUh1LWlZcU1iSjluZmlxZHo1UzhWRGJnIiwicmVjb3ZlcnlDb21taXRtZW50IjoiRWlCZk9aZE10VTZPQnc4UGs4NzlRdFotMkotOUZiYmpTWnlvYUFfYnFENHpoQSJ9fQ" + +func newMockProtocolClient() *mocks.MockProtocolClient { + pc := mocks.NewMockProtocolClient() + + for _, v := range pc.Versions { + parser := operationparser.New(v.Protocol()) + dc := doccomposer.New() + oa := operationapplier.New(v.Protocol(), parser, dc) + dv := &mocks.DocumentValidator{} + dt := doctransformer.New() + + pc.CasClient = mocks.NewMockCasClient(nil) + cp := compression.New(compression.WithDefaultAlgorithms()) + oh := txnprovider.NewOperationHandler(pc.Protocol, pc.CasClient, cp, parser, &mocks.MetricsProvider{}) + + v.OperationParserReturns(parser) + v.OperationApplierReturns(oa) + v.DocumentComposerReturns(dc) + v.DocumentValidatorReturns(dv) + v.DocumentTransformerReturns(dt) + v.OperationHandlerReturns(oh) + } + + return pc +} + +type mockUnpublishedOpsStore struct { + Ops []*operation.AnchoredOperation + PutErr error + DeleteErr error + GetErr error +} + +func (m *mockUnpublishedOpsStore) Put(_ *operation.AnchoredOperation) error { + return m.PutErr +} + +func (m *mockUnpublishedOpsStore) Delete(_ *operation.AnchoredOperation) error { + return m.DeleteErr +} + +func (m *mockUnpublishedOpsStore) Get(uniqueSuffix string) ([]*operation.AnchoredOperation, error) { + if m.GetErr != nil { + return nil, m.GetErr + } + + return m.Ops, nil +} + +type mockOperationDecorator struct { + Err error +} + +func (m *mockOperationDecorator) Decorate(op *operation.Operation) (*operation.Operation, error) { + if m.Err == nil { + return nil, m.Err + } + + return op, nil +} + +type mockBatchWriter struct { + Err error +} + +func (mbw *mockBatchWriter) Add(_ *operation.QueuedOperation, _ uint64) error { + return mbw.Err +} diff --git a/method/sidetreelongform/sidetree-core/dochandler/mocks/operationprocessor.gen.go b/method/sidetreelongform/sidetree-core/dochandler/mocks/operationprocessor.gen.go new file mode 100644 index 0000000..7726cb1 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/dochandler/mocks/operationprocessor.gen.go @@ -0,0 +1,117 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" +) + +type OperationProcessor struct { + ResolveStub func(string, ...document.ResolutionOption) (*protocol.ResolutionModel, error) + resolveMutex sync.RWMutex + resolveArgsForCall []struct { + arg1 string + arg2 []document.ResolutionOption + } + resolveReturns struct { + result1 *protocol.ResolutionModel + result2 error + } + resolveReturnsOnCall map[int]struct { + result1 *protocol.ResolutionModel + result2 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *OperationProcessor) Resolve(arg1 string, arg2 ...document.ResolutionOption) (*protocol.ResolutionModel, error) { + fake.resolveMutex.Lock() + ret, specificReturn := fake.resolveReturnsOnCall[len(fake.resolveArgsForCall)] + fake.resolveArgsForCall = append(fake.resolveArgsForCall, struct { + arg1 string + arg2 []document.ResolutionOption + }{arg1, arg2}) + fake.recordInvocation("Resolve", []interface{}{arg1, arg2}) + fake.resolveMutex.Unlock() + if fake.ResolveStub != nil { + return fake.ResolveStub(arg1, arg2...) + } + if specificReturn { + return ret.result1, ret.result2 + } + fakeReturns := fake.resolveReturns + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationProcessor) ResolveCallCount() int { + fake.resolveMutex.RLock() + defer fake.resolveMutex.RUnlock() + return len(fake.resolveArgsForCall) +} + +func (fake *OperationProcessor) ResolveCalls(stub func(string, ...document.ResolutionOption) (*protocol.ResolutionModel, error)) { + fake.resolveMutex.Lock() + defer fake.resolveMutex.Unlock() + fake.ResolveStub = stub +} + +func (fake *OperationProcessor) ResolveArgsForCall(i int) (string, []document.ResolutionOption) { + fake.resolveMutex.RLock() + defer fake.resolveMutex.RUnlock() + argsForCall := fake.resolveArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *OperationProcessor) ResolveReturns(result1 *protocol.ResolutionModel, result2 error) { + fake.resolveMutex.Lock() + defer fake.resolveMutex.Unlock() + fake.ResolveStub = nil + fake.resolveReturns = struct { + result1 *protocol.ResolutionModel + result2 error + }{result1, result2} +} + +func (fake *OperationProcessor) ResolveReturnsOnCall(i int, result1 *protocol.ResolutionModel, result2 error) { + fake.resolveMutex.Lock() + defer fake.resolveMutex.Unlock() + fake.ResolveStub = nil + if fake.resolveReturnsOnCall == nil { + fake.resolveReturnsOnCall = make(map[int]struct { + result1 *protocol.ResolutionModel + result2 error + }) + } + fake.resolveReturnsOnCall[i] = struct { + result1 *protocol.ResolutionModel + result2 error + }{result1, result2} +} + +func (fake *OperationProcessor) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.resolveMutex.RLock() + defer fake.resolveMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *OperationProcessor) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} diff --git a/method/sidetreelongform/sidetree-core/document/diddocument.go b/method/sidetreelongform/sidetree-core/document/diddocument.go new file mode 100644 index 0000000..032ec5f --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/diddocument.go @@ -0,0 +1,199 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "encoding/json" + "io" +) + +const ( + + // ContextProperty defines key for context property. + ContextProperty = "@context" + + // AlsoKnownAs defines also known as property. + AlsoKnownAs = "alsoKnownAs" + + // ServiceProperty defines key for service property. + ServiceProperty = "service" + + // PublicKeyProperty defines key for public key property. + PublicKeyProperty = "publicKey" + + // VerificationMethodProperty defines key for verification method. + VerificationMethodProperty = "verificationMethod" + + // AuthenticationProperty defines key for authentication property. + AuthenticationProperty = "authentication" + + // AssertionMethodProperty defines key for assertion method property. + AssertionMethodProperty = "assertionMethod" + + // KeyAgreementProperty defines key for key agreement property. + KeyAgreementProperty = "keyAgreement" + + // DelegationKeyProperty defines key for delegation key property. + DelegationKeyProperty = "capabilityDelegation" + + // InvocationKeyProperty defines key for invocation key property. + InvocationKeyProperty = "capabilityInvocation" +) + +// DIDDocument Defines DID Document data structure used by Sidetree for basic type safety checks. +type DIDDocument map[string]interface{} + +// ID is identifier for DID subject (what DID Document is about). +func (doc DIDDocument) ID() string { + return stringEntry(doc[IDProperty]) +} + +// Context is the context of did document. +func (doc DIDDocument) Context() []interface{} { + return interfaceArray(doc[ContextProperty]) +} + +// PublicKeys are used for digital signatures, encryption and other cryptographic operations. +func (doc DIDDocument) PublicKeys() []PublicKey { + return ParsePublicKeys(doc[PublicKeyProperty]) +} + +// VerificationMethods (formerly public keys) are used for digital signatures, +// encryption and other cryptographic operations. +func (doc DIDDocument) VerificationMethods() []PublicKey { + return ParsePublicKeys(doc[VerificationMethodProperty]) +} + +// AlsoKnownAs are alternate identifiers for DID subject. +func (doc DIDDocument) AlsoKnownAs() []string { + return StringArray(doc[AlsoKnownAs]) +} + +// ParsePublicKeys is helper function for parsing public keys. +func ParsePublicKeys(entry interface{}) []PublicKey { + if entry == nil { + return nil + } + + typedEntry, ok := entry.([]interface{}) + if !ok { + return nil + } + + var result []PublicKey + + for _, e := range typedEntry { + emap, ok := e.(map[string]interface{}) + if !ok { + continue + } + + result = append(result, NewPublicKey(emap)) + } + + return result +} + +// Services is an array of service endpoints. +func (doc DIDDocument) Services() []Service { + return ParseServices(doc[ServiceProperty]) +} + +// ParseServices is utility for parsing array of service endpoints. +func ParseServices(entry interface{}) []Service { + if entry == nil { + return nil + } + + typedEntry, ok := entry.([]interface{}) + if !ok { + return nil + } + + var result []Service + + for _, e := range typedEntry { + emap, ok := e.(map[string]interface{}) + if !ok { + continue + } + + result = append(result, NewService(emap)) + } + + return result +} + +// JSONLdObject returns map that represents JSON LD Object. +func (doc DIDDocument) JSONLdObject() map[string]interface{} { + return doc +} + +// Authentications returns authentication array (mixture of strings and objects). +func (doc DIDDocument) Authentications() []interface{} { + return interfaceArray(doc[AuthenticationProperty]) +} + +// AssertionMethods returns assertion method array (mixture of strings and objects). +func (doc DIDDocument) AssertionMethods() []interface{} { + return interfaceArray(doc[AssertionMethodProperty]) +} + +// AgreementKeys returns agreement method array (mixture of strings and objects). +func (doc DIDDocument) AgreementKeys() []interface{} { + return interfaceArray(doc[KeyAgreementProperty]) +} + +// DelegationKeys returns delegation method array (mixture of strings and objects). +func (doc DIDDocument) DelegationKeys() []interface{} { + return interfaceArray(doc[DelegationKeyProperty]) +} + +// InvocationKeys returns invocation method array (mixture of strings and objects). +func (doc DIDDocument) InvocationKeys() []interface{} { + return interfaceArray(doc[InvocationKeyProperty]) +} + +// DIDDocumentFromReader creates an instance of DIDDocument by reading a JSON document from Reader. +func DIDDocumentFromReader(r io.Reader) (DIDDocument, error) { + data, err := io.ReadAll(r) + if err != nil { + return nil, err + } + + return DidDocumentFromBytes(data) +} + +// DidDocumentFromBytes creates an instance of DIDDocument by reading a JSON document from bytes. +func DidDocumentFromBytes(data []byte) (DIDDocument, error) { + doc := make(DIDDocument) + err := json.Unmarshal(data, &doc) + + if err != nil { + return nil, err + } + + return doc, nil +} + +// DidDocumentFromJSONLDObject creates an instance of DIDDocument from json ld object. +func DidDocumentFromJSONLDObject(jsonldObject map[string]interface{}) DIDDocument { + return jsonldObject +} + +func interfaceArray(entry interface{}) []interface{} { + if entry == nil { + return nil + } + + entries, ok := entry.([]interface{}) + if !ok { + return nil + } + + return entries +} diff --git a/method/sidetreelongform/sidetree-core/document/diddocument_test.go b/method/sidetreelongform/sidetree-core/document/diddocument_test.go new file mode 100644 index 0000000..4af68fe --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/diddocument_test.go @@ -0,0 +1,115 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestValid(t *testing.T) { + r := reader(t, "testdata/pk-doc.json") + + doc, err := DIDDocumentFromReader(r) + require.Nil(t, err) + require.NotNil(t, doc) + require.Equal(t, "", doc.ID()) + + publicKeys := doc.PublicKeys() + require.Equal(t, []PublicKey{ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": []interface{}{"authentication"}, + "publicKeyJwk": map[string]interface{}{ + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc", + }, + }, + }, publicKeys) + + services := doc.Services() + require.Equal(t, []Service{ + { + "id": "hub", + "type": "IdentityHub", + "serviceEndpoint": "https://example.com/hub/", + "routingKeys": "routingKeysValue", + "recipientKeys": "recipientKeysValue", + "priority": float64(0), + }, + }, services) + + jsonld := doc.JSONLdObject() + require.NotNil(t, jsonld) + + require.Empty(t, doc.Context()) + require.Equal(t, "whatever", doc.Authentications()[0]) + + require.Equal(t, 1, len(doc.AlsoKnownAs())) + require.Equal(t, "identityURI", doc.AlsoKnownAs()[0]) + + newDoc := DidDocumentFromJSONLDObject(doc.JSONLdObject()) + require.Equal(t, newDoc, doc) +} + +func TestValidWithVerificationMethods(t *testing.T) { + r := reader(t, "testdata/vm-doc.json") + + doc, err := DIDDocumentFromReader(r) + require.Nil(t, err) + require.NotNil(t, doc) + require.Equal(t, "", doc.ID()) + + publicKeys := doc.VerificationMethods() + require.Equal(t, []PublicKey{ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": []interface{}{"authentication"}, + "publicKeyJwk": map[string]interface{}{ + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc", + }, + }, + }, publicKeys) +} + +func TestEmptyDoc(t *testing.T) { + var bytes = []byte(`{"@context":"https://w3id.org/did/v1"}`) //nolint:gofumpt + + doc, err := DidDocumentFromBytes(bytes) + require.Nil(t, err) + require.NotNil(t, doc) + + require.Equal(t, 0, len(doc.PublicKeys())) + require.Equal(t, 0, len(doc.Services())) + require.Equal(t, 0, len(doc.Authentications())) + require.Equal(t, 0, len(doc.AssertionMethods())) + require.Equal(t, 0, len(doc.AgreementKeys())) + require.Equal(t, 0, len(doc.DelegationKeys())) + require.Equal(t, 0, len(doc.InvocationKeys())) +} + +func TestInvalidLists(t *testing.T) { + r := reader(t, "testdata/invalid-lists.json") + + doc, err := DIDDocumentFromReader(r) + require.Nil(t, err) + require.NotNil(t, doc) + + services := doc.Services() + require.Equal(t, 0, len(services)) + + pubKeys := doc.PublicKeys() + require.Equal(t, 0, len(pubKeys)) +} diff --git a/method/sidetreelongform/sidetree-core/document/document.go b/method/sidetreelongform/sidetree-core/document/document.go new file mode 100644 index 0000000..37ea732 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/document.go @@ -0,0 +1,104 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "encoding/json" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/docutil" +) + +// IDProperty describes id key. +const IDProperty = "id" + +// Document defines generic document data structure. +type Document map[string]interface{} + +// FromBytes creates an instance of Document by reading a JSON document from bytes. +func FromBytes(data []byte) (Document, error) { + doc := make(Document) + err := json.Unmarshal(data, &doc) + + if err != nil { + return nil, err + } + + return doc, nil +} + +// FromJSONLDObject creates an instance of Document from json ld object. +func FromJSONLDObject(jsonldObject map[string]interface{}) Document { + return jsonldObject +} + +// ID is document identifier. +func (doc Document) ID() string { + return stringEntry(doc[IDProperty]) +} + +// Context is the context of document. +func (doc Document) Context() []interface{} { + return interfaceArray(doc[ContextProperty]) +} + +// PublicKeys in generic document are used for managing operation keys. +func (doc Document) PublicKeys() []PublicKey { + return ParsePublicKeys(doc[PublicKeyProperty]) +} + +// GetStringValue returns string value for specified key or "" if not found or wrong type. +func (doc Document) GetStringValue(key string) string { + return stringEntry(doc[key]) +} + +// Bytes returns byte representation of did document. +func (doc Document) Bytes() ([]byte, error) { + return docutil.MarshalCanonical(doc) +} + +// JSONLdObject returns map that represents JSON LD Object. +func (doc Document) JSONLdObject() map[string]interface{} { + return doc +} + +func stringEntry(entry interface{}) string { + if entry == nil { + return "" + } + + id, ok := entry.(string) + if !ok { + return "" + } + + return id +} + +// StringArray is utility function to return string array from interface. +func StringArray(entry interface{}) []string { + if entry == nil { + return nil + } + + entries, ok := entry.([]interface{}) + if !ok { + return nil + } + + var result []string + + for _, e := range entries { + val, ok := e.(string) + if !ok { + continue + } + + result = append(result, val) + } + + return result +} diff --git a/method/sidetreelongform/sidetree-core/document/document_test.go b/method/sidetreelongform/sidetree-core/document/document_test.go new file mode 100644 index 0000000..e0ff93d --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/document_test.go @@ -0,0 +1,95 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "io" + "os" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFromBytes(t *testing.T) { + r := reader(t, "testdata/pk-doc.json") + + data, err := io.ReadAll(r) + require.Nil(t, err) + + doc, err := FromBytes(data) + require.Nil(t, err) + require.NotNil(t, doc) + require.Equal(t, "", doc.ID()) + require.Equal(t, 1, len(doc.PublicKeys())) + require.Equal(t, 0, len(doc.Context())) + + bytes, err := doc.Bytes() + require.Nil(t, err) + require.NotEmpty(t, bytes) + + jsonld := doc.JSONLdObject() + require.NotNil(t, jsonld) + + jsonldObject := FromJSONLDObject(jsonld) + require.Equal(t, doc.ID(), jsonldObject.ID()) +} + +func TestFromBytesError(t *testing.T) { + doc, err := FromBytes([]byte("[test : 123]")) + require.NotNil(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "invalid character") +} + +func TestMarshalError(t *testing.T) { + doc := Document{} + doc["test"] = make(chan int) + + bytes, err := doc.Bytes() + require.NotNil(t, err) + require.Nil(t, bytes) + require.Contains(t, err.Error(), "json: unsupported type: chan int") +} + +func TestGetStringValue(t *testing.T) { + const key = "key" + + const value = "value" + + doc := Document{} + doc[key] = value + + require.Equal(t, value, doc.GetStringValue(key)) + + doc[key] = []string{"hello"} + require.Equal(t, "", doc.GetStringValue(key)) +} + +func TestStringEntry(t *testing.T) { + // not a string + str := stringEntry([]string{"hello"}) + require.Empty(t, str) + + str = stringEntry("hello") + require.Equal(t, "hello", str) +} + +func TestArrayStringEntry(t *testing.T) { + arr := StringArray(nil) + require.Nil(t, arr) + + // not a array + arr = StringArray("hello") + require.Nil(t, arr) +} + +func reader(t *testing.T, filename string) io.Reader { + f, err := os.Open(filename) + require.Nil(t, err) + + return f +} diff --git a/method/sidetreelongform/sidetree-core/document/jwk.go b/method/sidetreelongform/sidetree-core/document/jwk.go new file mode 100644 index 0000000..c71cdc2 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/jwk.go @@ -0,0 +1,56 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import "errors" + +// JWK represents public key in JWK format. +type JWK map[string]interface{} + +// NewJWK creates new JWK. +func NewJWK(jwk map[string]interface{}) JWK { + return jwk +} + +// Kty is key type. +func (jwk JWK) Kty() string { + return stringEntry(jwk["kty"]) +} + +// Crv is curve. +func (jwk JWK) Crv() string { + return stringEntry(jwk["crv"]) +} + +// X is x. +func (jwk JWK) X() string { + return stringEntry(jwk["x"]) +} + +// Y is y. +func (jwk JWK) Y() string { + return stringEntry(jwk["y"]) +} + +// Validate will validate JWK properties. +func (jwk JWK) Validate() error { + // TODO: validation of the JWK fields depends on the algorithm (issue-409) + // For now check required fields for currently supported algorithms secp256k1, P-256, P-384, P-512 and Ed25519 + if jwk.Crv() == "" { + return errors.New("JWK crv is missing") + } + + if jwk.Kty() == "" { + return errors.New("JWK kty is missing") + } + + if jwk.X() == "" { + return errors.New("JWK x is missing") + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/document/jwk_test.go b/method/sidetreelongform/sidetree-core/document/jwk_test.go new file mode 100644 index 0000000..b63655a --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/jwk_test.go @@ -0,0 +1,74 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestJWK(t *testing.T) { + jwk := NewJWK(map[string]interface{}{}) + require.Empty(t, jwk.Kty()) + require.Empty(t, jwk.Crv()) + require.Empty(t, jwk.Y()) + require.Empty(t, jwk.Y()) + + jwk = NewJWK(map[string]interface{}{ + "kty": "kty", + "crv": "crv", + "x": "x", + "y": "y", + }) + + require.Equal(t, "kty", jwk.Kty()) + require.Equal(t, "crv", jwk.Crv()) + require.Equal(t, "x", jwk.X()) + require.Equal(t, "y", jwk.Y()) +} + +func TestValidate(t *testing.T) { + t.Run("missing kty", func(t *testing.T) { + jwk := JWK{ + "kty": "", + "crv": "crv", + "x": "x", + "y": "y", + } + + err := jwk.Validate() + require.Error(t, err) + require.Contains(t, err.Error(), "JWK kty is missing") + }) + + t.Run("missing crv", func(t *testing.T) { + jwk := JWK{ + "kty": "kty", + "crv": "", + "x": "x", + "y": "y", + } + + err := jwk.Validate() + require.Error(t, err) + require.Contains(t, err.Error(), "JWK crv is missing") + }) + + t.Run("missing x", func(t *testing.T) { + jwk := JWK{ + "kty": "kty", + "crv": "crv", + "x": "", + "y": "y", + } + + err := jwk.Validate() + require.Error(t, err) + require.Contains(t, err.Error(), "JWK x is missing") + }) +} diff --git a/method/sidetreelongform/sidetree-core/document/publickey.go b/method/sidetreelongform/sidetree-core/document/publickey.go new file mode 100644 index 0000000..6227c75 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/publickey.go @@ -0,0 +1,102 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +const ( + + // ControllerProperty defines key for controller. + ControllerProperty = "controller" + + // PurposesProperty describes key purposes property. + PurposesProperty = "purposes" + + // PublicKeyJwkProperty describes external public key JWK. + PublicKeyJwkProperty = "publicKeyJwk" + + // TypeProperty describes type. + TypeProperty = "type" + + // PublicKeyBase58Property defines base 58 encoding for public key. + PublicKeyBase58Property = "publicKeyBase58" + + // PublicKeyMultibaseProperty defines base multibase for public key. + PublicKeyMultibaseProperty = "publicKeyMultibase" +) + +// KeyPurpose defines key purpose. +type KeyPurpose string + +const ( + // KeyPurposeAuthentication defines key purpose as authentication key. + KeyPurposeAuthentication = "authentication" + // KeyPurposeAssertionMethod defines key purpose as assertion key. + KeyPurposeAssertionMethod = "assertionMethod" + // KeyPurposeKeyAgreement defines key purpose as agreement key. + KeyPurposeKeyAgreement = "keyAgreement" + // KeyPurposeCapabilityDelegation defines key purpose as delegation key. + KeyPurposeCapabilityDelegation = "capabilityDelegation" + // KeyPurposeCapabilityInvocation defines key purpose as invocation key. + KeyPurposeCapabilityInvocation = "capabilityInvocation" +) + +// PublicKey must include id and type properties, and exactly one value property. +type PublicKey map[string]interface{} + +// NewPublicKey creates new public key. +func NewPublicKey(pk map[string]interface{}) PublicKey { + return pk +} + +// ID is public key ID. +func (pk PublicKey) ID() string { + return stringEntry(pk[IDProperty]) +} + +// Type is public key type. +func (pk PublicKey) Type() string { + return stringEntry(pk[TypeProperty]) +} + +// Controller identifies the entity that controls the corresponding private key. +func (pk PublicKey) Controller() string { + return stringEntry(pk[ControllerProperty]) +} + +// PublicKeyJwk is value property for JWK. +func (pk PublicKey) PublicKeyJwk() JWK { + entry, ok := pk[PublicKeyJwkProperty] + if !ok { + return nil + } + + json, ok := entry.(map[string]interface{}) + if !ok { + return nil + } + + return NewJWK(json) +} + +// PublicKeyBase58 is base58 encoded public key. +func (pk PublicKey) PublicKeyBase58() string { + return stringEntry(pk[PublicKeyBase58Property]) +} + +// PublicKeyMultibase is multibase public key. +func (pk PublicKey) PublicKeyMultibase() string { + return stringEntry(pk[PublicKeyMultibaseProperty]) +} + +// Purpose describes key purpose. +func (pk PublicKey) Purpose() []string { + return StringArray(pk[PurposesProperty]) +} + +// JSONLdObject returns map that represents JSON LD Object. +func (pk PublicKey) JSONLdObject() map[string]interface{} { + return pk +} diff --git a/method/sidetreelongform/sidetree-core/document/publickey_test.go b/method/sidetreelongform/sidetree-core/document/publickey_test.go new file mode 100644 index 0000000..9553bd6 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/publickey_test.go @@ -0,0 +1,59 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestPublicKey(t *testing.T) { + pk := NewPublicKey(map[string]interface{}{}) + require.Empty(t, pk.ID()) + require.Empty(t, pk.Type()) + require.Empty(t, pk.Controller()) + + pk = NewPublicKey(map[string]interface{}{ + "id": "did:example:123456789abcdefghi#keys-1", + "type": "JsonWebKey2020", + "controller": "did:example:123456789abcdefghi", + }) + require.Equal(t, "did:example:123456789abcdefghi#keys-1", pk.ID()) + require.Equal(t, "JsonWebKey2020", pk.Type()) + require.Equal(t, "did:example:123456789abcdefghi", pk.Controller()) + require.Empty(t, pk.Purpose()) + require.Empty(t, pk.PublicKeyJwk()) + require.Empty(t, pk.PublicKeyBase58()) + require.Empty(t, pk.PublicKeyMultibase()) + + require.NotEmpty(t, pk.JSONLdObject()) +} + +func TestPublicKeyJWK(t *testing.T) { + pk := NewPublicKey(map[string]interface{}{ + "publicKeyJwk": map[string]interface{}{ + "kty": "kty", + "crv": "crv", + "x": "x", + "y": "y", + }, + }) + + jwk := pk.PublicKeyJwk() + require.Equal(t, "kty", jwk.Kty()) + require.Equal(t, "crv", jwk.Crv()) + require.Equal(t, "x", jwk.X()) + require.Equal(t, "y", jwk.Y()) + + pk = NewPublicKey(map[string]interface{}{ + "publicKeyJwk": "invalid", + }) + + jwk = pk.PublicKeyJwk() + require.Nil(t, jwk) +} diff --git a/method/sidetreelongform/sidetree-core/document/replace.go b/method/sidetreelongform/sidetree-core/document/replace.go new file mode 100644 index 0000000..32ddfd9 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/replace.go @@ -0,0 +1,56 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "encoding/json" +) + +const ( + + // ReplaceServiceProperty defines key for service property. + ReplaceServiceProperty = "services" + + // ReplacePublicKeyProperty defines key for public key property. + ReplacePublicKeyProperty = "publicKeys" +) + +// ReplaceDocument defines replace document data structure. +type ReplaceDocument map[string]interface{} + +// ReplaceDocumentFromBytes creates an instance of replace document +// (for 'replace' patch, may be used for replace action). +func ReplaceDocumentFromBytes(data []byte) (ReplaceDocument, error) { + doc := make(ReplaceDocument) + + err := json.Unmarshal(data, &doc) + if err != nil { + return nil, err + } + + return doc, nil +} + +// ReplaceDocumentFromJSONLDObject creates an instance of ReplaceDocument from json ld object. +func ReplaceDocumentFromJSONLDObject(jsonldObject map[string]interface{}) ReplaceDocument { + return jsonldObject +} + +// PublicKeys returns public keys for replace document. +func (doc ReplaceDocument) PublicKeys() []PublicKey { + return ParsePublicKeys(doc[ReplacePublicKeyProperty]) +} + +// Services returns services for replace document. +func (doc ReplaceDocument) Services() []Service { + return ParseServices(doc[ReplaceServiceProperty]) +} + +// JSONLdObject returns map that represents JSON LD Object. +func (doc ReplaceDocument) JSONLdObject() map[string]interface{} { + return doc +} diff --git a/method/sidetreelongform/sidetree-core/document/replace_test.go b/method/sidetreelongform/sidetree-core/document/replace_test.go new file mode 100644 index 0000000..a9a69a7 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/replace_test.go @@ -0,0 +1,55 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestReplaceDocumentFromBytes(t *testing.T) { + doc, err := ReplaceDocumentFromBytes([]byte(replaceDoc)) + require.Nil(t, err) + require.NotNil(t, doc) + require.Equal(t, 1, len(doc.PublicKeys())) + require.Equal(t, 1, len(doc.Services())) + + jsonld := doc.JSONLdObject() + require.NotNil(t, jsonld) + + jsonldObject := ReplaceDocumentFromJSONLDObject(jsonld) + require.Equal(t, doc.PublicKeys()[0], jsonldObject.PublicKeys()[0]) +} + +func TestReplaceDocumentFromBytesError(t *testing.T) { + doc, err := ReplaceDocumentFromBytes([]byte("[test : 123]")) + require.NotNil(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "invalid character") +} + +const replaceDoc = `{ + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "type": "EcdsaSecp256k1VerificationKey2019", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }], + "services": [ + { + "id": "sds3", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }] +}` diff --git a/method/sidetreelongform/sidetree-core/document/resolution.go b/method/sidetreelongform/sidetree-core/document/resolution.go new file mode 100644 index 0000000..2933f09 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/resolution.go @@ -0,0 +1,106 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + +// ResolutionResult describes resolution result. +type ResolutionResult struct { + Context interface{} `json:"@context"` + Document Document `json:"didDocument"` + DocumentMetadata Metadata `json:"didDocumentMetadata,omitempty"` +} + +// Metadata can contains various metadata such as document metadata and method metadata.. +type Metadata map[string]interface{} + +const ( + // UpdateCommitmentProperty is update commitment key. + UpdateCommitmentProperty = "updateCommitment" + + // RecoveryCommitmentProperty is recovery commitment key. + RecoveryCommitmentProperty = "recoveryCommitment" + + // PublishedProperty is published key. + PublishedProperty = "published" + + // DeactivatedProperty is deactivated flag key. + DeactivatedProperty = "deactivated" + + // AnchorOriginProperty is anchor origin key. + AnchorOriginProperty = "anchorOrigin" + + // CreatedProperty is the time that document was created - anchoring time of first successful create operation. + CreatedProperty = "created" + + // UpdatedProperty is the time of last document update - anchoring time of update/recover operations. + UpdatedProperty = "updated" + + // VersionIDProperty is version ID key. + VersionIDProperty = "versionId" + + // CanonicalIDProperty is canonical ID key. + CanonicalIDProperty = "canonicalId" + + // EquivalentIDProperty is equivalent ID array. + EquivalentIDProperty = "equivalentId" + + // MethodProperty is used for method metadata within did document metadata. + MethodProperty = "method" + + // UnpublishedOperationsProperty holds unpublished did operations. + UnpublishedOperationsProperty = "unpublishedOperations" + + // PublishedOperationsProperty holds published did operations. + PublishedOperationsProperty = "publishedOperations" +) + +// ResolutionOption is an option for specifying the resolution options for various resolvers. +type ResolutionOption func(opts *ResolutionOptions) + +// ResolutionOptions represent resolution options. +type ResolutionOptions struct { + AdditionalOperations []*operation.AnchoredOperation + VersionID string + VersionTime string +} + +// WithAdditionalOperations sets the additional operations to be used in a Resolve call. +func WithAdditionalOperations(additionalOperations []*operation.AnchoredOperation) ResolutionOption { + return func(opts *ResolutionOptions) { + if len(additionalOperations) > 0 { + opts.AdditionalOperations = additionalOperations + } + } +} + +// WithVersionID sets the version ID to be used in a Resolve call. +func WithVersionID(versionID string) ResolutionOption { + return func(opts *ResolutionOptions) { + opts.VersionID = versionID + } +} + +// WithVersionTime sets the version time to be used in a Resolve call. +func WithVersionTime(versionTime string) ResolutionOption { + return func(opts *ResolutionOptions) { + opts.VersionTime = versionTime + } +} + +// GetResolutionOptions returns resolution options. +func GetResolutionOptions(opts ...ResolutionOption) (ResolutionOptions, error) { + options := ResolutionOptions{} + + for _, option := range opts { + if option != nil { + option(&options) + } + } + + return options, nil +} diff --git a/method/sidetreelongform/sidetree-core/document/resolution_test.go b/method/sidetreelongform/sidetree-core/document/resolution_test.go new file mode 100644 index 0000000..2605e7c --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/resolution_test.go @@ -0,0 +1,29 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" +) + +func TestGetOptions(t *testing.T) { + const ( + verTime = "2021-05-10T17:00:00Z" + verID = "ver" + ) + + opts, err := GetResolutionOptions(WithAdditionalOperations([]*operation.AnchoredOperation{{Type: "create"}}), + WithVersionID(verID), WithVersionTime(verTime)) + require.NoError(t, err) + require.Equal(t, 1, len(opts.AdditionalOperations)) + require.Equal(t, verID, opts.VersionID) + require.Equal(t, verTime, opts.VersionTime) +} diff --git a/method/sidetreelongform/sidetree-core/document/service.go b/method/sidetreelongform/sidetree-core/document/service.go new file mode 100644 index 0000000..beca673 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/service.go @@ -0,0 +1,38 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +// ServiceEndpointProperty describes external service endpoint property. +const ServiceEndpointProperty = "serviceEndpoint" + +// Service represents any type of service the entity wishes to advertise. +type Service map[string]interface{} + +// NewService creates new service. +func NewService(m map[string]interface{}) Service { + return m +} + +// ID is service ID. +func (s Service) ID() string { + return stringEntry(s[IDProperty]) +} + +// Type is service type. +func (s Service) Type() string { + return stringEntry(s[TypeProperty]) +} + +// ServiceEndpoint is service endpoint. +func (s Service) ServiceEndpoint() interface{} { + return s[ServiceEndpointProperty] +} + +// JSONLdObject returns map that represents JSON LD Object. +func (s Service) JSONLdObject() map[string]interface{} { + return s +} diff --git a/method/sidetreelongform/sidetree-core/document/service_test.go b/method/sidetreelongform/sidetree-core/document/service_test.go new file mode 100644 index 0000000..7cd5373 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/service_test.go @@ -0,0 +1,29 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestService(t *testing.T) { + svc := NewService(map[string]interface{}{}) + require.Empty(t, svc.Type()) + + svc = NewService(map[string]interface{}{ + "id": "did:example:123456789abcdefghi;openid", + "type": "OpenIdConnectVersion3.1Service", + "serviceEndpoint": "https://openid.example.com/", + }) + require.Equal(t, "did:example:123456789abcdefghi;openid", svc.ID()) + require.Equal(t, "OpenIdConnectVersion3.1Service", svc.Type()) + require.Equal(t, "https://openid.example.com/", svc.ServiceEndpoint()) + + require.NotEmpty(t, svc.JSONLdObject()) +} diff --git a/method/sidetreelongform/sidetree-core/document/testdata/invalid-lists.json b/method/sidetreelongform/sidetree-core/document/testdata/invalid-lists.json new file mode 100644 index 0000000..9611b0b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/testdata/invalid-lists.json @@ -0,0 +1,14 @@ +{ + "publicKey": + { + "id": "key2", + "type": "RsaVerificationKey2018", + "publicKeyPem": "-----BEGIN PUBLIC KEY.2.END PUBLIC KEY-----" + }, + "service": + { + "id": "IdentityHub", + "type": "IdentityHub", + "serviceEndpoint": "" + } +} \ No newline at end of file diff --git a/method/sidetreelongform/sidetree-core/document/testdata/pk-doc.json b/method/sidetreelongform/sidetree-core/document/testdata/pk-doc.json new file mode 100644 index 0000000..35c6f7a --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/testdata/pk-doc.json @@ -0,0 +1,27 @@ +{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ], + "service": [ + { + "id": "hub", + "type": "IdentityHub", + "serviceEndpoint": "https://example.com/hub/", + "routingKeys": "routingKeysValue", + "recipientKeys": "recipientKeysValue", + "priority": 0 + } + ], + "authentication": ["whatever"], + "alsoKnownAs": ["identityURI"] +} diff --git a/method/sidetreelongform/sidetree-core/document/testdata/vm-doc.json b/method/sidetreelongform/sidetree-core/document/testdata/vm-doc.json new file mode 100644 index 0000000..635cfce --- /dev/null +++ b/method/sidetreelongform/sidetree-core/document/testdata/vm-doc.json @@ -0,0 +1,15 @@ +{ + "verificationMethod": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +} diff --git a/method/sidetreelongform/sidetree-core/docutil/doc.go b/method/sidetreelongform/sidetree-core/docutil/doc.go new file mode 100644 index 0000000..82e34b5 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/docutil/doc.go @@ -0,0 +1,40 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package docutil + +import ( + "strings" + + "github.com/pkg/errors" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" +) + +// NamespaceDelimiter is the delimiter that separates the namespace from the unique suffix. +const NamespaceDelimiter = ":" + +// CalculateID calculates the ID from model and namespace. +func CalculateID(namespace string, value interface{}, hashAlgorithmAsMultihashCode uint) (string, error) { + uniqueSuffix, err := hashing.CalculateModelMultihash(value, hashAlgorithmAsMultihashCode) + if err != nil { + return "", err + } + + didID := namespace + NamespaceDelimiter + uniqueSuffix + + return didID, nil +} + +// GetNamespaceFromID returns namespace from ID. +func GetNamespaceFromID(id string) (string, error) { + pos := strings.LastIndex(id, ":") + if pos == -1 { + return "", errors.Errorf("invalid ID [%s]", id) + } + + return id[0:pos], nil +} diff --git a/method/sidetreelongform/sidetree-core/docutil/doc_test.go b/method/sidetreelongform/sidetree-core/docutil/doc_test.go new file mode 100644 index 0000000..9ce8a3c --- /dev/null +++ b/method/sidetreelongform/sidetree-core/docutil/doc_test.go @@ -0,0 +1,75 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package docutil + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +const ( + sha2_256 uint = 18 + namespace = "did:sidetree" +) + +func TestCalculateID(t *testing.T) { + t.Run("success", func(t *testing.T) { + id, err := CalculateID(namespace, suffixDataObject, sha2_256) + require.Nil(t, err) + require.Equal(t, namespace+NamespaceDelimiter+expectedSuffixForSuffixObject, id) + }) + + t.Run("error - multihash algorithm not supported", func(t *testing.T) { + id, err := CalculateID(namespace, suffixDataObject, 55) + require.NotNil(t, err) + require.Empty(t, id) + require.Contains(t, err.Error(), "algorithm not supported, unable to compute hash") + }) +} + +func TestDidCalculationError(t *testing.T) { + // non-supported mulithash code will cause an error + id, err := CalculateID(namespace, suffixDataObject, 55) + require.NotNil(t, err) + require.Empty(t, id) + require.Contains(t, err.Error(), "algorithm not supported, unable to compute hash") + + // payload has to be JSON object in order to canonicalize + id, err = CalculateID(namespace, "!!!", sha2_256) + require.NotNil(t, err) + require.Empty(t, id) + require.Contains(t, err.Error(), "Expected '{'") +} + +func TestNamespaceFromID(t *testing.T) { + const namespace = "did:sidetree" + const suffix = "123456" + + t.Run("Valid ID", func(t *testing.T) { + ns, err := GetNamespaceFromID(namespace + NamespaceDelimiter + suffix) + require.NoError(t, err) + require.Equal(t, namespace, ns) + }) + + t.Run("Invalid ID", func(t *testing.T) { + ns, err := GetNamespaceFromID(suffix) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid ID") + require.Empty(t, ns) + }) +} + +var suffixDataObject = &struct { + DeltaHash string `json:"deltaHash,omitempty"` + RecoveryCommitment string `json:"recoveryCommitment,omitempty"` +}{ + DeltaHash: "EiBOmkP6kn7yjt0VocmcPu9OQOsZi199Evh-xB48ebubQA", + RecoveryCommitment: "EiAAZJYry29vICkwmso8FL92WAISMAhsL8xkCm8dYVnq_w", +} + +const expectedSuffixForSuffixObject = "EiA5vyaRzJIxbkuZbvwEXiC__u8ieFx50TAAo98tBzCuyA" diff --git a/method/sidetreelongform/sidetree-core/docutil/json.go b/method/sidetreelongform/sidetree-core/docutil/json.go new file mode 100644 index 0000000..310d501 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/docutil/json.go @@ -0,0 +1,98 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package docutil + +import ( + "bytes" + "encoding/json" +) + +// MarshalCanonical marshals the object into a canonical JSON format. +func MarshalCanonical(v interface{}) ([]byte, error) { + b, err := json.Marshal(v) + if err != nil { + return nil, err + } + + return getCanonicalContent(b) +} + +// MarshalIndentCanonical is like MarshalCanonical but applies Indent to format the output. +// Each JSON element in the output will begin on a new line beginning with prefix +// followed by one or more copies of indent according to the indentation nesting. +func MarshalIndentCanonical(v interface{}, prefix, indent string) ([]byte, error) { + b, err := MarshalCanonical(v) + if err != nil { + return nil, err + } + var buf bytes.Buffer + err = json.Indent(&buf, b, prefix, indent) + if err != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +// getCanonicalContent ensures that fields in the JSON doc are marshaled in a deterministic order. +func getCanonicalContent(content []byte) ([]byte, error) { + m, err := unmarshalJSONMap(content) + if err != nil { + a, e := unmarshalJSONArray(content) + if e != nil { + return nil, e + } + + // Re-marshal it in order to ensure that the JSON fields are marshaled in a deterministic order. + aBytes, e := marshalJSONArray(a) + if e != nil { + return nil, e + } + + return aBytes, nil + } + + // Re-marshal it in order to ensure that the JSON fields are marshaled in a deterministic order. + mBytes, err := marshalJSONMap(m) + if err != nil { + return nil, err + } + + return mBytes, nil +} + +// marshalJSONMap marshals a JSON map. This variable may be overridden by unit tests. +var marshalJSONMap = func(m map[string]interface{}) ([]byte, error) { + return json.Marshal(&m) +} + +// unmarshalJSONMap unmarshals a JSON map from the given bytes. This variable may be overridden by unit tests. +var unmarshalJSONMap = func(bytes []byte) (map[string]interface{}, error) { + m := make(map[string]interface{}) + err := json.Unmarshal(bytes, &m) + if err != nil { + return nil, err + } + + return m, nil +} + +// unmarshalJSONArray unmarshals an array of JSON maps from the given bytes. This variable may be overridden by unit tests. +var unmarshalJSONArray = func(bytes []byte) ([]map[string]interface{}, error) { + var a []map[string]interface{} + err := json.Unmarshal(bytes, &a) + if err != nil { + return nil, err + } + + return a, nil +} + +// marshalJSONArray marshals an array of JSON maps. This variable may be overridden by unit tests. +var marshalJSONArray = func(a []map[string]interface{}) ([]byte, error) { + return json.Marshal(&a) +} diff --git a/method/sidetreelongform/sidetree-core/docutil/json_test.go b/method/sidetreelongform/sidetree-core/docutil/json_test.go new file mode 100644 index 0000000..ebf4366 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/docutil/json_test.go @@ -0,0 +1,169 @@ +//nolint +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package docutil + +import ( + "encoding/json" + "errors" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type testData struct { + FieldC string + FieldB int + FieldA string +} + +func TestMarshalCanonical(t *testing.T) { + value1 := &testData{ + FieldC: "valueC_1", + FieldB: 100, + FieldA: "valueA_1", + } + value2 := &testData{ + FieldC: "valueC_2", + FieldB: 200, + FieldA: "valueA_2", + } + + t.Run("Struct", func(t *testing.T) { + v1, err := MarshalCanonical(value1) + require.NoError(t, err) + assert.NotNil(t, v1) + fmt.Printf("%s\n", v1) + + v := &testData{} + err = json.Unmarshal(v1, v) + require.NoError(t, err) + + require.Equal(t, value1, v) + }) + + t.Run("Array", func(t *testing.T) { + arr := []*testData{value1, value2} + v1, err := MarshalCanonical(arr) + require.NoError(t, err) + assert.NotNil(t, v1) + fmt.Printf("%s\n", v1) + + var v []*testData + err = json.Unmarshal(v1, &v) + require.NoError(t, err) + + require.Equal(t, arr, v) + }) + + t.Run("Marshal struct error", func(t *testing.T) { + reset := SetJSONMarshaler(func(map[string]interface{}) (bytes []byte, e error) { + return nil, errors.New("injected marshal error") + }) + defer reset() + + _, err := MarshalCanonical(value1) + require.Error(t, err) + }) + + t.Run("Unmarshal struct error", func(t *testing.T) { + reset := SetJSONUnmarshaler(func(bytes []byte) (map[string]interface{}, error) { + return nil, errors.New("injected marshal error") + }) + defer reset() + + _, err := MarshalCanonical(value1) + require.Error(t, err) + }) + + t.Run("Marshal array error", func(t *testing.T) { + reset := SetJSONArrayMarshaler(func([]map[string]interface{}) (bytes []byte, e error) { + return nil, errors.New("injected marshal error") + }) + defer reset() + + _, err := MarshalCanonical([]*testData{value1, value2}) + require.Error(t, err) + }) + + t.Run("Unmarshal array error", func(t *testing.T) { + reset := SetJSONArrayUnmarshaler(func(bytes []byte) ([]map[string]interface{}, error) { + return nil, errors.New("injected marshal error") + }) + defer reset() + + _, err := MarshalCanonical([]*testData{value1, value2}) + require.Error(t, err) + }) +} + +func TestMarshalIndentCanonical(t *testing.T) { + value1 := &testData{ + FieldC: "valueC_1", + FieldB: 100, + FieldA: "valueA_1", + } + + t.Run("Success", func(t *testing.T) { + v1, err := MarshalIndentCanonical(value1, "", " ") + require.NoError(t, err) + assert.NotNil(t, v1) + fmt.Printf("%s\n", v1) + }) + + t.Run("Marshal error", func(t *testing.T) { + reset := SetJSONMarshaler(func(m map[string]interface{}) (bytes []byte, e error) { + return nil, errors.New("injected marshal error") + }) + defer reset() + + _, err := MarshalIndentCanonical(value1, "", " ") + require.Error(t, err) + }) +} + +func TestGetCanonicalContent(t *testing.T) { + t.Run("Struct", func(t *testing.T) { + value1 := []byte(`{"field1":"value1","field2":"value2"}`) + value2 := []byte(`{"field2":"value2","field1":"value1"}`) + + v1, err := getCanonicalContent(value1) + require.NoError(t, err) + assert.NotNil(t, v1) + + v2, err := getCanonicalContent(value2) + require.NoError(t, err) + assert.Equal(t, v1, v2) + }) + + t.Run("Array", func(t *testing.T) { + value1 := []byte(`[{"field1":"value1_1","field2":"value2_1"},{"field1":"value1_2","field2":"value2_2"}]`) + value2 := []byte(`[{"field2":"value2_1","field1":"value1_1"},{"field2":"value2_2","field1":"value1_2"}]`) + + v1, err := getCanonicalContent(value1) + require.NoError(t, err) + assert.NotNil(t, v1) + + v2, err := getCanonicalContent(value2) + require.NoError(t, err) + assert.Equal(t, v1, v2) + }) + + t.Run("Marshal error", func(t *testing.T) { + value1 := []byte(`{"field1":"value1","field2":"value2"}`) + + reset := SetJSONMarshaler(func(m map[string]interface{}) (bytes []byte, e error) { + return nil, errors.New("injected marshal error") + }) + defer reset() + + _, err := getCanonicalContent(value1) + require.Error(t, err) + }) +} diff --git a/method/sidetreelongform/sidetree-core/docutil/test_exports.go b/method/sidetreelongform/sidetree-core/docutil/test_exports.go new file mode 100644 index 0000000..19fb148 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/docutil/test_exports.go @@ -0,0 +1,53 @@ +//go:build testing + +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package docutil + +// SetJSONMarshaler sets the JSON map marshaler for unit tests. +// Returns a function that resets the marshaler to the previous value. +func SetJSONMarshaler(marshaler func(m map[string]interface{}) ([]byte, error)) func() { + prevMarshaler := marshalJSONMap + marshalJSONMap = marshaler + + return func() { + marshalJSONMap = prevMarshaler + } +} + +// SetJSONUnmarshaler sets the JSON map unmarshaler for unit tests. +// Returns a function that resets the unmarshaler to the previous value. +func SetJSONUnmarshaler(unmarshaler func(bytes []byte) (map[string]interface{}, error)) func() { + prevUnmarshaler := unmarshalJSONMap + unmarshalJSONMap = unmarshaler + + return func() { + unmarshalJSONMap = prevUnmarshaler + } +} + +// SetJSONArrayMarshaler sets the JSON array marshaler for unit tests. +// Returns a function that resets the marshaler to the previous value. +func SetJSONArrayMarshaler(marshaler func(m []map[string]interface{}) ([]byte, error)) func() { + prevMarshaler := marshalJSONArray + marshalJSONArray = marshaler + + return func() { + marshalJSONArray = prevMarshaler + } +} + +// SetJSONArrayUnmarshaler sets the JSON array unmarshaler for unit tests. +// Returns a function that resets the unmarshaler to the previous value. +func SetJSONArrayUnmarshaler(unmarshaler func(bytes []byte) ([]map[string]interface{}, error)) func() { + prevUnmarshaler := unmarshalJSONArray + unmarshalJSONArray = unmarshaler + + return func() { + unmarshalJSONArray = prevUnmarshaler + } +} diff --git a/method/sidetreelongform/sidetree-core/encoder/encoder.go b/method/sidetreelongform/sidetree-core/encoder/encoder.go new file mode 100644 index 0000000..6e50764 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/encoder/encoder.go @@ -0,0 +1,19 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package encoder + +import "encoding/base64" + +// EncodeToString encodes the bytes to string. +func EncodeToString(data []byte) string { + return base64.RawURLEncoding.EncodeToString(data) +} + +// DecodeString decodes the encoded content to Bytes. +func DecodeString(encodedContent string) ([]byte, error) { + return base64.RawURLEncoding.DecodeString(encodedContent) +} diff --git a/method/sidetreelongform/sidetree-core/encoder/encoder_test.go b/method/sidetreelongform/sidetree-core/encoder/encoder_test.go new file mode 100644 index 0000000..485a5c9 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/encoder/encoder_test.go @@ -0,0 +1,24 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package encoder + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestEncodeAndDecodeAsString(t *testing.T) { + data := "Hello World" + encoded := EncodeToString([]byte(data)) + require.NotNil(t, encoded) + + decodedBytes, err := DecodeString(encoded) + require.Nil(t, err) + require.NotNil(t, decodedBytes) + require.EqualValues(t, "Hello World", decodedBytes) +} diff --git a/method/sidetreelongform/sidetree-core/hashing/hash.go b/method/sidetreelongform/sidetree-core/hashing/hash.go new file mode 100644 index 0000000..166a971 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/hashing/hash.go @@ -0,0 +1,145 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package hashing + +import ( + "crypto" + "errors" + "fmt" + + "github.com/multiformats/go-multihash" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/encoder" +) + +// ComputeMultihash will compute the hash for the supplied bytes using multihash code. +func ComputeMultihash(multihashCode uint, bytes []byte) ([]byte, error) { + hash, err := GetHashFromMultihash(multihashCode) + if err != nil { + return nil, err + } + + hashedBytes, err := GetHash(hash, bytes) + if err != nil { + return nil, err + } + + return multihash.Encode(hashedBytes, uint64(multihashCode)) +} + +// GetHashFromMultihash will return hash based on specified multihash code. +func GetHashFromMultihash(multihashCode uint) (h crypto.Hash, err error) { + switch multihashCode { + case multihash.SHA2_256: + h = crypto.SHA256 + case multihash.SHA2_512: + h = crypto.SHA512 + default: + err = fmt.Errorf("algorithm not supported, unable to compute hash") + } + + return h, err +} + +// IsSupportedMultihash checks to see if the given encoded hash has been hashed using valid multihash code. +func IsSupportedMultihash(encodedMultihash string) bool { + code, err := GetMultihashCode(encodedMultihash) + if err != nil { + return false + } + + return multihash.ValidCode(code) +} + +// IsComputedUsingMultihashAlgorithms checks to see if the given encoded +// hash has been hashed using one of supplied code. +func IsComputedUsingMultihashAlgorithms(encodedMultihash string, codes []uint) bool { + mhCode, err := GetMultihashCode(encodedMultihash) + if err != nil { + return false + } + + for _, supported := range codes { + if mhCode == uint64(supported) { + return true + } + } + + return false +} + +// GetMultihashCode returns multihash code from encoded multihash. +func GetMultihashCode(encodedMultihash string) (uint64, error) { + mh, err := GetMultihash(encodedMultihash) + if err != nil { + return 0, fmt.Errorf("failed to get decoded multihash: %s", err.Error()) + } + + return mh.Code, nil +} + +// GetMultihash returns decoded multihash from encoded multihash. +func GetMultihash(encodedMultihash string) (*multihash.DecodedMultihash, error) { + multihashBytes, err := encoder.DecodeString(encodedMultihash) + if err != nil { + return nil, err + } + + return multihash.Decode(multihashBytes) +} + +// IsValidModelMultihash compares model with provided model multihash. +func IsValidModelMultihash(model interface{}, modelMultihash string) error { + code, err := GetMultihashCode(modelMultihash) + if err != nil { + return err + } + + encodedComputedMultihash, err := CalculateModelMultihash(model, uint(code)) + if err != nil { + return err + } + + if encodedComputedMultihash != modelMultihash { + return errors.New("supplied hash doesn't match original content") + } + + return nil +} + +// CalculateModelMultihash calculates model multihash. +func CalculateModelMultihash(value interface{}, alg uint) (string, error) { + bytes, err := canonicalizer.MarshalCanonical(value) + if err != nil { + return "", err + } + + multiHashBytes, err := ComputeMultihash(alg, bytes) + if err != nil { + return "", err + } + + return encoder.EncodeToString(multiHashBytes), nil +} + +// GetHash calculates hash of data using hash function identified by hash. +func GetHash(hash crypto.Hash, data []byte) ([]byte, error) { + if !hash.Available() { + return nil, fmt.Errorf("hash function not available for: %d", hash) + } + + h := hash.New() + + if _, hashErr := h.Write(data); hashErr != nil { + return nil, hashErr + } + + result := h.Sum(nil) + + return result, nil +} diff --git a/method/sidetreelongform/sidetree-core/hashing/hash_test.go b/method/sidetreelongform/sidetree-core/hashing/hash_test.go new file mode 100644 index 0000000..0ec7799 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/hashing/hash_test.go @@ -0,0 +1,184 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package hashing + +import ( + "crypto" + "crypto/sha256" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/encoder" +) + +const ( + algSHA256 = 5 + + sha2_256 = 18 + sha2_512 = 19 +) + +//nolint:gochecknoglobals +var sample = []byte("test") + +func TestGetHashFromMultihash(t *testing.T) { + hash, err := GetHashFromMultihash(100) + require.NotNil(t, err) + require.Contains(t, err.Error(), "algorithm not supported") + require.Equal(t, crypto.Hash(0), hash) + + hash, err = GetHashFromMultihash(sha2_256) + require.Nil(t, err) + require.NotNil(t, hash) +} + +func TestComputeHash(t *testing.T) { + hash, err := ComputeMultihash(100, sample) + require.NotNil(t, err) + require.Contains(t, err.Error(), "algorithm not supported") + require.Nil(t, hash) + + hash, err = ComputeMultihash(sha2_256, sample) + require.Nil(t, err) + require.NotNil(t, hash) +} + +func TestIsSupportedMultihash(t *testing.T) { + // scenario: not base64 encoded (corrupted input) + supported := IsSupportedMultihash("XXXXXaGVsbG8=") + require.False(t, supported) + + // scenario: base64 encoded, however not multihash + supported = IsSupportedMultihash(encoder.EncodeToString(sample)) + require.False(t, supported) + + // scenario: valid encoded multihash + hash, err := ComputeMultihash(sha2_256, sample) + require.Nil(t, err) + require.NotNil(t, hash) + + key := encoder.EncodeToString(hash) + supported = IsSupportedMultihash(key) + require.True(t, supported) +} + +func TestIsComputedUsingHashAlgorithm(t *testing.T) { + hash, err := ComputeMultihash(sha2_256, sample) + require.Nil(t, err) + require.NotNil(t, hash) + + key := encoder.EncodeToString(hash) + ok := IsComputedUsingMultihashAlgorithms(key, []uint{sha2_256}) + require.True(t, ok) + + // use random code to fail + ok = IsComputedUsingMultihashAlgorithms(key, []uint{55}) + require.False(t, ok) + + ok = IsComputedUsingMultihashAlgorithms("invalid", []uint{sha2_256}) + require.False(t, ok) +} + +func TestIsValidModelMultihash(t *testing.T) { + t.Run("success", func(t *testing.T) { + suffix, err := CalculateModelMultihash(suffixDataObject, sha2_256) + require.Nil(t, err) + require.Equal(t, expectedSuffixForSuffixObject, suffix) + + err = IsValidModelMultihash(suffixDataObject, suffix) + require.NoError(t, err) + }) + + t.Run("error - model multihash is not matching provided multihash", func(t *testing.T) { + differentMultihash, err := ComputeMultihash(sha2_256, []byte("test")) + require.NoError(t, err) + + err = IsValidModelMultihash(suffixDataObject, encoder.EncodeToString(differentMultihash)) + require.Error(t, err) + require.Contains(t, err.Error(), "supplied hash doesn't match original content") + }) + + t.Run("error - multihash is not encoded", func(t *testing.T) { + differentMultihash, err := ComputeMultihash(sha2_256, []byte("test")) + require.NoError(t, err) + + err = IsValidModelMultihash(suffixDataObject, string(differentMultihash)) + require.Error(t, err) + require.Contains(t, err.Error(), "illegal base64 data") + }) + + t.Run("error - invalid model", func(t *testing.T) { + differentMultihash, err := ComputeMultihash(sha2_256, []byte("test")) + require.NoError(t, err) + + var c chan int + err = IsValidModelMultihash(c, encoder.EncodeToString(differentMultihash)) + require.Error(t, err) + require.Contains(t, err.Error(), "json: unsupported type: chan int") + }) +} + +func TestCalculateModelMultihash(t *testing.T) { + t.Run("success", func(t *testing.T) { + suffix, err := CalculateModelMultihash(suffixDataObject, sha2_256) + require.Nil(t, err) + require.Equal(t, expectedSuffixForSuffixObject, suffix) + }) + + t.Run("success", func(t *testing.T) { + _, err := CalculateModelMultihash(suffixDataObject, sha2_512) + require.Nil(t, err) + }) + + t.Run("error - multihash algorithm not supported", func(t *testing.T) { + id, err := CalculateModelMultihash(suffixDataObject, 55) + require.NotNil(t, err) + require.Empty(t, id) + require.Contains(t, err.Error(), "algorithm not supported, unable to compute hash") + }) + + t.Run("error - marshal canonical", func(t *testing.T) { + var c chan int + result, err := CalculateModelMultihash(c, sha2_256) + require.Error(t, err) + require.Empty(t, result) + require.Contains(t, err.Error(), "json: unsupported type: chan int") + }) +} + +func TestHash(t *testing.T) { + t.Run("success", func(t *testing.T) { + test := []byte("hello world") + + h, err := GetHash(algSHA256, test) + require.NoError(t, err) + require.NotEmpty(t, h) + + expected := sha256.Sum256(test) + require.Equal(t, expected[:], h) + }) + + t.Run("error - hash code not supported", func(t *testing.T) { + test := []byte("test data") + h, err := GetHash(55, test) + require.Error(t, err) + require.Empty(t, h) + require.Contains(t, err.Error(), "hash function not available for: 55") + }) +} + +//nolint:gochecknoglobals +var suffixDataObject = &struct { + DeltaHash string `json:"deltaHash,omitempty"` + RecoveryCommitment string `json:"recoveryCommitment,omitempty"` +}{ + DeltaHash: "EiBOmkP6kn7yjt0VocmcPu9OQOsZi199Evh-xB48ebubQA", + RecoveryCommitment: "EiAAZJYry29vICkwmso8FL92WAISMAhsL8xkCm8dYVnq_w", +} + +const expectedSuffixForSuffixObject = "EiA5vyaRzJIxbkuZbvwEXiC__u8ieFx50TAAo98tBzCuyA" diff --git a/method/sidetreelongform/sidetree-core/internal/jws/jwk.go b/method/sidetreelongform/sidetree-core/internal/jws/jwk.go new file mode 100644 index 0000000..6c550a4 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/internal/jws/jwk.go @@ -0,0 +1,293 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/x509" + "encoding/base64" + "errors" + "fmt" + "math/big" + "strings" + + "github.com/btcsuite/btcd/btcec" + "github.com/square/go-jose/v3" + "github.com/square/go-jose/v3/json" + "golang.org/x/crypto/ed25519" +) + +const ( + secp256k1Crv = "secp256k1" + secp256k1Kty = "EC" + secp256k1Size = 32 + bitsPerByte = 8 +) + +// JWK (JSON Web Key) is a JSON data structure that represents a cryptographic key. +type JWK struct { + jose.JSONWebKey + + Kty string + Crv string +} + +// PublicKeyBytes converts a public key to bytes. +func (j *JWK) PublicKeyBytes() ([]byte, error) { + if isSecp256k1(j.Kty, j.Crv) { + var ecPubKey *ecdsa.PublicKey + + ecPubKey, ok := j.Key.(*ecdsa.PublicKey) + if !ok { + ecPubKey = &j.Key.(*ecdsa.PrivateKey).PublicKey + } + + pubKey := &btcec.PublicKey{ + Curve: btcec.S256(), + X: ecPubKey.X, + Y: ecPubKey.Y, + } + + return pubKey.SerializeCompressed(), nil + } + + switch pubKey := j.Public().Key.(type) { + case *ecdsa.PublicKey, ed25519.PublicKey: + pubKBytes, err := x509.MarshalPKIXPublicKey(pubKey) + if err != nil { + return nil, errors.New("failed to read public key bytes") + } + + return pubKBytes, nil + default: + return nil, fmt.Errorf("unsupported public key type in kid '%s'", j.KeyID) + } +} + +// UnmarshalJSON reads a key from its JSON representation. +func (j *JWK) UnmarshalJSON(jwkBytes []byte) error { + var key jsonWebKey + + marshalErr := json.Unmarshal(jwkBytes, &key) + if marshalErr != nil { + return fmt.Errorf("unable to read JWK: %w", marshalErr) + } + + if isSecp256k1(key.Kty, key.Crv) { + jwk, err := unmarshalSecp256k1(&key) + if err != nil { + return fmt.Errorf("unable to read JWK: %w", err) + } + + *j = *jwk + } else { + var joseJWK jose.JSONWebKey + + err := json.Unmarshal(jwkBytes, &joseJWK) + if err != nil { + return fmt.Errorf("unable to read jose JWK, %w", err) + } + + j.JSONWebKey = joseJWK + } + + j.Kty = key.Kty + j.Crv = key.Crv + + return nil +} + +// MarshalJSON serializes the given key to its JSON representation. +func (j *JWK) MarshalJSON() ([]byte, error) { + if isSecp256k1(j.Kty, j.Crv) { + return marshalSecp256k1(j) + } + + return (&j.JSONWebKey).MarshalJSON() +} + +func isSecp256k1(kty, crv string) bool { + return strings.EqualFold(kty, secp256k1Kty) && strings.EqualFold(crv, secp256k1Crv) +} + +func unmarshalSecp256k1(jwk *jsonWebKey) (*JWK, error) { + if jwk.X == nil { + return nil, ErrInvalidKey + } + + if jwk.Y == nil { + return nil, ErrInvalidKey + } + + curve := btcec.S256() + + if curveSize(curve) != len(jwk.X.data) { + return nil, ErrInvalidKey + } + + if curveSize(curve) != len(jwk.Y.data) { + return nil, ErrInvalidKey + } + + if jwk.D != nil && dSize(curve) != len(jwk.D.data) { + return nil, ErrInvalidKey + } + + x := jwk.X.bigInt() + y := jwk.Y.bigInt() + + if !curve.IsOnCurve(x, y) { + return nil, ErrInvalidKey + } + + var key interface{} + + if jwk.D != nil { + key = &ecdsa.PrivateKey{ + PublicKey: ecdsa.PublicKey{ + Curve: curve, + X: x, + Y: y, + }, + D: jwk.D.bigInt(), + } + } else { + key = &ecdsa.PublicKey{ + Curve: curve, + X: x, + Y: y, + } + } + + return &JWK{ + JSONWebKey: jose.JSONWebKey{ + Key: key, KeyID: jwk.Kid, Algorithm: jwk.Alg, Use: jwk.Use, + }, + }, nil +} + +func marshalSecp256k1(jwk *JWK) ([]byte, error) { + var raw jsonWebKey + + switch ecdsaKey := jwk.Key.(type) { + case *ecdsa.PublicKey: + raw = jsonWebKey{ + Kty: secp256k1Kty, + Crv: secp256k1Crv, + X: newFixedSizeBuffer(ecdsaKey.X.Bytes(), secp256k1Size), + Y: newFixedSizeBuffer(ecdsaKey.Y.Bytes(), secp256k1Size), + } + + case *ecdsa.PrivateKey: + raw = jsonWebKey{ + Kty: secp256k1Kty, + Crv: secp256k1Crv, + X: newFixedSizeBuffer(ecdsaKey.X.Bytes(), secp256k1Size), + Y: newFixedSizeBuffer(ecdsaKey.Y.Bytes(), secp256k1Size), + D: newFixedSizeBuffer(ecdsaKey.D.Bytes(), dSize(ecdsaKey.Curve)), + } + } + + raw.Kid = jwk.KeyID + raw.Alg = jwk.Algorithm + raw.Use = jwk.Use + + return json.Marshal(raw) +} + +// jsonWebKey contains subset of json web key json properties. +type jsonWebKey struct { + Use string `json:"use,omitempty"` + Kty string `json:"kty,omitempty"` + Kid string `json:"kid,omitempty"` + Crv string `json:"crv,omitempty"` + Alg string `json:"alg,omitempty"` + + X *byteBuffer `json:"x,omitempty"` + Y *byteBuffer `json:"y,omitempty"` + + D *byteBuffer `json:"d,omitempty"` +} + +// Get size of curve in bytes. +func curveSize(crv elliptic.Curve) int { + bits := crv.Params().BitSize + + div := bits / bitsPerByte + mod := bits % bitsPerByte + + if mod == 0 { + return div + } + + return div + 1 +} + +func dSize(curve elliptic.Curve) int { + order := curve.Params().P + bitLen := order.BitLen() + size := bitLen / bitsPerByte + + if bitLen%bitsPerByte != 0 { + size++ + } + + return size +} + +// byteBuffer represents a slice of bytes that can be serialized to url-safe base64. +type byteBuffer struct { + data []byte +} + +func (b *byteBuffer) UnmarshalJSON(data []byte) error { + var encoded string + + err := json.Unmarshal(data, &encoded) + if err != nil { + return err + } + + if encoded == "" { + return nil + } + + decoded, err := base64.RawURLEncoding.DecodeString(encoded) + if err != nil { + return err + } + + *b = byteBuffer{ + data: decoded, + } + + return nil +} + +func (b *byteBuffer) MarshalJSON() ([]byte, error) { + return json.Marshal(b.base64()) +} + +func (b *byteBuffer) base64() string { + return base64.RawURLEncoding.EncodeToString(b.data) +} + +func (b byteBuffer) bigInt() *big.Int { + return new(big.Int).SetBytes(b.data) +} + +func newFixedSizeBuffer(data []byte, length int) *byteBuffer { + paddedData := make([]byte, length-len(data)) + + return &byteBuffer{ + data: append(paddedData, data...), + } +} + +// ErrInvalidKey is returned when passed JWK is invalid. +var ErrInvalidKey = errors.New("invalid JWK") diff --git a/method/sidetreelongform/sidetree-core/internal/jws/jwk_test.go b/method/sidetreelongform/sidetree-core/internal/jws/jwk_test.go new file mode 100644 index 0000000..a0a305f --- /dev/null +++ b/method/sidetreelongform/sidetree-core/internal/jws/jwk_test.go @@ -0,0 +1,281 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "testing" + + "github.com/btcsuite/btcd/btcec" + "github.com/square/go-jose/v3" + "github.com/square/go-jose/v3/json" + "github.com/stretchr/testify/require" +) + +func TestDecodePublicKey(t *testing.T) { + t.Run("Test decode public key success", func(t *testing.T) { + tests := []struct { + name string + jwkJSON string + }{ + { + name: "get public key bytes Ed25519 JWK", + jwkJSON: `{ + "kty": "OKP", + "use": "enc", + "crv": "Ed25519", + "kid": "sample@sample.id", + "x": "sEHL6KXs8bUz9Ss2qSWWjhhRMHVjrog0lzFENM132R8", + "alg": "EdDSA" + }`, + }, + { + name: "get public key bytes EC P-526 JWK", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "P-256", + "kid": "sample@sample.id", + "x": "JR7nhI47w7bxrNkp7Xt1nbmozNn-RB2Q-PWi7KHT8J0", + "y": "iXmKtH0caOgB1vV0CQwinwK999qdDvrssKhdbiAz9OI", + "alg": "ES256" + }`, + }, + { + name: "get public key bytes EC SECP256K1 JWK", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "YRrvJocKf39GpdTnd-zBFE0msGDqawR-Cmtc6yKoFsM", + "y": "kE-dMH9S3mxnTXo0JFEhraCU_tVYFDfpu9tpP1LfVKQ", + "alg": "ES256K" + }`, + }, + { + name: "get private key bytes EC SECP256K1 JWK", + jwkJSON: `{ + "kty": "EC", + "d": "Lg5xrN8Usd_T-MfqBIs3bUWQCNsXY8hGU-Ru3Joom8E", + "use": "sig", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "dv6X5DheBaFWR2H_yv9pUI2dcmL2XX8m7zgFc9Coaqg", + "y": "AUVSmytVWP350kV1RHhQ6AcCWaJj8AFt4aNLlDws7C4", + "alg": "ES256K" + }`, + }, + } + + t.Parallel() + + for _, test := range tests { + tc := test + t.Run(tc.name, func(t *testing.T) { + var jwk JWK + + err := json.Unmarshal([]byte(tc.jwkJSON), &jwk) + require.NoError(t, err) + + pkBytes, err := jwk.PublicKeyBytes() + require.NoError(t, err) + require.NotEmpty(t, pkBytes) + + jwkBytes, err := json.Marshal(&jwk) + require.NoError(t, err) + require.NotEmpty(t, jwkBytes) + }) + } + }) + + t.Run("Test decode public key failure", func(t *testing.T) { + tests := []struct { + name string + jwkJSON string + err string + }{ + { + name: "attempt public key bytes from invalid JSON bytes", + jwkJSON: `}`, + err: "invalid character", + }, + { + name: "attempt public key bytes from invalid curve", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "sec12341", + "kid": "sample@sample.id", + "x": "wQehEGTVCu32yp8IwTaBCqPUIYslyd-WoFRsfDKE9II", + "y": "rIJO8RmkExUecJ5i15L9OC7rl7pwmYFR8QQgdM1ERWI", + "alg": "ES256" + }`, + err: "unsupported elliptic curve 'sec12341'", + }, + { + name: "attempt public key bytes from invalid JSON bytes", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "", + "y": "", + "alg": "ES256" + }`, + err: "unable to read JWK: invalid JWK", + }, + { + name: "attempt public key bytes from invalid JSON bytes", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "wQehEGTVCu32yp8IwTaBCqPUIYslyd-WoFRsfDKE9II", + "y": "", + "alg": "ES256" + }`, + err: "unable to read JWK: invalid JWK", + }, + { + name: "attempt public key bytes from invalid JSON bytes", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "x", + "y": "y", + "alg": "ES256" + }`, + err: "unable to read JWK", + }, + { + name: "X is not defined", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "y": "rIJO8RmkExUecJ5i15L9OC7rl7pwmYFR8QQgdM1ERWI", + "alg": "ES256" + }`, + err: "invalid JWK", + }, + { + name: "Y is not defined", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "wQehEGTVCu32yp8IwTaBCqPUIYslyd-WoFRsfDKE9II", + "alg": "ES256" + }`, + err: "invalid JWK", + }, + { + name: "Y is not defined", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "wQehEGTVCu32yp8IwTaBCqPUIYslyd-WoFRsfDKE9II", + "y": "rIJO8RmkExUecJ5i15L9OC7rl7pwmYFR8QQgdM1ERWI", + "d": "", + "alg": "ES256" + }`, + err: "invalid JWK", + }, + { + name: "Y is not defined", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "wQehEGTVCu32yp8IwTaBCqPUIYslyd-WoFRsfDKE9II", + "y": "rIJO8RmkExUecJ5i15L9OC7rl7pwmYFR8QQgdM1ERWO", + "alg": "ES256" + }`, + err: "unable to read JWK: invalid JWK", + }, + { + name: "attempt public key bytes from invalid JSON bytes", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "{", + "y": "y", + "alg": "ES256" + }`, + err: "unable to read JWK", + }, + } + + t.Parallel() + + for _, test := range tests { + tc := test + t.Run(tc.name, func(t *testing.T) { + var jwk JWK + err := json.Unmarshal([]byte(tc.jwkJSON), &jwk) + require.Error(t, err) + require.Contains(t, err.Error(), tc.err) + }) + } + }) +} + +func TestByteBufferUnmarshalFailure(t *testing.T) { + bb := &byteBuffer{} + err := bb.UnmarshalJSON([]byte("{")) + require.Error(t, err) +} + +func TestCurveSize(t *testing.T) { + require.Equal(t, 32, curveSize(btcec.S256())) + require.Equal(t, 32, curveSize(elliptic.P256())) + require.Equal(t, 28, curveSize(elliptic.P224())) + require.Equal(t, 48, curveSize(elliptic.P384())) + require.Equal(t, 66, curveSize(elliptic.P521())) +} + +func TestJWK_PublicKeyBytesValidation(t *testing.T) { + // invalid public key + privKey, err := ecdsa.GenerateKey(btcec.S256(), rand.Reader) + require.NoError(t, err) + + jwk := &JWK{ + JSONWebKey: jose.JSONWebKey{ + Key: &privKey.PublicKey, + Algorithm: "ES256", + KeyID: "pubkey#123", + }, + Crv: "P-256", + Kty: "EC", + } + + pkBytes, err := jwk.PublicKeyBytes() + require.Error(t, err) + require.Contains(t, err.Error(), "failed to read public key bytes") + require.Empty(t, pkBytes) + + // unsupported public key type + jwk.Key = "key of invalid type" + pkBytes, err = jwk.PublicKeyBytes() + require.Error(t, err) + require.Contains(t, err.Error(), "unsupported public key type in kid 'pubkey#123'") + require.Empty(t, pkBytes) +} diff --git a/method/sidetreelongform/sidetree-core/internal/jws/jws.go b/method/sidetreelongform/sidetree-core/internal/jws/jws.go new file mode 100644 index 0000000..5b81d4f --- /dev/null +++ b/method/sidetreelongform/sidetree-core/internal/jws/jws.go @@ -0,0 +1,297 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +import ( + "encoding/base64" + "errors" + "fmt" + "strings" + + "github.com/square/go-jose/v3/json" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" +) + +const ( + jwsPartsCount = 3 + jwsHeaderPart = 0 + jwsPayloadPart = 1 + jwsSignaturePart = 2 +) + +// JSONWebSignature defines JSON Web Signature (https://tools.ietf.org/html/rfc7515) +type JSONWebSignature struct { + ProtectedHeaders jws.Headers + UnprotectedHeaders jws.Headers + Payload []byte + + signature []byte + joseHeaders jws.Headers +} + +// Signer defines JWS Signer interface. It makes signing of data and provides custom JWS headers relevant to the signer. +type Signer interface { + // Sign signs. + Sign(data []byte) ([]byte, error) + + // Headers provides JWS headers. "alg" header must be provided (see https://tools.ietf.org/html/rfc7515#section-4.1) + Headers() jws.Headers +} + +// NewJWS creates JSON Web Signature. +func NewJWS( + protectedHeaders, unprotectedHeaders jws.Headers, payload []byte, signer Signer) (*JSONWebSignature, error) { + headers := mergeHeaders(protectedHeaders, signer.Headers()) + s := &JSONWebSignature{ + ProtectedHeaders: headers, + UnprotectedHeaders: unprotectedHeaders, + Payload: payload, + joseHeaders: headers, + } + + signature, err := sign(s.joseHeaders, payload, signer) + if err != nil { + return nil, fmt.Errorf("sign JWS: %w", err) + } + + s.signature = signature + + return s, nil +} + +// SerializeCompact makes JWS Compact Serialization (https://tools.ietf.org/html/rfc7515#section-7.1) +func (s JSONWebSignature) SerializeCompact(detached bool) (string, error) { + byteHeaders, err := json.Marshal(s.joseHeaders) + if err != nil { + return "", fmt.Errorf("marshal JWS JOSE Headers: %w", err) + } + + b64Headers := base64.RawURLEncoding.EncodeToString(byteHeaders) + + b64Payload := "" + if !detached { + b64Payload = base64.RawURLEncoding.EncodeToString(s.Payload) + } + + b64Signature := base64.RawURLEncoding.EncodeToString(s.signature) + + return fmt.Sprintf("%s.%s.%s", + b64Headers, + b64Payload, + b64Signature), nil +} + +// Signature returns a copy of JWS signature. +func (s JSONWebSignature) Signature() []byte { + if s.signature == nil { + return nil + } + + sCopy := make([]byte, len(s.signature)) + copy(sCopy, s.signature) + + return sCopy +} + +func mergeHeaders(h1, h2 jws.Headers) jws.Headers { + h := make(jws.Headers, len(h1)+len(h2)) + + for k, v := range h2 { + h[k] = v + } + + for k, v := range h1 { + h[k] = v + } + + return h +} + +func sign(joseHeaders jws.Headers, payload []byte, signer Signer) ([]byte, error) { + err := checkJWSHeaders(joseHeaders) + if err != nil { + return nil, fmt.Errorf("check JOSE headers: %w", err) + } + + sigInput, err := signingInput(joseHeaders, payload) + if err != nil { + return nil, fmt.Errorf("prepare JWS verification data: %w", err) + } + + signature, err := signer.Sign(sigInput) + if err != nil { + return nil, fmt.Errorf("sign JWS verification data: %w", err) + } + + return signature, nil +} + +// jwsParseOpts holds options for the JWS Parsing. +type jwsParseOpts struct { + detachedPayload []byte +} + +// ParseOpt is the JWS Parser option. +type ParseOpt func(opts *jwsParseOpts) + +// WithJWSDetachedPayload option is for definition of JWS detached payload. +func WithJWSDetachedPayload(payload []byte) ParseOpt { + return func(opts *jwsParseOpts) { + opts.detachedPayload = payload + } +} + +// ParseJWS parses serialized JWS. Currently only JWS Compact Serialization parsing is supported. +func ParseJWS(jwsStr string, opts ...ParseOpt) (*JSONWebSignature, error) { + pOpts := &jwsParseOpts{} + + for _, opt := range opts { + opt(pOpts) + } + + if strings.HasPrefix(jwsStr, "{") { + // TODO support JWS JSON serialization format + // https://github.com/hyperledger/aries-framework-go/issues/1331 + return nil, errors.New("JWS JSON serialization is not supported") + } + + return parseCompacted(jwsStr, pOpts) +} + +// VerifyJWS parses and validates serialized JWS. Currently only JWS Compact Serialization parsing is supported. +func VerifyJWS(jwsStr string, jwk *jws.JWK, opts ...ParseOpt) (*JSONWebSignature, error) { + parsedJWS, err := ParseJWS(jwsStr, opts...) + if err != nil { + return nil, err + } + + sInput, err := signingInput(parsedJWS.ProtectedHeaders, parsedJWS.Payload) + if err != nil { + return nil, fmt.Errorf("build signing input: %w", err) + } + + err = VerifySignature(jwk, parsedJWS.signature, sInput) + if err != nil { + return nil, err + } + + return parsedJWS, nil +} + +// IsCompactJWS checks weather input is a compact JWS (based on https://tools.ietf.org/html/rfc7516#section-9) +func IsCompactJWS(s string) bool { + parts := strings.Split(s, ".") + + return len(parts) == jwsPartsCount +} + +func parseCompacted(jwsCompact string, opts *jwsParseOpts) (*JSONWebSignature, error) { + parts := strings.Split(jwsCompact, ".") + if len(parts) != jwsPartsCount { + return nil, errors.New("invalid JWS compact format") + } + + joseHeaders, err := parseCompactedHeaders(parts) + if err != nil { + return nil, err + } + + payload, err := parseCompactedPayload(parts[jwsPayloadPart], opts) + if err != nil { + return nil, err + } + + signature, err := base64.RawURLEncoding.DecodeString(parts[jwsSignaturePart]) + if err != nil { + return nil, fmt.Errorf("decode base64 signature: %w", err) + } + + if len(signature) == 0 { + return nil, errors.New("compact jws signature is empty") + } + + return &JSONWebSignature{ + ProtectedHeaders: joseHeaders, + Payload: payload, + signature: signature, + joseHeaders: joseHeaders, + }, nil +} + +func parseCompactedPayload(jwsPayload string, opts *jwsParseOpts) ([]byte, error) { + if len(opts.detachedPayload) > 0 { + return opts.detachedPayload, nil + } + + payload, err := base64.RawURLEncoding.DecodeString(jwsPayload) + if err != nil { + return nil, fmt.Errorf("decode base64 payload: %w", err) + } + + if len(payload) == 0 { + return nil, errors.New("compact jws payload is empty") + } + + return payload, nil +} + +func parseCompactedHeaders(parts []string) (jws.Headers, error) { + headersBytes, err := base64.RawURLEncoding.DecodeString(parts[jwsHeaderPart]) + if err != nil { + return nil, fmt.Errorf("decode base64 header: %w", err) + } + + var joseHeaders jws.Headers + + err = json.Unmarshal(headersBytes, &joseHeaders) + if err != nil { + return nil, fmt.Errorf("unmarshal JSON headers: %w", err) + } + + err = checkJWSHeaders(joseHeaders) + if err != nil { + return nil, err + } + + return joseHeaders, nil +} + +func signingInput(headers jws.Headers, payload []byte) ([]byte, error) { + headersBytes, err := json.Marshal(headers) + if err != nil { + return nil, fmt.Errorf("serialize JWS headers: %w", err) + } + + hBase64 := true + + if b64, ok := headers[jws.HeaderB64Payload]; ok { + if hBase64, ok = b64.(bool); !ok { + return nil, errors.New("invalid b64 header") + } + } + + headersStr := base64.RawURLEncoding.EncodeToString(headersBytes) + + var payloadStr string + + if hBase64 { + payloadStr = base64.RawURLEncoding.EncodeToString(payload) + } else { + payloadStr = string(payload) + } + + return []byte(fmt.Sprintf("%s.%s", headersStr, payloadStr)), nil +} + +func checkJWSHeaders(headers jws.Headers) error { + if _, ok := headers[jws.HeaderAlgorithm]; !ok { + return fmt.Errorf("%s JWS header is not defined", jws.HeaderAlgorithm) + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/internal/jws/jws_test.go b/method/sidetreelongform/sidetree-core/internal/jws/jws_test.go new file mode 100644 index 0000000..555897c --- /dev/null +++ b/method/sidetreelongform/sidetree-core/internal/jws/jws_test.go @@ -0,0 +1,279 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +import ( + "crypto/ecdsa" + "crypto/ed25519" + "crypto/elliptic" + "crypto/rand" + "encoding/base64" + "errors" + "fmt" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/ecsigner" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/edsigner" +) + +func TestHeaders_GetKeyID(t *testing.T) { + kid, ok := jws.Headers{"kid": "key id"}.KeyID() + require.True(t, ok) + require.Equal(t, "key id", kid) + + kid, ok = jws.Headers{"kid": 777}.KeyID() + require.False(t, ok) + require.Empty(t, kid) + + kid, ok = jws.Headers{}.KeyID() + require.False(t, ok) + require.Empty(t, kid) +} + +func TestHeaders_GetAlgorithm(t *testing.T) { + kid, ok := jws.Headers{"alg": "EdDSA"}.Algorithm() + require.True(t, ok) + require.Equal(t, "EdDSA", kid) + + kid, ok = jws.Headers{"alg": 777}.Algorithm() + require.False(t, ok) + require.Empty(t, kid) + + kid, ok = jws.Headers{}.Algorithm() + require.False(t, ok) + require.Empty(t, kid) +} + +func TestJSONWebSignature_SerializeCompact(t *testing.T) { + headers := jws.Headers{"alg": "EdSDA", "typ": "JWT"} + payload := []byte("payload") + + newJWS, err := NewJWS(headers, nil, payload, + &testSigner{ + headers: jws.Headers{"alg": "dummy"}, + signature: []byte("signature"), + }) + require.NoError(t, err) + + jwsCompact, err := newJWS.SerializeCompact(false) + require.NoError(t, err) + require.NotEmpty(t, jwsCompact) + + // b64=false + newJWS, err = NewJWS(headers, nil, payload, + &testSigner{ + headers: jws.Headers{"alg": "dummy", "b64": false}, + signature: []byte("signature"), + }) + require.NoError(t, err) + + jwsCompact, err = newJWS.SerializeCompact(false) + require.NoError(t, err) + require.NotEmpty(t, jwsCompact) + + // signer error + newJWS, err = NewJWS(headers, nil, payload, + &testSigner{ + headers: jws.Headers{"alg": "dummy"}, + err: errors.New("signer error"), + }) + require.Error(t, err) + require.Contains(t, err.Error(), "sign JWS verification data") + require.Nil(t, newJWS) + + // no alg defined + newJWS, err = NewJWS(jws.Headers{}, nil, payload, + &testSigner{ + headers: jws.Headers{}, + }) + require.Error(t, err) + require.Contains(t, err.Error(), "alg JWS header is not defined") + require.Nil(t, newJWS) + + // jose headers marshalling error + newJWS, err = NewJWS(jws.Headers{}, nil, payload, + &testSigner{ + headers: getUnmarshallableMap(), + }) + require.Error(t, err) + require.Contains(t, err.Error(), "serialize JWS headers") + require.Nil(t, newJWS) + + // invalid b64 + newJWS, err = NewJWS(jws.Headers{}, nil, payload, + &testSigner{ + headers: jws.Headers{"alg": "dummy", "b64": "invalid"}, + signature: []byte("signature"), + }) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid b64 header") + require.Nil(t, newJWS) +} + +func TestJSONWebSignature_Signature(t *testing.T) { + jws := &JSONWebSignature{ + signature: []byte("signature"), + } + require.NotEmpty(t, jws.Signature()) + + jws.signature = nil + require.Empty(t, jws.Signature()) +} + +func TestParseJWS(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + corruptedBased64 := "XXXXXaGVsbG8=" + + signer := ecsigner.New(privateKey, "ES256", "key-1") + jws, err := NewJWS(signer.Headers(), nil, []byte("payload"), + signer) + require.NoError(t, err) + + jwsCompact, err := jws.SerializeCompact(false) + require.NoError(t, err) + require.NotEmpty(t, jwsCompact) + + validJWSParts := strings.Split(jwsCompact, ".") + + parsedJWS, err := VerifyJWS(jwsCompact, jwk) + require.NoError(t, err) + require.NotNil(t, parsedJWS) + require.Equal(t, jws, parsedJWS) + + jwsDetached := fmt.Sprintf("%s.%s.%s", validJWSParts[0], "", validJWSParts[2]) + + detachedPayload, err := base64.RawURLEncoding.DecodeString(validJWSParts[1]) + require.NoError(t, err) + + parsedJWS, err = VerifyJWS(jwsDetached, jwk, WithJWSDetachedPayload(detachedPayload)) + require.NoError(t, err) + require.NotNil(t, parsedJWS) + require.Equal(t, jws, parsedJWS) + + // Parse not compact JWS format + parsedJWS, err = VerifyJWS(`{"some": "JSON"}`, jwk) + require.Error(t, err) + require.EqualError(t, err, "JWS JSON serialization is not supported") + require.Nil(t, parsedJWS) + + // Parse invalid compact JWS format + parsedJWS, err = VerifyJWS("two_parts.only", jwk) + require.Error(t, err) + require.EqualError(t, err, "invalid JWS compact format") + require.Nil(t, parsedJWS) + + // invalid headers + jwsWithInvalidHeaders := fmt.Sprintf("%s.%s.%s", "invalid", validJWSParts[1], validJWSParts[2]) + parsedJWS, err = VerifyJWS(jwsWithInvalidHeaders, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "unmarshal JSON headers") + require.Nil(t, parsedJWS) + + jwsWithInvalidHeaders = fmt.Sprintf("%s.%s.%s", corruptedBased64, validJWSParts[1], validJWSParts[2]) + parsedJWS, err = VerifyJWS(jwsWithInvalidHeaders, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "decode base64 header") + require.Nil(t, parsedJWS) + + emptyHeaders := base64.RawURLEncoding.EncodeToString([]byte("{}")) + + jwsWithInvalidHeaders = fmt.Sprintf("%s.%s.%s", emptyHeaders, validJWSParts[1], validJWSParts[2]) + parsedJWS, err = VerifyJWS(jwsWithInvalidHeaders, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "alg JWS header is not defined") + require.Nil(t, parsedJWS) + + // invalid payload + jwsWithInvalidPayload := fmt.Sprintf("%s.%s.%s", validJWSParts[0], corruptedBased64, validJWSParts[2]) + parsedJWS, err = VerifyJWS(jwsWithInvalidPayload, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "decode base64 payload") + require.Nil(t, parsedJWS) + + // invalid signature + jwsWithInvalidSignature := fmt.Sprintf("%s.%s.%s", validJWSParts[0], validJWSParts[1], corruptedBased64) + parsedJWS, err = VerifyJWS(jwsWithInvalidSignature, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "decode base64 signature") + require.Nil(t, parsedJWS) + + // missing signature + jwsMissingSignature := fmt.Sprintf("%s.%s.%s", validJWSParts[0], validJWSParts[1], "") + parsedJWS, err = VerifyJWS(jwsMissingSignature, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "compact jws signature is empty") + require.Nil(t, parsedJWS) + + // missing payload + jwsMissingPayload := fmt.Sprintf("%s.%s.%s", validJWSParts[0], "", validJWSParts[2]) + parsedJWS, err = VerifyJWS(jwsMissingPayload, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "compact jws payload is empty") + require.Nil(t, parsedJWS) + + // signature verification error error + jwk.Kty = "type" + parsedJWS, err = VerifyJWS(jwsCompact, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "key type is not supported for verifying signature") + require.Nil(t, parsedJWS) +} + +func TestParseJWS_ED25519(t *testing.T) { + publicKey, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(publicKey) + require.NoError(t, err) + + signer := edsigner.New(privateKey, "EdDSA", "key-1") + jws, err := NewJWS(signer.Headers(), nil, []byte("payload"), signer) + require.NoError(t, err) + + jwsCompact, err := jws.SerializeCompact(false) + require.NoError(t, err) + require.NotEmpty(t, jwsCompact) + + parsedJWS, err := VerifyJWS(jwsCompact, jwk) + require.NoError(t, err) + require.NotNil(t, parsedJWS) + require.Equal(t, jws, parsedJWS) +} + +func TestIsCompactJWS(t *testing.T) { + require.True(t, IsCompactJWS("a.b.c")) + require.False(t, IsCompactJWS("a.b")) + require.False(t, IsCompactJWS(`{"some": "JSON"}`)) + require.False(t, IsCompactJWS("")) +} + +type testSigner struct { + headers jws.Headers + signature []byte + err error +} + +func (s testSigner) Sign(_ []byte) ([]byte, error) { + return s.signature, s.err +} + +func (s testSigner) Headers() jws.Headers { + return s.headers +} + +func getUnmarshallableMap() map[string]interface{} { + return map[string]interface{}{"alg": "JWS", "error": map[chan int]interface{}{make(chan int): 6}} +} diff --git a/method/sidetreelongform/sidetree-core/internal/jws/signature.go b/method/sidetreelongform/sidetree-core/internal/jws/signature.go new file mode 100644 index 0000000..c9449bd --- /dev/null +++ b/method/sidetreelongform/sidetree-core/internal/jws/signature.go @@ -0,0 +1,169 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +import ( + "crypto" + "crypto/ecdsa" + "crypto/ed25519" + "crypto/elliptic" + "encoding/json" + "errors" + "fmt" + "math/big" + + "github.com/btcsuite/btcd/btcec" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" +) + +const ( + p256KeySize = 32 + p384KeySize = 48 + p521KeySize = 66 + secp256k1KeySize = 32 +) + +// VerifySignature verifies signature against public key in JWK format. +func VerifySignature(jwk *jws.JWK, signature, msg []byte) error { + switch jwk.Kty { + case "EC": + return verifyECSignature(jwk, signature, msg) + case "OKP": + return verifyEd25519Signature(jwk, signature, msg) + default: + return fmt.Errorf("'%s' key type is not supported for verifying signature", jwk.Kty) + } +} + +func verifyEd25519Signature(jwk *jws.JWK, signature, msg []byte) error { + pubKey, err := GetED25519PublicKey(jwk) + if err != nil { + return err + } + + verified := ed25519.Verify(pubKey, msg, signature) + if !verified { + return errors.New("ed25519: invalid signature") + } + + return nil +} + +// GetED25519PublicKey retunns ed25519 public key. +func GetED25519PublicKey(jwk *jws.JWK) (ed25519.PublicKey, error) { + jsonBytes, err := json.Marshal(jwk) + if err != nil { + return nil, err + } + + var internalJWK JWK + + err = internalJWK.UnmarshalJSON(jsonBytes) + if err != nil { + return nil, err + } + + pubKey, ok := internalJWK.Key.(ed25519.PublicKey) + if !ok { + return nil, errors.New("unexpected public key type for ed25519") + } + + // ed25519 panics if key size is wrong + if len(pubKey) != ed25519.PublicKeySize { + return nil, errors.New("ed25519: invalid key") + } + + return pubKey, nil +} + +func verifyECSignature(jwk *jws.JWK, signature, msg []byte) error { + ec := parseEllipticCurve(jwk.Crv) + if ec == nil { + return fmt.Errorf("ecdsa: unsupported elliptic curve '%s'", jwk.Crv) + } + + jwkBytes, err := json.Marshal(jwk) + if err != nil { + return err + } + + internalJWK := JWK{ + Kty: jwk.Kty, + Crv: jwk.Crv, + } + + err = internalJWK.UnmarshalJSON(jwkBytes) + if err != nil { + return err + } + + ecdsaPubKey, ok := internalJWK.JSONWebKey.Key.(*ecdsa.PublicKey) + if !ok { + return errors.New("not an EC public key") + } + + if len(signature) != 2*ec.keySize { + return errors.New("ecdsa: invalid signature size") + } + + hasher := ec.hash.New() + + _, err = hasher.Write(msg) + if err != nil { + return errors.New("ecdsa: hash error") + } + + hash := hasher.Sum(nil) + + r := big.NewInt(0).SetBytes(signature[:ec.keySize]) + s := big.NewInt(0).SetBytes(signature[ec.keySize:]) + + verified := ecdsa.Verify(ecdsaPubKey, hash, r, s) + if !verified { + return errors.New("ecdsa: invalid signature") + } + + return nil +} + +type ellipticCurve struct { + curve elliptic.Curve + keySize int + hash crypto.Hash +} + +func parseEllipticCurve(curve string) *ellipticCurve { + switch curve { + case "P-256": + return &ellipticCurve{ + curve: elliptic.P256(), + keySize: p256KeySize, + hash: crypto.SHA256, + } + case "P-384": + return &ellipticCurve{ + curve: elliptic.P384(), + keySize: p384KeySize, + hash: crypto.SHA384, + } + case "P-521": + return &ellipticCurve{ + curve: elliptic.P521(), + keySize: p521KeySize, + hash: crypto.SHA512, + } + case "secp256k1": + return &ellipticCurve{ + curve: btcec.S256(), + keySize: secp256k1KeySize, + hash: crypto.SHA256, + } + default: + return nil + } +} diff --git a/method/sidetreelongform/sidetree-core/internal/jws/signature_test.go b/method/sidetreelongform/sidetree-core/internal/jws/signature_test.go new file mode 100644 index 0000000..a0ed84b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/internal/jws/signature_test.go @@ -0,0 +1,286 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +import ( + "crypto" + "crypto/ecdsa" + "crypto/ed25519" + "crypto/elliptic" + "crypto/rand" + "fmt" + "reflect" + "testing" + + "github.com/btcsuite/btcd/btcec" + gojose "github.com/square/go-jose/v3" + "github.com/square/go-jose/v3/json" + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" +) + +func TestVerifySignature(t *testing.T) { + t.Run("success EC P-256", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + + signature := getECSignature(privateKey, payload, crypto.SHA256) + err = VerifySignature(jwk, signature, payload) + require.NoError(t, err) + }) + + t.Run("success EC P-384", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P384(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + + signature := getECSignature(privateKey, payload, crypto.SHA384) + err = VerifySignature(jwk, signature, payload) + require.NoError(t, err) + }) + + t.Run("success EC P-521", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + + signature := getECSignature(privateKey, payload, crypto.SHA512) + err = VerifySignature(jwk, signature, payload) + require.NoError(t, err) + }) + + t.Run("success EC secp256k1", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(btcec.S256(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + + signature := getECSignature(privateKey, payload, crypto.SHA256) + err = VerifySignature(jwk, signature, payload) + require.NoError(t, err) + }) + + t.Run("success ED25519", func(t *testing.T) { + publicKey, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + payload := []byte("test message") + signature := ed25519.Sign(privateKey, payload) + + jwk, err := getPublicKeyJWK(publicKey) + require.NoError(t, err) + + err = VerifySignature(jwk, signature, payload) + require.NoError(t, err) + }) + + t.Run("unsupported key type", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + signature := getECSignatureSHA256(privateKey, payload) + + jwk.Kty = "not-supported" + err = VerifySignature(jwk, signature, payload) + require.Error(t, err) + require.Contains(t, err.Error(), "key type is not supported for verifying signature") + }) +} + +func TestVerifyECSignature(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + t.Run("success", func(t *testing.T) { + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + + signature := getECSignatureSHA256(privateKey, payload) + err = verifyECSignature(jwk, signature, payload) + require.NoError(t, err) + }) + t.Run("unsupported elliptic curve", func(t *testing.T) { + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + signature := getECSignatureSHA256(privateKey, payload) + + jwk.Crv = "invalid" + err = verifyECSignature(jwk, signature, payload) + require.Error(t, err) + require.Contains(t, err.Error(), "unsupported elliptic curve") + }) + t.Run("invalid signature size", func(t *testing.T) { + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + err = verifyECSignature(jwk, []byte("signature"), []byte("test")) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid signature size") + }) + t.Run("invalid signature", func(t *testing.T) { + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + signature := getECSignatureSHA256(privateKey, []byte("test")) + + err = verifyECSignature(jwk, signature, []byte("different")) + require.Error(t, err) + require.Contains(t, err.Error(), "ecdsa: invalid signature") + }) +} + +func TestVerifyED25519Signature(t *testing.T) { + publicKey, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + payload := []byte("test message") + signature := ed25519.Sign(privateKey, payload) + + t.Run("success", func(t *testing.T) { + jwk, err := getPublicKeyJWK(publicKey) + require.NoError(t, err) + + err = verifyEd25519Signature(jwk, signature, payload) + require.NoError(t, err) + }) + + t.Run("invalid payload", func(t *testing.T) { + jwk, err := getPublicKeyJWK(publicKey) + require.NoError(t, err) + + err = verifyEd25519Signature(jwk, signature, []byte("different payload")) + require.Error(t, err) + require.Contains(t, err.Error(), "ed25519: invalid signature") + }) + + t.Run("invalid signature", func(t *testing.T) { + jwk, err := getPublicKeyJWK(publicKey) + require.NoError(t, err) + + err = verifyEd25519Signature(jwk, []byte("signature"), payload) + require.Error(t, err) + require.Contains(t, err.Error(), "ed25519: invalid signature") + }) + + t.Run("invalid curve", func(t *testing.T) { + jwk, err := getPublicKeyJWK(publicKey) + require.NoError(t, err) + jwk.Crv = "invalid" + + err = verifyEd25519Signature(jwk, signature, payload) + require.Error(t, err) + require.Contains(t, err.Error(), "unknown curve") + }) + + t.Run("wrong key type - EC key", func(t *testing.T) { + ecPrivateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&ecPrivateKey.PublicKey) + require.NoError(t, err) + + err = verifyEd25519Signature(jwk, signature, payload) + require.Error(t, err) + require.Contains(t, err.Error(), "unexpected public key type for ed25519") + }) +} + +func getECSignatureSHA256(privateKey *ecdsa.PrivateKey, payload []byte) []byte { + return getECSignature(privateKey, payload, crypto.SHA256) +} + +func getECSignature(privKey *ecdsa.PrivateKey, payload []byte, hash crypto.Hash) []byte { + hasher := hash.New() + + _, err := hasher.Write(payload) + if err != nil { + panic(err) + } + + hashed := hasher.Sum(nil) + + r, s, err := ecdsa.Sign(rand.Reader, privKey, hashed) + if err != nil { + panic(err) + } + + curveBits := privKey.Curve.Params().BitSize + + keyBytes := curveBits / 8 + if curveBits%8 > 0 { + keyBytes++ + } + + copyPadded := func(source []byte, size int) []byte { + dest := make([]byte, size) + copy(dest[size-len(source):], source) + + return dest + } + + return append(copyPadded(r.Bytes(), keyBytes), copyPadded(s.Bytes(), keyBytes)...) +} + +// getPublicKeyJWK returns public key in JWK format. +func getPublicKeyJWK(pubKey interface{}) (*jws.JWK, error) { + internalJWK := JWK{ + JSONWebKey: gojose.JSONWebKey{Key: pubKey}, + } + + switch key := pubKey.(type) { + case ed25519.PublicKey: + // handled automatically by gojose + case *ecdsa.PublicKey: + ecdsaPubKey := pubKey.(*ecdsa.PublicKey) //nolint: errcheck + // using internal jwk wrapper marshall feature since gojose doesn't handle secp256k1 curve + if ecdsaPubKey.Curve == btcec.S256() { + internalJWK.Kty = secp256k1Kty + internalJWK.Crv = secp256k1Crv + } + default: + return nil, fmt.Errorf("unknown key type '%s'", reflect.TypeOf(key)) + } + + jsonJWK, err := internalJWK.MarshalJSON() + if err != nil { + return nil, err + } + + var jwk jws.JWK + err = json.Unmarshal(jsonJWK, &jwk) + + if err != nil { + return nil, err + } + + return &jwk, nil +} diff --git a/method/sidetreelongform/sidetree-core/internal/log/fields.go b/method/sidetreelongform/sidetree-core/internal/log/fields.go new file mode 100644 index 0000000..ddf2f25 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/internal/log/fields.go @@ -0,0 +1,332 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package log + +import ( + "encoding/json" + "fmt" + + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +// Log Fields. +const ( + FieldURI = "uri" + FieldServiceName = "service" + FieldData = "data" + FieldRequestBody = "requestBody" + FieldSize = "size" + FieldMaxSize = "maxSize" + FieldParameter = "parameter" + FieldTotal = "total" + FieldSuffix = "suffix" + FieldSuffixes = "suffixes" + FieldOperationType = "operationType" + FieldOperation = "operation" + FieldOperations = "operations" + FieldOperationID = "operationID" + FieldGenesisTime = "genesisTime" + FieldOperationGenesisTime = "opGenesisTime" + FieldSidetreeTxn = "sidetreeTxn" + FieldID = "id" + FieldResolutionModel = "resolutionModel" + FieldVersion = "version" + FieldNamespace = "namespace" + FieldAnchorString = "anchorString" + FieldSource = "source" + FieldTotalPending = "totalPending" + FieldTransactionTime = "transactionTime" + FieldTransactionNumber = "transactionNumber" + FieldCommitment = "commitment" + FieldRecoveryCommitment = "recoveryCommitment" + FieldUpdateCommitment = "updateCommitment" + FieldTotalCommitments = "totalCommitments" + FieldTotalOperations = "totalOperations" + FieldTotalCreateOperations = "totalCreateOperations" + FieldTotalUpdateOperations = "totalUpdateOperations" + FieldTotalRecoverOperations = "totalRecoverOperations" + FieldTotalDeactivateOperations = "totalDeactivateOperations" + FieldDocument = "document" + FieldDeactivated = "deactivated" + FieldVersionTime = "versionTime" + FieldPatch = "patch" + FieldIsBatch = "isBatch" + FieldContent = "content" + FieldSources = "sources" + FieldAlias = "alias" +) + +// WithURIString sets the uri field. +func WithURIString(value string) zap.Field { + return zap.String(FieldURI, value) +} + +// WithData sets the data field. +func WithData(value []byte) zap.Field { + return zap.String(FieldData, string(value)) +} + +// WithRequestBody sets the request-body field. +func WithRequestBody(value []byte) zap.Field { + return zap.String(FieldRequestBody, string(value)) +} + +// WithServiceName sets the service field. +func WithServiceName(value string) zap.Field { + return zap.String(FieldServiceName, value) +} + +// WithSize sets the size field. +func WithSize(value int) zap.Field { + return zap.Int(FieldSize, value) +} + +// WithMaxSize sets the max-size field. +func WithMaxSize(value int) zap.Field { + return zap.Int(FieldMaxSize, value) +} + +// WithParameter sets the parameter field. +func WithParameter(value string) zap.Field { + return zap.String(FieldParameter, value) +} + +// WithTotal sets the total field. +func WithTotal(value int) zap.Field { + return zap.Int(FieldTotal, value) +} + +// WithSuffix sets the suffix field. +func WithSuffix(value string) zap.Field { + return zap.String(FieldSuffix, value) +} + +// WithSuffixes sets the suffixes field. +func WithSuffixes(value ...string) zap.Field { + return zap.Array(FieldSuffixes, NewStringArrayMarshaller(value)) +} + +// WithOperationType sets the operation-type field. +func WithOperationType(value string) zap.Field { + return zap.Any(FieldOperationType, value) +} + +// WithOperation sets the operation field. +func WithOperation(value interface{}) zap.Field { + return zap.Inline(NewObjectMarshaller(FieldOperation, value)) +} + +// WithOperationID sets the operation-id field. +func WithOperationID(value string) zap.Field { + return zap.String(FieldOperationID, value) +} + +// WithGenesisTime sets the genesis-time field. +func WithGenesisTime(value uint64) zap.Field { + return zap.Uint64(FieldGenesisTime, value) +} + +// WithOperationGenesisTime sets the op-genesis-time field. +func WithOperationGenesisTime(value uint64) zap.Field { + return zap.Uint64(FieldOperationGenesisTime, value) +} + +// WithSidetreeTxn sets the sidetree-txn field. +func WithSidetreeTxn(value interface{}) zap.Field { + return zap.Inline(NewObjectMarshaller(FieldSidetreeTxn, value)) +} + +// WithID sets the id field. +func WithID(value string) zap.Field { + return zap.String(FieldID, value) +} + +// WithResolutionModel sets the resolution-model field. +func WithResolutionModel(value interface{}) zap.Field { + return zap.Inline(NewObjectMarshaller(FieldResolutionModel, value)) +} + +// WithVersion sets the version field. +func WithVersion(value string) zap.Field { + return zap.String(FieldVersion, value) +} + +// WithNamespace sets the namespace field. +func WithNamespace(value string) zap.Field { + return zap.String(FieldNamespace, value) +} + +// WithAnchorString sets the anchor-string field. +func WithAnchorString(value string) zap.Field { + return zap.String(FieldAnchorString, value) +} + +// WithSource sets the source field. +func WithSource(value string) zap.Field { + return zap.String(FieldSource, value) +} + +// WithTotalPending sets the total-pending field. +func WithTotalPending(value uint) zap.Field { + return zap.Uint(FieldTotalPending, value) +} + +// WithTransactionTime sets the transaction-time field. +func WithTransactionTime(value uint64) zap.Field { + return zap.Uint64(FieldTransactionTime, value) +} + +// WithTransactionNumber sets the transaction-number field. +func WithTransactionNumber(value uint64) zap.Field { + return zap.Uint64(FieldTransactionNumber, value) +} + +// WithCommitment sets the commitment field. +func WithCommitment(value string) zap.Field { + return zap.String(FieldCommitment, value) +} + +// WithRecoveryCommitment sets the recovery-commitment field. +func WithRecoveryCommitment(value string) zap.Field { + return zap.String(FieldRecoveryCommitment, value) +} + +// WithUpdateCommitment sets the update-commitment field. +func WithUpdateCommitment(value string) zap.Field { + return zap.String(FieldUpdateCommitment, value) +} + +// WithTotalCommitments sets the total-commitments field. +func WithTotalCommitments(value int) zap.Field { + return zap.Int(FieldTotalCommitments, value) +} + +// WithTotalOperations sets the total-operations field. +func WithTotalOperations(value int) zap.Field { + return zap.Int(FieldTotalOperations, value) +} + +// WithTotalCreateOperations sets the total-create-operations field. +func WithTotalCreateOperations(value int) zap.Field { + return zap.Int(FieldTotalCreateOperations, value) +} + +// WithTotalUpdateOperations sets the total-update-operations field. +func WithTotalUpdateOperations(value int) zap.Field { + return zap.Int(FieldTotalUpdateOperations, value) +} + +// WithTotalRecoverOperations sets the total-recover-operations field. +func WithTotalRecoverOperations(value int) zap.Field { + return zap.Int(FieldTotalRecoverOperations, value) +} + +// WithTotalDeactivateOperations sets the total-deactivate-operations field. +func WithTotalDeactivateOperations(value int) zap.Field { + return zap.Int(FieldTotalDeactivateOperations, value) +} + +// WithDocument sets the document field. +func WithDocument(value map[string]interface{}) zap.Field { + return zap.Inline(newJSONMarshaller(FieldDocument, value)) +} + +// WithDeactivated sets the deactivated field. +func WithDeactivated(value bool) zap.Field { + return zap.Bool(FieldDeactivated, value) +} + +// WithOperations sets the operation field. +func WithOperations(value interface{}) zap.Field { + return zap.Inline(NewObjectMarshaller(FieldOperations, value)) +} + +// WithVersionTime sets the version-time field. +func WithVersionTime(value string) zap.Field { + return zap.String(FieldVersionTime, value) +} + +// WithPatch sets the patch field. +func WithPatch(value interface{}) zap.Field { + return zap.Inline(NewObjectMarshaller(FieldPatch, value)) +} + +// WithIsBatch sets the is-batch field. +func WithIsBatch(value bool) zap.Field { + return zap.Bool(FieldIsBatch, value) +} + +// WithContent sets the content field. +func WithContent(value []byte) zap.Field { + return zap.String(FieldContent, string(value)) +} + +// WithSources sets the sources field. +func WithSources(value ...string) zap.Field { + return zap.Array(FieldSources, NewStringArrayMarshaller(value)) +} + +// WithAlias sets the alias field. +func WithAlias(value string) zap.Field { + return zap.String(FieldAlias, value) +} + +type jsonMarshaller struct { + key string + obj interface{} +} + +func newJSONMarshaller(key string, value interface{}) *jsonMarshaller { + return &jsonMarshaller{key: key, obj: value} +} + +func (m *jsonMarshaller) MarshalLogObject(e zapcore.ObjectEncoder) error { + b, err := json.Marshal(m.obj) + if err != nil { + return fmt.Errorf("marshal json: %w", err) + } + + e.AddString(m.key, string(b)) + + return nil +} + +// ObjectMarshaller uses reflection to marshal an object's fields. +type ObjectMarshaller struct { + key string + obj interface{} +} + +// NewObjectMarshaller returns a new ObjectMarshaller. +func NewObjectMarshaller(key string, obj interface{}) *ObjectMarshaller { + return &ObjectMarshaller{key: key, obj: obj} +} + +// MarshalLogObject marshals the object's fields. +func (m *ObjectMarshaller) MarshalLogObject(e zapcore.ObjectEncoder) error { + return e.AddReflected(m.key, m.obj) +} + +// StringArrayMarshaller marshals an array of strings into a log field. +type StringArrayMarshaller struct { + values []string +} + +// NewStringArrayMarshaller returns a new StringArrayMarshaller. +func NewStringArrayMarshaller(values []string) *StringArrayMarshaller { + return &StringArrayMarshaller{values: values} +} + +// MarshalLogArray marshals the array. +func (m *StringArrayMarshaller) MarshalLogArray(e zapcore.ArrayEncoder) error { + for _, v := range m.values { + e.AppendString(v) + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/internal/log/fields_test.go b/method/sidetreelongform/sidetree-core/internal/log/fields_test.go new file mode 100644 index 0000000..24f588d --- /dev/null +++ b/method/sidetreelongform/sidetree-core/internal/log/fields_test.go @@ -0,0 +1,203 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package log + +import ( + "bytes" + "encoding/json" + "net/url" + "testing" + + "github.com/stretchr/testify/require" + "github.com/trustbloc/logutil-go/pkg/log" +) + +func TestStandardFields(t *testing.T) { + const module = "test_module" + + u1 := parseURL(t, "https://example1.com") + + t.Run("json fields 1", func(t *testing.T) { + stdOut := newMockWriter() + + logger := log.New(module, log.WithStdOut(stdOut), log.WithEncoding(log.JSON)) + + rm := &mockObject{Field1: "value33", Field2: 888} + + logger.Info("Some message", + WithData([]byte(`{"field":"value"}`)), WithServiceName("service1"), WithSize(1234), + WithParameter("param1"), WithRequestBody([]byte(`request body`)), + WithTotal(12), WithSuffix("1234"), WithOperationType("Create"), + WithURIString(u1.String()), WithOperationID("op1"), WithGenesisTime(1233), + WithOperationGenesisTime(3321), WithID("id1"), WithResolutionModel(rm), + ) + + t.Logf(stdOut.String()) + l := unmarshalLogData(t, stdOut.Bytes()) + + require.Equal(t, `Some message`, l.Msg) + require.Equal(t, `{"field":"value"}`, l.Data) + require.Equal(t, `service1`, l.Service) + require.Equal(t, 1234, l.Size) + require.Equal(t, `param1`, l.Parameter) + require.Equal(t, `request body`, l.RequestBody) + require.Equal(t, 12, l.Total) + require.Equal(t, "1234", l.Suffix) + require.Equal(t, "Create", l.OperationType) + require.Equal(t, `Some message`, l.Msg) + require.Equal(t, u1.String(), l.URI) + require.Equal(t, u1.String(), l.URI) + require.Equal(t, "op1", l.OperationID) + require.Equal(t, 1233, l.GenesisTime) + require.Equal(t, 3321, l.OperationGenesisTime) + require.Equal(t, "id1", l.ID) + require.Equal(t, rm, l.ResolutionModel) + }) + + t.Run("json fields 2", func(t *testing.T) { + stdOut := newMockWriter() + + logger := log.New(module, log.WithStdOut(stdOut), log.WithEncoding(log.JSON)) + + op := &mockObject{Field1: "op1", Field2: 9486} + txn := &mockObject{Field1: "txn1", Field2: 5967} + patch := &mockObject{Field1: "patch1", Field2: 3265} + + logger.Info("Some message", + WithSuffixes("suffix1", "suffix2"), WithVersion("v1"), WithMaxSize(20), + WithOperation(op), WithSidetreeTxn(txn), WithNamespace("ns1"), WithAnchorString("anchor1"), + WithSource("inbox"), WithTotalPending(36), WithTransactionTime(989), WithTransactionNumber(778), + WithCommitment("commit1"), WithRecoveryCommitment("recommit1"), WithUpdateCommitment("upcommit1"), + WithTotalCommitments(32), WithTotalOperations(54), WithTotalCreateOperations(12), + WithTotalUpdateOperations(87), WithTotalRecoverOperations(12), WithTotalDeactivateOperations(3), + WithDocument(map[string]interface{}{"field1": 1234}), WithDeactivated(true), WithOperations([]*mockObject{op}), + WithVersionTime("12"), WithPatch(patch), WithIsBatch(true), WithContent([]byte("content1")), + WithSources("source1", "source2"), WithAlias("alias1"), + ) + + l := unmarshalLogData(t, stdOut.Bytes()) + + require.Equal(t, []string{"suffix1", "suffix2"}, l.Suffixes) + require.Equal(t, "v1", l.Version) + require.Equal(t, 20, l.MaxSize) + require.Equal(t, op, l.Operation) + require.Equal(t, txn, l.SidetreeTxn) + require.Equal(t, "ns1", l.Namespace) + require.Equal(t, "anchor1", l.AnchorString) + require.Equal(t, "inbox", l.Source) + require.Equal(t, 36, l.TotalPending) + require.Equal(t, 989, l.TransactionTime) + require.Equal(t, 778, l.TransactionNumber) + require.Equal(t, "commit1", l.Commitment) + require.Equal(t, "recommit1", l.RecoveryCommitment) + require.Equal(t, "upcommit1", l.UpdateCommitment) + require.Equal(t, 32, l.TotalCommitments) + require.Equal(t, 54, l.TotalOperations) + require.Equal(t, 12, l.TotalCreateOperations) + require.Equal(t, 87, l.TotalUpdateOperations) + require.Equal(t, 12, l.TotalRecoverOperations) + require.Equal(t, 3, l.TotalDeactivateOperations) + require.Equal(t, `{"field1":1234}`, l.Document) + require.Equal(t, true, l.Deactivated) + require.Equal(t, []*mockObject{op}, l.Operations) + require.Equal(t, "12", l.VersionTime) + require.Equal(t, patch, l.Patch) + require.Equal(t, true, l.IsBatch) + require.Equal(t, "content1", l.Content) + require.Equal(t, []string{"source1", "source2"}, l.Sources) + require.Equal(t, "alias1", l.Alias) + }) +} + +type mockObject struct { + Field1 string + Field2 int +} + +type logData struct { + Level string `json:"level"` + Time string `json:"time"` + Logger string `json:"logger"` + Caller string `json:"caller"` + Msg string `json:"msg"` + Error string `json:"error"` + + Data string `json:"data"` + Service string `json:"service"` + Size int `json:"size"` + Parameter string `json:"parameter"` + URI string `json:"uri"` + RequestBody string `json:"requestBody"` + Total int `json:"total"` + Suffix string `json:"suffix"` + OperationType string `json:"operationType"` + OperationID string `json:"operationID"` + GenesisTime int `json:"genesisTime"` + ID string `json:"id"` + ResolutionModel *mockObject `json:"resolutionModel"` + Suffixes []string `json:"suffixes"` + Version string `json:"version"` + MaxSize int `json:"maxSize"` + Operation *mockObject `json:"operation"` + SidetreeTxn *mockObject `json:"sidetreeTxn"` + Namespace string `json:"namespace"` + AnchorString string `json:"anchorString"` + Source string `json:"source"` + OperationGenesisTime int `json:"opGenesisTime"` + TotalPending int `json:"totalPending"` + TransactionTime int `json:"transactionTime"` + TransactionNumber int `json:"transactionNumber"` + Commitment string `json:"commitment"` + RecoveryCommitment string `json:"recoveryCommitment"` + UpdateCommitment string `json:"updateCommitment"` + TotalCommitments int `json:"totalCommitments"` + TotalOperations int `json:"totalOperations"` + TotalCreateOperations int `json:"totalCreateOperations"` + TotalUpdateOperations int `json:"totalUpdateOperations"` + TotalRecoverOperations int `json:"totalRecoverOperations"` + TotalDeactivateOperations int `json:"totalDeactivateOperations"` + Document string `json:"document"` + Deactivated bool `json:"deactivated"` + Operations []*mockObject `json:"operations"` + VersionTime string `json:"versionTime"` + Patch *mockObject `json:"patch"` + IsBatch bool `json:"isBatch"` + Content string `json:"content"` + Sources []string `json:"sources"` + Alias string `json:"alias"` +} + +func unmarshalLogData(t *testing.T, b []byte) *logData { + t.Helper() + + l := &logData{} + + require.NoError(t, json.Unmarshal(b, l)) + + return l +} + +func parseURL(t *testing.T, raw string) *url.URL { + t.Helper() + + u, err := url.Parse(raw) + require.NoError(t, err) + + return u +} + +type mockWriter struct { + *bytes.Buffer +} + +func (m *mockWriter) Sync() error { + return nil +} + +func newMockWriter() *mockWriter { + return &mockWriter{Buffer: bytes.NewBuffer(nil)} +} diff --git a/method/sidetreelongform/sidetree-core/internal/signutil/signature.go b/method/sidetreelongform/sidetree-core/internal/signutil/signature.go new file mode 100644 index 0000000..f2d1e40 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/internal/signutil/signature.go @@ -0,0 +1,50 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package signutil + +import ( + "errors" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + internaljws "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" +) + +// Signer defines JWS Signer interface that will be used to sign required data in Sidetree request. +type Signer interface { + // Sign signs data and returns signature value. + Sign(data []byte) ([]byte, error) + + // Headers provides required JWS protected headers. It provides information about signing key and algorithm. + Headers() jws.Headers +} + +// SignModel signs model. +func SignModel(model interface{}, signer Signer) (string, error) { + // first you normalize model + signedDataBytes, err := canonicalizer.MarshalCanonical(model) + if err != nil { + return "", err + } + + return SignPayload(signedDataBytes, signer) +} + +// SignPayload allows for singing payload. +func SignPayload(payload []byte, signer Signer) (string, error) { + alg, ok := signer.Headers().Algorithm() + if !ok || alg == "" { + return "", errors.New("signing algorithm is required") + } + + jwsSignature, err := internaljws.NewJWS(signer.Headers(), nil, payload, signer) + if err != nil { + return "", err + } + + return jwsSignature.SerializeCompact(false) +} diff --git a/method/sidetreelongform/sidetree-core/internal/signutil/signature_test.go b/method/sidetreelongform/sidetree-core/internal/signutil/signature_test.go new file mode 100644 index 0000000..b0e139e --- /dev/null +++ b/method/sidetreelongform/sidetree-core/internal/signutil/signature_test.go @@ -0,0 +1,114 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package signutil + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "errors" + "testing" + + "github.com/stretchr/testify/require" + + internal "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/ecsigner" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/pubkey" +) + +func TestSignModel(t *testing.T) { + t.Run("marshal error", func(t *testing.T) { + ch := make(chan int) + request, err := SignModel(ch, nil) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "unsupported type: chan int") + }) + t.Run("success", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + signer := ecsigner.New(privateKey, "ES256", "key-1") + + test := struct { + message string + }{ + message: "test", + } + + request, err := SignModel(test, signer) + require.NoError(t, err) + require.NotEmpty(t, request) + }) +} + +func TestSignPayload(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + t.Run("success", func(t *testing.T) { + signer := ecsigner.New(privateKey, "ES256", "key-1") + + message := []byte("test") + jwsSignature, err := SignPayload(message, signer) + require.NoError(t, err) + require.NotEmpty(t, jwsSignature) + + _, err = internal.VerifyJWS(jwsSignature, jwk) + require.NoError(t, err) + }) + t.Run("signing algorithm required", func(t *testing.T) { + signer := ecsigner.New(privateKey, "", "kid") + + jws, err := SignPayload([]byte("test"), signer) + require.Error(t, err) + require.Empty(t, jws) + require.Contains(t, err.Error(), "signing algorithm is required") + }) + t.Run("kid is required", func(t *testing.T) { + jws, err := SignPayload([]byte(""), NewMockSigner(errors.New("test error"), true)) + require.Error(t, err) + require.Empty(t, jws) + require.Contains(t, err.Error(), "test error") + }) +} + +// MockSigner implements signer interface. +type MockSigner struct { + Recovery bool + Err error +} + +// NewMockSigner creates new mock signer (default to recovery signer). +func NewMockSigner(err error, recovery bool) *MockSigner { + return &MockSigner{Err: err, Recovery: recovery} +} + +// Headers provides required JWS protected headers. It provides information about signing key and algorithm. +func (ms *MockSigner) Headers() jws.Headers { + headers := make(jws.Headers) + headers[jws.HeaderAlgorithm] = "alg" + + if !ms.Recovery { + headers[jws.HeaderKeyID] = "kid" + } + + return headers +} + +// Sign signs msg and returns mock signature value. +func (ms *MockSigner) Sign(msg []byte) ([]byte, error) { + if ms.Err != nil { + return nil, ms.Err + } + + return []byte("signature"), nil +} diff --git a/method/sidetreelongform/sidetree-core/jws/header.go b/method/sidetreelongform/sidetree-core/jws/header.go new file mode 100644 index 0000000..e417657 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/jws/header.go @@ -0,0 +1,98 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +// IANA registered JOSE headers (https://tools.ietf.org/html/rfc7515#section-4.1) +const ( + // HeaderAlgorithm identifies: + // For JWS: the cryptographic algorithm used to secure the JWS. + // For JWE: the cryptographic algorithm used to encrypt or determine the value of the CEK. + HeaderAlgorithm = "alg" // string + + // HeaderJWKSetURL is a URI that refers to a resource for a set of JSON-encoded public keys, one of which: + // For JWS: corresponds to the key used to digitally sign the JWS. + // For JWE: corresponds to the public key to which the JWE was encrypted. + HeaderJWKSetURL = "jku" // string + + // HeaderJSONWebKey is: + // For JWS: the public key that corresponds to the key used to digitally sign the JWS. + // For JWE: the public key to which the JWE was encrypted. + HeaderJSONWebKey = "publicKeyJwk" // JSON + + // HeaderKeyID is a hint: + // For JWS: indicating which key was used to secure the JWS. + // For JWE: which references the public key to which the JWE was encrypted. + HeaderKeyID = "kid" // string + + // HeaderX509URL is a URI that refers to a resource for the X.509 public key certificate or certificate chain: + // For JWS: corresponding to the key used to digitally sign the JWS. + // For JWE: corresponding to the public key to which the JWE was encrypted. + HeaderX509URL = "x5u" + + // HeaderX509CertificateChain contains the X.509 public key certificate or certificate chain: + // For JWS: corresponding to the key used to digitally sign the JWS. + // For JWE: corresponding to the public key to which the JWE was encrypted. + HeaderX509CertificateChain = "x5c" + + // HeaderX509CertificateDigest (X.509 certificate SHA-1 thumbprint) is a base64url-encoded + // SHA-1 thumbprint (a.k.a. digest) of the DER encoding of the X.509 certificate: + // For JWS: corresponding to the key used to digitally sign the JWS. + // For JWE: corresponding to the public key to which the JWE was encrypted. + HeaderX509CertificateDigestSha1 = "x5t" + + // HeaderX509CertificateDigestSha256 (X.509 certificate SHA-256 thumbprint) is a base64url-encoded SHA-256 + // thumbprint (a.k.a. digest) of the DER encoding of the X.509 certificate: + // For JWS: corresponding to the key used to digitally sign the JWS. + // For JWE: corresponding to the public key to which the JWE was encrypted. + HeaderX509CertificateDigestSha256 = "x5t#S256" // string + + // HeaderType is: + // For JWS: used by JWS applications to declare the media type of this complete JWS. + // For JWE: used by JWE applications to declare the media type of this complete JWE. + HeaderType = "typ" // string + + // HeaderContentType is used by JWS applications to declare the media type of: + // For JWS: the secured content (the payload). + // For JWE: the secured content (the plaintext). + HeaderContentType = "cty" // string + + // HeaderCritical indicates that extensions to: + // For JWS: this JWS header specification and/or JWA are being used that MUST be understood and processed. + // For JWE: this JWE header specification and/or JWA are being used that MUST be understood and processed. + HeaderCritical = "crit" // array +) + +// Header defined in https://tools.ietf.org/html/rfc7797 +const ( + // HeaderB64 determines whether the payload is represented in the JWS and the JWS Signing + // Input as ASCII(BASE64URL(JWS Payload)) or as the JWS Payload value itself with no encoding performed. + HeaderB64Payload = "b64" // bool +) + +// Headers represents JOSE headers. +type Headers map[string]interface{} + +// KeyID gets Key ID from JOSE headers. +func (h Headers) KeyID() (string, bool) { + return h.stringValue(HeaderKeyID) +} + +// Algorithm gets Key ID from JOSE headers. +func (h Headers) Algorithm() (string, bool) { + return h.stringValue(HeaderAlgorithm) +} + +func (h Headers) stringValue(key string) (string, bool) { + kRaw, ok := h[key] + if !ok { + return "", false + } + + kStr, ok := kRaw.(string) + + return kStr, ok +} diff --git a/method/sidetreelongform/sidetree-core/jws/header_test.go b/method/sidetreelongform/sidetree-core/jws/header_test.go new file mode 100644 index 0000000..e69498d --- /dev/null +++ b/method/sidetreelongform/sidetree-core/jws/header_test.go @@ -0,0 +1,38 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestHeader(t *testing.T) { + headers := make(Headers) + + alg, ok := headers.Algorithm() + require.False(t, ok) + require.Empty(t, alg) + + kid, ok := headers.KeyID() + require.False(t, ok) + require.Empty(t, kid) + + headers = Headers(map[string]interface{}{ + "alg": "alg", + "kid": "kid", + }) + + alg, ok = headers.Algorithm() + require.True(t, ok) + require.Equal(t, "alg", alg) + + kid, ok = headers.KeyID() + require.True(t, ok) + require.Equal(t, "kid", kid) +} diff --git a/method/sidetreelongform/sidetree-core/jws/jwk.go b/method/sidetreelongform/sidetree-core/jws/jwk.go new file mode 100644 index 0000000..2ab1289 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/jws/jwk.go @@ -0,0 +1,35 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +import "errors" + +// JWK contains public key in JWK format. +type JWK struct { + Kty string `json:"kty"` + Crv string `json:"crv"` + X string `json:"x"` + Y string `json:"y"` + Nonce string `json:"nonce,omitempty"` +} + +// Validate validates JWK. +func (jwk *JWK) Validate() error { + if jwk.Crv == "" { + return errors.New("JWK crv is missing") + } + + if jwk.Kty == "" { + return errors.New("JWK kty is missing") + } + + if jwk.X == "" { + return errors.New("JWK x is missing") + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/jws/jwk_test.go b/method/sidetreelongform/sidetree-core/jws/jwk_test.go new file mode 100644 index 0000000..a9894be --- /dev/null +++ b/method/sidetreelongform/sidetree-core/jws/jwk_test.go @@ -0,0 +1,60 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestValidate(t *testing.T) { + t.Run("success ", func(t *testing.T) { + jwk := JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + } + + err := jwk.Validate() + require.NoError(t, err) + }) + + t.Run("missing kty", func(t *testing.T) { + jwk := JWK{ + Kty: "", + Crv: "crv", + X: "x", + } + + err := jwk.Validate() + require.Error(t, err) + require.Contains(t, err.Error(), "kty is missing") + }) + + t.Run("missing crv", func(t *testing.T) { + jwk := JWK{ + Kty: "kty", + X: "x", + } + + err := jwk.Validate() + require.Error(t, err) + require.Contains(t, err.Error(), "crv is missing") + }) + + t.Run("missing x", func(t *testing.T) { + jwk := JWK{ + Kty: "kty", + Crv: "crv", + } + + err := jwk.Validate() + require.Error(t, err) + require.Contains(t, err.Error(), "x is missing") + }) +} diff --git a/method/sidetreelongform/sidetree-core/mocks/blockchain.go b/method/sidetreelongform/sidetree-core/mocks/blockchain.go new file mode 100644 index 0000000..97a9972 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/blockchain.go @@ -0,0 +1,77 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package mocks + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/txn" +) + +// MockAnchorWriter mocks anchor writer for testing purposes. +type MockAnchorWriter struct { + mutex sync.RWMutex + namespace string + anchors []string + err error +} + +// NewMockAnchorWriter creates mock anchor writer. +func NewMockAnchorWriter(err error) *MockAnchorWriter { + return &MockAnchorWriter{err: err, namespace: DefaultNS} +} + +// WriteAnchor writes the anchor string as a transaction to anchoring system. +func (m *MockAnchorWriter) WriteAnchor( + anchor string, _ []*protocol.AnchorDocument, _ []*operation.Reference, _ uint64) error { + if m.err != nil { + return m.err + } + + m.mutex.Lock() + defer m.mutex.Unlock() + + m.anchors = append(m.anchors, anchor) + + return nil +} + +// Read reads transactions since transaction number. +func (m *MockAnchorWriter) Read(sinceTransactionNumber int) (bool, *txn.SidetreeTxn) { + m.mutex.RLock() + defer m.mutex.RUnlock() + + moreTransactions := false + if len(m.anchors) > 0 && sinceTransactionNumber < len(m.anchors)-2 { + moreTransactions = true + } + + if len(m.anchors) > 0 && sinceTransactionNumber < len(m.anchors)-1 { + hashIndex := sinceTransactionNumber + 1 + + t := &txn.SidetreeTxn{ + Namespace: m.namespace, + TransactionTime: uint64(hashIndex), + TransactionNumber: uint64(hashIndex), + AnchorString: m.anchors[hashIndex], + } + + return moreTransactions, t + } + + return moreTransactions, nil +} + +// GetAnchors returns anchors. +func (m *MockAnchorWriter) GetAnchors() []string { + m.mutex.RLock() + defer m.mutex.RUnlock() + + return m.anchors +} diff --git a/method/sidetreelongform/sidetree-core/mocks/cas.go b/method/sidetreelongform/sidetree-core/mocks/cas.go new file mode 100644 index 0000000..8cfe72b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/cas.go @@ -0,0 +1,103 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package mocks + +import ( + "bytes" + "fmt" + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/encoder" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" +) + +const sha2_256 = 18 + +// MockCasClient mocks CAS for testing purposes. +type MockCasClient struct { + mutex sync.RWMutex + m map[string][]byte + err error +} + +// NewMockCasClient creates mock client. +func NewMockCasClient(err error) *MockCasClient { + return &MockCasClient{m: make(map[string][]byte), err: err} +} + +// Write writes the given content to CAS. +// returns the SHA256 hash in base64url encoding which represents the address of the content. +func (m *MockCasClient) Write(content []byte) (string, error) { + err := m.GetError() + if err != nil { + return "", err + } + + hash, err := hashing.ComputeMultihash(sha2_256, content) + if err != nil { + return "", err + } + + key := encoder.EncodeToString(hash) + + m.mutex.Lock() + defer m.mutex.Unlock() + + m.m[key] = content + + return key, nil +} + +// Read reads the content of the given address in CAS. +// returns the content of the given address. +func (m *MockCasClient) Read(address string) ([]byte, error) { + err := m.GetError() + if err != nil { + return nil, err + } + + m.mutex.RLock() + defer m.mutex.RUnlock() + + value, ok := m.m[address] + if !ok { + return nil, fmt.Errorf("not found") + } + + // decode address to verify hashes + decoded, err := encoder.DecodeString(address) + if err != nil { + return nil, err + } + + valueHash, err := hashing.ComputeMultihash(sha2_256, value) + if err != nil { + return nil, err + } + + if !bytes.Equal(valueHash, decoded) { + return nil, fmt.Errorf("hashes don't match") + } + + return value, nil +} + +// SetError injects an error into the mock client. +func (m *MockCasClient) SetError(err error) { + m.mutex.Lock() + defer m.mutex.Unlock() + + m.err = err +} + +// GetError returns the injected error. +func (m *MockCasClient) GetError() error { + m.mutex.RLock() + defer m.mutex.RUnlock() + + return m.err +} diff --git a/method/sidetreelongform/sidetree-core/mocks/dochandler.go b/method/sidetreelongform/sidetree-core/mocks/dochandler.go new file mode 100644 index 0000000..b146774 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/dochandler.go @@ -0,0 +1,224 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package mocks + +import ( + "encoding/json" + "errors" + "fmt" + "strings" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/docutil" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +const deleted = "_deleted" + +// NewMockDocumentHandler returns a new mock document handler. +func NewMockDocumentHandler() *MockDocumentHandler { + return &MockDocumentHandler{ + client: NewMockProtocolClient(), + store: make(map[string]document.Document), + } +} + +// MockDocumentHandler mocks the document handler. +type MockDocumentHandler struct { + err error + namespace string + client protocol.Client + store map[string]document.Document +} + +// WithNamespace sets the namespace. +func (m *MockDocumentHandler) WithNamespace(ns string) *MockDocumentHandler { + m.namespace = ns + + return m +} + +// WithError injects an error into the mock handler. +func (m *MockDocumentHandler) WithError(err error) *MockDocumentHandler { + m.err = err + + return m +} + +// WithProtocolClient sets the protocol client. +func (m *MockDocumentHandler) WithProtocolClient(client protocol.Client) *MockDocumentHandler { + m.client = client + + return m +} + +// Namespace returns the namespace. +func (m *MockDocumentHandler) Namespace() string { + return m.namespace +} + +// Protocol returns the Protocol. +func (m *MockDocumentHandler) Protocol() protocol.Client { + return m.client +} + +// Operation is used for parsing operation request. +type Operation struct { + // Operation defines operation type + Operation operation.Type `json:"type,omitempty"` + + // SuffixData object + SuffixData *model.SuffixDataModel `json:"suffixData,omitempty"` + + // Delta object + Delta *model.DeltaModel `json:"delta,omitempty"` + + // DidSuffix is the suffix of the DID + DidSuffix string `json:"didSuffix"` +} + +// ProcessOperation mocks process operation. +func (m *MockDocumentHandler) ProcessOperation(operationBuffer []byte, _ uint64) (*document.ResolutionResult, error) { + if m.err != nil { + return nil, m.err + } + + var op Operation + + err := json.Unmarshal(operationBuffer, &op) + if err != nil { + return nil, fmt.Errorf("bad request: %s", err.Error()) + } + + var suffix string + + switch op.Operation { + case operation.TypeCreate: + suffix, err = hashing.CalculateModelMultihash(op.SuffixData, sha2_256) + if err != nil { + return nil, err + } + case operation.TypeUpdate, operation.TypeDeactivate, operation.TypeRecover: + suffix = op.DidSuffix + default: + return nil, fmt.Errorf("bad request: operation type [%s] not supported", op.Operation) + } + + id := m.namespace + docutil.NamespaceDelimiter + suffix + + if op.Operation == operation.TypeDeactivate { + empty := applyID(make(document.Document), id) + empty[deleted] = true + m.store[id] = empty + + return &document.ResolutionResult{ + Document: empty, + }, nil + } + + doc, ok := m.store[id] + if !ok { // create operation + doc = make(document.Document) + } + + doc, err = doccomposer.New().ApplyPatches(doc, op.Delta.Patches) + if err != nil { + return nil, err + } + + doc = applyID(doc, id) + + m.store[id] = doc + + return &document.ResolutionResult{ + Document: doc, + }, nil +} + +// ResolveDocument mocks resolve document. +func (m *MockDocumentHandler) ResolveDocument(didOrDocument string, opts ...document.ResolutionOption, +) (*document.ResolutionResult, error) { + if m.err != nil { + return nil, m.err + } + + const badRequest = "bad request" + if !strings.HasPrefix(didOrDocument, m.namespace) { + return nil, fmt.Errorf("%s: must start with supported namespace", badRequest) + } + + pv, err := m.Protocol().Current() + if err != nil { + return nil, err + } + + did, initial, err := pv.OperationParser().ParseDID(m.namespace, didOrDocument) + if err != nil { + return nil, fmt.Errorf("%s: %s", badRequest, err.Error()) + } + + if initial != nil { + return m.resolveWithInitialState(did, initial) + } + + if _, ok := m.store[didOrDocument]; !ok { + return nil, errors.New("not found") + } + + doc := m.store[didOrDocument] + + docMetadata := make(document.Metadata) + if isDeactivated(doc) { + docMetadata[document.DeactivatedProperty] = true + } + + return &document.ResolutionResult{ + Document: doc, + DocumentMetadata: docMetadata, + }, nil +} + +func isDeactivated(doc document.Document) bool { + deactivated, ok := doc[deleted] + if !ok { + return false + } + + return deactivated.(bool) +} + +// helper function to insert ID into document. +func applyID(doc document.Document, id string) document.Document { + // apply id to document + doc["id"] = id + + return doc +} + +func (m *MockDocumentHandler) resolveWithInitialState(did string, initial []byte) (*document.ResolutionResult, error) { + var createReq model.CreateRequest + + err := json.Unmarshal(initial, &createReq) + if err != nil { + return nil, err + } + + doc, err := doccomposer.New().ApplyPatches(make(document.Document), createReq.Delta.Patches) + if err != nil { + return nil, err + } + + doc = applyID(doc, did) + + return &document.ResolutionResult{ + Document: doc, + }, nil +} diff --git a/method/sidetreelongform/sidetree-core/mocks/documentcomposer.gen.go b/method/sidetreelongform/sidetree-core/mocks/documentcomposer.gen.go new file mode 100644 index 0000000..0e1d816 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/documentcomposer.gen.go @@ -0,0 +1,124 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +type DocumentComposer struct { + ApplyPatchesStub func(document.Document, []patch.Patch) (document.Document, error) + applyPatchesMutex sync.RWMutex + applyPatchesArgsForCall []struct { + arg1 document.Document + arg2 []patch.Patch + } + applyPatchesReturns struct { + result1 document.Document + result2 error + } + applyPatchesReturnsOnCall map[int]struct { + result1 document.Document + result2 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *DocumentComposer) ApplyPatches(arg1 document.Document, arg2 []patch.Patch) (document.Document, error) { + var arg2Copy []patch.Patch + if arg2 != nil { + arg2Copy = make([]patch.Patch, len(arg2)) + copy(arg2Copy, arg2) + } + fake.applyPatchesMutex.Lock() + ret, specificReturn := fake.applyPatchesReturnsOnCall[len(fake.applyPatchesArgsForCall)] + fake.applyPatchesArgsForCall = append(fake.applyPatchesArgsForCall, struct { + arg1 document.Document + arg2 []patch.Patch + }{arg1, arg2Copy}) + fake.recordInvocation("ApplyPatches", []interface{}{arg1, arg2Copy}) + fake.applyPatchesMutex.Unlock() + if fake.ApplyPatchesStub != nil { + return fake.ApplyPatchesStub(arg1, arg2) + } + if specificReturn { + return ret.result1, ret.result2 + } + fakeReturns := fake.applyPatchesReturns + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *DocumentComposer) ApplyPatchesCallCount() int { + fake.applyPatchesMutex.RLock() + defer fake.applyPatchesMutex.RUnlock() + return len(fake.applyPatchesArgsForCall) +} + +func (fake *DocumentComposer) ApplyPatchesCalls(stub func(document.Document, []patch.Patch) (document.Document, error)) { + fake.applyPatchesMutex.Lock() + defer fake.applyPatchesMutex.Unlock() + fake.ApplyPatchesStub = stub +} + +func (fake *DocumentComposer) ApplyPatchesArgsForCall(i int) (document.Document, []patch.Patch) { + fake.applyPatchesMutex.RLock() + defer fake.applyPatchesMutex.RUnlock() + argsForCall := fake.applyPatchesArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *DocumentComposer) ApplyPatchesReturns(result1 document.Document, result2 error) { + fake.applyPatchesMutex.Lock() + defer fake.applyPatchesMutex.Unlock() + fake.ApplyPatchesStub = nil + fake.applyPatchesReturns = struct { + result1 document.Document + result2 error + }{result1, result2} +} + +func (fake *DocumentComposer) ApplyPatchesReturnsOnCall(i int, result1 document.Document, result2 error) { + fake.applyPatchesMutex.Lock() + defer fake.applyPatchesMutex.Unlock() + fake.ApplyPatchesStub = nil + if fake.applyPatchesReturnsOnCall == nil { + fake.applyPatchesReturnsOnCall = make(map[int]struct { + result1 document.Document + result2 error + }) + } + fake.applyPatchesReturnsOnCall[i] = struct { + result1 document.Document + result2 error + }{result1, result2} +} + +func (fake *DocumentComposer) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.applyPatchesMutex.RLock() + defer fake.applyPatchesMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *DocumentComposer) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} + +var _ protocol.DocumentComposer = new(DocumentComposer) diff --git a/method/sidetreelongform/sidetree-core/mocks/documenttransformer.gen.go b/method/sidetreelongform/sidetree-core/mocks/documenttransformer.gen.go new file mode 100644 index 0000000..ba2e219 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/documenttransformer.gen.go @@ -0,0 +1,118 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" +) + +type DocumentTransformer struct { + TransformDocumentStub func(*protocol.ResolutionModel, protocol.TransformationInfo) (*document.ResolutionResult, error) + transformDocumentMutex sync.RWMutex + transformDocumentArgsForCall []struct { + arg1 *protocol.ResolutionModel + arg2 protocol.TransformationInfo + } + transformDocumentReturns struct { + result1 *document.ResolutionResult + result2 error + } + transformDocumentReturnsOnCall map[int]struct { + result1 *document.ResolutionResult + result2 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *DocumentTransformer) TransformDocument(arg1 *protocol.ResolutionModel, arg2 protocol.TransformationInfo) (*document.ResolutionResult, error) { + fake.transformDocumentMutex.Lock() + ret, specificReturn := fake.transformDocumentReturnsOnCall[len(fake.transformDocumentArgsForCall)] + fake.transformDocumentArgsForCall = append(fake.transformDocumentArgsForCall, struct { + arg1 *protocol.ResolutionModel + arg2 protocol.TransformationInfo + }{arg1, arg2}) + fake.recordInvocation("TransformDocument", []interface{}{arg1, arg2}) + fake.transformDocumentMutex.Unlock() + if fake.TransformDocumentStub != nil { + return fake.TransformDocumentStub(arg1, arg2) + } + if specificReturn { + return ret.result1, ret.result2 + } + fakeReturns := fake.transformDocumentReturns + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *DocumentTransformer) TransformDocumentCallCount() int { + fake.transformDocumentMutex.RLock() + defer fake.transformDocumentMutex.RUnlock() + return len(fake.transformDocumentArgsForCall) +} + +func (fake *DocumentTransformer) TransformDocumentCalls(stub func(*protocol.ResolutionModel, protocol.TransformationInfo) (*document.ResolutionResult, error)) { + fake.transformDocumentMutex.Lock() + defer fake.transformDocumentMutex.Unlock() + fake.TransformDocumentStub = stub +} + +func (fake *DocumentTransformer) TransformDocumentArgsForCall(i int) (*protocol.ResolutionModel, protocol.TransformationInfo) { + fake.transformDocumentMutex.RLock() + defer fake.transformDocumentMutex.RUnlock() + argsForCall := fake.transformDocumentArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *DocumentTransformer) TransformDocumentReturns(result1 *document.ResolutionResult, result2 error) { + fake.transformDocumentMutex.Lock() + defer fake.transformDocumentMutex.Unlock() + fake.TransformDocumentStub = nil + fake.transformDocumentReturns = struct { + result1 *document.ResolutionResult + result2 error + }{result1, result2} +} + +func (fake *DocumentTransformer) TransformDocumentReturnsOnCall(i int, result1 *document.ResolutionResult, result2 error) { + fake.transformDocumentMutex.Lock() + defer fake.transformDocumentMutex.Unlock() + fake.TransformDocumentStub = nil + if fake.transformDocumentReturnsOnCall == nil { + fake.transformDocumentReturnsOnCall = make(map[int]struct { + result1 *document.ResolutionResult + result2 error + }) + } + fake.transformDocumentReturnsOnCall[i] = struct { + result1 *document.ResolutionResult + result2 error + }{result1, result2} +} + +func (fake *DocumentTransformer) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.transformDocumentMutex.RLock() + defer fake.transformDocumentMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *DocumentTransformer) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} + +var _ protocol.DocumentTransformer = new(DocumentTransformer) diff --git a/method/sidetreelongform/sidetree-core/mocks/documentvalidator.gen.go b/method/sidetreelongform/sidetree-core/mocks/documentvalidator.gen.go new file mode 100644 index 0000000..37a8f26 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/documentvalidator.gen.go @@ -0,0 +1,193 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" +) + +type DocumentValidator struct { + IsValidOriginalDocumentStub func([]byte) error + isValidOriginalDocumentMutex sync.RWMutex + isValidOriginalDocumentArgsForCall []struct { + arg1 []byte + } + isValidOriginalDocumentReturns struct { + result1 error + } + isValidOriginalDocumentReturnsOnCall map[int]struct { + result1 error + } + IsValidPayloadStub func([]byte) error + isValidPayloadMutex sync.RWMutex + isValidPayloadArgsForCall []struct { + arg1 []byte + } + isValidPayloadReturns struct { + result1 error + } + isValidPayloadReturnsOnCall map[int]struct { + result1 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *DocumentValidator) IsValidOriginalDocument(arg1 []byte) error { + var arg1Copy []byte + if arg1 != nil { + arg1Copy = make([]byte, len(arg1)) + copy(arg1Copy, arg1) + } + fake.isValidOriginalDocumentMutex.Lock() + ret, specificReturn := fake.isValidOriginalDocumentReturnsOnCall[len(fake.isValidOriginalDocumentArgsForCall)] + fake.isValidOriginalDocumentArgsForCall = append(fake.isValidOriginalDocumentArgsForCall, struct { + arg1 []byte + }{arg1Copy}) + fake.recordInvocation("IsValidOriginalDocument", []interface{}{arg1Copy}) + fake.isValidOriginalDocumentMutex.Unlock() + if fake.IsValidOriginalDocumentStub != nil { + return fake.IsValidOriginalDocumentStub(arg1) + } + if specificReturn { + return ret.result1 + } + fakeReturns := fake.isValidOriginalDocumentReturns + return fakeReturns.result1 +} + +func (fake *DocumentValidator) IsValidOriginalDocumentCallCount() int { + fake.isValidOriginalDocumentMutex.RLock() + defer fake.isValidOriginalDocumentMutex.RUnlock() + return len(fake.isValidOriginalDocumentArgsForCall) +} + +func (fake *DocumentValidator) IsValidOriginalDocumentCalls(stub func([]byte) error) { + fake.isValidOriginalDocumentMutex.Lock() + defer fake.isValidOriginalDocumentMutex.Unlock() + fake.IsValidOriginalDocumentStub = stub +} + +func (fake *DocumentValidator) IsValidOriginalDocumentArgsForCall(i int) []byte { + fake.isValidOriginalDocumentMutex.RLock() + defer fake.isValidOriginalDocumentMutex.RUnlock() + argsForCall := fake.isValidOriginalDocumentArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *DocumentValidator) IsValidOriginalDocumentReturns(result1 error) { + fake.isValidOriginalDocumentMutex.Lock() + defer fake.isValidOriginalDocumentMutex.Unlock() + fake.IsValidOriginalDocumentStub = nil + fake.isValidOriginalDocumentReturns = struct { + result1 error + }{result1} +} + +func (fake *DocumentValidator) IsValidOriginalDocumentReturnsOnCall(i int, result1 error) { + fake.isValidOriginalDocumentMutex.Lock() + defer fake.isValidOriginalDocumentMutex.Unlock() + fake.IsValidOriginalDocumentStub = nil + if fake.isValidOriginalDocumentReturnsOnCall == nil { + fake.isValidOriginalDocumentReturnsOnCall = make(map[int]struct { + result1 error + }) + } + fake.isValidOriginalDocumentReturnsOnCall[i] = struct { + result1 error + }{result1} +} + +func (fake *DocumentValidator) IsValidPayload(arg1 []byte) error { + var arg1Copy []byte + if arg1 != nil { + arg1Copy = make([]byte, len(arg1)) + copy(arg1Copy, arg1) + } + fake.isValidPayloadMutex.Lock() + ret, specificReturn := fake.isValidPayloadReturnsOnCall[len(fake.isValidPayloadArgsForCall)] + fake.isValidPayloadArgsForCall = append(fake.isValidPayloadArgsForCall, struct { + arg1 []byte + }{arg1Copy}) + fake.recordInvocation("IsValidPayload", []interface{}{arg1Copy}) + fake.isValidPayloadMutex.Unlock() + if fake.IsValidPayloadStub != nil { + return fake.IsValidPayloadStub(arg1) + } + if specificReturn { + return ret.result1 + } + fakeReturns := fake.isValidPayloadReturns + return fakeReturns.result1 +} + +func (fake *DocumentValidator) IsValidPayloadCallCount() int { + fake.isValidPayloadMutex.RLock() + defer fake.isValidPayloadMutex.RUnlock() + return len(fake.isValidPayloadArgsForCall) +} + +func (fake *DocumentValidator) IsValidPayloadCalls(stub func([]byte) error) { + fake.isValidPayloadMutex.Lock() + defer fake.isValidPayloadMutex.Unlock() + fake.IsValidPayloadStub = stub +} + +func (fake *DocumentValidator) IsValidPayloadArgsForCall(i int) []byte { + fake.isValidPayloadMutex.RLock() + defer fake.isValidPayloadMutex.RUnlock() + argsForCall := fake.isValidPayloadArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *DocumentValidator) IsValidPayloadReturns(result1 error) { + fake.isValidPayloadMutex.Lock() + defer fake.isValidPayloadMutex.Unlock() + fake.IsValidPayloadStub = nil + fake.isValidPayloadReturns = struct { + result1 error + }{result1} +} + +func (fake *DocumentValidator) IsValidPayloadReturnsOnCall(i int, result1 error) { + fake.isValidPayloadMutex.Lock() + defer fake.isValidPayloadMutex.Unlock() + fake.IsValidPayloadStub = nil + if fake.isValidPayloadReturnsOnCall == nil { + fake.isValidPayloadReturnsOnCall = make(map[int]struct { + result1 error + }) + } + fake.isValidPayloadReturnsOnCall[i] = struct { + result1 error + }{result1} +} + +func (fake *DocumentValidator) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.isValidOriginalDocumentMutex.RLock() + defer fake.isValidOriginalDocumentMutex.RUnlock() + fake.isValidPayloadMutex.RLock() + defer fake.isValidPayloadMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *DocumentValidator) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} + +var _ protocol.DocumentValidator = new(DocumentValidator) diff --git a/method/sidetreelongform/sidetree-core/mocks/metricsprovider.go b/method/sidetreelongform/sidetree-core/mocks/metricsprovider.go new file mode 100644 index 0000000..7909dbd --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/metricsprovider.go @@ -0,0 +1,56 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package mocks + +import "time" + +// MetricsProvider implements a mock metrics provider. +type MetricsProvider struct{} + +// ProcessOperation records the overall time to process operation. +func (m *MetricsProvider) ProcessOperation(value time.Duration) { +} + +// GetProtocolVersionTime records the time to get protocol version. +func (m *MetricsProvider) GetProtocolVersionTime(value time.Duration) { +} + +// ParseOperationTime records the time to parse operations. +func (m *MetricsProvider) ParseOperationTime(value time.Duration) { +} + +// ValidateOperationTime records the time to validate operation. +func (m *MetricsProvider) ValidateOperationTime(value time.Duration) { +} + +// DecorateOperationTime records the time to decorate operation. +func (m *MetricsProvider) DecorateOperationTime(value time.Duration) { +} + +// AddUnpublishedOperationTime records the time to add unpublished operation. +func (m *MetricsProvider) AddUnpublishedOperationTime(value time.Duration) { +} + +// AddOperationToBatchTime records the time to add operation to batch. +func (m *MetricsProvider) AddOperationToBatchTime(value time.Duration) { +} + +// GetCreateOperationResultTime records the time to create operation result response. +func (m *MetricsProvider) GetCreateOperationResultTime(value time.Duration) { +} + +// HTTPCreateUpdateTime records the time rest call for create or update. +func (m *MetricsProvider) HTTPCreateUpdateTime(value time.Duration) { +} + +// HTTPResolveTime records the time rest call for resolve. +func (m *MetricsProvider) HTTPResolveTime(value time.Duration) { +} + +// CASWriteSize records the size of the data written to CAS. +func (m *MetricsProvider) CASWriteSize(dataType string, size int) { +} diff --git a/method/sidetreelongform/sidetree-core/mocks/operationapplier.gen.go b/method/sidetreelongform/sidetree-core/mocks/operationapplier.gen.go new file mode 100644 index 0000000..cee64c3 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/operationapplier.gen.go @@ -0,0 +1,118 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" +) + +type OperationApplier struct { + ApplyStub func(*operation.AnchoredOperation, *protocol.ResolutionModel) (*protocol.ResolutionModel, error) + applyMutex sync.RWMutex + applyArgsForCall []struct { + arg1 *operation.AnchoredOperation + arg2 *protocol.ResolutionModel + } + applyReturns struct { + result1 *protocol.ResolutionModel + result2 error + } + applyReturnsOnCall map[int]struct { + result1 *protocol.ResolutionModel + result2 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *OperationApplier) Apply(arg1 *operation.AnchoredOperation, arg2 *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { + fake.applyMutex.Lock() + ret, specificReturn := fake.applyReturnsOnCall[len(fake.applyArgsForCall)] + fake.applyArgsForCall = append(fake.applyArgsForCall, struct { + arg1 *operation.AnchoredOperation + arg2 *protocol.ResolutionModel + }{arg1, arg2}) + fake.recordInvocation("Apply", []interface{}{arg1, arg2}) + fake.applyMutex.Unlock() + if fake.ApplyStub != nil { + return fake.ApplyStub(arg1, arg2) + } + if specificReturn { + return ret.result1, ret.result2 + } + fakeReturns := fake.applyReturns + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationApplier) ApplyCallCount() int { + fake.applyMutex.RLock() + defer fake.applyMutex.RUnlock() + return len(fake.applyArgsForCall) +} + +func (fake *OperationApplier) ApplyCalls(stub func(*operation.AnchoredOperation, *protocol.ResolutionModel) (*protocol.ResolutionModel, error)) { + fake.applyMutex.Lock() + defer fake.applyMutex.Unlock() + fake.ApplyStub = stub +} + +func (fake *OperationApplier) ApplyArgsForCall(i int) (*operation.AnchoredOperation, *protocol.ResolutionModel) { + fake.applyMutex.RLock() + defer fake.applyMutex.RUnlock() + argsForCall := fake.applyArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *OperationApplier) ApplyReturns(result1 *protocol.ResolutionModel, result2 error) { + fake.applyMutex.Lock() + defer fake.applyMutex.Unlock() + fake.ApplyStub = nil + fake.applyReturns = struct { + result1 *protocol.ResolutionModel + result2 error + }{result1, result2} +} + +func (fake *OperationApplier) ApplyReturnsOnCall(i int, result1 *protocol.ResolutionModel, result2 error) { + fake.applyMutex.Lock() + defer fake.applyMutex.Unlock() + fake.ApplyStub = nil + if fake.applyReturnsOnCall == nil { + fake.applyReturnsOnCall = make(map[int]struct { + result1 *protocol.ResolutionModel + result2 error + }) + } + fake.applyReturnsOnCall[i] = struct { + result1 *protocol.ResolutionModel + result2 error + }{result1, result2} +} + +func (fake *OperationApplier) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.applyMutex.RLock() + defer fake.applyMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *OperationApplier) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} + +var _ protocol.OperationApplier = new(OperationApplier) diff --git a/method/sidetreelongform/sidetree-core/mocks/operationhandler.gen.go b/method/sidetreelongform/sidetree-core/mocks/operationhandler.gen.go new file mode 100644 index 0000000..b1ee9c7 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/operationhandler.gen.go @@ -0,0 +1,121 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" +) + +type OperationHandler struct { + PrepareTxnFilesStub func([]*operation.QueuedOperation) (*protocol.AnchoringInfo, error) + prepareTxnFilesMutex sync.RWMutex + prepareTxnFilesArgsForCall []struct { + arg1 []*operation.QueuedOperation + } + prepareTxnFilesReturns struct { + result1 *protocol.AnchoringInfo + result2 error + } + prepareTxnFilesReturnsOnCall map[int]struct { + result1 *protocol.AnchoringInfo + result2 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *OperationHandler) PrepareTxnFiles(arg1 []*operation.QueuedOperation) (*protocol.AnchoringInfo, error) { + var arg1Copy []*operation.QueuedOperation + if arg1 != nil { + arg1Copy = make([]*operation.QueuedOperation, len(arg1)) + copy(arg1Copy, arg1) + } + fake.prepareTxnFilesMutex.Lock() + ret, specificReturn := fake.prepareTxnFilesReturnsOnCall[len(fake.prepareTxnFilesArgsForCall)] + fake.prepareTxnFilesArgsForCall = append(fake.prepareTxnFilesArgsForCall, struct { + arg1 []*operation.QueuedOperation + }{arg1Copy}) + fake.recordInvocation("PrepareTxnFiles", []interface{}{arg1Copy}) + fake.prepareTxnFilesMutex.Unlock() + if fake.PrepareTxnFilesStub != nil { + return fake.PrepareTxnFilesStub(arg1) + } + if specificReturn { + return ret.result1, ret.result2 + } + fakeReturns := fake.prepareTxnFilesReturns + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationHandler) PrepareTxnFilesCallCount() int { + fake.prepareTxnFilesMutex.RLock() + defer fake.prepareTxnFilesMutex.RUnlock() + return len(fake.prepareTxnFilesArgsForCall) +} + +func (fake *OperationHandler) PrepareTxnFilesCalls(stub func([]*operation.QueuedOperation) (*protocol.AnchoringInfo, error)) { + fake.prepareTxnFilesMutex.Lock() + defer fake.prepareTxnFilesMutex.Unlock() + fake.PrepareTxnFilesStub = stub +} + +func (fake *OperationHandler) PrepareTxnFilesArgsForCall(i int) []*operation.QueuedOperation { + fake.prepareTxnFilesMutex.RLock() + defer fake.prepareTxnFilesMutex.RUnlock() + argsForCall := fake.prepareTxnFilesArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *OperationHandler) PrepareTxnFilesReturns(result1 *protocol.AnchoringInfo, result2 error) { + fake.prepareTxnFilesMutex.Lock() + defer fake.prepareTxnFilesMutex.Unlock() + fake.PrepareTxnFilesStub = nil + fake.prepareTxnFilesReturns = struct { + result1 *protocol.AnchoringInfo + result2 error + }{result1, result2} +} + +func (fake *OperationHandler) PrepareTxnFilesReturnsOnCall(i int, result1 *protocol.AnchoringInfo, result2 error) { + fake.prepareTxnFilesMutex.Lock() + defer fake.prepareTxnFilesMutex.Unlock() + fake.PrepareTxnFilesStub = nil + if fake.prepareTxnFilesReturnsOnCall == nil { + fake.prepareTxnFilesReturnsOnCall = make(map[int]struct { + result1 *protocol.AnchoringInfo + result2 error + }) + } + fake.prepareTxnFilesReturnsOnCall[i] = struct { + result1 *protocol.AnchoringInfo + result2 error + }{result1, result2} +} + +func (fake *OperationHandler) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.prepareTxnFilesMutex.RLock() + defer fake.prepareTxnFilesMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *OperationHandler) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} + +var _ protocol.OperationHandler = new(OperationHandler) diff --git a/method/sidetreelongform/sidetree-core/mocks/operationparser.gen.go b/method/sidetreelongform/sidetree-core/mocks/operationparser.gen.go new file mode 100644 index 0000000..acb5160 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/operationparser.gen.go @@ -0,0 +1,374 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" +) + +type OperationParser struct { + GetCommitmentStub func([]byte) (string, error) + getCommitmentMutex sync.RWMutex + getCommitmentArgsForCall []struct { + arg1 []byte + } + getCommitmentReturns struct { + result1 string + result2 error + } + getCommitmentReturnsOnCall map[int]struct { + result1 string + result2 error + } + GetRevealValueStub func([]byte) (string, error) + getRevealValueMutex sync.RWMutex + getRevealValueArgsForCall []struct { + arg1 []byte + } + getRevealValueReturns struct { + result1 string + result2 error + } + getRevealValueReturnsOnCall map[int]struct { + result1 string + result2 error + } + ParseStub func(string, []byte) (*operation.Operation, error) + parseMutex sync.RWMutex + parseArgsForCall []struct { + arg1 string + arg2 []byte + } + parseReturns struct { + result1 *operation.Operation + result2 error + } + parseReturnsOnCall map[int]struct { + result1 *operation.Operation + result2 error + } + ParseDIDStub func(string, string) (string, []byte, error) + parseDIDMutex sync.RWMutex + parseDIDArgsForCall []struct { + arg1 string + arg2 string + } + parseDIDReturns struct { + result1 string + result2 []byte + result3 error + } + parseDIDReturnsOnCall map[int]struct { + result1 string + result2 []byte + result3 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *OperationParser) GetCommitment(arg1 []byte) (string, error) { + var arg1Copy []byte + if arg1 != nil { + arg1Copy = make([]byte, len(arg1)) + copy(arg1Copy, arg1) + } + fake.getCommitmentMutex.Lock() + ret, specificReturn := fake.getCommitmentReturnsOnCall[len(fake.getCommitmentArgsForCall)] + fake.getCommitmentArgsForCall = append(fake.getCommitmentArgsForCall, struct { + arg1 []byte + }{arg1Copy}) + fake.recordInvocation("GetCommitment", []interface{}{arg1Copy}) + fake.getCommitmentMutex.Unlock() + if fake.GetCommitmentStub != nil { + return fake.GetCommitmentStub(arg1) + } + if specificReturn { + return ret.result1, ret.result2 + } + fakeReturns := fake.getCommitmentReturns + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationParser) GetCommitmentCallCount() int { + fake.getCommitmentMutex.RLock() + defer fake.getCommitmentMutex.RUnlock() + return len(fake.getCommitmentArgsForCall) +} + +func (fake *OperationParser) GetCommitmentCalls(stub func([]byte) (string, error)) { + fake.getCommitmentMutex.Lock() + defer fake.getCommitmentMutex.Unlock() + fake.GetCommitmentStub = stub +} + +func (fake *OperationParser) GetCommitmentArgsForCall(i int) []byte { + fake.getCommitmentMutex.RLock() + defer fake.getCommitmentMutex.RUnlock() + argsForCall := fake.getCommitmentArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *OperationParser) GetCommitmentReturns(result1 string, result2 error) { + fake.getCommitmentMutex.Lock() + defer fake.getCommitmentMutex.Unlock() + fake.GetCommitmentStub = nil + fake.getCommitmentReturns = struct { + result1 string + result2 error + }{result1, result2} +} + +func (fake *OperationParser) GetCommitmentReturnsOnCall(i int, result1 string, result2 error) { + fake.getCommitmentMutex.Lock() + defer fake.getCommitmentMutex.Unlock() + fake.GetCommitmentStub = nil + if fake.getCommitmentReturnsOnCall == nil { + fake.getCommitmentReturnsOnCall = make(map[int]struct { + result1 string + result2 error + }) + } + fake.getCommitmentReturnsOnCall[i] = struct { + result1 string + result2 error + }{result1, result2} +} + +func (fake *OperationParser) GetRevealValue(arg1 []byte) (string, error) { + var arg1Copy []byte + if arg1 != nil { + arg1Copy = make([]byte, len(arg1)) + copy(arg1Copy, arg1) + } + fake.getRevealValueMutex.Lock() + ret, specificReturn := fake.getRevealValueReturnsOnCall[len(fake.getRevealValueArgsForCall)] + fake.getRevealValueArgsForCall = append(fake.getRevealValueArgsForCall, struct { + arg1 []byte + }{arg1Copy}) + fake.recordInvocation("GetRevealValue", []interface{}{arg1Copy}) + fake.getRevealValueMutex.Unlock() + if fake.GetRevealValueStub != nil { + return fake.GetRevealValueStub(arg1) + } + if specificReturn { + return ret.result1, ret.result2 + } + fakeReturns := fake.getRevealValueReturns + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationParser) GetRevealValueCallCount() int { + fake.getRevealValueMutex.RLock() + defer fake.getRevealValueMutex.RUnlock() + return len(fake.getRevealValueArgsForCall) +} + +func (fake *OperationParser) GetRevealValueCalls(stub func([]byte) (string, error)) { + fake.getRevealValueMutex.Lock() + defer fake.getRevealValueMutex.Unlock() + fake.GetRevealValueStub = stub +} + +func (fake *OperationParser) GetRevealValueArgsForCall(i int) []byte { + fake.getRevealValueMutex.RLock() + defer fake.getRevealValueMutex.RUnlock() + argsForCall := fake.getRevealValueArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *OperationParser) GetRevealValueReturns(result1 string, result2 error) { + fake.getRevealValueMutex.Lock() + defer fake.getRevealValueMutex.Unlock() + fake.GetRevealValueStub = nil + fake.getRevealValueReturns = struct { + result1 string + result2 error + }{result1, result2} +} + +func (fake *OperationParser) GetRevealValueReturnsOnCall(i int, result1 string, result2 error) { + fake.getRevealValueMutex.Lock() + defer fake.getRevealValueMutex.Unlock() + fake.GetRevealValueStub = nil + if fake.getRevealValueReturnsOnCall == nil { + fake.getRevealValueReturnsOnCall = make(map[int]struct { + result1 string + result2 error + }) + } + fake.getRevealValueReturnsOnCall[i] = struct { + result1 string + result2 error + }{result1, result2} +} + +func (fake *OperationParser) Parse(arg1 string, arg2 []byte) (*operation.Operation, error) { + var arg2Copy []byte + if arg2 != nil { + arg2Copy = make([]byte, len(arg2)) + copy(arg2Copy, arg2) + } + fake.parseMutex.Lock() + ret, specificReturn := fake.parseReturnsOnCall[len(fake.parseArgsForCall)] + fake.parseArgsForCall = append(fake.parseArgsForCall, struct { + arg1 string + arg2 []byte + }{arg1, arg2Copy}) + fake.recordInvocation("Parse", []interface{}{arg1, arg2Copy}) + fake.parseMutex.Unlock() + if fake.ParseStub != nil { + return fake.ParseStub(arg1, arg2) + } + if specificReturn { + return ret.result1, ret.result2 + } + fakeReturns := fake.parseReturns + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationParser) ParseCallCount() int { + fake.parseMutex.RLock() + defer fake.parseMutex.RUnlock() + return len(fake.parseArgsForCall) +} + +func (fake *OperationParser) ParseCalls(stub func(string, []byte) (*operation.Operation, error)) { + fake.parseMutex.Lock() + defer fake.parseMutex.Unlock() + fake.ParseStub = stub +} + +func (fake *OperationParser) ParseArgsForCall(i int) (string, []byte) { + fake.parseMutex.RLock() + defer fake.parseMutex.RUnlock() + argsForCall := fake.parseArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *OperationParser) ParseReturns(result1 *operation.Operation, result2 error) { + fake.parseMutex.Lock() + defer fake.parseMutex.Unlock() + fake.ParseStub = nil + fake.parseReturns = struct { + result1 *operation.Operation + result2 error + }{result1, result2} +} + +func (fake *OperationParser) ParseReturnsOnCall(i int, result1 *operation.Operation, result2 error) { + fake.parseMutex.Lock() + defer fake.parseMutex.Unlock() + fake.ParseStub = nil + if fake.parseReturnsOnCall == nil { + fake.parseReturnsOnCall = make(map[int]struct { + result1 *operation.Operation + result2 error + }) + } + fake.parseReturnsOnCall[i] = struct { + result1 *operation.Operation + result2 error + }{result1, result2} +} + +func (fake *OperationParser) ParseDID(arg1 string, arg2 string) (string, []byte, error) { + fake.parseDIDMutex.Lock() + ret, specificReturn := fake.parseDIDReturnsOnCall[len(fake.parseDIDArgsForCall)] + fake.parseDIDArgsForCall = append(fake.parseDIDArgsForCall, struct { + arg1 string + arg2 string + }{arg1, arg2}) + fake.recordInvocation("ParseDID", []interface{}{arg1, arg2}) + fake.parseDIDMutex.Unlock() + if fake.ParseDIDStub != nil { + return fake.ParseDIDStub(arg1, arg2) + } + if specificReturn { + return ret.result1, ret.result2, ret.result3 + } + fakeReturns := fake.parseDIDReturns + return fakeReturns.result1, fakeReturns.result2, fakeReturns.result3 +} + +func (fake *OperationParser) ParseDIDCallCount() int { + fake.parseDIDMutex.RLock() + defer fake.parseDIDMutex.RUnlock() + return len(fake.parseDIDArgsForCall) +} + +func (fake *OperationParser) ParseDIDCalls(stub func(string, string) (string, []byte, error)) { + fake.parseDIDMutex.Lock() + defer fake.parseDIDMutex.Unlock() + fake.ParseDIDStub = stub +} + +func (fake *OperationParser) ParseDIDArgsForCall(i int) (string, string) { + fake.parseDIDMutex.RLock() + defer fake.parseDIDMutex.RUnlock() + argsForCall := fake.parseDIDArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *OperationParser) ParseDIDReturns(result1 string, result2 []byte, result3 error) { + fake.parseDIDMutex.Lock() + defer fake.parseDIDMutex.Unlock() + fake.ParseDIDStub = nil + fake.parseDIDReturns = struct { + result1 string + result2 []byte + result3 error + }{result1, result2, result3} +} + +func (fake *OperationParser) ParseDIDReturnsOnCall(i int, result1 string, result2 []byte, result3 error) { + fake.parseDIDMutex.Lock() + defer fake.parseDIDMutex.Unlock() + fake.ParseDIDStub = nil + if fake.parseDIDReturnsOnCall == nil { + fake.parseDIDReturnsOnCall = make(map[int]struct { + result1 string + result2 []byte + result3 error + }) + } + fake.parseDIDReturnsOnCall[i] = struct { + result1 string + result2 []byte + result3 error + }{result1, result2, result3} +} + +func (fake *OperationParser) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.getCommitmentMutex.RLock() + defer fake.getCommitmentMutex.RUnlock() + fake.getRevealValueMutex.RLock() + defer fake.getRevealValueMutex.RUnlock() + fake.parseMutex.RLock() + defer fake.parseMutex.RUnlock() + fake.parseDIDMutex.RLock() + defer fake.parseDIDMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *OperationParser) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} + +var _ protocol.OperationParser = new(OperationParser) diff --git a/method/sidetreelongform/sidetree-core/mocks/operationprovider.gen.go b/method/sidetreelongform/sidetree-core/mocks/operationprovider.gen.go new file mode 100644 index 0000000..39d7bd9 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/operationprovider.gen.go @@ -0,0 +1,117 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/txn" +) + +type OperationProvider struct { + GetTxnOperationsStub func(*txn.SidetreeTxn) ([]*operation.AnchoredOperation, error) + getTxnOperationsMutex sync.RWMutex + getTxnOperationsArgsForCall []struct { + arg1 *txn.SidetreeTxn + } + getTxnOperationsReturns struct { + result1 []*operation.AnchoredOperation + result2 error + } + getTxnOperationsReturnsOnCall map[int]struct { + result1 []*operation.AnchoredOperation + result2 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *OperationProvider) GetTxnOperations(arg1 *txn.SidetreeTxn) ([]*operation.AnchoredOperation, error) { + fake.getTxnOperationsMutex.Lock() + ret, specificReturn := fake.getTxnOperationsReturnsOnCall[len(fake.getTxnOperationsArgsForCall)] + fake.getTxnOperationsArgsForCall = append(fake.getTxnOperationsArgsForCall, struct { + arg1 *txn.SidetreeTxn + }{arg1}) + fake.recordInvocation("GetTxnOperations", []interface{}{arg1}) + fake.getTxnOperationsMutex.Unlock() + if fake.GetTxnOperationsStub != nil { + return fake.GetTxnOperationsStub(arg1) + } + if specificReturn { + return ret.result1, ret.result2 + } + fakeReturns := fake.getTxnOperationsReturns + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationProvider) GetTxnOperationsCallCount() int { + fake.getTxnOperationsMutex.RLock() + defer fake.getTxnOperationsMutex.RUnlock() + return len(fake.getTxnOperationsArgsForCall) +} + +func (fake *OperationProvider) GetTxnOperationsCalls(stub func(*txn.SidetreeTxn) ([]*operation.AnchoredOperation, error)) { + fake.getTxnOperationsMutex.Lock() + defer fake.getTxnOperationsMutex.Unlock() + fake.GetTxnOperationsStub = stub +} + +func (fake *OperationProvider) GetTxnOperationsArgsForCall(i int) *txn.SidetreeTxn { + fake.getTxnOperationsMutex.RLock() + defer fake.getTxnOperationsMutex.RUnlock() + argsForCall := fake.getTxnOperationsArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *OperationProvider) GetTxnOperationsReturns(result1 []*operation.AnchoredOperation, result2 error) { + fake.getTxnOperationsMutex.Lock() + defer fake.getTxnOperationsMutex.Unlock() + fake.GetTxnOperationsStub = nil + fake.getTxnOperationsReturns = struct { + result1 []*operation.AnchoredOperation + result2 error + }{result1, result2} +} + +func (fake *OperationProvider) GetTxnOperationsReturnsOnCall(i int, result1 []*operation.AnchoredOperation, result2 error) { + fake.getTxnOperationsMutex.Lock() + defer fake.getTxnOperationsMutex.Unlock() + fake.GetTxnOperationsStub = nil + if fake.getTxnOperationsReturnsOnCall == nil { + fake.getTxnOperationsReturnsOnCall = make(map[int]struct { + result1 []*operation.AnchoredOperation + result2 error + }) + } + fake.getTxnOperationsReturnsOnCall[i] = struct { + result1 []*operation.AnchoredOperation + result2 error + }{result1, result2} +} + +func (fake *OperationProvider) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.getTxnOperationsMutex.RLock() + defer fake.getTxnOperationsMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *OperationProvider) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} + +var _ protocol.OperationProvider = new(OperationProvider) diff --git a/method/sidetreelongform/sidetree-core/mocks/operationqueue.gen.go b/method/sidetreelongform/sidetree-core/mocks/operationqueue.gen.go new file mode 100644 index 0000000..06779cf --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/operationqueue.gen.go @@ -0,0 +1,349 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" +) + +type OperationQueue struct { + AddStub func(*operation.QueuedOperation, uint64) (uint, error) + addMutex sync.RWMutex + addArgsForCall []struct { + arg1 *operation.QueuedOperation + arg2 uint64 + } + addReturns struct { + result1 uint + result2 error + } + addReturnsOnCall map[int]struct { + result1 uint + result2 error + } + LenStub func() uint + lenMutex sync.RWMutex + lenArgsForCall []struct { + } + lenReturns struct { + result1 uint + } + lenReturnsOnCall map[int]struct { + result1 uint + } + PeekStub func(uint) (operation.QueuedOperationsAtTime, error) + peekMutex sync.RWMutex + peekArgsForCall []struct { + arg1 uint + } + peekReturns struct { + result1 operation.QueuedOperationsAtTime + result2 error + } + peekReturnsOnCall map[int]struct { + result1 operation.QueuedOperationsAtTime + result2 error + } + RemoveStub func(uint) (operation.QueuedOperationsAtTime, func() uint, func(error), error) + removeMutex sync.RWMutex + removeArgsForCall []struct { + arg1 uint + } + removeReturns struct { + result1 operation.QueuedOperationsAtTime + result2 func() uint + result3 func(error) + result4 error + } + removeReturnsOnCall map[int]struct { + result1 operation.QueuedOperationsAtTime + result2 func() uint + result3 func(error) + result4 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *OperationQueue) Add(arg1 *operation.QueuedOperation, arg2 uint64) (uint, error) { + fake.addMutex.Lock() + ret, specificReturn := fake.addReturnsOnCall[len(fake.addArgsForCall)] + fake.addArgsForCall = append(fake.addArgsForCall, struct { + arg1 *operation.QueuedOperation + arg2 uint64 + }{arg1, arg2}) + stub := fake.AddStub + fakeReturns := fake.addReturns + fake.recordInvocation("Add", []interface{}{arg1, arg2}) + fake.addMutex.Unlock() + if stub != nil { + return stub(arg1, arg2) + } + if specificReturn { + return ret.result1, ret.result2 + } + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationQueue) AddCallCount() int { + fake.addMutex.RLock() + defer fake.addMutex.RUnlock() + return len(fake.addArgsForCall) +} + +func (fake *OperationQueue) AddCalls(stub func(*operation.QueuedOperation, uint64) (uint, error)) { + fake.addMutex.Lock() + defer fake.addMutex.Unlock() + fake.AddStub = stub +} + +func (fake *OperationQueue) AddArgsForCall(i int) (*operation.QueuedOperation, uint64) { + fake.addMutex.RLock() + defer fake.addMutex.RUnlock() + argsForCall := fake.addArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *OperationQueue) AddReturns(result1 uint, result2 error) { + fake.addMutex.Lock() + defer fake.addMutex.Unlock() + fake.AddStub = nil + fake.addReturns = struct { + result1 uint + result2 error + }{result1, result2} +} + +func (fake *OperationQueue) AddReturnsOnCall(i int, result1 uint, result2 error) { + fake.addMutex.Lock() + defer fake.addMutex.Unlock() + fake.AddStub = nil + if fake.addReturnsOnCall == nil { + fake.addReturnsOnCall = make(map[int]struct { + result1 uint + result2 error + }) + } + fake.addReturnsOnCall[i] = struct { + result1 uint + result2 error + }{result1, result2} +} + +func (fake *OperationQueue) Len() uint { + fake.lenMutex.Lock() + ret, specificReturn := fake.lenReturnsOnCall[len(fake.lenArgsForCall)] + fake.lenArgsForCall = append(fake.lenArgsForCall, struct { + }{}) + stub := fake.LenStub + fakeReturns := fake.lenReturns + fake.recordInvocation("Len", []interface{}{}) + fake.lenMutex.Unlock() + if stub != nil { + return stub() + } + if specificReturn { + return ret.result1 + } + return fakeReturns.result1 +} + +func (fake *OperationQueue) LenCallCount() int { + fake.lenMutex.RLock() + defer fake.lenMutex.RUnlock() + return len(fake.lenArgsForCall) +} + +func (fake *OperationQueue) LenCalls(stub func() uint) { + fake.lenMutex.Lock() + defer fake.lenMutex.Unlock() + fake.LenStub = stub +} + +func (fake *OperationQueue) LenReturns(result1 uint) { + fake.lenMutex.Lock() + defer fake.lenMutex.Unlock() + fake.LenStub = nil + fake.lenReturns = struct { + result1 uint + }{result1} +} + +func (fake *OperationQueue) LenReturnsOnCall(i int, result1 uint) { + fake.lenMutex.Lock() + defer fake.lenMutex.Unlock() + fake.LenStub = nil + if fake.lenReturnsOnCall == nil { + fake.lenReturnsOnCall = make(map[int]struct { + result1 uint + }) + } + fake.lenReturnsOnCall[i] = struct { + result1 uint + }{result1} +} + +func (fake *OperationQueue) Peek(arg1 uint) (operation.QueuedOperationsAtTime, error) { + fake.peekMutex.Lock() + ret, specificReturn := fake.peekReturnsOnCall[len(fake.peekArgsForCall)] + fake.peekArgsForCall = append(fake.peekArgsForCall, struct { + arg1 uint + }{arg1}) + stub := fake.PeekStub + fakeReturns := fake.peekReturns + fake.recordInvocation("Peek", []interface{}{arg1}) + fake.peekMutex.Unlock() + if stub != nil { + return stub(arg1) + } + if specificReturn { + return ret.result1, ret.result2 + } + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationQueue) PeekCallCount() int { + fake.peekMutex.RLock() + defer fake.peekMutex.RUnlock() + return len(fake.peekArgsForCall) +} + +func (fake *OperationQueue) PeekCalls(stub func(uint) (operation.QueuedOperationsAtTime, error)) { + fake.peekMutex.Lock() + defer fake.peekMutex.Unlock() + fake.PeekStub = stub +} + +func (fake *OperationQueue) PeekArgsForCall(i int) uint { + fake.peekMutex.RLock() + defer fake.peekMutex.RUnlock() + argsForCall := fake.peekArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *OperationQueue) PeekReturns(result1 operation.QueuedOperationsAtTime, result2 error) { + fake.peekMutex.Lock() + defer fake.peekMutex.Unlock() + fake.PeekStub = nil + fake.peekReturns = struct { + result1 operation.QueuedOperationsAtTime + result2 error + }{result1, result2} +} + +func (fake *OperationQueue) PeekReturnsOnCall(i int, result1 operation.QueuedOperationsAtTime, result2 error) { + fake.peekMutex.Lock() + defer fake.peekMutex.Unlock() + fake.PeekStub = nil + if fake.peekReturnsOnCall == nil { + fake.peekReturnsOnCall = make(map[int]struct { + result1 operation.QueuedOperationsAtTime + result2 error + }) + } + fake.peekReturnsOnCall[i] = struct { + result1 operation.QueuedOperationsAtTime + result2 error + }{result1, result2} +} + +func (fake *OperationQueue) Remove(arg1 uint) (operation.QueuedOperationsAtTime, func() uint, func(error), error) { + fake.removeMutex.Lock() + ret, specificReturn := fake.removeReturnsOnCall[len(fake.removeArgsForCall)] + fake.removeArgsForCall = append(fake.removeArgsForCall, struct { + arg1 uint + }{arg1}) + stub := fake.RemoveStub + fakeReturns := fake.removeReturns + fake.recordInvocation("Remove", []interface{}{arg1}) + fake.removeMutex.Unlock() + if stub != nil { + return stub(arg1) + } + if specificReturn { + return ret.result1, ret.result2, ret.result3, ret.result4 + } + return fakeReturns.result1, fakeReturns.result2, fakeReturns.result3, fakeReturns.result4 +} + +func (fake *OperationQueue) RemoveCallCount() int { + fake.removeMutex.RLock() + defer fake.removeMutex.RUnlock() + return len(fake.removeArgsForCall) +} + +func (fake *OperationQueue) RemoveCalls(stub func(uint) (operation.QueuedOperationsAtTime, func() uint, func(error), error)) { + fake.removeMutex.Lock() + defer fake.removeMutex.Unlock() + fake.RemoveStub = stub +} + +func (fake *OperationQueue) RemoveArgsForCall(i int) uint { + fake.removeMutex.RLock() + defer fake.removeMutex.RUnlock() + argsForCall := fake.removeArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *OperationQueue) RemoveReturns(result1 operation.QueuedOperationsAtTime, result2 func() uint, result3 func(error), result4 error) { + fake.removeMutex.Lock() + defer fake.removeMutex.Unlock() + fake.RemoveStub = nil + fake.removeReturns = struct { + result1 operation.QueuedOperationsAtTime + result2 func() uint + result3 func(error) + result4 error + }{result1, result2, result3, result4} +} + +func (fake *OperationQueue) RemoveReturnsOnCall(i int, result1 operation.QueuedOperationsAtTime, result2 func() uint, result3 func(error), result4 error) { + fake.removeMutex.Lock() + defer fake.removeMutex.Unlock() + fake.RemoveStub = nil + if fake.removeReturnsOnCall == nil { + fake.removeReturnsOnCall = make(map[int]struct { + result1 operation.QueuedOperationsAtTime + result2 func() uint + result3 func(error) + result4 error + }) + } + fake.removeReturnsOnCall[i] = struct { + result1 operation.QueuedOperationsAtTime + result2 func() uint + result3 func(error) + result4 error + }{result1, result2, result3, result4} +} + +func (fake *OperationQueue) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.addMutex.RLock() + defer fake.addMutex.RUnlock() + fake.lenMutex.RLock() + defer fake.lenMutex.RUnlock() + fake.peekMutex.RLock() + defer fake.peekMutex.RUnlock() + fake.removeMutex.RLock() + defer fake.removeMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *OperationQueue) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} diff --git a/method/sidetreelongform/sidetree-core/mocks/protocol.go b/method/sidetreelongform/sidetree-core/mocks/protocol.go new file mode 100644 index 0000000..3336a48 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/protocol.go @@ -0,0 +1,159 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package mocks + +import ( + "fmt" + + "github.com/pkg/errors" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" +) + +const ( + + // DefaultNS is default namespace used in mocks. + DefaultNS = "did:sidetree" + + // MaxBatchFileSize is maximum batch files size in bytes. + MaxBatchFileSize = 20000 + + // MaxOperationByteSize is maximum operation size in bytes. + MaxOperationByteSize = 2000 + + // MaxDeltaByteSize is maximum delta size in bytes. + MaxDeltaByteSize = 1000 + + // CurrentVersion is the current protocol version. + CurrentVersion = "1.0" +) + +// MockProtocolClient mocks protocol for testing purposes. +type MockProtocolClient struct { + Protocol protocol.Protocol // current version (separated for easier testing) + CurrentVersion *ProtocolVersion + Versions []*ProtocolVersion + Err error + CasClient *MockCasClient +} + +// NewMockProtocolClient creates mock protocol client. +func NewMockProtocolClient() *MockProtocolClient { + latest := GetDefaultProtocolParameters() + + latestVersion := GetProtocolVersion(latest) + + // has to be sorted for mock client to work + versions := []*ProtocolVersion{latestVersion} + + return &MockProtocolClient{ + Protocol: latest, + CurrentVersion: latestVersion, + Versions: versions, + } +} + +// Current mocks getting last protocol version. +func (m *MockProtocolClient) Current() (protocol.Version, error) { + if m.Err != nil { + return nil, m.Err + } + + return m.CurrentVersion, nil +} + +// Get mocks getting protocol version based on anchoring(transaction) time. +func (m *MockProtocolClient) Get(transactionTime uint64) (protocol.Version, error) { + if m.Err != nil { + return nil, m.Err + } + + for i := len(m.Versions) - 1; i >= 0; i-- { + if transactionTime >= m.Versions[i].Protocol().GenesisTime { + return m.Versions[i], nil + } + } + + return nil, fmt.Errorf("protocol parameters are not defined for anchoring time: %d", transactionTime) +} + +// NewMockProtocolClientProvider creates new mock protocol client provider. +func NewMockProtocolClientProvider() *MockProtocolClientProvider { + m := make(map[string]protocol.Client) + + m[DefaultNS] = NewMockProtocolClient() + + return &MockProtocolClientProvider{ + ProtocolClients: m, + } +} + +// MockProtocolClientProvider implements mock protocol client provider. +type MockProtocolClientProvider struct { + ProtocolClients map[string]protocol.Client +} + +// WithProtocolClient sets the protocol client. +func (m *MockProtocolClientProvider) WithProtocolClient(ns string, pc protocol.Client) *MockProtocolClientProvider { + m.ProtocolClients[ns] = pc + + return m +} + +// ForNamespace will return protocol client for that namespace. +func (m *MockProtocolClientProvider) ForNamespace(namespace string) (protocol.Client, error) { + pc, ok := m.ProtocolClients[namespace] + if !ok { + return nil, errors.Errorf("protocol client not found for namespace [%s]", namespace) + } + + return pc, nil +} + +// GetProtocolVersion returns mock protocol version. +// +//nolint:gocritic +func GetProtocolVersion(p protocol.Protocol) *ProtocolVersion { + v := &ProtocolVersion{} + v.VersionReturns(CurrentVersion) + v.OperationApplierReturns(&OperationApplier{}) + v.OperationParserReturns(&OperationParser{}) + v.DocumentComposerReturns(&DocumentComposer{}) + v.DocumentValidatorReturns(&DocumentValidator{}) + v.OperationHandlerReturns(&OperationHandler{}) + v.OperationProviderReturns(&OperationProvider{}) + v.TransactionProcessorReturns(&TxnProcessor{}) + v.DocumentTransformerReturns(&DocumentTransformer{}) + + v.ProtocolReturns(p) + + return v +} + +// GetDefaultProtocolParameters returns mock protocol parameters. +func GetDefaultProtocolParameters() protocol.Protocol { + return protocol.Protocol{ + GenesisTime: 0, + MultihashAlgorithms: []uint{sha2_256}, + MaxOperationCount: 2, + MaxOperationSize: MaxOperationByteSize, + MaxOperationHashLength: 100, + MaxDeltaSize: MaxDeltaByteSize, + MaxCasURILength: 100, + CompressionAlgorithm: "GZIP", + MaxChunkFileSize: MaxBatchFileSize, + MaxProvisionalIndexFileSize: MaxBatchFileSize, + MaxCoreIndexFileSize: MaxBatchFileSize, + MaxProofFileSize: MaxBatchFileSize, + SignatureAlgorithms: []string{"EdDSA", "ES256"}, + KeyAlgorithms: []string{"Ed25519", "P-256"}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, //nolint:lll + MaxOperationTimeDelta: 2 * 60 * 60, + NonceSize: 16, // 16 bytes = 128 bits + MaxMemoryDecompressionFactor: 3, + } +} diff --git a/method/sidetreelongform/sidetree-core/mocks/protocolversion.gen.go b/method/sidetreelongform/sidetree-core/mocks/protocolversion.gen.go new file mode 100644 index 0000000..20aa6d7 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/protocolversion.gen.go @@ -0,0 +1,677 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" +) + +type ProtocolVersion struct { + DocumentComposerStub func() protocol.DocumentComposer + documentComposerMutex sync.RWMutex + documentComposerArgsForCall []struct { + } + documentComposerReturns struct { + result1 protocol.DocumentComposer + } + documentComposerReturnsOnCall map[int]struct { + result1 protocol.DocumentComposer + } + DocumentTransformerStub func() protocol.DocumentTransformer + documentTransformerMutex sync.RWMutex + documentTransformerArgsForCall []struct { + } + documentTransformerReturns struct { + result1 protocol.DocumentTransformer + } + documentTransformerReturnsOnCall map[int]struct { + result1 protocol.DocumentTransformer + } + DocumentValidatorStub func() protocol.DocumentValidator + documentValidatorMutex sync.RWMutex + documentValidatorArgsForCall []struct { + } + documentValidatorReturns struct { + result1 protocol.DocumentValidator + } + documentValidatorReturnsOnCall map[int]struct { + result1 protocol.DocumentValidator + } + OperationApplierStub func() protocol.OperationApplier + operationApplierMutex sync.RWMutex + operationApplierArgsForCall []struct { + } + operationApplierReturns struct { + result1 protocol.OperationApplier + } + operationApplierReturnsOnCall map[int]struct { + result1 protocol.OperationApplier + } + OperationHandlerStub func() protocol.OperationHandler + operationHandlerMutex sync.RWMutex + operationHandlerArgsForCall []struct { + } + operationHandlerReturns struct { + result1 protocol.OperationHandler + } + operationHandlerReturnsOnCall map[int]struct { + result1 protocol.OperationHandler + } + OperationParserStub func() protocol.OperationParser + operationParserMutex sync.RWMutex + operationParserArgsForCall []struct { + } + operationParserReturns struct { + result1 protocol.OperationParser + } + operationParserReturnsOnCall map[int]struct { + result1 protocol.OperationParser + } + OperationProviderStub func() protocol.OperationProvider + operationProviderMutex sync.RWMutex + operationProviderArgsForCall []struct { + } + operationProviderReturns struct { + result1 protocol.OperationProvider + } + operationProviderReturnsOnCall map[int]struct { + result1 protocol.OperationProvider + } + ProtocolStub func() protocol.Protocol + protocolMutex sync.RWMutex + protocolArgsForCall []struct { + } + protocolReturns struct { + result1 protocol.Protocol + } + protocolReturnsOnCall map[int]struct { + result1 protocol.Protocol + } + TransactionProcessorStub func() protocol.TxnProcessor + transactionProcessorMutex sync.RWMutex + transactionProcessorArgsForCall []struct { + } + transactionProcessorReturns struct { + result1 protocol.TxnProcessor + } + transactionProcessorReturnsOnCall map[int]struct { + result1 protocol.TxnProcessor + } + VersionStub func() string + versionMutex sync.RWMutex + versionArgsForCall []struct { + } + versionReturns struct { + result1 string + } + versionReturnsOnCall map[int]struct { + result1 string + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *ProtocolVersion) DocumentComposer() protocol.DocumentComposer { + fake.documentComposerMutex.Lock() + ret, specificReturn := fake.documentComposerReturnsOnCall[len(fake.documentComposerArgsForCall)] + fake.documentComposerArgsForCall = append(fake.documentComposerArgsForCall, struct { + }{}) + fake.recordInvocation("DocumentComposer", []interface{}{}) + fake.documentComposerMutex.Unlock() + if fake.DocumentComposerStub != nil { + return fake.DocumentComposerStub() + } + if specificReturn { + return ret.result1 + } + fakeReturns := fake.documentComposerReturns + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) DocumentComposerCallCount() int { + fake.documentComposerMutex.RLock() + defer fake.documentComposerMutex.RUnlock() + return len(fake.documentComposerArgsForCall) +} + +func (fake *ProtocolVersion) DocumentComposerCalls(stub func() protocol.DocumentComposer) { + fake.documentComposerMutex.Lock() + defer fake.documentComposerMutex.Unlock() + fake.DocumentComposerStub = stub +} + +func (fake *ProtocolVersion) DocumentComposerReturns(result1 protocol.DocumentComposer) { + fake.documentComposerMutex.Lock() + defer fake.documentComposerMutex.Unlock() + fake.DocumentComposerStub = nil + fake.documentComposerReturns = struct { + result1 protocol.DocumentComposer + }{result1} +} + +func (fake *ProtocolVersion) DocumentComposerReturnsOnCall(i int, result1 protocol.DocumentComposer) { + fake.documentComposerMutex.Lock() + defer fake.documentComposerMutex.Unlock() + fake.DocumentComposerStub = nil + if fake.documentComposerReturnsOnCall == nil { + fake.documentComposerReturnsOnCall = make(map[int]struct { + result1 protocol.DocumentComposer + }) + } + fake.documentComposerReturnsOnCall[i] = struct { + result1 protocol.DocumentComposer + }{result1} +} + +func (fake *ProtocolVersion) DocumentTransformer() protocol.DocumentTransformer { + fake.documentTransformerMutex.Lock() + ret, specificReturn := fake.documentTransformerReturnsOnCall[len(fake.documentTransformerArgsForCall)] + fake.documentTransformerArgsForCall = append(fake.documentTransformerArgsForCall, struct { + }{}) + fake.recordInvocation("DocumentTransformer", []interface{}{}) + fake.documentTransformerMutex.Unlock() + if fake.DocumentTransformerStub != nil { + return fake.DocumentTransformerStub() + } + if specificReturn { + return ret.result1 + } + fakeReturns := fake.documentTransformerReturns + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) DocumentTransformerCallCount() int { + fake.documentTransformerMutex.RLock() + defer fake.documentTransformerMutex.RUnlock() + return len(fake.documentTransformerArgsForCall) +} + +func (fake *ProtocolVersion) DocumentTransformerCalls(stub func() protocol.DocumentTransformer) { + fake.documentTransformerMutex.Lock() + defer fake.documentTransformerMutex.Unlock() + fake.DocumentTransformerStub = stub +} + +func (fake *ProtocolVersion) DocumentTransformerReturns(result1 protocol.DocumentTransformer) { + fake.documentTransformerMutex.Lock() + defer fake.documentTransformerMutex.Unlock() + fake.DocumentTransformerStub = nil + fake.documentTransformerReturns = struct { + result1 protocol.DocumentTransformer + }{result1} +} + +func (fake *ProtocolVersion) DocumentTransformerReturnsOnCall(i int, result1 protocol.DocumentTransformer) { + fake.documentTransformerMutex.Lock() + defer fake.documentTransformerMutex.Unlock() + fake.DocumentTransformerStub = nil + if fake.documentTransformerReturnsOnCall == nil { + fake.documentTransformerReturnsOnCall = make(map[int]struct { + result1 protocol.DocumentTransformer + }) + } + fake.documentTransformerReturnsOnCall[i] = struct { + result1 protocol.DocumentTransformer + }{result1} +} + +func (fake *ProtocolVersion) DocumentValidator() protocol.DocumentValidator { + fake.documentValidatorMutex.Lock() + ret, specificReturn := fake.documentValidatorReturnsOnCall[len(fake.documentValidatorArgsForCall)] + fake.documentValidatorArgsForCall = append(fake.documentValidatorArgsForCall, struct { + }{}) + fake.recordInvocation("DocumentValidator", []interface{}{}) + fake.documentValidatorMutex.Unlock() + if fake.DocumentValidatorStub != nil { + return fake.DocumentValidatorStub() + } + if specificReturn { + return ret.result1 + } + fakeReturns := fake.documentValidatorReturns + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) DocumentValidatorCallCount() int { + fake.documentValidatorMutex.RLock() + defer fake.documentValidatorMutex.RUnlock() + return len(fake.documentValidatorArgsForCall) +} + +func (fake *ProtocolVersion) DocumentValidatorCalls(stub func() protocol.DocumentValidator) { + fake.documentValidatorMutex.Lock() + defer fake.documentValidatorMutex.Unlock() + fake.DocumentValidatorStub = stub +} + +func (fake *ProtocolVersion) DocumentValidatorReturns(result1 protocol.DocumentValidator) { + fake.documentValidatorMutex.Lock() + defer fake.documentValidatorMutex.Unlock() + fake.DocumentValidatorStub = nil + fake.documentValidatorReturns = struct { + result1 protocol.DocumentValidator + }{result1} +} + +func (fake *ProtocolVersion) DocumentValidatorReturnsOnCall(i int, result1 protocol.DocumentValidator) { + fake.documentValidatorMutex.Lock() + defer fake.documentValidatorMutex.Unlock() + fake.DocumentValidatorStub = nil + if fake.documentValidatorReturnsOnCall == nil { + fake.documentValidatorReturnsOnCall = make(map[int]struct { + result1 protocol.DocumentValidator + }) + } + fake.documentValidatorReturnsOnCall[i] = struct { + result1 protocol.DocumentValidator + }{result1} +} + +func (fake *ProtocolVersion) OperationApplier() protocol.OperationApplier { + fake.operationApplierMutex.Lock() + ret, specificReturn := fake.operationApplierReturnsOnCall[len(fake.operationApplierArgsForCall)] + fake.operationApplierArgsForCall = append(fake.operationApplierArgsForCall, struct { + }{}) + fake.recordInvocation("OperationApplier", []interface{}{}) + fake.operationApplierMutex.Unlock() + if fake.OperationApplierStub != nil { + return fake.OperationApplierStub() + } + if specificReturn { + return ret.result1 + } + fakeReturns := fake.operationApplierReturns + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) OperationApplierCallCount() int { + fake.operationApplierMutex.RLock() + defer fake.operationApplierMutex.RUnlock() + return len(fake.operationApplierArgsForCall) +} + +func (fake *ProtocolVersion) OperationApplierCalls(stub func() protocol.OperationApplier) { + fake.operationApplierMutex.Lock() + defer fake.operationApplierMutex.Unlock() + fake.OperationApplierStub = stub +} + +func (fake *ProtocolVersion) OperationApplierReturns(result1 protocol.OperationApplier) { + fake.operationApplierMutex.Lock() + defer fake.operationApplierMutex.Unlock() + fake.OperationApplierStub = nil + fake.operationApplierReturns = struct { + result1 protocol.OperationApplier + }{result1} +} + +func (fake *ProtocolVersion) OperationApplierReturnsOnCall(i int, result1 protocol.OperationApplier) { + fake.operationApplierMutex.Lock() + defer fake.operationApplierMutex.Unlock() + fake.OperationApplierStub = nil + if fake.operationApplierReturnsOnCall == nil { + fake.operationApplierReturnsOnCall = make(map[int]struct { + result1 protocol.OperationApplier + }) + } + fake.operationApplierReturnsOnCall[i] = struct { + result1 protocol.OperationApplier + }{result1} +} + +func (fake *ProtocolVersion) OperationHandler() protocol.OperationHandler { + fake.operationHandlerMutex.Lock() + ret, specificReturn := fake.operationHandlerReturnsOnCall[len(fake.operationHandlerArgsForCall)] + fake.operationHandlerArgsForCall = append(fake.operationHandlerArgsForCall, struct { + }{}) + fake.recordInvocation("OperationHandler", []interface{}{}) + fake.operationHandlerMutex.Unlock() + if fake.OperationHandlerStub != nil { + return fake.OperationHandlerStub() + } + if specificReturn { + return ret.result1 + } + fakeReturns := fake.operationHandlerReturns + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) OperationHandlerCallCount() int { + fake.operationHandlerMutex.RLock() + defer fake.operationHandlerMutex.RUnlock() + return len(fake.operationHandlerArgsForCall) +} + +func (fake *ProtocolVersion) OperationHandlerCalls(stub func() protocol.OperationHandler) { + fake.operationHandlerMutex.Lock() + defer fake.operationHandlerMutex.Unlock() + fake.OperationHandlerStub = stub +} + +func (fake *ProtocolVersion) OperationHandlerReturns(result1 protocol.OperationHandler) { + fake.operationHandlerMutex.Lock() + defer fake.operationHandlerMutex.Unlock() + fake.OperationHandlerStub = nil + fake.operationHandlerReturns = struct { + result1 protocol.OperationHandler + }{result1} +} + +func (fake *ProtocolVersion) OperationHandlerReturnsOnCall(i int, result1 protocol.OperationHandler) { + fake.operationHandlerMutex.Lock() + defer fake.operationHandlerMutex.Unlock() + fake.OperationHandlerStub = nil + if fake.operationHandlerReturnsOnCall == nil { + fake.operationHandlerReturnsOnCall = make(map[int]struct { + result1 protocol.OperationHandler + }) + } + fake.operationHandlerReturnsOnCall[i] = struct { + result1 protocol.OperationHandler + }{result1} +} + +func (fake *ProtocolVersion) OperationParser() protocol.OperationParser { + fake.operationParserMutex.Lock() + ret, specificReturn := fake.operationParserReturnsOnCall[len(fake.operationParserArgsForCall)] + fake.operationParserArgsForCall = append(fake.operationParserArgsForCall, struct { + }{}) + fake.recordInvocation("OperationParser", []interface{}{}) + fake.operationParserMutex.Unlock() + if fake.OperationParserStub != nil { + return fake.OperationParserStub() + } + if specificReturn { + return ret.result1 + } + fakeReturns := fake.operationParserReturns + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) OperationParserCallCount() int { + fake.operationParserMutex.RLock() + defer fake.operationParserMutex.RUnlock() + return len(fake.operationParserArgsForCall) +} + +func (fake *ProtocolVersion) OperationParserCalls(stub func() protocol.OperationParser) { + fake.operationParserMutex.Lock() + defer fake.operationParserMutex.Unlock() + fake.OperationParserStub = stub +} + +func (fake *ProtocolVersion) OperationParserReturns(result1 protocol.OperationParser) { + fake.operationParserMutex.Lock() + defer fake.operationParserMutex.Unlock() + fake.OperationParserStub = nil + fake.operationParserReturns = struct { + result1 protocol.OperationParser + }{result1} +} + +func (fake *ProtocolVersion) OperationParserReturnsOnCall(i int, result1 protocol.OperationParser) { + fake.operationParserMutex.Lock() + defer fake.operationParserMutex.Unlock() + fake.OperationParserStub = nil + if fake.operationParserReturnsOnCall == nil { + fake.operationParserReturnsOnCall = make(map[int]struct { + result1 protocol.OperationParser + }) + } + fake.operationParserReturnsOnCall[i] = struct { + result1 protocol.OperationParser + }{result1} +} + +func (fake *ProtocolVersion) OperationProvider() protocol.OperationProvider { + fake.operationProviderMutex.Lock() + ret, specificReturn := fake.operationProviderReturnsOnCall[len(fake.operationProviderArgsForCall)] + fake.operationProviderArgsForCall = append(fake.operationProviderArgsForCall, struct { + }{}) + fake.recordInvocation("OperationProvider", []interface{}{}) + fake.operationProviderMutex.Unlock() + if fake.OperationProviderStub != nil { + return fake.OperationProviderStub() + } + if specificReturn { + return ret.result1 + } + fakeReturns := fake.operationProviderReturns + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) OperationProviderCallCount() int { + fake.operationProviderMutex.RLock() + defer fake.operationProviderMutex.RUnlock() + return len(fake.operationProviderArgsForCall) +} + +func (fake *ProtocolVersion) OperationProviderCalls(stub func() protocol.OperationProvider) { + fake.operationProviderMutex.Lock() + defer fake.operationProviderMutex.Unlock() + fake.OperationProviderStub = stub +} + +func (fake *ProtocolVersion) OperationProviderReturns(result1 protocol.OperationProvider) { + fake.operationProviderMutex.Lock() + defer fake.operationProviderMutex.Unlock() + fake.OperationProviderStub = nil + fake.operationProviderReturns = struct { + result1 protocol.OperationProvider + }{result1} +} + +func (fake *ProtocolVersion) OperationProviderReturnsOnCall(i int, result1 protocol.OperationProvider) { + fake.operationProviderMutex.Lock() + defer fake.operationProviderMutex.Unlock() + fake.OperationProviderStub = nil + if fake.operationProviderReturnsOnCall == nil { + fake.operationProviderReturnsOnCall = make(map[int]struct { + result1 protocol.OperationProvider + }) + } + fake.operationProviderReturnsOnCall[i] = struct { + result1 protocol.OperationProvider + }{result1} +} + +func (fake *ProtocolVersion) Protocol() protocol.Protocol { + fake.protocolMutex.Lock() + ret, specificReturn := fake.protocolReturnsOnCall[len(fake.protocolArgsForCall)] + fake.protocolArgsForCall = append(fake.protocolArgsForCall, struct { + }{}) + fake.recordInvocation("Protocol", []interface{}{}) + fake.protocolMutex.Unlock() + if fake.ProtocolStub != nil { + return fake.ProtocolStub() + } + if specificReturn { + return ret.result1 + } + fakeReturns := fake.protocolReturns + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) ProtocolCallCount() int { + fake.protocolMutex.RLock() + defer fake.protocolMutex.RUnlock() + return len(fake.protocolArgsForCall) +} + +func (fake *ProtocolVersion) ProtocolCalls(stub func() protocol.Protocol) { + fake.protocolMutex.Lock() + defer fake.protocolMutex.Unlock() + fake.ProtocolStub = stub +} + +func (fake *ProtocolVersion) ProtocolReturns(result1 protocol.Protocol) { + fake.protocolMutex.Lock() + defer fake.protocolMutex.Unlock() + fake.ProtocolStub = nil + fake.protocolReturns = struct { + result1 protocol.Protocol + }{result1} +} + +func (fake *ProtocolVersion) ProtocolReturnsOnCall(i int, result1 protocol.Protocol) { + fake.protocolMutex.Lock() + defer fake.protocolMutex.Unlock() + fake.ProtocolStub = nil + if fake.protocolReturnsOnCall == nil { + fake.protocolReturnsOnCall = make(map[int]struct { + result1 protocol.Protocol + }) + } + fake.protocolReturnsOnCall[i] = struct { + result1 protocol.Protocol + }{result1} +} + +func (fake *ProtocolVersion) TransactionProcessor() protocol.TxnProcessor { + fake.transactionProcessorMutex.Lock() + ret, specificReturn := fake.transactionProcessorReturnsOnCall[len(fake.transactionProcessorArgsForCall)] + fake.transactionProcessorArgsForCall = append(fake.transactionProcessorArgsForCall, struct { + }{}) + fake.recordInvocation("TransactionProcessor", []interface{}{}) + fake.transactionProcessorMutex.Unlock() + if fake.TransactionProcessorStub != nil { + return fake.TransactionProcessorStub() + } + if specificReturn { + return ret.result1 + } + fakeReturns := fake.transactionProcessorReturns + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) TransactionProcessorCallCount() int { + fake.transactionProcessorMutex.RLock() + defer fake.transactionProcessorMutex.RUnlock() + return len(fake.transactionProcessorArgsForCall) +} + +func (fake *ProtocolVersion) TransactionProcessorCalls(stub func() protocol.TxnProcessor) { + fake.transactionProcessorMutex.Lock() + defer fake.transactionProcessorMutex.Unlock() + fake.TransactionProcessorStub = stub +} + +func (fake *ProtocolVersion) TransactionProcessorReturns(result1 protocol.TxnProcessor) { + fake.transactionProcessorMutex.Lock() + defer fake.transactionProcessorMutex.Unlock() + fake.TransactionProcessorStub = nil + fake.transactionProcessorReturns = struct { + result1 protocol.TxnProcessor + }{result1} +} + +func (fake *ProtocolVersion) TransactionProcessorReturnsOnCall(i int, result1 protocol.TxnProcessor) { + fake.transactionProcessorMutex.Lock() + defer fake.transactionProcessorMutex.Unlock() + fake.TransactionProcessorStub = nil + if fake.transactionProcessorReturnsOnCall == nil { + fake.transactionProcessorReturnsOnCall = make(map[int]struct { + result1 protocol.TxnProcessor + }) + } + fake.transactionProcessorReturnsOnCall[i] = struct { + result1 protocol.TxnProcessor + }{result1} +} + +func (fake *ProtocolVersion) Version() string { + fake.versionMutex.Lock() + ret, specificReturn := fake.versionReturnsOnCall[len(fake.versionArgsForCall)] + fake.versionArgsForCall = append(fake.versionArgsForCall, struct { + }{}) + fake.recordInvocation("Version", []interface{}{}) + fake.versionMutex.Unlock() + if fake.VersionStub != nil { + return fake.VersionStub() + } + if specificReturn { + return ret.result1 + } + fakeReturns := fake.versionReturns + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) VersionCallCount() int { + fake.versionMutex.RLock() + defer fake.versionMutex.RUnlock() + return len(fake.versionArgsForCall) +} + +func (fake *ProtocolVersion) VersionCalls(stub func() string) { + fake.versionMutex.Lock() + defer fake.versionMutex.Unlock() + fake.VersionStub = stub +} + +func (fake *ProtocolVersion) VersionReturns(result1 string) { + fake.versionMutex.Lock() + defer fake.versionMutex.Unlock() + fake.VersionStub = nil + fake.versionReturns = struct { + result1 string + }{result1} +} + +func (fake *ProtocolVersion) VersionReturnsOnCall(i int, result1 string) { + fake.versionMutex.Lock() + defer fake.versionMutex.Unlock() + fake.VersionStub = nil + if fake.versionReturnsOnCall == nil { + fake.versionReturnsOnCall = make(map[int]struct { + result1 string + }) + } + fake.versionReturnsOnCall[i] = struct { + result1 string + }{result1} +} + +func (fake *ProtocolVersion) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.documentComposerMutex.RLock() + defer fake.documentComposerMutex.RUnlock() + fake.documentTransformerMutex.RLock() + defer fake.documentTransformerMutex.RUnlock() + fake.documentValidatorMutex.RLock() + defer fake.documentValidatorMutex.RUnlock() + fake.operationApplierMutex.RLock() + defer fake.operationApplierMutex.RUnlock() + fake.operationHandlerMutex.RLock() + defer fake.operationHandlerMutex.RUnlock() + fake.operationParserMutex.RLock() + defer fake.operationParserMutex.RUnlock() + fake.operationProviderMutex.RLock() + defer fake.operationProviderMutex.RUnlock() + fake.protocolMutex.RLock() + defer fake.protocolMutex.RUnlock() + fake.transactionProcessorMutex.RLock() + defer fake.transactionProcessorMutex.RUnlock() + fake.versionMutex.RLock() + defer fake.versionMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *ProtocolVersion) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} + +var _ protocol.Version = new(ProtocolVersion) diff --git a/method/sidetreelongform/sidetree-core/mocks/store.go b/method/sidetreelongform/sidetree-core/mocks/store.go new file mode 100644 index 0000000..66e1217 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/store.go @@ -0,0 +1,68 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package mocks + +import ( + "errors" + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" +) + +// MockOperationStore mocks store for testing purposes. +type MockOperationStore struct { + mutex sync.RWMutex + operations map[string][]*operation.AnchoredOperation + Err error + Validate bool +} + +// NewMockOperationStore creates mock operations store. +func NewMockOperationStore(err error) *MockOperationStore { + return &MockOperationStore{operations: make(map[string][]*operation.AnchoredOperation), Err: err, Validate: true} +} + +// Put mocks storing operation. +func (m *MockOperationStore) Put(op *operation.AnchoredOperation) error { + if m.Err != nil { + return m.Err + } + + var opsSize int + + m.mutex.RLock() + opsSize = len(m.operations[op.UniqueSuffix]) + m.mutex.RUnlock() + + if m.Validate && op.Type == operation.TypeCreate && opsSize > 0 { + // Nothing to do; already created + return nil + } + + m.mutex.Lock() + defer m.mutex.Unlock() + + m.operations[op.UniqueSuffix] = append(m.operations[op.UniqueSuffix], op) + + return nil +} + +// Get mocks retrieving operations from the store. +func (m *MockOperationStore) Get(uniqueSuffix string) ([]*operation.AnchoredOperation, error) { + if m.Err != nil { + return nil, m.Err + } + + m.mutex.RLock() + defer m.mutex.RUnlock() + + if ops, ok := m.operations[uniqueSuffix]; ok { + return ops, nil + } + + return nil, errors.New("uniqueSuffix not found in the store") +} diff --git a/method/sidetreelongform/sidetree-core/mocks/transformer.go b/method/sidetreelongform/sidetree-core/mocks/transformer.go new file mode 100644 index 0000000..0a9bdb0 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/transformer.go @@ -0,0 +1,46 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package mocks + +import ( + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" +) + +// MockDocumentTransformer is responsible for validating operations, +// original document and transforming to external document. +type MockDocumentTransformer struct { + Err error +} + +// NewDocumentTransformer creates a new mock document transformer. +func NewDocumentTransformer() *MockDocumentTransformer { + return &MockDocumentTransformer{} +} + +// TransformDocument mocks transformation from internal to external document. +func (m *MockDocumentTransformer) TransformDocument(internal *protocol.ResolutionModel, + info protocol.TransformationInfo) (*document.ResolutionResult, error) { + if m.Err != nil { + return nil, m.Err + } + + internal.Doc[document.IDProperty] = info[document.IDProperty] + + metadata := make(document.Metadata) + metadata[document.PublishedProperty] = info[document.PublishedProperty] + metadata[document.RecoveryCommitmentProperty] = internal.RecoveryCommitment + metadata[document.UpdateCommitmentProperty] = internal.UpdateCommitment + + docMetadata := make(document.Metadata) + docMetadata[document.MethodProperty] = metadata + + return &document.ResolutionResult{ + Document: internal.Doc, + DocumentMetadata: docMetadata, + }, nil +} diff --git a/method/sidetreelongform/sidetree-core/mocks/txnprocessor.gen.go b/method/sidetreelongform/sidetree-core/mocks/txnprocessor.gen.go new file mode 100644 index 0000000..f6d5be0 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/txnprocessor.gen.go @@ -0,0 +1,119 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/txn" +) + +type TxnProcessor struct { + ProcessStub func(txn.SidetreeTxn, ...string) (int, error) + processMutex sync.RWMutex + processArgsForCall []struct { + arg1 txn.SidetreeTxn + arg2 []string + } + processReturns struct { + result1 int + result2 error + } + processReturnsOnCall map[int]struct { + result1 int + result2 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *TxnProcessor) Process(arg1 txn.SidetreeTxn, arg2 ...string) (int, error) { + fake.processMutex.Lock() + ret, specificReturn := fake.processReturnsOnCall[len(fake.processArgsForCall)] + fake.processArgsForCall = append(fake.processArgsForCall, struct { + arg1 txn.SidetreeTxn + arg2 []string + }{arg1, arg2}) + stub := fake.ProcessStub + fakeReturns := fake.processReturns + fake.recordInvocation("Process", []interface{}{arg1, arg2}) + fake.processMutex.Unlock() + if stub != nil { + return stub(arg1, arg2...) + } + if specificReturn { + return ret.result1, ret.result2 + } + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *TxnProcessor) ProcessCallCount() int { + fake.processMutex.RLock() + defer fake.processMutex.RUnlock() + return len(fake.processArgsForCall) +} + +func (fake *TxnProcessor) ProcessCalls(stub func(txn.SidetreeTxn, ...string) (int, error)) { + fake.processMutex.Lock() + defer fake.processMutex.Unlock() + fake.ProcessStub = stub +} + +func (fake *TxnProcessor) ProcessArgsForCall(i int) (txn.SidetreeTxn, []string) { + fake.processMutex.RLock() + defer fake.processMutex.RUnlock() + argsForCall := fake.processArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *TxnProcessor) ProcessReturns(result1 int, result2 error) { + fake.processMutex.Lock() + defer fake.processMutex.Unlock() + fake.ProcessStub = nil + fake.processReturns = struct { + result1 int + result2 error + }{result1, result2} +} + +func (fake *TxnProcessor) ProcessReturnsOnCall(i int, result1 int, result2 error) { + fake.processMutex.Lock() + defer fake.processMutex.Unlock() + fake.ProcessStub = nil + if fake.processReturnsOnCall == nil { + fake.processReturnsOnCall = make(map[int]struct { + result1 int + result2 error + }) + } + fake.processReturnsOnCall[i] = struct { + result1 int + result2 error + }{result1, result2} +} + +func (fake *TxnProcessor) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.processMutex.RLock() + defer fake.processMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *TxnProcessor) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} + +var _ protocol.TxnProcessor = new(TxnProcessor) diff --git a/method/sidetreelongform/sidetree-core/mocks/validator.go b/method/sidetreelongform/sidetree-core/mocks/validator.go new file mode 100644 index 0000000..140b590 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/mocks/validator.go @@ -0,0 +1,31 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package mocks + +// MockDocumentValidator is responsible for validating operations, +// original document and transforming to external document. +type MockDocumentValidator struct { + IsValidPayloadErr error + IsValidOriginalDocumentErr error +} + +// New creates a new mock document validator. +func New() *MockDocumentValidator { + return &MockDocumentValidator{} +} + +// IsValidPayload mocks check that the given payload is a valid Sidetree specific payload +// that can be accepted by the Sidetree update operations. +func (m *MockDocumentValidator) IsValidPayload(payload []byte) error { + return m.IsValidPayloadErr +} + +// IsValidOriginalDocument mocks check that the given payload is a valid Sidetree specific document that can +// be accepted by the Sidetree create operation. +func (m *MockDocumentValidator) IsValidOriginalDocument(payload []byte) error { + return m.IsValidOriginalDocumentErr +} diff --git a/method/sidetreelongform/sidetree-core/observer/observer.go b/method/sidetreelongform/sidetree-core/observer/observer.go new file mode 100644 index 0000000..531b902 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/observer/observer.go @@ -0,0 +1,116 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package observer + +import ( + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/txn" + logfields "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/log" +) + +var logger = log.New("sidetree-core-observer") + +// Ledger interface to access ledger txn. +type Ledger interface { + RegisterForSidetreeTxn() <-chan []txn.SidetreeTxn +} + +// OperationStore interface to access operation store. +type OperationStore interface { + Put(ops []*operation.AnchoredOperation) error +} + +// OperationFilter filters out operations before they are persisted. +type OperationFilter interface { + Filter(uniqueSuffix string, ops []*operation.AnchoredOperation) ([]*operation.AnchoredOperation, error) +} + +// Providers contains all of the providers required by the TxnProcessor. +type Providers struct { + Ledger Ledger + ProtocolClientProvider protocol.ClientProvider +} + +// Observer receives transactions over a channel and processes them by storing them to an operation store. +type Observer struct { + *Providers + + stopCh chan struct{} +} + +// New returns a new observer. +func New(providers *Providers) *Observer { + return &Observer{ + Providers: providers, + stopCh: make(chan struct{}, 1), + } +} + +// Start starts observer routines. +func (o *Observer) Start() { + go o.listen(o.Ledger.RegisterForSidetreeTxn()) +} + +// Stop stops the observer. +func (o *Observer) Stop() { + o.stopCh <- struct{}{} +} + +func (o *Observer) listen(txnsCh <-chan []txn.SidetreeTxn) { + for { + select { + case <-o.stopCh: + logger.Info("The observer has been stopped. Exiting.") + + return + + case txns, ok := <-txnsCh: + if !ok { + logger.Warn("Notification channel was closed. Exiting.") + + return + } + + o.process(txns) + } + } +} + +func (o *Observer) process(txns []txn.SidetreeTxn) { + for _, txn := range txns { + pc, err := o.ProtocolClientProvider.ForNamespace(txn.Namespace) + if err != nil { + logger.Warn( + "Failed to get protocol client for namespace", + logfields.WithNamespace(txn.Namespace), log.WithError(err)) + + continue + } + + v, err := pc.Get(txn.ProtocolVersion) + if err != nil { + logger.Warn( + "Failed to get processor for transaction time", logfields.WithGenesisTime(txn.ProtocolVersion), + log.WithError(err)) + + continue + } + + _, err = v.TransactionProcessor().Process(txn) + if err != nil { + logger.Warn("Failed to process anchor", + logfields.WithAnchorString(txn.AnchorString), log.WithError(err)) + + continue + } + + logger.Debug("Successfully processed anchor", logfields.WithAnchorString(txn.AnchorString)) + } +} diff --git a/method/sidetreelongform/sidetree-core/observer/observer_test.go b/method/sidetreelongform/sidetree-core/observer/observer_test.go new file mode 100644 index 0000000..8cf99a4 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/observer/observer_test.go @@ -0,0 +1,140 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package observer + +import ( + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/txn" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/txnprocessor" +) + +func TestStartObserver(t *testing.T) { + const ( + namespace1 = "ns1" + namespace2 = "ns2" + ) + + t.Run("test channel close", func(t *testing.T) { + sidetreeTxnCh := make(chan []txn.SidetreeTxn, 100) + + providers := &Providers{ + Ledger: mockLedger{registerForSidetreeTxnValue: sidetreeTxnCh}, + } + + o := New(providers) + require.NotNil(t, o) + + o.Start() + defer o.Stop() + + close(sidetreeTxnCh) + time.Sleep(200 * time.Millisecond) + }) + + t.Run("test success", func(t *testing.T) { + sidetreeTxnCh := make(chan []txn.SidetreeTxn, 100) + + tp := &mocks.TxnProcessor{} + + pc := mocks.NewMockProtocolClient() + pc.Protocol.GenesisTime = 1 + pc.Versions[0].TransactionProcessorReturns(tp) + pc.Versions[0].ProtocolReturns(pc.Protocol) + + providers := &Providers{ + Ledger: mockLedger{registerForSidetreeTxnValue: sidetreeTxnCh}, + ProtocolClientProvider: mocks.NewMockProtocolClientProvider().WithProtocolClient(namespace1, pc), + } + + o := New(providers) + require.NotNil(t, o) + + o.Start() + defer o.Stop() + + sidetreeTxnCh <- []txn.SidetreeTxn{ + {Namespace: namespace1, ProtocolVersion: 0, TransactionTime: 10, TransactionNumber: 0, AnchorString: "1.address"}, + {Namespace: namespace1, ProtocolVersion: 20, TransactionTime: 21, TransactionNumber: 2, AnchorString: "1.address"}, + {Namespace: namespace2, ProtocolVersion: 100, TransactionTime: 200, TransactionNumber: 2, AnchorString: "2.address"}, + } + time.Sleep(200 * time.Millisecond) + + require.Equal(t, 1, tp.ProcessCallCount()) + }) +} + +func TestTxnProcessor_Process(t *testing.T) { + t.Run("test error from txn operations provider", func(t *testing.T) { + errExpected := fmt.Errorf("txn operations provider error") + + opp := &mockTxnOpsProvider{ + err: errExpected, + } + + providers := &txnprocessor.Providers{ + OpStore: &mockOperationStore{}, + OperationProtocolProvider: opp, + } + + p := txnprocessor.New(providers) + _, err := p.Process(txn.SidetreeTxn{}) + require.Error(t, err) + require.Contains(t, err.Error(), errExpected.Error()) + }) +} + +type mockLedger struct { + registerForSidetreeTxnValue chan []txn.SidetreeTxn +} + +func (m mockLedger) RegisterForSidetreeTxn() <-chan []txn.SidetreeTxn { + return m.registerForSidetreeTxnValue +} + +type mockOperationStore struct { + putFunc func(ops []*operation.AnchoredOperation) error + getFunc func(suffix string) ([]*operation.AnchoredOperation, error) +} + +func (m *mockOperationStore) Put(ops []*operation.AnchoredOperation) error { + if m.putFunc != nil { + return m.putFunc(ops) + } + + return nil +} + +func (m *mockOperationStore) Get(suffix string) ([]*operation.AnchoredOperation, error) { + if m.getFunc != nil { + return m.getFunc(suffix) + } + + return nil, nil +} + +type mockTxnOpsProvider struct { + err error +} + +func (m *mockTxnOpsProvider) GetTxnOperations(txn *txn.SidetreeTxn) ([]*operation.AnchoredOperation, error) { + if m.err != nil { + return nil, m.err + } + + op := &operation.AnchoredOperation{ + UniqueSuffix: "abc", + } + + return []*operation.AnchoredOperation{op}, nil +} diff --git a/method/sidetreelongform/sidetree-core/patch/patch.go b/method/sidetreelongform/sidetree-core/patch/patch.go new file mode 100644 index 0000000..88b6b5f --- /dev/null +++ b/method/sidetreelongform/sidetree-core/patch/patch.go @@ -0,0 +1,459 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patch + +import ( + "encoding/json" + "errors" + "fmt" + "sort" + "strings" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/docutil" +) + +const jsonPatchAddTemplate = `{ "op": "add", "path": "/%s", "value": %s }` + +// Action defines action of document patch. +type Action string + +const ( + + // Replace captures enum value "replace". + Replace Action = "replace" + + // AddPublicKeys captures enum value "add-public-keys". + AddPublicKeys Action = "add-public-keys" + + // RemovePublicKeys captures enum value "remove-public-keys". + RemovePublicKeys Action = "remove-public-keys" + + // AddServiceEndpoints captures "add-services". + AddServiceEndpoints Action = "add-services" + + // RemoveServiceEndpoints captures "remove-services". + RemoveServiceEndpoints Action = "remove-services" + + // JSONPatch captures enum value "json-patch". + JSONPatch Action = "ietf-json-patch" + + // AddAlsoKnownAs captures "add-also-known-as". + AddAlsoKnownAs Action = "add-also-known-as" + + // RemoveAlsoKnownAs captures "remove-also-known-as". + RemoveAlsoKnownAs Action = "remove-also-known-as" +) + +// Key defines key that will be used to get document patch information. +type Key string + +const ( + + // DocumentKey captures "document" key. + DocumentKey Key = "document" + + // PatchesKey captures "patches" key. + PatchesKey Key = "patches" + + // PublicKeys captures "publicKeys" key. + PublicKeys Key = "publicKeys" + + // ServicesKey captures "services" key. + ServicesKey Key = "services" + + // IdsKey captures "ids" key. + IdsKey Key = "ids" + + // ActionKey captures "action" key. + ActionKey Key = "action" + + // UrisKey captures "uris" key. + UrisKey Key = "uris" +) + +//nolint:gochecknoglobals +var actionConfig = map[Action]Key{ + AddPublicKeys: PublicKeys, + RemovePublicKeys: IdsKey, + AddServiceEndpoints: ServicesKey, + RemoveServiceEndpoints: IdsKey, + JSONPatch: PatchesKey, + Replace: DocumentKey, + AddAlsoKnownAs: UrisKey, + RemoveAlsoKnownAs: UrisKey, +} + +// Patch defines generic patch structure. +type Patch map[Key]interface{} + +// PatchesFromDocument creates patches from opaque document. +func PatchesFromDocument(doc string) ([]Patch, error) { //nolint:gocyclo + parsed, err := document.FromBytes([]byte(doc)) + if err != nil { + return nil, err + } + + if err := validateDocument(parsed); err != nil { + return nil, err + } + + var ( + docPatches []Patch + jsonPatches []string + ) + + for _, key := range sortedKeys(parsed) { + jsonBytes, err := json.Marshal(parsed[key]) + if err != nil { + return nil, err + } + + var docPatch Patch + + switch key { + case document.PublicKeyProperty: + docPatch, err = NewAddPublicKeysPatch(string(jsonBytes)) + case document.ServiceProperty: + docPatch, err = NewAddServiceEndpointsPatch(string(jsonBytes)) + case document.AlsoKnownAs: + docPatch, err = NewAddAlsoKnownAs(string(jsonBytes)) + default: + jsonPatches = append(jsonPatches, fmt.Sprintf(jsonPatchAddTemplate, key, string(jsonBytes))) + } + + if err != nil { + return nil, err + } + + if docPatch != nil { + docPatches = append(docPatches, docPatch) + } + } + + if len(jsonPatches) > 0 { + combinedJSONPatch, err := NewJSONPatch(fmt.Sprintf("[%s]", strings.Join(jsonPatches, ","))) + if err != nil { + return nil, err + } + + docPatches = append(docPatches, combinedJSONPatch) + } + + return docPatches, nil +} + +// NewReplacePatch creates new replace patch. +func NewReplacePatch(doc string) (Patch, error) { + parsed, err := document.ReplaceDocumentFromBytes([]byte(doc)) + if err != nil { + return nil, err + } + + if err := validateReplaceDocument(parsed); err != nil { + return nil, err + } + + patch := make(Patch) + patch[ActionKey] = Replace + patch[DocumentKey] = parsed.JSONLdObject() + + return patch, nil +} + +// NewJSONPatch creates new generic update patch (will be used for generic updates). +func NewJSONPatch(patches string) (Patch, error) { + var generic []interface{} + + err := json.Unmarshal([]byte(patches), &generic) + if err != nil { + return nil, err + } + + patch := make(Patch) + patch[ActionKey] = JSONPatch + patch[PatchesKey] = generic + + return patch, nil +} + +// NewAddPublicKeysPatch creates new patch for adding public keys. +func NewAddPublicKeysPatch(publicKeys string) (Patch, error) { + pubKeys, err := getPublicKeys(publicKeys) + if err != nil { + return nil, err + } + + patch := make(Patch) + patch[ActionKey] = AddPublicKeys + patch[PublicKeys] = pubKeys + + return patch, nil +} + +// NewRemovePublicKeysPatch creates new patch for removing public keys. +func NewRemovePublicKeysPatch(publicKeyIds string) (Patch, error) { + ids, err := getStringArray(publicKeyIds) + if err != nil { + return nil, fmt.Errorf("public key ids not string array: %s", err.Error()) + } + + if len(ids) == 0 { + return nil, errors.New("missing public key ids") + } + + patch := make(Patch) + patch[ActionKey] = RemovePublicKeys + patch[IdsKey] = getGenericArray(ids) + + return patch, nil +} + +// NewAddServiceEndpointsPatch creates new patch for adding service endpoints. +func NewAddServiceEndpointsPatch(serviceEndpoints string) (Patch, error) { + services, err := getServices(serviceEndpoints) + if err != nil { + return nil, err + } + + patch := make(Patch) + patch[ActionKey] = AddServiceEndpoints + patch[ServicesKey] = services + + return patch, nil +} + +// NewRemoveServiceEndpointsPatch creates new patch for removing service endpoints. +func NewRemoveServiceEndpointsPatch(serviceEndpointIds string) (Patch, error) { + ids, err := getStringArray(serviceEndpointIds) + if err != nil { + return nil, fmt.Errorf("service ids not string array: %s", err.Error()) + } + + if len(ids) == 0 { + return nil, errors.New("missing service ids") + } + + patch := make(Patch) + patch[ActionKey] = RemoveServiceEndpoints + patch[IdsKey] = getGenericArray(ids) + + return patch, nil +} + +// NewAddAlsoKnownAs creates new patch for adding also-known-as property. +func NewAddAlsoKnownAs(uris string) (Patch, error) { + urisToAdd, err := getStringArray(uris) + if err != nil { + return nil, fmt.Errorf("also known as uris is not string array: %s", err.Error()) + } + + if len(urisToAdd) == 0 { + return nil, errors.New("missing also known as uris") + } + + patch := make(Patch) + patch[ActionKey] = AddAlsoKnownAs + patch[UrisKey] = getGenericArray(urisToAdd) + + return patch, nil +} + +// NewRemoveAlsoKnownAs creates new patch for removing also-known-as URI. +func NewRemoveAlsoKnownAs(uris string) (Patch, error) { + urisToRemove, err := getStringArray(uris) + if err != nil { + return nil, fmt.Errorf("also known as uris is not string array: %s", err.Error()) + } + + if len(urisToRemove) == 0 { + return nil, errors.New("missing also known as uris") + } + + patch := make(Patch) + patch[ActionKey] = RemoveAlsoKnownAs + patch[UrisKey] = getGenericArray(urisToRemove) + + return patch, nil +} + +// GetValue returns patch value. +func (p Patch) GetValue() (interface{}, error) { + action, err := p.GetAction() + if err != nil { + return nil, err + } + + valueKey, ok := actionConfig[action] + if !ok { + return nil, fmt.Errorf("action '%s' is not supported", action) + } + + entry, ok := p[valueKey] + if !ok { + return nil, fmt.Errorf("%s patch is missing key: %s", action, valueKey) + } + + return entry, nil +} + +// GetAction returns string value for specified key or "" if not found or wrong type. +func (p Patch) GetAction() (Action, error) { + entry, ok := p[ActionKey] + if !ok { + return "", fmt.Errorf("patch is missing %s key", ActionKey) + } + + var action Action + switch v := entry.(type) { + case Action: + action = v + case string: + action = Action(v) + default: + return "", fmt.Errorf("action type not supported: %s", v) + } + + _, ok = actionConfig[action] + if !ok { + return "", fmt.Errorf("action '%s' is not supported", action) + } + + return action, nil +} + +// Bytes returns byte representation of patch. +func (p Patch) Bytes() ([]byte, error) { + return docutil.MarshalCanonical(p) +} + +// JSONLdObject returns map that represents JSON LD Object. +func (p Patch) JSONLdObject() map[Key]interface{} { + return p +} + +// FromBytes parses provided data into document patch. +func FromBytes(data []byte) (Patch, error) { + patch := make(Patch) + + err := json.Unmarshal(data, &patch) + if err != nil { + return nil, err + } + + _, err = patch.GetAction() + if err != nil { + return nil, err + } + + _, err = patch.GetValue() + if err != nil { + return nil, err + } + + return patch, nil +} + +func stringEntry(entry interface{}) string { + if entry == nil { + return "" + } + + id, ok := entry.(string) + if !ok { + return "" + } + + return id +} + +func validateReplaceDocument(doc document.ReplaceDocument) error { + allowedKeys := []string{document.ReplaceServiceProperty, document.ReplacePublicKeyProperty} + + for key := range doc { + if !contains(allowedKeys, key) { + return fmt.Errorf("key '%s' is not allowed in replace document", key) + } + } + + return nil +} + +func contains(keys []string, key string) bool { + for _, k := range keys { + if k == key { + return true + } + } + + return false +} + +func validateDocument(doc document.Document) error { + if doc.ID() != "" { + return errors.New("document must NOT have the id property") + } + + return nil +} + +func getPublicKeys(publicKeys string) (interface{}, error) { + // create an empty did document with public keys + pkDoc, err := document.DidDocumentFromBytes([]byte(fmt.Sprintf(`{%q:%s}`, document.PublicKeyProperty, publicKeys))) + if err != nil { + return nil, fmt.Errorf("public keys invalid: %s", err.Error()) + } + + return pkDoc[document.PublicKeyProperty], nil +} + +func getServices(serviceEndpoints string) (interface{}, error) { + // create an empty did document with service endpoints + svcDocStr := fmt.Sprintf(`{%q:%s}`, document.ServiceProperty, serviceEndpoints) + + svcDoc, err := document.DidDocumentFromBytes([]byte(svcDocStr)) + if err != nil { + return nil, fmt.Errorf("services invalid: %s", err.Error()) + } + + return svcDoc[document.ServiceProperty], nil +} + +func getStringArray(arr string) ([]string, error) { + var values []string + + err := json.Unmarshal([]byte(arr), &values) + if err != nil { + return nil, err + } + + return values, nil +} + +func getGenericArray(arr []string) []interface{} { + var values []interface{} + for _, v := range arr { + values = append(values, v) + } + + return values +} + +func sortedKeys(m map[string]interface{}) []string { + keys := make([]string, len(m)) + + i := 0 + + for k := range m { + keys[i] = k + i++ + } + + sort.Strings(keys) + + return keys +} diff --git a/method/sidetreelongform/sidetree-core/patch/patch_test.go b/method/sidetreelongform/sidetree-core/patch/patch_test.go new file mode 100644 index 0000000..9bbef2b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/patch/patch_test.go @@ -0,0 +1,708 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patch + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" +) + +func TestFromBytes(t *testing.T) { + t.Run("success", func(t *testing.T) { + patch, err := FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddPublicKeys) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[PublicKeys]) + + bytes, err := patch.Bytes() + require.NoError(t, err) + require.NotEmpty(t, bytes) + + jsonld := patch.JSONLdObject() + require.NotNil(t, jsonld) + }) + t.Run("parse error - invalid character", func(t *testing.T) { + patch, err := FromBytes([]byte("[test : 123]")) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "invalid character") + }) + t.Run("parse error - invalid character", func(t *testing.T) { + patch, err := FromBytes([]byte("[test : 123]")) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "invalid character") + }) +} + +func TestActionValidation(t *testing.T) { + t.Run("error - missing action", func(t *testing.T) { + patch, err := FromBytes([]byte(`{}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "patch is missing action key") + }) + t.Run("error -action not supported", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "invalid"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Equal(t, err.Error(), "action 'invalid' is not supported") + }) + t.Run("error - action type not supported", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": 0}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "action type not supported") + }) +} + +func TestPatchesFromDocument(t *testing.T) { + t.Run("success from new", func(t *testing.T) { + patches, err := PatchesFromDocument(testDoc) + require.NoError(t, err) + require.Equal(t, 3, len(patches)) + }) + t.Run("success from new with also known as", func(t *testing.T) { + patches, err := PatchesFromDocument(testDocWithAlsoKnownAs) + require.NoError(t, err) + require.Equal(t, 2, len(patches)) + }) + t.Run("error from new due to invalid uris format", func(t *testing.T) { + patches, err := PatchesFromDocument(testDocWithInvalidAlsoKnownAs) + require.Error(t, err) + require.Nil(t, patches) + require.Contains(t, err.Error(), "also known as uris is not string array") + }) + t.Run("error - invalid json", func(t *testing.T) { + p, err := PatchesFromDocument(`invalid`) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "invalid character") + }) + t.Run("error - document has id", func(t *testing.T) { + p, err := PatchesFromDocument(`{"id": "abc"}`) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "document must NOT have the id property") + }) + t.Run("patches array is always in the same order", func(t *testing.T) { + var prev []Patch + + for i := 1; i <= 100; i++ { + patches, err := PatchesFromDocument(testDoc) + require.NoError(t, err) + + if prev != nil { + require.Equalf(t, prev, patches, "expecting the patches array to be in the same order") + } + + prev = patches + } + }) +} + +func TestReplacePatch(t *testing.T) { + t.Run("success from bytes", func(t *testing.T) { + patch, err := FromBytes([]byte(replacePatch)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, Replace) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[DocumentKey]) + }) + t.Run("missing document", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "replace"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "replace patch is missing key: document") + }) + t.Run("success from new", func(t *testing.T) { + doc, err := document.FromBytes([]byte(replaceDoc)) + require.NoError(t, err) + + p, err := NewReplacePatch(replaceDoc) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, Replace) + + value, err := p.GetValue() + require.NoError(t, err) + require.Equal(t, value, doc.JSONLdObject()) + }) + t.Run("error - invalid json", func(t *testing.T) { + p, err := NewReplacePatch(`invalid`) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "invalid character") + }) + t.Run("error - document has invalid property", func(t *testing.T) { + p, err := NewReplacePatch(`{"id": "abc"}`) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "key 'id' is not allowed in replace document") + }) +} + +func TestIETFPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + patch, err := FromBytes([]byte(ietfPatch)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, JSONPatch) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[PatchesKey]) + }) + t.Run("missing patches", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "ietf-json-patch"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "ietf-json-patch patch is missing key: patches") + }) + t.Run("success from new", func(t *testing.T) { + p, err := NewJSONPatch(patches) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, JSONPatch) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[PatchesKey]) + }) +} + +func TestAddPublicKeysPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + patch, err := FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddPublicKeys) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[PublicKeys]) + }) + t.Run("missing public keys", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "add-public-keys"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "add-public-keys patch is missing key: publicKeys") + }) + t.Run("success from new", func(t *testing.T) { + p, err := NewAddPublicKeysPatch(testAddPublicKeys) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddPublicKeys) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[PublicKeys]) + }) + t.Run("error - invalid string", func(t *testing.T) { + p, err := NewAddPublicKeysPatch("invalid-json") + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "public keys invalid: invalid character") + }) +} + +func TestRemovePublicKeysPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + patch, err := FromBytes([]byte(removePublicKeysPatch)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, RemovePublicKeys) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[IdsKey]) + }) + t.Run("missing public key ids", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "remove-public-keys"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "remove-public-keys patch is missing key: ids") + }) + t.Run("success from new", func(t *testing.T) { + const ids = `["key1", "key2"]` + p, err := NewRemovePublicKeysPatch(ids) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, RemovePublicKeys) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[IdsKey]) + }) + t.Run("empty public key ids", func(t *testing.T) { + const ids = `[]` + p, err := NewRemovePublicKeysPatch(ids) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "missing public key ids") + }) + t.Run("error - ids not string array", func(t *testing.T) { + const ids = `[0, 1]` + p, err := NewRemovePublicKeysPatch(ids) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "cannot unmarshal") + }) +} + +func TestAddServiceEndpointsPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + patch, err := FromBytes([]byte(addServiceEndpoints)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddServiceEndpoints) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[ServicesKey]) + }) + t.Run("missing service endpoints", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "add-services"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "add-services patch is missing key: services") + }) + t.Run("success from new", func(t *testing.T) { + p, err := NewAddServiceEndpointsPatch(testAddServiceEndpoints) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddServiceEndpoints) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[ServicesKey]) + }) + t.Run("error - not json", func(t *testing.T) { + p, err := NewAddServiceEndpointsPatch("not-json") + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "services invalid: invalid character") + }) +} + +func TestRemoveServiceEndpointsPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := FromBytes([]byte(removeServiceEndpoints)) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, RemoveServiceEndpoints) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[IdsKey]) + }) + t.Run("missing public key ids", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "remove-services"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "remove-services patch is missing key: ids") + }) + t.Run("success from new", func(t *testing.T) { + const ids = `["svc1", "svc2"]` + p, err := NewRemoveServiceEndpointsPatch(ids) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, RemoveServiceEndpoints) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[IdsKey]) + }) + t.Run("empty service ids", func(t *testing.T) { + const ids = `[]` + p, err := NewRemoveServiceEndpointsPatch(ids) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "missing service ids") + }) + t.Run("error - ids not string array", func(t *testing.T) { + const ids = `[0, 1]` + p, err := NewRemoveServiceEndpointsPatch(ids) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "cannot unmarshal") + }) +} + +func TestAddAlsoKnownAsPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + patch, err := FromBytes([]byte(addAlsoKnownAs)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddAlsoKnownAs) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[UrisKey]) + }) + t.Run("missing URIs", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "add-also-known-as"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "add-also-known-as patch is missing key: uris") + }) + t.Run("success from new", func(t *testing.T) { + p, err := NewAddAlsoKnownAs(`["testURI"]`) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddAlsoKnownAs) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[UrisKey]) + }) + t.Run("error - empty", func(t *testing.T) { + p, err := NewAddAlsoKnownAs("[]") + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "missing also known as uris") + }) + t.Run("error - not json", func(t *testing.T) { + p, err := NewAddAlsoKnownAs("not-json") + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "also known as uris is not string array") + }) +} + +func TestRemoveAlsoKnownAsPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := FromBytes([]byte(removeAlsoKnownAs)) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, RemoveAlsoKnownAs) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[UrisKey]) + }) + t.Run("missing public key ids", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "remove-also-known-as"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "remove-also-known-as patch is missing key: uris") + }) + t.Run("success from new", func(t *testing.T) { + const uris = `["identity1", "identity2"]` + p, err := NewRemoveAlsoKnownAs(uris) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, RemoveAlsoKnownAs) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[UrisKey]) + }) + t.Run("empty uris", func(t *testing.T) { + const uris = `[]` + p, err := NewRemoveAlsoKnownAs(uris) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "missing also known as uris") + }) + t.Run("error - uris not string array", func(t *testing.T) { + const uris = `[0, 1]` + p, err := NewRemoveAlsoKnownAs(uris) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "cannot unmarshal") + }) +} + +func TestBytes(t *testing.T) { + t.Run("success", func(t *testing.T) { + original, err := FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + require.NotNil(t, original) + + bytes, err := original.Bytes() + require.NoError(t, err) + require.NotEmpty(t, bytes) + + patch, err := FromBytes(bytes) + require.NoError(t, err) + require.Equal(t, original, patch) + }) + t.Run("error from bytes", func(t *testing.T) { + patch := Patch{} + patch["test"] = make(chan int) + + bytes, err := patch.Bytes() + require.NotNil(t, err) + require.Nil(t, bytes) + require.Contains(t, err.Error(), "json: unsupported type: chan int") + }) +} + +func TestStringEntry(t *testing.T) { + t.Run("success", func(t *testing.T) { + str := stringEntry([]string{"hello"}) + require.Empty(t, str) + + str = stringEntry("hello") + require.Equal(t, "hello", str) + }) +} + +const ietfPatch = `{ + "action": "ietf-json-patch", + "patches": [{ + "op": "replace", + "path": "/name", + "value": "value" + }] +}` + +const patches = `[ + { + "op": "replace", + "path": "/some/object/0", + "value": "value" + } +]` + +const addPublicKeysPatch = `{ + "action": "add-public-keys", + "publicKeys": [{ + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const testAddPublicKeys = `[{ + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }]` + +const removePublicKeysPatch = `{ + "action": "remove-public-keys", + "ids": ["key1", "key2"] +}` + +const addServiceEndpoints = `{ + "action": "add-services", + "services": [ + { + "id": "sds1", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }, + { + "id": "sds2", + "type": "SecureDataStore", + "serviceEndpoint": "http://some-cloud.com/hub" + } + ] +}` + +const testAddServiceEndpoints = `[ + { + "id": "sds1", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }, + { + "id": "sds2", + "type": "SecureDataStore", + "serviceEndpoint": "http://some-cloud.com/hub" + } + ]` + +const removeServiceEndpoints = `{ + "action": "remove-services", + "ids": ["sds1", "sds2"] +}` + +const testDoc = `{ + "publicKey": [{ + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }], + "service": [{ + "id":"vcs", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }], + "test": "test", + "other": "value" +}` + +const replacePatch = `{ + "action": "replace", + "document": { + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "type": "EcdsaSecp256k1VerificationKey2019", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }], + "services": [ + { + "id": "sds3", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }] + } +}` + +const replaceDoc = `{ + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "type": "EcdsaSecp256k1VerificationKey2019", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }], + "services": [ + { + "id": "sds3", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }] +}` + +const addAlsoKnownAs = `{ + "action": "add-also-known-as", + "uris": ["testURI"] +}` + +const removeAlsoKnownAs = `{ + "action": "remove-also-known-as", + "uris": ["testURI", "nonExistentURI"] +}` + +const testDocWithAlsoKnownAs = `{ + "alsoKnownAs": ["authentication"], + "publicKey": [{ + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const testDocWithInvalidAlsoKnownAs = `{ + "alsoKnownAs": [123] +}` diff --git a/method/sidetreelongform/sidetree-core/processor/processor.go b/method/sidetreelongform/sidetree-core/processor/processor.go new file mode 100644 index 0000000..ee86a4a --- /dev/null +++ b/method/sidetreelongform/sidetree-core/processor/processor.go @@ -0,0 +1,585 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package processor + +import ( + "errors" + "fmt" + "sort" + "strings" + "time" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + logfields "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/log" +) + +const loggerModule = "sidetree-core-processor" + +// OperationProcessor will process document operations in chronological order and create final +// document during resolution. +// It uses operation store client to retrieve all operations that are related to requested document. +type OperationProcessor struct { + store OperationStoreClient + pc protocol.Client + + unpublishedOperationStore unpublishedOperationStore + logger *log.Log +} + +// OperationStoreClient defines interface for retrieving all operations related to document. +type OperationStoreClient interface { + // Get retrieves all operations related to document + Get(uniqueSuffix string) ([]*operation.AnchoredOperation, error) +} + +type unpublishedOperationStore interface { + // Get retrieves unpublished operation related to document, we can have only one unpublished operation. + Get(uniqueSuffix string) ([]*operation.AnchoredOperation, error) +} + +// New returns new operation processor with the given name. (Note that name is only used for logging.) +func New(name string, store OperationStoreClient, pc protocol.Client, opts ...Option) *OperationProcessor { + op := &OperationProcessor{ + store: store, + pc: pc, unpublishedOperationStore: &noopUnpublishedOpsStore{}, + logger: log.New(loggerModule, log.WithFields(logfields.WithNamespace(name))), + } + + // apply options + for _, opt := range opts { + opt(op) + } + + return op +} + +// Option is an option for operation processor. +type Option func(opts *OperationProcessor) + +// WithUnpublishedOperationStore stores unpublished operation into unpublished operation store. +func WithUnpublishedOperationStore(store unpublishedOperationStore) Option { + return func(opts *OperationProcessor) { + opts.unpublishedOperationStore = store + } +} + +// Resolve document based on the given unique suffix. +// Parameters: +// uniqueSuffix - unique portion of ID to resolve. for example "abc123" in "did:sidetree:abc123". +// nolint:funlen +func (s *OperationProcessor) Resolve(uniqueSuffix string, + opts ...document.ResolutionOption) (*protocol.ResolutionModel, error) { + var unpublishedOps []*operation.AnchoredOperation + + unpubOps, err := s.unpublishedOperationStore.Get(uniqueSuffix) + if err == nil { + s.logger.Debug("Found unpublished operations for unique suffix", + logfields.WithTotal(len(unpubOps)), logfields.WithSuffix(uniqueSuffix)) + + unpublishedOps = append(unpublishedOps, unpubOps...) + } + + publishedOps, err := s.store.Get(uniqueSuffix) + if err != nil && !strings.Contains(err.Error(), "not found") { + return nil, err + } + + publishedOps, unpublishedOps, filteredOps, err := + s.processOperations(publishedOps, unpublishedOps, uniqueSuffix, opts...) + if err != nil { + return nil, err + } + + // return all operations in response - versionId is considered just like view of information + rm := &protocol.ResolutionModel{PublishedOperations: publishedOps, UnpublishedOperations: unpublishedOps} + + // split operations into 'create', 'update' and 'full' operations + createOps, updateOps, fullOps := splitOperations(filteredOps) + if len(createOps) == 0 { + return nil, fmt.Errorf("create operation not found") + } + + // Ensure that all published 'create' operations are processed first (in case there are + // unpublished 'create' operations in the collection due to race condition). + sort.SliceStable(createOps, func(i, j int) bool { + return createOps[i].CanonicalReference != "" + }) + + // apply 'create' operations first + rm = s.applyFirstValidCreateOperation(createOps, rm) + if rm == nil { + return nil, errors.New("valid create operation not found") + } + + // apply 'full' operations first + if len(fullOps) > 0 { + s.logger.Debug("Applying full operations", logfields.WithTotal(len(fullOps)), + logfields.WithSuffix(uniqueSuffix)) + + rm = s.applyOperations(fullOps, rm, getRecoveryCommitment) + if rm.Deactivated { + // document was deactivated, stop processing + return rm, nil + } + } + + // next apply update ops since last 'full' transaction + filteredUpdateOps := getOpsWithTxnGreaterThanOrUnpublished( + updateOps, rm.LastOperationTransactionTime, rm.LastOperationTransactionNumber) + if len(filteredUpdateOps) > 0 { + s.logger.Debug("Applying update operations after last full operation", + logfields.WithTotal(len(filteredUpdateOps)), + logfields.WithSuffix(uniqueSuffix)) + + rm = s.applyOperations(filteredUpdateOps, rm, getUpdateCommitment) + } + + return rm, nil +} + +func (s *OperationProcessor) processOperations( + publishedOps []*operation.AnchoredOperation, + unpublishedOps []*operation.AnchoredOperation, + uniqueSuffix string, + opts ...document.ResolutionOption, +) ([]*operation.AnchoredOperation, []*operation.AnchoredOperation, []*operation.AnchoredOperation, error) { + resOpts, err := document.GetResolutionOptions(opts...) + if err != nil { + return nil, nil, nil, err + } + + pubOps, unpubOps, ops, err := s.applyResolutionOptions(uniqueSuffix, publishedOps, unpublishedOps, resOpts) + if err != nil { + return nil, nil, nil, + fmt.Errorf("failed to apply resolution options for document id[%s]: %s", uniqueSuffix, err.Error()) + } + + return pubOps, unpubOps, ops, nil +} + +func (s *OperationProcessor) filterOps(ops []*operation.AnchoredOperation, opts document.ResolutionOptions, + uniqueSuffx string) ([]*operation.AnchoredOperation, error) { + if opts.VersionID != "" { + s.logger.Debug("Filtering operations for unique suffix by version", logfields.WithSuffix(uniqueSuffx), + logfields.WithVersion(opts.VersionID)) + + return filterOpsByVersionID(ops, opts.VersionID) + } + + if opts.VersionTime != "" { + s.logger.Debug("Filtering operations for unique suffix by versionTime", logfields.WithSuffix(uniqueSuffx), + logfields.WithVersionTime(opts.VersionTime)) + + return filterOpsByVersionTime(ops, opts.VersionTime) + } + + return ops, nil +} + +func filterOpsByVersionID(ops []*operation.AnchoredOperation, versionID string, +) ([]*operation.AnchoredOperation, error) { + for index, op := range ops { + if op.CanonicalReference == versionID { + return ops[:index+1], nil + } + } + + return nil, fmt.Errorf("'%s' is not a valid versionId", versionID) +} + +func filterOpsByVersionTime(ops []*operation.AnchoredOperation, timeStr string, +) ([]*operation.AnchoredOperation, error) { + var filteredOps []*operation.AnchoredOperation + + vt, err := time.Parse(time.RFC3339, timeStr) + if err != nil { + return nil, fmt.Errorf("failed to parse version time[%s]: %w", timeStr, err) + } + + for _, op := range ops { + if op.TransactionTime <= uint64(vt.Unix()) { + filteredOps = append(filteredOps, op) + } + } + + if len(filteredOps) == 0 { + return nil, fmt.Errorf("no operations found for version time %s", timeStr) + } + + return filteredOps, nil +} + +func (s *OperationProcessor) applyResolutionOptions(uniqueSuffix string, published, + unpublished []*operation.AnchoredOperation, opts document.ResolutionOptions, +) ([]*operation.AnchoredOperation, []*operation.AnchoredOperation, []*operation.AnchoredOperation, error) { + canonicalIds := getCanonicalMap(published) + + for _, op := range opts.AdditionalOperations { + if op.CanonicalReference == "" { + unpublished = append(unpublished, op) + } else if _, ok := canonicalIds[op.CanonicalReference]; !ok { + published = append(published, op) + } + } + + sortOperations(published) + sortOperations(unpublished) + + ops := append(published, unpublished...) //nolint:gocritic + + s.logger.Debug("Found operations for unique suffix", logfields.WithTotalOperations(len(ops)), + logfields.WithSuffix(uniqueSuffix), logfields.WithOperations(ops)) + + filteredOps, err := s.filterOps(ops, opts, uniqueSuffix) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to filter document id[%s] operations: %s", uniqueSuffix, err.Error()) + } + + if len(filteredOps) == len(ops) { + // base case : nothing got filtered + return published, unpublished, ops, nil + } + + var ( + filteredPublishedOps []*operation.AnchoredOperation + filteredUnpublishedOps []*operation.AnchoredOperation + ) + + for _, op := range filteredOps { + if op.CanonicalReference == "" { + filteredUnpublishedOps = append(filteredUnpublishedOps, op) + } else { + filteredPublishedOps = append(filteredPublishedOps, op) + } + } + + return filteredPublishedOps, filteredUnpublishedOps, filteredOps, nil +} + +func getCanonicalMap(published []*operation.AnchoredOperation) map[string]bool { + canonicalMap := make(map[string]bool) + + for _, op := range published { + canonicalMap[op.CanonicalReference] = true + } + + return canonicalMap +} + +func (s *OperationProcessor) createOperationHashMap(ops []*operation.AnchoredOperation, +) map[string][]*operation.AnchoredOperation { + opMap := make(map[string][]*operation.AnchoredOperation) + + for _, op := range ops { + rv, err := s.getRevealValue(op) + if err != nil { + s.logger.Info("Skipped bad operation while creating operation hash map", + logfields.WithSuffix(op.UniqueSuffix), + logfields.WithOperationType(string(op.Type)), logfields.WithTransactionTime(op.TransactionTime), + logfields.WithTransactionNumber(op.TransactionNumber), log.WithError(err)) + + continue + } + + c, err := commitment.GetCommitmentFromRevealValue(rv) + if err != nil { + s.logger.Info("Skipped calculating commitment while creating operation hash map", + logfields.WithSuffix(op.UniqueSuffix), + logfields.WithOperationType(string(op.Type)), logfields.WithTransactionTime(op.TransactionTime), + logfields.WithTransactionNumber(op.TransactionNumber), log.WithError(err)) + + continue + } + + opMap[c] = append(opMap[c], op) + } + + return opMap +} + +func splitOperations(ops []*operation.AnchoredOperation, +) (createOps, updateOps, fullOps []*operation.AnchoredOperation) { + for _, op := range ops { + switch op.Type { + case operation.TypeCreate: + createOps = append(createOps, op) + case operation.TypeUpdate: + updateOps = append(updateOps, op) + case operation.TypeRecover: + fullOps = append(fullOps, op) + case operation.TypeDeactivate: + fullOps = append(fullOps, op) + } + } + + return createOps, updateOps, fullOps +} + +func getOpsWithTxnGreaterThanOrUnpublished(ops []*operation.AnchoredOperation, txnTime, txnNumber uint64, +) []*operation.AnchoredOperation { + var selection []*operation.AnchoredOperation + + for _, op := range ops { + if isOpWithTxnGreaterThanOrUnpublished(op, txnTime, txnNumber) { + selection = append(selection, op) + } + } + + return selection +} + +func isOpWithTxnGreaterThanOrUnpublished(op *operation.AnchoredOperation, txnTime, txnNumber uint64) bool { + if op.CanonicalReference == "" { + return true + } + + if op.TransactionTime < txnTime { + return false + } + + if op.TransactionTime > txnTime { + return true + } + + if op.TransactionNumber > txnNumber { + return true + } + + return false +} + +func (s *OperationProcessor) applyOperations(ops []*operation.AnchoredOperation, rm *protocol.ResolutionModel, + commitmentFnc fnc) *protocol.ResolutionModel { + // suffix for logging + uniqueSuffix := ops[0].UniqueSuffix + + state := rm + + opMap := s.createOperationHashMap(ops) + + // holds applied commitments + commitmentMap := make(map[string]bool) + + c := commitmentFnc(state) + + s.logger.Debug("Processing commitment", logfields.WithCommitment(c), logfields.WithSuffix(uniqueSuffix)) + + commitmentOps, ok := opMap[c] + for ok { + s.logger.Debug("Found operation(s) for commitment", logfields.WithTotal(len(commitmentOps)), + logfields.WithCommitment(c), logfields.WithSuffix(uniqueSuffix)) + + newState := s.applyFirstValidOperation(commitmentOps, state, c, commitmentMap) + + // can't find a valid operation to apply + if newState == nil { + s.logger.Info("Unable to apply valid operation for commitment", logfields.WithCommitment(c), + logfields.WithSuffixes(uniqueSuffix)) + + break + } + + // commitment has been processed successfully + commitmentMap[c] = true + state = newState + + s.logger.Debug("Successfully processed commitment", + logfields.WithCommitment(c), logfields.WithSuffix(uniqueSuffix)) + + // get next commitment to be processed + c = commitmentFnc(state) + + s.logger.Debug("Next commitment to process", + logfields.WithCommitment(c), logfields.WithSuffix(uniqueSuffix)) + + // stop if there is no next commitment + if c == "" { + return state + } + + commitmentOps, ok = opMap[c] + } + + if len(commitmentMap) != len(ops) { + s.logger.Debug("Number of commitments applied doesn't match number of operations", + logfields.WithTotalCommitments(len(commitmentMap)), logfields.WithTotalOperations(len(ops)), + logfields.WithSuffix(uniqueSuffix)) + } + + return state +} + +type fnc func(rm *protocol.ResolutionModel) string + +func getUpdateCommitment(rm *protocol.ResolutionModel) string { + return rm.UpdateCommitment +} + +func getRecoveryCommitment(rm *protocol.ResolutionModel) string { + return rm.RecoveryCommitment +} + +func (s *OperationProcessor) applyFirstValidCreateOperation(createOps []*operation.AnchoredOperation, + rm *protocol.ResolutionModel) *protocol.ResolutionModel { + for _, op := range createOps { + var ( + state *protocol.ResolutionModel + err error + ) + + if state, err = s.applyOperation(op, rm); err != nil { + s.logger.Info( + "Skipped bad operation", + logfields.WithSuffix(op.UniqueSuffix), + logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(op.TransactionTime), + logfields.WithTransactionNumber(op.TransactionNumber), + log.WithError(err)) + + continue + } + + s.logger.Debug( + "Applied create operation, recover commitment, update commitment which results in new document", + logfields.WithOperation(op), logfields.WithRecoveryCommitment(state.RecoveryCommitment), + logfields.WithUpdateCommitment(state.UpdateCommitment), logfields.WithDocument(state.Doc)) + + return state + } + + return nil +} + +// this function should be used for update, recover and deactivate operations (create is handled differently). +// +//nolint:funlen +func (s *OperationProcessor) applyFirstValidOperation(ops []*operation.AnchoredOperation, rm *protocol.ResolutionModel, + currCommitment string, processedCommitments map[string]bool) *protocol.ResolutionModel { + for _, op := range ops { + var ( + state *protocol.ResolutionModel + err error + ) + + nextCommitment, err := s.getCommitment(op) + if err != nil { + s.logger.Info("Skipped bad operation", + logfields.WithSuffix(op.UniqueSuffix), + logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(op.TransactionTime), + logfields.WithTransactionNumber(op.TransactionNumber), log.WithError(err)) + + continue + } + + if currCommitment == nextCommitment { + s.logger.Info( + "Skipped bad operatio. Reason: operation commitment(key) equals next operation commitment(key)", + logfields.WithSuffix(op.UniqueSuffix), + logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(op.TransactionTime), + logfields.WithTransactionNumber(op.TransactionNumber)) + + continue + } + + if nextCommitment != "" { + // for recovery and update operations check if next commitment has been used already; if so skip to next operation + _, processed := processedCommitments[nextCommitment] + if processed { + s.logger.Info("Skipped bad operation. Reason: next operation commitment(key) has already been used", + logfields.WithSuffix(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(op.TransactionTime), logfields.WithTransactionNumber(op.TransactionNumber)) + + continue + } + } + + if state, err = s.applyOperation(op, rm); err != nil { + s.logger.Info("Skipped bad operation", + logfields.WithSuffix(op.UniqueSuffix), + logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(op.TransactionTime), logfields.WithTransactionNumber(op.TransactionNumber), + log.WithError(err)) + + continue + } + + s.logger.Debug("Applyied operation.", + logfields.WithOperation(op), + logfields.WithRecoveryCommitment(state.RecoveryCommitment), + logfields.WithUpdateCommitment(state.UpdateCommitment), + logfields.WithDeactivated(state.Deactivated), logfields.WithDocument(state.Doc)) + + return state + } + + return nil +} + +func (s *OperationProcessor) applyOperation(op *operation.AnchoredOperation, + rm *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { + p, err := s.pc.Get(op.ProtocolVersion) + if err != nil { + return nil, fmt.Errorf("apply '%s' operation: %s", op.Type, err.Error()) + } + + return p.OperationApplier().Apply(op, rm) +} + +func sortOperations(ops []*operation.AnchoredOperation) { + sort.Slice(ops, func(i, j int) bool { + if ops[i].TransactionTime < ops[j].TransactionTime { + return true + } + + return ops[i].TransactionNumber < ops[j].TransactionNumber + }) +} + +func (s *OperationProcessor) getRevealValue(op *operation.AnchoredOperation) (string, error) { + if op.Type == operation.TypeCreate { + return "", errors.New("create operation doesn't have reveal value") + } + + p, err := s.pc.Get(op.ProtocolVersion) + if err != nil { + return "", fmt.Errorf("get operation reveal value - retrieve protocol: %s", err.Error()) + } + + rv, err := p.OperationParser().GetRevealValue(op.OperationRequest) + if err != nil { + return "", fmt.Errorf("get operation reveal value from operation parser: %s", err.Error()) + } + + return rv, nil +} + +func (s *OperationProcessor) getCommitment(op *operation.AnchoredOperation) (string, error) { + p, err := s.pc.Get(op.ProtocolVersion) + if err != nil { + return "", fmt.Errorf("get next operation commitment: %s", err.Error()) + } + + nextCommitment, err := p.OperationParser().GetCommitment(op.OperationRequest) + if err != nil { + return "", fmt.Errorf("get commitment from operation parser: %s", err.Error()) + } + + return nextCommitment, nil +} + +type noopUnpublishedOpsStore struct{} + +func (noop *noopUnpublishedOpsStore) Get(_ string) ([]*operation.AnchoredOperation, error) { + return nil, fmt.Errorf("not found") +} diff --git a/method/sidetreelongform/sidetree-core/processor/processor_test.go b/method/sidetreelongform/sidetree-core/processor/processor_test.go new file mode 100644 index 0000000..49eff78 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/processor/processor_test.go @@ -0,0 +1,1568 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package processor + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "encoding/json" + "errors" + "fmt" + "strconv" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/signutil" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/ecsigner" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/pubkey" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/client" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationapplier" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationparser" +) + +const ( + sha2_256 = 18 + sha2_512 = 19 + + dummyUniqueSuffix = "dummy" + + defaultBlockNumber = 0 +) + +func TestResolve(t *testing.T) { + recoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + updateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + pc := newMockProtocolClient() + + t.Run("success", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + op := New("test", store, pc) + + doc, err := op.Resolve(uniqueSuffix) + require.Nil(t, err) + require.NotNil(t, doc) + }) + + t.Run("success - with additional operations", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + op := New("test", store, pc) + + additionalOps := []*operation.AnchoredOperation{ + {Type: operation.TypeUpdate}, // unpublished operation + {Type: operation.TypeUpdate, CanonicalReference: "abc"}, // published operation + } + + doc, err := op.Resolve(uniqueSuffix, document.WithAdditionalOperations(additionalOps)) + require.Nil(t, err) + require.NotNil(t, doc) + }) + + t.Run("success - with version id", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + op := New("test", store, pc) + + now := uint64(time.Now().Unix()) + + additionalOps := []*operation.AnchoredOperation{ + { // unpublished operation + Type: operation.TypeUpdate, + TransactionTime: now, + }, + { // published operation + Type: operation.TypeUpdate, + CanonicalReference: "abc", + TransactionTime: now - 60, + }, + } + + doc, err := op.Resolve(uniqueSuffix, + document.WithAdditionalOperations(additionalOps), + document.WithVersionID("abc")) + require.NoError(t, err) + require.NotNil(t, doc) + require.Len(t, doc.PublishedOperations, 2) + require.Len(t, doc.UnpublishedOperations, 0) + }) + + t.Run("error - invalid version id", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + op := New("test", store, pc) + + additionalOps := []*operation.AnchoredOperation{ + {Type: operation.TypeUpdate}, // unpublished operation + {Type: operation.TypeUpdate, CanonicalReference: "abc"}, // published operation + } + + doc, err := op.Resolve(uniqueSuffix, + document.WithAdditionalOperations(additionalOps), + document.WithVersionID("invalid")) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "'invalid' is not a valid versionId") + }) + + t.Run("success - with version time", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + op := New("test", store, pc) + + now := uint64(time.Now().Unix()) + + nowStr := time.Now().UTC().Format(time.RFC3339) + + additionalOps := []*operation.AnchoredOperation{ + { // unpublished operation + Type: operation.TypeUpdate, + TransactionTime: now + 5, + }, + { // published operation + Type: operation.TypeUpdate, + CanonicalReference: "abc", + TransactionTime: now + 10, + }, + } + + doc, err := op.Resolve(uniqueSuffix, + document.WithAdditionalOperations(additionalOps), + document.WithVersionTime(nowStr)) + require.NoError(t, err) + require.NotNil(t, doc) + require.Len(t, doc.PublishedOperations, 1) + require.Len(t, doc.UnpublishedOperations, 0) + }) + + t.Run("success - with version time (includes unpublished)", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + op := New("test", store, pc) + + now := uint64(time.Now().Unix()) + + nowStr := time.Now().UTC().Format(time.RFC3339) + + additionalOps := []*operation.AnchoredOperation{ + { // unpublished operation + Type: operation.TypeUpdate, + TransactionTime: now + 5, + }, + { // unpublished operation + Type: operation.TypeUpdate, + TransactionTime: now - 5, + }, + { // published operation + Type: operation.TypeUpdate, + CanonicalReference: "abc", + TransactionTime: now - 10, + }, + } + + doc, err := op.Resolve(uniqueSuffix, + document.WithAdditionalOperations(additionalOps), + document.WithVersionTime(nowStr)) + require.NoError(t, err) + require.NotNil(t, doc) + require.Len(t, doc.PublishedOperations, 2) + require.Len(t, doc.UnpublishedOperations, 1) + }) + + t.Run("success - no ops with version time", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + op := New("test", store, pc) + + additionalOps := []*operation.AnchoredOperation{ + { // unpublished operation + Type: operation.TypeUpdate, + TransactionTime: uint64(time.Now().Unix()), + }, + { // published operation + Type: operation.TypeCreate, + CanonicalReference: "abc", + TransactionTime: uint64(time.Now().Unix() - 10), + }, + } + + doc, err := op.Resolve("uniqueSuffix", + document.WithAdditionalOperations(additionalOps), + document.WithVersionTime("2020-12-20T19:17:47Z")) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "no operations found for version time 2020-12-20T19:17:47Z") + }) + + t.Run("error - with invalid version time", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + op := New("test", store, pc) + + additionalOps := []*operation.AnchoredOperation{ + {Type: operation.TypeUpdate}, // unpublished operation + {Type: operation.TypeUpdate, CanonicalReference: "abc"}, // published operation + } + + doc, err := op.Resolve(uniqueSuffix, + document.WithAdditionalOperations(additionalOps), + document.WithVersionTime("invalid")) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "failed to parse version time[invalid]") + }) + + t.Run("document not found error", func(t *testing.T) { + store, _ := getDefaultStore(recoveryKey, updateKey) + + op := New("test", store, pc) + doc, err := op.Resolve(dummyUniqueSuffix) + require.Nil(t, doc) + require.Error(t, err) + require.Equal(t, "create operation not found", err.Error()) + }) + + t.Run("store error", func(t *testing.T) { + testErr := errors.New("test store error") + store := mocks.NewMockOperationStore(testErr) + p := New("test", store, pc) + + doc, err := p.Resolve("suffix") + require.Nil(t, doc) + require.Error(t, err) + require.Equal(t, testErr, err) + }) + + t.Run("protocol error", func(t *testing.T) { + pcWithErr := mocks.NewMockProtocolClient() + pcWithErr.Versions = nil + + store, _ := getDefaultStore(recoveryKey, updateKey) + op := New("test", store, pcWithErr) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + doc, err := op.applyOperation(createOp, &protocol.ResolutionModel{}) + require.Nil(t, doc) + require.Error(t, err) + require.Contains(t, err.Error(), + "apply 'create' operation: protocol parameters are not defined for anchoring time") + }) + + t.Run("resolution error", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + createOp, err := getCreateOperation(recoveryKey, updateKey, defaultBlockNumber) + require.NoError(t, err) + + createOp.SuffixData = &model.SuffixDataModel{} + + err = store.Put(getAnchoredOperation(createOp, defaultBlockNumber)) + require.Nil(t, err) + + p := New("test", store, pc) + doc, err := p.Resolve(createOp.UniqueSuffix) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "valid create operation not found") + }) +} + +func TestUpdateDocument(t *testing.T) { + recoveryKey, e := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, e) + + updateKey, e := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, e) + + // protocol version switches at block 100 + pc := newMockProtocolClient() + + t.Run("success", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + + updateOp, nextUpdateKey, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.Nil(t, err) + + err = store.Put(updateOp) + require.Nil(t, err) + + p := New("test", store, pc) + result, err := p.Resolve(uniqueSuffix) + require.Nil(t, err) + + // check if service type value is updated (done via json patch) + didDoc := document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special1", didDoc["test"]) + + // test consecutive update + updateOp, _, err = getAnchoredUpdateOperation(nextUpdateKey, uniqueSuffix, 2) + require.Nil(t, err) + err = store.Put(updateOp) + require.Nil(t, err) + + result, err = p.Resolve(uniqueSuffix) + require.Nil(t, err) + + // check if service type value is updated again (done via json patch) + didDoc = document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special2", didDoc["test"]) + }) + + t.Run("success - with unpublished operation store", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + + updateOp, _, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.Nil(t, err) + + // Add an unpublished 'create' operation to test a race condition that may occur if + // both a published and unpublished 'create' is retrieved. + unpublishedCreateOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + unpublishedCreateOp.CanonicalReference = "" + + p := New("test", store, pc, + WithUnpublishedOperationStore( + &mockUnpublishedOpsStore{AnchoredOps: []*operation.AnchoredOperation{unpublishedCreateOp, updateOp}})) + result, err := p.Resolve(uniqueSuffix) + require.Nil(t, err) + + // check if service type value is updated (done via json patch) + didDoc := document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special1", didDoc["test"]) + }) + + t.Run("success - protocol version changed between create/update", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + + pubJWK, err := pubkey.GetPublicKeyJWK(&updateKey.PublicKey) + require.NoError(t, err) + + rv, err := commitment.GetRevealValue(pubJWK, getProtocol(1).MultihashAlgorithms[0]) + require.NoError(t, err) + + // protocol value for hashing algorithm changed at block 100 + updateOp, _, err := getUpdateOperation(updateKey, uniqueSuffix, 200) + require.NoError(t, err) + + updateOp.RevealValue = rv + + anchoredOp := getAnchoredOperation(updateOp, 200) + + err = store.Put(anchoredOp) + require.Nil(t, err) + + p := New("test", store, pc) + result, err := p.Resolve(uniqueSuffix) + require.Nil(t, err) + + // check if service type value is updated (done via json patch) + didDoc := document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special200", didDoc["test"]) + }) + + t.Run("success - protocol version changed between consecutive updates", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + + updateOp, nextUpdateKey, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 50) + require.Nil(t, err) + + err = store.Put(updateOp) + require.Nil(t, err) + + p := New("test", store, pc) + result, err := p.Resolve(uniqueSuffix) + require.Nil(t, err) + + // check if service type value is updated (done via json patch) + didDoc := document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special50", didDoc["test"]) + + pubJWK, err := pubkey.GetPublicKeyJWK(&nextUpdateKey.PublicKey) + require.NoError(t, err) + + // previous operation commit value was calculated with protocol value at block 50 + rv, err := commitment.GetRevealValue(pubJWK, getProtocol(50).MultihashAlgorithms[0]) + require.NoError(t, err) + + // protocol value for hashing algorithm changed at block 100 + op, nextUpdateKey, err := getUpdateOperation(nextUpdateKey, uniqueSuffix, 500) + require.NoError(t, err) + + op.RevealValue = rv + + updateOp = getAnchoredOperation(op, 500) + err = store.Put(updateOp) + require.Nil(t, err) + + result, err = p.Resolve(uniqueSuffix) + require.Nil(t, err) + + didDoc = document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special500", didDoc["test"]) + + // test consecutive update within new protocol value + updateOp, _, err = getAnchoredUpdateOperation(nextUpdateKey, uniqueSuffix, 700) + require.Nil(t, err) + err = store.Put(updateOp) + require.Nil(t, err) + + result, err = p.Resolve(uniqueSuffix) + require.Nil(t, err) + + // check if service type value is updated again (done via json patch) + didDoc = document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special700", didDoc["test"]) + }) + + t.Run("success - operation with reused next commitment ignored", func(t *testing.T) { + // scenario: update 1 followed by update 2 followed by update 3 with reused commitment from 1 + + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + + updateOp, nextUpdateKey, err := getUpdateOperation(updateKey, uniqueSuffix, 1) + require.Nil(t, err) + + delta1 := updateOp.Delta + + err = store.Put(getAnchoredOperation(updateOp, 1)) + require.Nil(t, err) + + p := New("test", store, pc) + result, err := p.Resolve(uniqueSuffix) + require.Nil(t, err) + + // check if service type value is updated (done via json patch) + didDoc := document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special1", didDoc["test"]) + + // test consecutive update + updateOp, nextUpdateKey, err = getUpdateOperation(nextUpdateKey, uniqueSuffix, 2) + require.Nil(t, err) + + err = store.Put(getAnchoredOperation(updateOp, 2)) + require.Nil(t, err) + + result, err = p.Resolve(uniqueSuffix) + require.Nil(t, err) + + // service type value is updated since operation is valid + didDoc = document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special2", didDoc["test"]) + + // two successful update operations - next update with reused commitment from op 1 + updateOp, _, err = getUpdateOperation(nextUpdateKey, uniqueSuffix, 1) + require.Nil(t, err) + + delta3 := updateOp.Delta + delta3.UpdateCommitment = delta1.UpdateCommitment + + err = store.Put(getAnchoredOperation(updateOp, 1)) + require.Nil(t, err) + + result, err = p.Resolve(uniqueSuffix) + require.Nil(t, err) + + // service type value is not updated since commitment value was reused + didDoc = document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special2", didDoc["test"]) + }) + + t.Run("success - operation with same commitment as next operation commitment is ignored", func(t *testing.T) { + // scenario: update 1 followed by update 2 with same operation commitment as next operation commitment + + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + + updateOp, nextUpdateKey, err := getUpdateOperation(updateKey, uniqueSuffix, 1) + require.Nil(t, err) + + delta1 := updateOp.Delta + require.NoError(t, err) + + err = store.Put(getAnchoredOperation(updateOp, 1)) + require.Nil(t, err) + + p := New("test", store, pc) + result, err := p.Resolve(uniqueSuffix) + require.Nil(t, err) + + // check if service type value is updated (done via json patch) + didDoc := document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special1", didDoc["test"]) + + // update operation commitment is the same as next operation commitment + updateOp, _, err = getUpdateOperation(nextUpdateKey, uniqueSuffix, 1) + require.Nil(t, err) + + delta2 := updateOp.Delta + delta2.UpdateCommitment = delta1.UpdateCommitment + + err = store.Put(getAnchoredOperation(updateOp, 1)) + require.Nil(t, err) + + result, err = p.Resolve(uniqueSuffix) + require.Nil(t, err) + + // service type value is not updated since commitment value was reused + didDoc = document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special1", didDoc["test"]) + }) +} + +func TestProcessOperation(t *testing.T) { + recoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + updateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + pc := newMockProtocolClient() + parser := operationparser.New(pc.Protocol) + + t.Run("update is first operation error", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + const uniqueSuffix = "uniqueSuffix" + updateOp, _, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.Nil(t, err) + err = store.Put(updateOp) + require.Nil(t, err) + + p := New("test", store, pc) + doc, err := p.Resolve(uniqueSuffix) + require.Error(t, err) + require.Nil(t, doc) + require.Equal(t, "create operation not found", err.Error()) + }) + + t.Run("create is second operation error", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + store.Validate = false + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + a := operationapplier.New(pc.Protocol, parser, &mockDocComposer{}) + doc, err := a.Apply(createOp, &protocol.ResolutionModel{ + Doc: make(document.Document), + }) + require.Error(t, err) + require.Nil(t, doc) + require.Equal(t, "create has to be the first operation", err.Error()) + }) + + t.Run("apply recover to non existing document error", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + recoverOp, _, err := getAnchoredRecoverOperation(recoveryKey, updateKey, uniqueSuffix, 2) + require.NoError(t, err) + err = store.Put(recoverOp) + require.Nil(t, err) + + p := New("test", store, pc) + doc, err := p.applyOperation(recoverOp, &protocol.ResolutionModel{}) + require.Error(t, err) + require.Contains(t, err.Error(), "recover can only be applied to an existing document") + require.Nil(t, doc) + }) + + t.Run("invalid operation type error", func(t *testing.T) { + store, _ := getDefaultStore(recoveryKey, updateKey) + + p := New("test", store, pc) + doc, err := p.applyOperation( + &operation.AnchoredOperation{Type: "invalid"}, &protocol.ResolutionModel{Doc: make(document.Document)}) + require.Error(t, err) + require.Equal(t, "operation type not supported for process operation", err.Error()) + require.Nil(t, doc) + }) +} + +func TestDeactivate(t *testing.T) { + recoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + updateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + pc := newMockProtocolClient() + + t.Run("success", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + + deactivateOp, err := getAnchoredDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + err = store.Put(deactivateOp) + require.Nil(t, err) + + p := New("test", store, pc) + doc, err := p.Resolve(uniqueSuffix) + require.NoError(t, err) + require.Equal(t, doc.Deactivated, true) + require.Empty(t, doc.UpdateCommitment) + require.Empty(t, doc.RecoveryCommitment) + }) +} + +func TestRecover(t *testing.T) { + recoveryKey, e := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, e) + + updateKey, e := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, e) + + pc := newMockProtocolClient() + + t.Run("success", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + + recoverOp, nextRecoveryKey, err := + getAnchoredRecoverOperation(recoveryKey, updateKey, uniqueSuffix, 1) + require.NoError(t, err) + err = store.Put(recoverOp) + require.Nil(t, err) + + p := New("test", store, pc) + result, err := p.Resolve(uniqueSuffix) + require.NoError(t, err) + + // test for recovered key + docBytes, err := result.Doc.Bytes() + require.NoError(t, err) + require.Contains(t, string(docBytes), "recovered1") + + // apply recover again - consecutive recoveries are valid + recoverOp, _, err = getAnchoredRecoverOperation(nextRecoveryKey, updateKey, uniqueSuffix, 2) + require.NoError(t, err) + err = store.Put(recoverOp) + require.Nil(t, err) + + result, err = p.Resolve(uniqueSuffix) + require.NoError(t, err) + require.NotNil(t, result) + + docBytes, err = result.Doc.Bytes() + require.NoError(t, err) + require.Contains(t, string(docBytes), "recovered2") + }) + + t.Run("success - protocol version changed between create and recover", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + + // hashing algorithm changed at block 100 - + //calculate reveal based on the hashing protocol of previous operation (block 1) + pubJWK, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + rv, err := commitment.GetRevealValue(pubJWK, getProtocol(1).MultihashAlgorithms[0]) + require.NoError(t, err) + + // hashing algorithm changed at block 100 + op, nextRecoveryKey, err := getRecoverOperationWithBlockNum(recoveryKey, updateKey, uniqueSuffix, 200) + require.NoError(t, err) + + op.RevealValue = rv + + recoverOp := getAnchoredOperation(op, 200) + err = store.Put(recoverOp) + require.Nil(t, err) + + p := New("test", store, pc) + result, err := p.Resolve(uniqueSuffix) + require.NoError(t, err) + + // test for recovered key + docBytes, err := result.Doc.Bytes() + require.NoError(t, err) + require.Contains(t, string(docBytes), "recovered200") + + // apply recover again - consecutive recoveries within new protocol version + recoverOp, _, err = getAnchoredRecoverOperation(nextRecoveryKey, updateKey, uniqueSuffix, 300) + require.NoError(t, err) + err = store.Put(recoverOp) + require.Nil(t, err) + + result, err = p.Resolve(uniqueSuffix) + require.NoError(t, err) + require.NotNil(t, result) + + // test for recovered key + docBytes, err = result.Doc.Bytes() + require.NoError(t, err) + require.Contains(t, string(docBytes), "recovered300") + }) + + t.Run("success - protocol version changed between recoveries", func(t *testing.T) { + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + + recoverOp, nextRecoveryKey, err := + getAnchoredRecoverOperation(recoveryKey, updateKey, uniqueSuffix, 50) + require.NoError(t, err) + err = store.Put(recoverOp) + require.Nil(t, err) + + p := New("test", store, pc) + result, err := p.Resolve(uniqueSuffix) + require.NoError(t, err) + + // test for recovered key + docBytes, err := result.Doc.Bytes() + require.NoError(t, err) + require.Contains(t, string(docBytes), "recovered50") + + // apply recover again - there was a protocol change at 100 (new hashing algorithm) + // hashing algorithm changed at block 100 - calculate reveal based + // on the hashing protocol of previous operation (block 1) + pubJWK, err := pubkey.GetPublicKeyJWK(&nextRecoveryKey.PublicKey) + require.NoError(t, err) + + rv, err := commitment.GetRevealValue(pubJWK, getProtocol(50).MultihashAlgorithms[0]) + require.NoError(t, err) + + // hashing algorithm changed at block 100 + op, _, err := getRecoverOperationWithBlockNum(nextRecoveryKey, updateKey, uniqueSuffix, 200) + require.NoError(t, err) + + op.RevealValue = rv + + recoverOp = getAnchoredOperation(op, 200) + err = store.Put(recoverOp) + require.Nil(t, err) + + result, err = p.Resolve(uniqueSuffix) + require.NoError(t, err) + require.NotNil(t, result) + + // test for recovered key + docBytes, err = result.Doc.Bytes() + require.NoError(t, err) + require.Contains(t, string(docBytes), "recovered200") + }) +} + +func TestGetOperationCommitment(t *testing.T) { + recoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + updateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + pc := newMockProtocolClient() + + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + p := New("test", store, pc) + + t.Run("success - recover", func(t *testing.T) { + recoverOp, _, err := getAnchoredRecoverOperation(recoveryKey, updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + rv, err := p.getRevealValue(recoverOp) + require.NoError(t, err) + require.NotEmpty(t, rv) + + expected, err := commitment.GetCommitmentFromRevealValue(rv) + require.NoError(t, err) + + c, err := getCommitment(recoveryKey, getProtocol(1)) + require.NoError(t, err) + require.Equal(t, c, expected) + }) + + t.Run("success - update", func(t *testing.T) { + updateOp, _, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + rv, err := p.getRevealValue(updateOp) + require.NoError(t, err) + require.NotEmpty(t, rv) + + expected, err := commitment.GetCommitmentFromRevealValue(rv) + require.NoError(t, err) + + c, err := getCommitment(updateKey, getProtocol(1)) + require.NoError(t, err) + require.Equal(t, c, expected) + }) + + t.Run("success - deactivate", func(t *testing.T) { + deactivateOp, err := getAnchoredDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + rv, err := p.getRevealValue(deactivateOp) + require.NoError(t, err) + require.NotEmpty(t, rv) + + expected, err := commitment.GetCommitmentFromRevealValue(rv) + require.NoError(t, err) + + c, err := getCommitment(recoveryKey, getProtocol(1)) + require.NoError(t, err) + require.Equal(t, c, expected) + }) + + t.Run("error - protocol error", func(t *testing.T) { + pcWithoutProtocols := mocks.NewMockProtocolClient() + pcWithoutProtocols.Versions = nil + store, _ := getDefaultStore(recoveryKey, updateKey) + + updateOp, _, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + value, err := New("test", store, pcWithoutProtocols).getRevealValue(updateOp) + require.Error(t, err) + require.Empty(t, value) + require.Contains(t, err.Error(), "protocol parameters are not defined for anchoring time") + }) + + t.Run("error - create operation doesn't have reveal value", func(t *testing.T) { + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + value, err := p.getRevealValue(createOp) + require.Error(t, err) + require.Empty(t, value) + require.Contains(t, err.Error(), "create operation doesn't have reveal value") + }) + + t.Run("error - missing signed data", func(t *testing.T) { + recoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + recoverOp.SignedData = "" + + anchoredOp := getAnchoredOperation(recoverOp, 1) + + value, err := p.getRevealValue(anchoredOp) + require.Error(t, err) + require.Empty(t, value) + require.Contains(t, err.Error(), "missing signed data") + }) + + t.Run("error - unmarshall signed models", func(t *testing.T) { + // test recover signed model + recoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + recoverSigner := ecsigner.New(recoveryKey, "ES256", "") + recoverCompactJWS, err := signutil.SignPayload([]byte("recover payload"), recoverSigner) + require.NoError(t, err) + + recoverOp.SignedData = recoverCompactJWS + + anchoredOp := getAnchoredOperation(recoverOp, 1) + + value, err := p.getRevealValue(anchoredOp) + require.Error(t, err) + require.Empty(t, value) + require.Contains(t, err.Error(), "failed to unmarshal signed data model for recover") + + // test deactivate signed model + deactivateOp, err := getDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + deactivateOp.SignedData = recoverCompactJWS + + anchoredOp = getAnchoredOperation(deactivateOp, 1) + + value, err = p.getRevealValue(anchoredOp) + require.Error(t, err) + require.Empty(t, value) + require.Contains(t, err.Error(), "failed to unmarshal signed data model for deactivate") + + // test deactivate signed model + updateOp, _, err := getUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + updateSigner := ecsigner.New(recoveryKey, "ES256", "") + updateCompactJWS, err := signutil.SignPayload([]byte("update payload"), updateSigner) + require.NoError(t, err) + + updateOp.SignedData = updateCompactJWS + + anchoredOp = getAnchoredOperation(updateOp, 1) + + value, err = p.getRevealValue(anchoredOp) + require.Error(t, err) + require.Empty(t, value) + require.Contains(t, err.Error(), "failed to unmarshal signed data model for update") + }) +} + +func TestGetNextOperationCommitment(t *testing.T) { + recoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + updateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + pc := newMockProtocolClient() + + store, uniqueSuffix := getDefaultStore(recoveryKey, updateKey) + p := New("test", store, pc) + + t.Run("success - recover", func(t *testing.T) { + recoverOp, nextRecoveryKey, err := getAnchoredRecoverOperation(recoveryKey, updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + value, err := p.getCommitment(recoverOp) + require.NoError(t, err) + require.NotEmpty(t, value) + + c, err := getCommitment(nextRecoveryKey, getProtocol(1)) + require.NoError(t, err) + require.Equal(t, c, value) + }) + + t.Run("success - update", func(t *testing.T) { + updateOp, nextUpdateKey, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + value, err := p.getCommitment(updateOp) + require.NoError(t, err) + require.NotEmpty(t, value) + + c, err := getCommitment(nextUpdateKey, getProtocol(1)) + require.NoError(t, err) + require.Equal(t, c, value) + }) + + t.Run("success - deactivate", func(t *testing.T) { + deactivateOp, err := getAnchoredDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + value, err := p.getCommitment(deactivateOp) + require.NoError(t, err) + require.Empty(t, value) + }) + + t.Run("error - protocol error", func(t *testing.T) { + pcWithoutProtocols := mocks.NewMockProtocolClient() + pcWithoutProtocols.Versions = nil + store, _ := getDefaultStore(recoveryKey, updateKey) + + updateOp, _, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + value, err := New("test", store, pcWithoutProtocols).getCommitment(updateOp) + require.Error(t, err) + require.Empty(t, value) + require.Contains(t, err.Error(), "protocol parameters are not defined for anchoring time") + }) + + t.Run("error - create operation is currently not supported", func(t *testing.T) { + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + value, err := p.getCommitment(createOp) + require.Error(t, err) + require.Empty(t, value) + require.Contains(t, err.Error(), "operation type 'create' not supported for getting next operation commitment") + }) + + t.Run("error - missing signed data", func(t *testing.T) { + recoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + recoverOp.SignedData = "" + + anchoredOp := getAnchoredOperation(recoverOp, 1) + + value, err := p.getCommitment(anchoredOp) + require.Error(t, err) + require.Empty(t, value) + require.Contains(t, err.Error(), "missing signed data") + }) + + t.Run("error - operation type not supported", func(t *testing.T) { + request := model.RecoverRequest{ + Operation: "other", + } + + bytes, err := canonicalizer.MarshalCanonical(request) + require.NoError(t, err) + + value, err := p.getCommitment(&operation.AnchoredOperation{OperationRequest: bytes}) + require.Error(t, err) + require.Empty(t, value) + require.Contains(t, err.Error(), "operation type [other] not supported") + }) + + t.Run("error - unmarshall signed model for recovery", func(t *testing.T) { + // test recover signed model + recoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + recoverSigner := ecsigner.New(recoveryKey, "ES256", "") + recoverCompactJWS, err := signutil.SignPayload([]byte("recover payload"), recoverSigner) + require.NoError(t, err) + + recoverOp.SignedData = recoverCompactJWS + + anchoredOp := getAnchoredOperation(recoverOp, 1) + + value, err := p.getCommitment(anchoredOp) + require.Error(t, err) + require.Empty(t, value) + require.Contains(t, err.Error(), "failed to unmarshal signed data model for recover") + }) +} + +func TestOpsWithTxnGreaterThan(t *testing.T) { + op1 := &operation.AnchoredOperation{ + TransactionTime: 1, + TransactionNumber: 1, + CanonicalReference: "ref", + } + + op2 := &operation.AnchoredOperation{ + TransactionTime: 1, + TransactionNumber: 2, + CanonicalReference: "ref", + } + + ops := []*operation.AnchoredOperation{op1, op2} + + txns := getOpsWithTxnGreaterThanOrUnpublished(ops, 0, 0) + require.Equal(t, 2, len(txns)) + + txns = getOpsWithTxnGreaterThanOrUnpublished(ops, 2, 1) + require.Equal(t, 0, len(txns)) + + txns = getOpsWithTxnGreaterThanOrUnpublished(ops, 1, 1) + require.Equal(t, 1, len(txns)) +} + +func getUpdateOperation(privateKey *ecdsa.PrivateKey, uniqueSuffix string, blockNum uint64, +) (*model.Operation, *ecdsa.PrivateKey, error) { + s := ecsigner.New(privateKey, "ES256", "") + + return getUpdateOperationWithSigner(s, privateKey, uniqueSuffix, blockNum) +} + +func getAnchoredUpdateOperation(privateKey *ecdsa.PrivateKey, uniqueSuffix string, blockNumber uint64, +) (*operation.AnchoredOperation, *ecdsa.PrivateKey, error) { + op, nextUpdateKey, err := getUpdateOperation(privateKey, uniqueSuffix, blockNumber) + if err != nil { + return nil, nil, err + } + + return getAnchoredOperation(op, blockNumber), nextUpdateKey, nil +} + +func getUpdateOperationWithSigner( + s client.Signer, privateKey *ecdsa.PrivateKey, uniqueSuffix string, blockNumber uint64, +) (*model.Operation, *ecdsa.PrivateKey, error) { + p := map[string]interface{}{ + "op": "replace", + "path": "/test", + "value": "special" + strconv.Itoa(int(blockNumber)), + } + + patchBytes, err := canonicalizer.MarshalCanonical([]map[string]interface{}{p}) + if err != nil { + return nil, nil, err + } + + jsonPatch, err := patch.NewJSONPatch(string(patchBytes)) + if err != nil { + return nil, nil, err + } + + nextUpdateKey, updateCommitment, err := generateKeyAndCommitment(getProtocol(blockNumber)) + if err != nil { + return nil, nil, err + } + + delta := &model.DeltaModel{ + UpdateCommitment: updateCommitment, + Patches: []patch.Patch{jsonPatch}, + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, getProtocol(blockNumber).MultihashAlgorithms[0]) + if err != nil { + return nil, nil, err + } + + updatePubKey, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + if err != nil { + return nil, nil, err + } + + signedData := &model.UpdateSignedDataModel{ + DeltaHash: deltaHash, + UpdateKey: updatePubKey, + } + + jws, err := signutil.SignModel(signedData, s) + if err != nil { + return nil, nil, err + } + + rv, err := commitment.GetRevealValue(updatePubKey, getProtocol(blockNumber).MultihashAlgorithms[0]) + if err != nil { + return nil, nil, err + } + + op := &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Delta: delta, + Type: operation.TypeUpdate, + SignedData: jws, + RevealValue: rv, + } + + return op, nextUpdateKey, nil +} + +func generateKeyAndCommitment(p protocol.Protocol) (*ecdsa.PrivateKey, string, error) { + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return nil, "", err + } + + pubKey, err := pubkey.GetPublicKeyJWK(&key.PublicKey) + if err != nil { + return nil, "", err + } + + c, err := commitment.GetCommitment(pubKey, p.MultihashAlgorithms[0]) + if err != nil { + return nil, "", err + } + + return key, c, nil +} + +func getDeactivateOperation(privateKey *ecdsa.PrivateKey, uniqueSuffix string) (*model.Operation, error) { + signer := ecsigner.New(privateKey, "ES256", "") + + return getDeactivateOperationWithSigner(signer, privateKey, uniqueSuffix) +} + +func getAnchoredDeactivateOperation(privateKey *ecdsa.PrivateKey, uniqueSuffix string, +) (*operation.AnchoredOperation, error) { + op, err := getDeactivateOperation(privateKey, uniqueSuffix) + if err != nil { + return nil, err + } + + return getAnchoredOperation(op, defaultBlockNumber), nil +} + +func getDeactivateOperationWithSigner(singer client.Signer, privateKey *ecdsa.PrivateKey, uniqueSuffix string, +) (*model.Operation, error) { + recoverPubKey, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + if err != nil { + return nil, err + } + + signedDataModel := model.DeactivateSignedDataModel{ + DidSuffix: uniqueSuffix, + RecoveryKey: recoverPubKey, + } + + jws, err := signutil.SignModel(signedDataModel, singer) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(signedDataModel.RecoveryKey, sha2_256) + if err != nil { + return nil, err + } + + return &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeDeactivate, + SignedData: jws, + RevealValue: rv, + }, nil +} + +func getRecoverOperation(recoveryKey, updateKey *ecdsa.PrivateKey, uniqueSuffix string, +) (*model.Operation, *ecdsa.PrivateKey, error) { + return getRecoverOperationWithBlockNum(recoveryKey, updateKey, uniqueSuffix, 1) +} + +func getRecoverOperationWithBlockNum(recoveryKey, updateKey *ecdsa.PrivateKey, uniqueSuffix string, blockNum uint64, +) (*model.Operation, *ecdsa.PrivateKey, error) { + signer := ecsigner.New(recoveryKey, "ES256", "") + + return getRecoverOperationWithSigner(signer, recoveryKey, updateKey, uniqueSuffix, blockNum) +} + +func getAnchoredRecoverOperation( + recoveryKey, updateKey *ecdsa.PrivateKey, uniqueSuffix string, blockNumber uint64, +) (*operation.AnchoredOperation, *ecdsa.PrivateKey, error) { + op, nextRecoveryKey, err := getRecoverOperationWithBlockNum(recoveryKey, updateKey, uniqueSuffix, blockNumber) + if err != nil { + return nil, nil, err + } + + return getAnchoredOperation(op, blockNumber), nextRecoveryKey, nil +} + +func getRecoverOperationWithSigner( + signer client.Signer, recoveryKey, updateKey *ecdsa.PrivateKey, uniqueSuffix string, blockNum uint64, +) (*model.Operation, *ecdsa.PrivateKey, error) { + recoverRequest, nextRecoveryKey, err := getDefaultRecoverRequest(signer, recoveryKey, updateKey, blockNum) + if err != nil { + return nil, nil, err + } + + return &model.Operation{ + Namespace: mocks.DefaultNS, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeRecover, + OperationRequest: []byte(recoverRequest.Operation), + Delta: recoverRequest.Delta, + SignedData: recoverRequest.SignedData, + RevealValue: recoverRequest.RevealValue, + }, nextRecoveryKey, nil +} + +func getRecoverRequest( + signer client.Signer, deltaModel *model.DeltaModel, signedDataModel *model.RecoverSignedDataModel, blockNum uint64, +) (*model.RecoverRequest, error) { + deltaHash, err := hashing.CalculateModelMultihash(deltaModel, getProtocol(blockNum).MultihashAlgorithms[0]) + if err != nil { + return nil, err + } + + signedDataModel.DeltaHash = deltaHash + + jws, err := signutil.SignModel(signedDataModel, signer) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(signedDataModel.RecoveryKey, getProtocol(blockNum).MultihashAlgorithms[0]) + if err != nil { + return nil, err + } + + return &model.RecoverRequest{ + Operation: operation.TypeRecover, + DidSuffix: "suffix", + Delta: deltaModel, + SignedData: jws, + RevealValue: rv, + }, nil +} + +func getDefaultRecoverRequest(signer client.Signer, recoveryKey, updateKey *ecdsa.PrivateKey, blockNum uint64, +) (*model.RecoverRequest, *ecdsa.PrivateKey, error) { + p := getProtocol(blockNum) + + updateCommitment, err := getCommitment(updateKey, p) + if err != nil { + return nil, nil, err + } + + recoveredDoc := fmt.Sprintf(recoveredDocTemplate, strconv.Itoa(int(blockNum))) + + delta, err := getDeltaModel(recoveredDoc, updateCommitment) + if err != nil { + return nil, nil, err + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, p.MultihashAlgorithms[0]) + if err != nil { + return nil, nil, err + } + + recoveryPubKey, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + if err != nil { + return nil, nil, err + } + + nextRecoveryKey, recoveryCommitment, err := generateKeyAndCommitment(p) + if err != nil { + return nil, nil, err + } + + recoverSignedData := &model.RecoverSignedDataModel{ + RecoveryKey: recoveryPubKey, + RecoveryCommitment: recoveryCommitment, + DeltaHash: deltaHash, + } + + req, err := getRecoverRequest(signer, delta, recoverSignedData, blockNum) + if err != nil { + return nil, nil, err + } + + return req, nextRecoveryKey, nil +} + +func getDefaultStore(recoveryKey, updateKey *ecdsa.PrivateKey) (*mocks.MockOperationStore, string) { + store := mocks.NewMockOperationStore(nil) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + if err != nil { + panic(err) + } + + // store default create operation + err = store.Put(createOp) + if err != nil { + panic(err) + } + + return store, createOp.UniqueSuffix +} + +func getCreateOperationWithDoc( + recoveryKey, updateKey *ecdsa.PrivateKey, doc string, blockNum uint64) (*model.Operation, error) { + p := getProtocol(blockNum) + + createRequest, err := getCreateRequest(recoveryKey, updateKey, p) + if err != nil { + return nil, err + } + + operationBuffer, err := json.Marshal(createRequest) + if err != nil { + return nil, err + } + + uniqueSuffix, err := hashing.CalculateModelMultihash(createRequest.SuffixData, sha2_256) + if err != nil { + return nil, err + } + + updateCommitment, err := getCommitment(updateKey, p) + if err != nil { + return nil, err + } + + delta, err := getDeltaModel(doc, updateCommitment) + if err != nil { + return nil, err + } + + suffixData, err := getSuffixData(recoveryKey, delta, p) + if err != nil { + return nil, err + } + + return &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeCreate, + OperationRequest: operationBuffer, + Delta: delta, + SuffixData: suffixData, + }, nil +} + +func getCreateOperation(recoveryKey, updateKey *ecdsa.PrivateKey, blockNum uint64) (*model.Operation, error) { + return getCreateOperationWithDoc(recoveryKey, updateKey, validDoc, blockNum) +} + +func getAnchoredCreateOperation(recoveryKey, updateKey *ecdsa.PrivateKey) (*operation.AnchoredOperation, error) { + op, err := getCreateOperation(recoveryKey, updateKey, defaultBlockNumber) + if err != nil { + return nil, err + } + + return getAnchoredOperation(op, defaultBlockNumber), nil +} + +func getAnchoredOperation(op *model.Operation, blockNum uint64) *operation.AnchoredOperation { + anchoredOp, err := model.GetAnchoredOperation(op) + if err != nil { + panic(err) + } + + anchoredOp.CanonicalReference = "ref" + anchoredOp.TransactionTime = blockNum + anchoredOp.ProtocolVersion = getProtocol(blockNum).GenesisTime + + return anchoredOp +} + +func getCreateRequest(recoveryKey, updateKey *ecdsa.PrivateKey, p protocol.Protocol) (*model.CreateRequest, error) { + updateCommitment, err := getCommitment(updateKey, p) + if err != nil { + return nil, err + } + + delta, err := getDeltaModel(validDoc, updateCommitment) + if err != nil { + return nil, err + } + + suffixData, err := getSuffixData(recoveryKey, delta, p) + if err != nil { + return nil, err + } + + return &model.CreateRequest{ + Operation: operation.TypeCreate, + Delta: delta, + SuffixData: suffixData, + }, nil +} + +func getProtocol(blockNum uint64) protocol.Protocol { + pc := newMockProtocolClient() + + pv, err := pc.Get(blockNum) + if err != nil { + panic(err) + } + + return pv.Protocol() +} + +func getDeltaModel(doc string, updateCommitment string) (*model.DeltaModel, error) { + patches, err := patch.PatchesFromDocument(doc) + if err != nil { + return nil, err + } + + return &model.DeltaModel{ + Patches: patches, + UpdateCommitment: updateCommitment, + }, nil +} + +func getCommitment(key *ecdsa.PrivateKey, p protocol.Protocol) (string, error) { + pubKey, err := pubkey.GetPublicKeyJWK(&key.PublicKey) + if err != nil { + return "", err + } + + return commitment.GetCommitment(pubKey, p.MultihashAlgorithms[0]) +} + +func getSuffixData( + privateKey *ecdsa.PrivateKey, delta *model.DeltaModel, p protocol.Protocol) (*model.SuffixDataModel, error) { + recoveryCommitment, err := getCommitment(privateKey, p) + if err != nil { + return nil, err + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, p.MultihashAlgorithms[0]) + if err != nil { + return nil, err + } + + return &model.SuffixDataModel{ + DeltaHash: deltaHash, + RecoveryCommitment: recoveryCommitment, + }, nil +} + +const validDoc = `{ + "publicKey": [{ + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const recoveredDocTemplate = `{ + "publicKey": [{ + "id": "recovered%s", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +type mockDocComposer struct { + Err error +} + +// ApplyPatches mocks applying patches to the document. +func (m *mockDocComposer) ApplyPatches(doc document.Document, patches []patch.Patch) (document.Document, error) { + if m.Err != nil { + return nil, m.Err + } + + return make(document.Document), nil +} + +// mock protocol client with two protocol versions, first one effective at block 0, second at block 100. +func newMockProtocolClient() *mocks.MockProtocolClient { + pc := mocks.NewMockProtocolClient() + + latest := protocol.Protocol{ + GenesisTime: 100, + MultihashAlgorithms: []uint{sha2_512, sha2_256}, + MaxOperationCount: 2, + MaxOperationSize: mocks.MaxOperationByteSize, + MaxOperationHashLength: 100, + MaxDeltaSize: mocks.MaxDeltaByteSize, + MaxCasURILength: 100, + CompressionAlgorithm: "GZIP", + MaxChunkFileSize: mocks.MaxBatchFileSize, + MaxProvisionalIndexFileSize: mocks.MaxBatchFileSize, + MaxCoreIndexFileSize: mocks.MaxBatchFileSize, + SignatureAlgorithms: []string{"EdDSA", "ES256"}, + KeyAlgorithms: []string{"Ed25519", "P-256"}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, //nolint:lll + } + + latestVersion := mocks.GetProtocolVersion(latest) + + // has to be sorted for mock client to work + pc.Versions = append(pc.Versions, latestVersion) + + pc.CurrentVersion = latestVersion + + for _, v := range pc.Versions { + parser := operationparser.New(v.Protocol()) + dc := doccomposer.New() + oa := operationapplier.New(v.Protocol(), parser, dc) + v.OperationParserReturns(parser) + v.OperationApplierReturns(oa) + v.DocumentComposerReturns(dc) + } + + return pc +} + +type mockUnpublishedOpsStore struct { + GetErr error + AnchoredOps []*operation.AnchoredOperation +} + +func (m *mockUnpublishedOpsStore) Get(_ string) ([]*operation.AnchoredOperation, error) { + if m.GetErr != nil { + return nil, m.GetErr + } + + return m.AnchoredOps, nil +} diff --git a/method/sidetreelongform/sidetree-core/util/ecsigner/signer.go b/method/sidetreelongform/sidetree-core/util/ecsigner/signer.go new file mode 100644 index 0000000..52cd419 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/util/ecsigner/signer.go @@ -0,0 +1,100 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package ecsigner + +import ( + "crypto" + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "errors" + + "github.com/btcsuite/btcd/btcec" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" +) + +// Signer implements signer interface. +type Signer struct { + alg string + kid string + privateKey *ecdsa.PrivateKey +} + +// New creates new ECDSA signer. +func New(privKey *ecdsa.PrivateKey, alg, kid string) *Signer { + return &Signer{privateKey: privKey, kid: kid, alg: alg} +} + +// Headers provides required JWS protected headers. It provides information about signing key and algorithm. +func (signer *Signer) Headers() jws.Headers { + headers := make(jws.Headers) + + if signer.alg != "" { + headers[jws.HeaderAlgorithm] = signer.alg + } + + if signer.kid != "" { + headers[jws.HeaderKeyID] = signer.kid + } + + return headers +} + +// Sign signs msg and returns signature value. +func (signer *Signer) Sign(msg []byte) ([]byte, error) { + if signer.privateKey == nil { + return nil, errors.New("private key not provided") + } + + hasher := getHasher(signer.privateKey.Curve).New() + + _, err := hasher.Write(msg) + if err != nil { + return nil, err + } + + hashed := hasher.Sum(nil) + + r, s, err := ecdsa.Sign(rand.Reader, signer.privateKey, hashed) + if err != nil { + return nil, err + } + + curveBits := signer.privateKey.Curve.Params().BitSize + + const bitsInByte = 8 + keyBytes := curveBits / bitsInByte + + if curveBits%bitsInByte > 0 { + keyBytes++ + } + + return append(copyPadded(r.Bytes(), keyBytes), copyPadded(s.Bytes(), keyBytes)...), nil +} + +func copyPadded(source []byte, size int) []byte { + dest := make([]byte, size) + copy(dest[size-len(source):], source) + + return dest +} + +func getHasher(curve elliptic.Curve) crypto.Hash { + switch curve { + case elliptic.P256(): + return crypto.SHA256 + case elliptic.P384(): + return crypto.SHA384 + case elliptic.P521(): + return crypto.SHA512 + case btcec.S256(): + return crypto.SHA256 + default: + return crypto.SHA256 + } +} diff --git a/method/sidetreelongform/sidetree-core/util/ecsigner/signer_test.go b/method/sidetreelongform/sidetree-core/util/ecsigner/signer_test.go new file mode 100644 index 0000000..a9a3d1c --- /dev/null +++ b/method/sidetreelongform/sidetree-core/util/ecsigner/signer_test.go @@ -0,0 +1,109 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package ecsigner + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "testing" + + "github.com/btcsuite/btcd/btcec" + "github.com/stretchr/testify/require" +) + +func TestSign(t *testing.T) { + msg := []byte("test message") + + t.Run("success EC P-256", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "ES256", "key-1") + + signature, err := signer.Sign(msg) + require.NoError(t, err) + require.NotEmpty(t, signature) + }) + + t.Run("success EC P-384", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P384(), rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "ES384", "key-1") + + signature, err := signer.Sign(msg) + require.NoError(t, err) + require.NotEmpty(t, signature) + }) + + t.Run("success EC P-521", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "ES521", "key-1") + + signature, err := signer.Sign(msg) + require.NoError(t, err) + require.NotEmpty(t, signature) + }) + + t.Run("success EC secp256k1 ", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(btcec.S256(), rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "ES256K", "key-1") + + signature, err := signer.Sign(msg) + require.NoError(t, err) + require.NotEmpty(t, signature) + }) + + t.Run("private key not provided", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(btcec.S256(), rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "ES256K", "key-1") + signer.privateKey = nil + + signature, err := signer.Sign(msg) + require.Error(t, err) + require.Nil(t, signature) + require.Contains(t, err.Error(), "private key not provided") + }) +} + +func TestHeaders(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + t.Run("success - kid, alg provided", func(t *testing.T) { + signer := New(privateKey, "ES256", "key-1") + + // verify headers + kid, ok := signer.Headers().KeyID() + require.Equal(t, true, ok) + require.Equal(t, "key-1", kid) + + alg, ok := signer.Headers().Algorithm() + require.Equal(t, true, ok) + require.Equal(t, "ES256", alg) + }) + + t.Run("success - kid, alg not provided", func(t *testing.T) { + signer := New(privateKey, "", "") + + // verify headers + kid, ok := signer.Headers().KeyID() + require.Equal(t, false, ok) + require.Empty(t, kid) + + alg, ok := signer.Headers().Algorithm() + require.Equal(t, false, ok) + require.Empty(t, alg) + }) +} diff --git a/method/sidetreelongform/sidetree-core/util/edsigner/signer.go b/method/sidetreelongform/sidetree-core/util/edsigner/signer.go new file mode 100644 index 0000000..53d2653 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/util/edsigner/signer.go @@ -0,0 +1,50 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package edsigner + +import ( + "crypto/ed25519" + "errors" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" +) + +// Signer implements signer interface. +type Signer struct { + alg string + kid string + privateKey ed25519.PrivateKey +} + +// New returns ED25519 signer. +func New(privKey ed25519.PrivateKey, alg, kid string) *Signer { + return &Signer{privateKey: privKey, kid: kid, alg: alg} +} + +// Headers provides required JWS protected headers. It provides information about signing key and algorithm. +func (signer *Signer) Headers() jws.Headers { + headers := make(jws.Headers) + + if signer.alg != "" { + headers[jws.HeaderAlgorithm] = signer.alg + } + + if signer.kid != "" { + headers[jws.HeaderKeyID] = signer.kid + } + + return headers +} + +// Sign signs msg and returns signature value. +func (signer *Signer) Sign(msg []byte) ([]byte, error) { + if l := len(signer.privateKey); l != ed25519.PrivateKeySize { + return nil, errors.New("invalid private key size") + } + + return ed25519.Sign(signer.privateKey, msg), nil +} diff --git a/method/sidetreelongform/sidetree-core/util/edsigner/signer_test.go b/method/sidetreelongform/sidetree-core/util/edsigner/signer_test.go new file mode 100644 index 0000000..1e79a53 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/util/edsigner/signer_test.go @@ -0,0 +1,74 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package edsigner + +import ( + "crypto/ed25519" + "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSign(t *testing.T) { + _, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + msg := []byte("test message") + + t.Run("success", func(t *testing.T) { + signer := New(privateKey, "EdDSA", "key-1") + + signature, err := signer.Sign(msg) + require.NoError(t, err) + require.NotEmpty(t, signature) + }) + + t.Run("invalid key size", func(t *testing.T) { + signer := New(privateKey, "EdDSA", "key-1") + signer.privateKey = nil + + signature, err := signer.Sign(msg) + require.Error(t, err) + require.Nil(t, signature) + require.Contains(t, err.Error(), "invalid private key size") + }) +} + +func TestHeaders(t *testing.T) { + t.Run("success - kid, alg provided", func(t *testing.T) { + _, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "EdDSA", "key-1") + + // verify headers + kid, ok := signer.Headers().KeyID() + require.Equal(t, true, ok) + require.Equal(t, "key-1", kid) + + alg, ok := signer.Headers().Algorithm() + require.Equal(t, true, ok) + require.Equal(t, "EdDSA", alg) + }) + + t.Run("success - kid, alg not provided", func(t *testing.T) { + _, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "", "") + + // verify headers + kid, ok := signer.Headers().KeyID() + require.Equal(t, false, ok) + require.Empty(t, kid) + + alg, ok := signer.Headers().Algorithm() + require.Equal(t, false, ok) + require.Empty(t, alg) + }) +} diff --git a/method/sidetreelongform/sidetree-core/util/pubkey/jwk.go b/method/sidetreelongform/sidetree-core/util/pubkey/jwk.go new file mode 100644 index 0000000..7bdfa7b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/util/pubkey/jwk.go @@ -0,0 +1,66 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package pubkey + +import ( + "crypto/ecdsa" + "crypto/ed25519" + "errors" + "fmt" + "reflect" + + "github.com/btcsuite/btcd/btcec" + gojose "github.com/square/go-jose/v3" + "github.com/square/go-jose/v3/json" + + internal "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" +) + +const ( + secp256k1Crv = "secp256k1" + secp256k1Kty = "EC" +) + +// GetPublicKeyJWK returns public key in JWK format. +func GetPublicKeyJWK(pubKey interface{}) (*jws.JWK, error) { + internalJWK := internal.JWK{ + JSONWebKey: gojose.JSONWebKey{Key: pubKey}, + } + + switch key := pubKey.(type) { + case ed25519.PublicKey: + // handled automatically by gojose + case *ecdsa.PublicKey: + ecdsaPubKey, ok := pubKey.(*ecdsa.PublicKey) + if !ok { + // check because linter complains; should never happen + return nil, errors.New("unexpected interface") + } + // using internal jwk wrapper marshall feature since gojose doesn't handle secp256k1 curve + if ecdsaPubKey.Curve == btcec.S256() { + internalJWK.Kty = secp256k1Kty + internalJWK.Crv = secp256k1Crv + } + default: + return nil, fmt.Errorf("unknown key type '%s'", reflect.TypeOf(key)) + } + + jsonJWK, err := internalJWK.MarshalJSON() + if err != nil { + return nil, err + } + + var jwk jws.JWK + + err = json.Unmarshal(jsonJWK, &jwk) + if err != nil { + return nil, err + } + + return &jwk, nil +} diff --git a/method/sidetreelongform/sidetree-core/util/pubkey/jwk_test.go b/method/sidetreelongform/sidetree-core/util/pubkey/jwk_test.go new file mode 100644 index 0000000..323c399 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/util/pubkey/jwk_test.go @@ -0,0 +1,78 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package pubkey + +import ( + "crypto/ecdsa" + "crypto/ed25519" + "crypto/elliptic" + "crypto/rand" + "testing" + + "github.com/btcsuite/btcd/btcec" + "github.com/stretchr/testify/require" +) + +func TestGetPublicKeyJWK(t *testing.T) { + t.Run("success EC P-256", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := GetPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + require.NotEmpty(t, jwk) + require.Equal(t, "P-256", jwk.Crv) + require.Equal(t, "EC", jwk.Kty) + }) + + t.Run("success EC secp256k1 ", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(btcec.S256(), rand.Reader) + require.NoError(t, err) + + jwk, err := GetPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + require.NotEmpty(t, jwk) + require.Equal(t, "secp256k1", jwk.Crv) + require.Equal(t, "EC", jwk.Kty) + }) + + t.Run("success ED25519", func(t *testing.T) { + publicKey, _, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + jwk, err := GetPublicKeyJWK(publicKey) + require.NoError(t, err) + require.NotEmpty(t, jwk) + require.Equal(t, "Ed25519", jwk.Crv) + require.Equal(t, "OKP", jwk.Kty) + }) + + t.Run("unknown key type", func(t *testing.T) { + _, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + jwk, err := GetPublicKeyJWK(privateKey) + require.Error(t, err) + require.Nil(t, jwk) + require.Contains(t, err.Error(), "unknown key type") + }) + t.Run("marshall error", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + privateKey.PublicKey = ecdsa.PublicKey{ + Curve: nil, + X: nil, + Y: nil, + } + + jwk, err := GetPublicKeyJWK(&privateKey.PublicKey) + require.Error(t, err) + require.Nil(t, jwk) + require.Contains(t, err.Error(), "invalid EC key") + }) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/client/create.go b/method/sidetreelongform/sidetree-core/versions/1_0/client/create.go new file mode 100644 index 0000000..d20b95d --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/client/create.go @@ -0,0 +1,124 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "errors" + "fmt" + + "github.com/multiformats/go-multihash" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// CreateRequestInfo contains data for creating create payload. +type CreateRequestInfo struct { + + // opaque document content + // required + OpaqueDocument string + + // patches that will be used to create document + // required if opaque document is not specified + Patches []patch.Patch + + // the recovery commitment + // required + RecoveryCommitment string + + // the update commitment + // required + UpdateCommitment string + + // AnchorOrigin signifies the system(s) that know the most recent anchor for this DID (optional) + AnchorOrigin interface{} + + // Type signifies the type of entity a DID represents (optional) + Type string + + // latest hashing algorithm supported by protocol + MultihashCode uint +} + +// NewCreateRequest is utility function to create payload for 'create' request. +func NewCreateRequest(info *CreateRequestInfo) ([]byte, error) { + if err := validateCreateRequest(info); err != nil { + return nil, err + } + + patches, err := getPatches(info.OpaqueDocument, info.Patches) + if err != nil { + return nil, err + } + + delta := &model.DeltaModel{ + UpdateCommitment: info.UpdateCommitment, + Patches: patches, + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, info.MultihashCode) + if err != nil { + return nil, err + } + + suffixData := &model.SuffixDataModel{ + DeltaHash: deltaHash, + RecoveryCommitment: info.RecoveryCommitment, + AnchorOrigin: info.AnchorOrigin, + Type: info.Type, + } + + schema := &model.CreateRequest{ + Operation: operation.TypeCreate, + Delta: delta, + SuffixData: suffixData, + } + + return canonicalizer.MarshalCanonical(schema) +} + +func getPatches(opaque string, patches []patch.Patch) ([]patch.Patch, error) { + if opaque != "" { + return patch.PatchesFromDocument(opaque) + } + + return patches, nil +} + +func validateCreateRequest(info *CreateRequestInfo) error { + if info.OpaqueDocument == "" && len(info.Patches) == 0 { + return errors.New("either opaque document or patches have to be supplied") + } + + if info.OpaqueDocument != "" && len(info.Patches) > 0 { + return errors.New("cannot provide both opaque document and patches") + } + + supported := multihash.ValidCode(uint64(info.MultihashCode)) + + if !supported { + return fmt.Errorf("multihash[%d] not supported", info.MultihashCode) + } + + if !hashing.IsComputedUsingMultihashAlgorithms(info.RecoveryCommitment, []uint{info.MultihashCode}) { + return errors.New("next recovery commitment is not computed with the specified hash algorithm") + } + + if !hashing.IsComputedUsingMultihashAlgorithms(info.UpdateCommitment, []uint{info.MultihashCode}) { + return errors.New("next update commitment is not computed with the specified hash algorithm") + } + + if info.RecoveryCommitment == info.UpdateCommitment { + return errors.New("recovery and update commitments cannot be equal, re-using public keys is not allowed") + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/client/create_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/client/create_test.go new file mode 100644 index 0000000..c38e141 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/client/create_test.go @@ -0,0 +1,188 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/pubkey" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +const ( + didSuffix = "whatever" + opaqueDoc = "{}" + + signerErr = "signer error" + + sha2_256 = 18 +) + +func TestNewCreateRequest(t *testing.T) { + recoverPrivateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + updatePrivateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + recoverJWK, err := pubkey.GetPublicKeyJWK(&recoverPrivateKey.PublicKey) + require.NoError(t, err) + + updateJWK, err := pubkey.GetPublicKeyJWK(&updatePrivateKey.PublicKey) + require.NoError(t, err) + + recoveryCommitment, err := commitment.GetCommitment(recoverJWK, sha2_256) + require.NoError(t, err) + + updateCommitment, err := commitment.GetCommitment(updateJWK, sha2_256) + require.NoError(t, err) + + t.Run("missing opaque document or patches", func(t *testing.T) { + request, err := NewCreateRequest(&CreateRequestInfo{}) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "either opaque document or patches have to be supplied") + }) + t.Run("cannot provide both opaque document and patches", func(t *testing.T) { + request, err := NewCreateRequest(&CreateRequestInfo{OpaqueDocument: "{}", Patches: []patch.Patch{{}}}) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "cannot provide both opaque document and patches") + }) + t.Run("recovery commitment error", func(t *testing.T) { + request, err := NewCreateRequest( + &CreateRequestInfo{OpaqueDocument: "{}", RecoveryCommitment: recoveryCommitment}) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), + "recovery commitment is not computed with the specified hash algorithm") + }) + t.Run("update commitment error", func(t *testing.T) { + info := &CreateRequestInfo{ + OpaqueDocument: "{}", + RecoveryCommitment: recoveryCommitment, + MultihashCode: sha2_256, + } + + request, err := NewCreateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "update commitment is not computed with the specified hash algorithm") + }) + t.Run("multihash not supported", func(t *testing.T) { + info := &CreateRequestInfo{ + OpaqueDocument: "{}", + MultihashCode: 55, + } + + request, err := NewCreateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "multihash[55] not supported") + }) + t.Run("error - malformed opaque doc", func(t *testing.T) { + info := &CreateRequestInfo{ + OpaqueDocument: `{,}`, + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, + MultihashCode: sha2_256, + } + + request, err := NewCreateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "invalid character ','") + }) + + t.Run("error - update and recover commitment equal", func(t *testing.T) { + info := &CreateRequestInfo{ + OpaqueDocument: "{}", + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: recoveryCommitment, + MultihashCode: sha2_256, + } + + request, err := NewCreateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), + "recovery and update commitments cannot be equal, re-using public keys is not allowed") + }) + + t.Run("success - opaque document", func(t *testing.T) { + info := &CreateRequestInfo{ + OpaqueDocument: "{}", + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, + MultihashCode: sha2_256, + } + + request, err := NewCreateRequest(info) + require.NoError(t, err) + require.NotEmpty(t, request) + }) + + t.Run("success - patches", func(t *testing.T) { + p, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + info := &CreateRequestInfo{ + Patches: []patch.Patch{p}, + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, + MultihashCode: sha2_256, + } + + request, err := NewCreateRequest(info) + require.NoError(t, err) + require.NotEmpty(t, request) + }) + + t.Run("success - optional params (entity type and anchor origin)", func(t *testing.T) { + p, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + info := &CreateRequestInfo{ + Patches: []patch.Patch{p}, + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, + AnchorOrigin: "anchor-origin", + Type: "did-entity-type", + MultihashCode: sha2_256, + } + + bytes, err := NewCreateRequest(info) + require.NoError(t, err) + require.NotEmpty(t, bytes) + + var request model.CreateRequest + err = json.Unmarshal(bytes, &request) + require.NoError(t, err) + + require.Contains(t, request.SuffixData.AnchorOrigin, "anchor-origin") + require.Contains(t, request.SuffixData.Type, "did-entity-type") + }) +} + +const addKeys = `[{ + "id": "test", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA" + } +}]` diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/client/deactivate.go b/method/sidetreelongform/sidetree-core/versions/1_0/client/deactivate.go new file mode 100644 index 0000000..0913f62 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/client/deactivate.go @@ -0,0 +1,122 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "errors" + "fmt" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/signutil" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// Signer defines JWS Signer interface that will be used to sign required data in Sidetree request. +type Signer interface { + // Sign signs data and returns signature value + Sign(data []byte) ([]byte, error) + + // Headers provides required JWS protected headers. It provides information about signing key and algorithm. + Headers() jws.Headers +} + +// DeactivateRequestInfo is the information required to create deactivate request. +type DeactivateRequestInfo struct { + + // DidSuffix is the suffix of the document to be deactivated + DidSuffix string + + // RecoveryKey is recovery key for current deactivate request + RecoveryKey *jws.JWK + + // Signer that will be used for signing specific subset of request data + // Signer for recover operation must be recovery key + Signer Signer + + // RevealValue is reveal value + RevealValue string + + // AnchorFrom defines earliest time for this operation. + AnchorFrom int64 + + // AnchorUntil defines expiry time for this operation. + AnchorUntil int64 +} + +// NewDeactivateRequest is utility function to create payload for 'deactivate' request. +func NewDeactivateRequest(info *DeactivateRequestInfo) ([]byte, error) { + if err := validateDeactivateRequest(info); err != nil { + return nil, err + } + + signedDataModel := model.DeactivateSignedDataModel{ + DidSuffix: info.DidSuffix, + RecoveryKey: info.RecoveryKey, + AnchorFrom: info.AnchorFrom, + AnchorUntil: info.AnchorUntil, + } + + signModel, err := signutil.SignModel(signedDataModel, info.Signer) + if err != nil { + return nil, err + } + + schema := &model.DeactivateRequest{ + Operation: operation.TypeDeactivate, + DidSuffix: info.DidSuffix, + RevealValue: info.RevealValue, + SignedData: signModel, + } + + return canonicalizer.MarshalCanonical(schema) +} + +func validateDeactivateRequest(info *DeactivateRequestInfo) error { + if info.DidSuffix == "" { + return errors.New("missing did unique suffix") + } + + if info.RevealValue == "" { + return errors.New("missing reveal value") + } + + return validateSigner(info.Signer) +} + +func validateSigner(signer Signer) error { + if signer == nil { + return errors.New("missing signer") + } + + if signer.Headers() == nil { + return errors.New("missing protected headers") + } + + alg, ok := signer.Headers().Algorithm() + if !ok { + return errors.New("algorithm must be present in the protected header") + } + + if alg == "" { + return errors.New("algorithm cannot be empty in the protected header") + } + + allowedHeaders := map[string]bool{ + jws.HeaderAlgorithm: true, + jws.HeaderKeyID: true, + } + + for h := range signer.Headers() { + if _, ok := allowedHeaders[h]; !ok { + return fmt.Errorf("header '%s' is not allowed in the protected headers", h) + } + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/client/deactivate_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/client/deactivate_test.go new file mode 100644 index 0000000..ac008c7 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/client/deactivate_test.go @@ -0,0 +1,171 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "errors" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/ecsigner" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/pubkey" +) + +func TestNewDeactivateRequest(t *testing.T) { + t.Run("missing unique suffix", func(t *testing.T) { + info := &DeactivateRequestInfo{} + + request, err := NewDeactivateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing did unique suffix") + }) + t.Run("missing reveal value", func(t *testing.T) { + info := &DeactivateRequestInfo{DidSuffix: "suffix"} + + request, err := NewDeactivateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing reveal value") + }) + t.Run("signing error", func(t *testing.T) { + info := &DeactivateRequestInfo{ + DidSuffix: "whatever", + Signer: NewMockSigner(errors.New(signerErr)), + RevealValue: "reveal", + } + + request, err := NewDeactivateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), signerErr) + }) + t.Run("success", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + signer := ecsigner.New(privateKey, "ES256", "") + + info := &DeactivateRequestInfo{ + DidSuffix: "whatever", + Signer: signer, + RecoveryKey: jwk, + RevealValue: "reveal", + } + + request, err := NewDeactivateRequest(info) + require.NoError(t, err) + require.NotEmpty(t, request) + }) +} + +func TestValidateSigner(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + const testKid = "kid" + + t.Run("success - kid can be empty", func(t *testing.T) { + signer := ecsigner.New(privateKey, "alg", "") + + err := validateSigner(signer) + require.NoError(t, err) + }) + t.Run("success - kid can be provided", func(t *testing.T) { + signer := ecsigner.New(privateKey, "alg", testKid) + + err := validateSigner(signer) + require.NoError(t, err) + }) + t.Run("error - missing signer", func(t *testing.T) { + err := validateSigner(nil) + require.Error(t, err) + require.Contains(t, err.Error(), "missing signer") + }) + + t.Run("error - missing protected headers", func(t *testing.T) { + err := validateSigner(&MockSigner{}) + require.Error(t, err) + require.Contains(t, err.Error(), "missing protected headers") + }) + + t.Run("err - algorithm must be present in the protected header", func(t *testing.T) { + headers := make(jws.Headers) + + headers["kid"] = testKid + + signer := &MockSigner{MockHeaders: headers} + + err := validateSigner(signer) + require.Error(t, err) + require.Contains(t, err.Error(), "algorithm must be present in the protected header") + }) + + t.Run("err - algorithm cannot be empty", func(t *testing.T) { + headers := make(jws.Headers) + + headers["kid"] = testKid + headers["alg"] = "" + + signer := &MockSigner{MockHeaders: headers} + + err := validateSigner(signer) + require.Error(t, err) + require.Contains(t, err.Error(), "algorithm cannot be empty in the protected header") + }) + + t.Run("err - invalid protected header value", func(t *testing.T) { + headers := make(jws.Headers) + + headers["kid"] = "kid" + headers["alg"] = "alg" + headers["invalid"] = "value" + + signer := &MockSigner{MockHeaders: headers} + + err := validateSigner(signer) + require.Error(t, err) + require.Contains(t, err.Error(), "header 'invalid' is not allowed in the protected headers") + }) +} + +// MockSigner implements signer interface. +type MockSigner struct { + MockHeaders jws.Headers + Err error +} + +// New creates new mock signer (default to recovery signer). +func NewMockSigner(err error) *MockSigner { + headers := make(jws.Headers) + headers[jws.HeaderAlgorithm] = "alg" + headers[jws.HeaderKeyID] = "kid" + + return &MockSigner{Err: err, MockHeaders: headers} +} + +// Headers provides required JWS protected headers. It provides information about signing key and algorithm. +func (ms *MockSigner) Headers() jws.Headers { + return ms.MockHeaders +} + +// Sign signs msg and returns mock signature value. +func (ms *MockSigner) Sign(msg []byte) ([]byte, error) { + if ms.Err != nil { + return nil, ms.Err + } + + return []byte("signature"), nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/client/recover.go b/method/sidetreelongform/sidetree-core/versions/1_0/client/recover.go new file mode 100644 index 0000000..aba756c --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/client/recover.go @@ -0,0 +1,160 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "errors" + "fmt" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/signutil" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// RecoverRequestInfo is the information required to create recover request. +type RecoverRequestInfo struct { + + // DidSuffix is the suffix of the document to be recovered + DidSuffix string + + // RecoveryKey is the current recovery public key + RecoveryKey *jws.JWK + + // OpaqueDocument is opaque content + OpaqueDocument string + + // Patches that will be used to create document + // required if opaque document is not specified + Patches []patch.Patch + + // RecoveryCommitment is recovery commitment to be used for the next recovery + RecoveryCommitment string + + // UpdateCommitment is update commitment to be used for the next update + UpdateCommitment string + + // AnchorOrigin signifies the system(s) that know the most recent anchor for this DID (optional) + AnchorOrigin interface{} + + // AnchorFrom defines earliest time for this operation. + AnchorFrom int64 + + // AnchorUntil defines expiry time for this operation. + AnchorUntil int64 + + // MultihashCode is the latest hashing algorithm supported by protocol + MultihashCode uint + + // Signer will be used for signing specific subset of request data + // Signer for recover operation must be recovery key + Signer Signer + + // RevealValue is reveal value + RevealValue string +} + +// NewRecoverRequest is utility function to create payload for 'recovery' request. +func NewRecoverRequest(info *RecoverRequestInfo) ([]byte, error) { + err := validateRecoverRequest(info) + if err != nil { + return nil, err + } + + patches, err := getPatches(info.OpaqueDocument, info.Patches) + if err != nil { + return nil, err + } + + delta := &model.DeltaModel{ + UpdateCommitment: info.UpdateCommitment, + Patches: patches, + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, info.MultihashCode) + if err != nil { + return nil, err + } + + signedDataModel := model.RecoverSignedDataModel{ + DeltaHash: deltaHash, + RecoveryKey: info.RecoveryKey, + RecoveryCommitment: info.RecoveryCommitment, + AnchorOrigin: info.AnchorOrigin, + AnchorFrom: info.AnchorFrom, + AnchorUntil: info.AnchorUntil, + } + + err = validateCommitment(info.RecoveryKey, info.MultihashCode, info.RecoveryCommitment) + if err != nil { + return nil, err + } + + signModel, err := signutil.SignModel(signedDataModel, info.Signer) + if err != nil { + return nil, err + } + + schema := &model.RecoverRequest{ + Operation: operation.TypeRecover, + DidSuffix: info.DidSuffix, + RevealValue: info.RevealValue, + Delta: delta, + SignedData: signModel, + } + + return canonicalizer.MarshalCanonical(schema) +} + +func validateRecoverRequest(info *RecoverRequestInfo) error { + if info.DidSuffix == "" { + return errors.New("missing did unique suffix") + } + + if info.RevealValue == "" { + return errors.New("missing reveal value") + } + + if info.OpaqueDocument == "" && len(info.Patches) == 0 { + return errors.New("either opaque document or patches have to be supplied") + } + + if info.OpaqueDocument != "" && len(info.Patches) > 0 { + return errors.New("cannot provide both opaque document and patches") + } + + if err := validateSigner(info.Signer); err != nil { + return err + } + + return validateRecoveryKey(info.RecoveryKey) +} + +func validateRecoveryKey(key *jws.JWK) error { + if key == nil { + return errors.New("missing recovery key") + } + + return key.Validate() +} + +func validateCommitment(jwk *jws.JWK, multihashCode uint, nextCommitment string) error { + currentCommitment, err := commitment.GetCommitment(jwk, multihashCode) + if err != nil { + return fmt.Errorf("calculate current commitment: %s", err.Error()) + } + + if currentCommitment == nextCommitment { + return errors.New("re-using public keys for commitment is not allowed") + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/client/recover_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/client/recover_test.go new file mode 100644 index 0000000..441746e --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/client/recover_test.go @@ -0,0 +1,205 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "encoding/json" + "errors" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + internaljws "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/ecsigner" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/pubkey" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +func TestNewRecoverRequest(t *testing.T) { + t.Run("missing unique suffix", func(t *testing.T) { + info := getRecoverRequestInfo() + info.DidSuffix = "" + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing did unique suffix") + }) + t.Run("missing reveal value", func(t *testing.T) { + info := getRecoverRequestInfo() + info.RevealValue = "" + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing reveal value") + }) + t.Run("missing opaque document", func(t *testing.T) { + info := getRecoverRequestInfo() + info.OpaqueDocument = "" + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "either opaque document or patches have to be supplied") + }) + t.Run("cannot provide both opaque document and patches", func(t *testing.T) { + info := getRecoverRequestInfo() + info.Patches = []patch.Patch{{}} + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "cannot provide both opaque document and patches") + }) + t.Run("missing recovery key", func(t *testing.T) { + info := getRecoverRequestInfo() + info.RecoveryKey = nil + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing recovery key") + }) + t.Run("missing signer", func(t *testing.T) { + info := getRecoverRequestInfo() + info.Signer = nil + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing signer") + }) + t.Run("multihash not supported", func(t *testing.T) { + info := getRecoverRequestInfo() + info.MultihashCode = 55 + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "algorithm not supported") + }) + t.Run("signing error", func(t *testing.T) { + info := getRecoverRequestInfo() + info.Signer = NewMockSigner(errors.New(signerErr)) + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), signerErr) + }) + t.Run("error - malformed opaque doc", func(t *testing.T) { + info := getRecoverRequestInfo() + info.OpaqueDocument = "{,}" + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "invalid character ','") + }) + + t.Run("error - re-using public keys for commitment is not allowed", func(t *testing.T) { + info := getRecoverRequestInfo() + + currentCommitment, err := commitment.GetCommitment(info.RecoveryKey, info.MultihashCode) + require.NoError(t, err) + + info.RecoveryCommitment = currentCommitment + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "re-using public keys for commitment is not allowed") + }) + + t.Run("success - opaque document", func(t *testing.T) { + info := getRecoverRequestInfo() + + bytes, err := NewRecoverRequest(info) + require.NoError(t, err) + require.NotEmpty(t, bytes) + + var request map[string]interface{} + err = json.Unmarshal(bytes, &request) + require.NoError(t, err) + + require.Equal(t, "recover", request["type"]) + require.Equal(t, didSuffix, request["didSuffix"]) + }) + + t.Run("success - json patches", func(t *testing.T) { + p, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + // default request info is constructed with opaque document; switch to patches + info := getRecoverRequestInfo() + info.OpaqueDocument = "" + info.Patches = []patch.Patch{p} + + bytes, err := NewRecoverRequest(info) + require.NoError(t, err) + require.NotEmpty(t, bytes) + + var request map[string]interface{} + err = json.Unmarshal(bytes, &request) + require.NoError(t, err) + + require.Equal(t, "recover", request["type"]) + require.Equal(t, didSuffix, request["didSuffix"]) + }) + + t.Run("success - optional params (anchor origin)", func(t *testing.T) { + info := getRecoverRequestInfo() + info.AnchorOrigin = "test-anchor-origin" + + bytes, err := NewRecoverRequest(info) + require.NoError(t, err) + require.NotEmpty(t, bytes) + + var request map[string]interface{} + err = json.Unmarshal(bytes, &request) + require.NoError(t, err) + + jws, ok := request["signedData"] + require.True(t, ok) + + signedData, err := internaljws.ParseJWS(jws.(string)) + require.NoError(t, err) + + var signedModel model.RecoverSignedDataModel + err = json.Unmarshal(signedData.Payload, &signedModel) + require.NoError(t, err) + + require.Equal(t, "test-anchor-origin", signedModel.AnchorOrigin) + }) +} + +func getRecoverRequestInfo() *RecoverRequestInfo { + privKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + panic(err) + } + + jwk, err := pubkey.GetPublicKeyJWK(&privKey.PublicKey) + if err != nil { + panic(err) + } + + return &RecoverRequestInfo{ + DidSuffix: didSuffix, + OpaqueDocument: opaqueDoc, + RecoveryKey: jwk, + MultihashCode: sha2_256, + Signer: ecsigner.New(privKey, "ES256", ""), + RevealValue: "reveal", + } +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/client/update.go b/method/sidetreelongform/sidetree-core/versions/1_0/client/update.go new file mode 100644 index 0000000..f553a94 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/client/update.go @@ -0,0 +1,122 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "errors" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/signutil" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// UpdateRequestInfo is the information required to create update request. +type UpdateRequestInfo struct { + + // DidSuffix is the suffix of the document to be updated + DidSuffix string + + // Patches is an array of standard patch actions + Patches []patch.Patch + + // update commitment to be used for the next update + UpdateCommitment string + + // update key to be used for this update + UpdateKey *jws.JWK + + // latest hashing algorithm supported by protocol + MultihashCode uint + + // Signer that will be used for signing request specific subset of data + Signer Signer + + // RevealValue is reveal value + RevealValue string + + // AnchorFrom defines earliest time for this operation. + AnchorFrom int64 + + // AnchorUntil defines expiry time for this operation. + AnchorUntil int64 +} + +// NewUpdateRequest is utility function to create payload for 'update' request. +func NewUpdateRequest(info *UpdateRequestInfo) ([]byte, error) { + if err := validateUpdateRequest(info); err != nil { + return nil, err + } + + delta := &model.DeltaModel{ + UpdateCommitment: info.UpdateCommitment, + Patches: info.Patches, + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, info.MultihashCode) + if err != nil { + return nil, err + } + + signedDataModel := &model.UpdateSignedDataModel{ + DeltaHash: deltaHash, + UpdateKey: info.UpdateKey, + AnchorFrom: info.AnchorFrom, + AnchorUntil: info.AnchorUntil, + } + + err = validateCommitment(info.UpdateKey, info.MultihashCode, info.UpdateCommitment) + if err != nil { + return nil, err + } + + signModel, err := signutil.SignModel(signedDataModel, info.Signer) + if err != nil { + return nil, err + } + + schema := &model.UpdateRequest{ + Operation: operation.TypeUpdate, + DidSuffix: info.DidSuffix, + RevealValue: info.RevealValue, + Delta: delta, + SignedData: signModel, + } + + return canonicalizer.MarshalCanonical(schema) +} + +func validateUpdateRequest(info *UpdateRequestInfo) error { + if info.DidSuffix == "" { + return errors.New("missing did unique suffix") + } + + if info.RevealValue == "" { + return errors.New("missing reveal value") + } + + if len(info.Patches) == 0 { + return errors.New("missing update information") + } + + if err := validateUpdateKey(info.UpdateKey); err != nil { + return err + } + + return validateSigner(info.Signer) +} + +func validateUpdateKey(key *jws.JWK) error { + if key == nil { + return errors.New("missing update key") + } + + return key.Validate() +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/client/update_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/client/update_test.go new file mode 100644 index 0000000..e9e6ae4 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/client/update_test.go @@ -0,0 +1,178 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "errors" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/ecsigner" +) + +func TestNewUpdateRequest(t *testing.T) { + const didSuffix = "whatever" + + patches, err := getTestPatches() + require.NoError(t, err) + + updateJWK := &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", + } + + signer := NewMockSigner(nil) + + t.Run("missing unique suffix", func(t *testing.T) { + info := &UpdateRequestInfo{} + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing did unique suffix") + }) + t.Run("missing reveal value", func(t *testing.T) { + info := &UpdateRequestInfo{DidSuffix: didSuffix} + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing reveal value") + }) + t.Run("missing json patch", func(t *testing.T) { + info := &UpdateRequestInfo{DidSuffix: didSuffix, RevealValue: "reveal"} + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing update information") + }) + t.Run("multihash not supported", func(t *testing.T) { + info := &UpdateRequestInfo{ + DidSuffix: didSuffix, + Patches: patches, + UpdateKey: updateJWK, + Signer: signer, + RevealValue: "reveal", + } + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "algorithm not supported") + }) + t.Run("missing update key", func(t *testing.T) { + signer = NewMockSigner(nil) + signer.MockHeaders = make(jws.Headers) + + info := &UpdateRequestInfo{ + DidSuffix: didSuffix, + Patches: patches, + MultihashCode: sha2_256, + Signer: signer, + RevealValue: "reveal", + } + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing update key") + }) + t.Run("algorithm must be present in the protected header", func(t *testing.T) { + signer = NewMockSigner(nil) + signer.MockHeaders = make(jws.Headers) + + info := &UpdateRequestInfo{ + DidSuffix: didSuffix, + Patches: patches, + MultihashCode: sha2_256, + UpdateKey: updateJWK, + Signer: signer, + RevealValue: "reveal", + } + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "algorithm must be present in the protected header") + }) + t.Run("signing error", func(t *testing.T) { + info := &UpdateRequestInfo{ + DidSuffix: didSuffix, + Patches: patches, + MultihashCode: sha2_256, + UpdateKey: updateJWK, + Signer: NewMockSigner(errors.New(signerErr)), + RevealValue: "reveal", + } + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), signerErr) + }) + t.Run("error - re-using public keys for commitment is not allowed", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + signer := ecsigner.New(privateKey, "ES256", "key-1") + + currentCommitment, err := commitment.GetCommitment(updateJWK, sha2_256) + require.NoError(t, err) + + info := &UpdateRequestInfo{ + DidSuffix: didSuffix, + Patches: patches, + MultihashCode: sha2_256, + UpdateKey: updateJWK, + UpdateCommitment: currentCommitment, + Signer: signer, + RevealValue: "reveal", + } + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "re-using public keys for commitment is not allowed") + }) + t.Run("success", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + signer := ecsigner.New(privateKey, "ES256", "key-1") + + info := &UpdateRequestInfo{ + DidSuffix: didSuffix, + Patches: patches, + MultihashCode: sha2_256, + UpdateKey: updateJWK, + Signer: signer, + RevealValue: "reveal", + } + + request, err := NewUpdateRequest(info) + require.NoError(t, err) + require.NotEmpty(t, request) + }) +} + +func getTestPatches() ([]patch.Patch, error) { + p, err := patch.NewJSONPatch(`[{"op": "replace", "path": "/name", "value": "Jane"}]`) + if err != nil { + return nil, err + } + + return []patch.Patch{p}, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer/composer.go b/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer/composer.go new file mode 100644 index 0000000..8155e32 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer/composer.go @@ -0,0 +1,360 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package doccomposer + +import ( + "encoding/json" + "fmt" + + jsonpatch "github.com/evanphx/json-patch" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + logfields "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/log" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +var logger = log.New("sidetree-core-composer") + +// DocumentComposer applies patches to the document. +type DocumentComposer struct { +} + +// New creates new document composer. +func New() *DocumentComposer { + return &DocumentComposer{} +} + +// ApplyPatches applies patches to the document. +func (c *DocumentComposer) ApplyPatches(doc document.Document, patches []patch.Patch) (document.Document, error) { + result, err := deepCopy(doc) + if err != nil { + return nil, err + } + + for _, p := range patches { + result, err = applyPatch(result, p) + if err != nil { + return nil, err + } + } + + return result, nil +} + +// applyPatch applies a patch to the document. +func applyPatch(doc document.Document, p patch.Patch) (document.Document, error) { //nolint:gocyclo + action, err := p.GetAction() + if err != nil { + return nil, err + } + + value, err := p.GetValue() + if err != nil { + return nil, err + } + + switch action { + case patch.Replace: + return applyRecover(value) + case patch.JSONPatch: + return applyJSON(doc, value) + case patch.AddPublicKeys: + return applyAddPublicKeys(doc, value) + case patch.RemovePublicKeys: + return applyRemovePublicKeys(doc, value) + case patch.AddServiceEndpoints: + return applyAddServiceEndpoints(doc, value) + case patch.RemoveServiceEndpoints: + return applyRemoveServiceEndpoints(doc, value) + case patch.AddAlsoKnownAs: + return applyAddAlsoKnownAs(doc, value) + case patch.RemoveAlsoKnownAs: + return applyRemoveAlsoKnownAs(doc, value) + } + + return nil, fmt.Errorf("action '%s' is not supported", action) +} + +func applyJSON(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("Applying JSON patch", logfields.WithPatch(entry)) + + bytes, err := json.Marshal(entry) + if err != nil { + return nil, err + } + + jsonPatches, err := jsonpatch.DecodePatch(bytes) + if err != nil { + return nil, err + } + + docBytes, err := doc.Bytes() + if err != nil { + return nil, err + } + + docBytes, err = jsonPatches.Apply(docBytes) + if err != nil { + return nil, err + } + + return document.FromBytes(docBytes) +} + +func applyRecover(replaceDoc interface{}) (document.Document, error) { + logger.Debug("Applying replace patch", logfields.WithPatch(replaceDoc)) + + docBytes, err := json.Marshal(replaceDoc) + if err != nil { + return nil, err + } + + replace, err := document.ReplaceDocumentFromBytes(docBytes) + if err != nil { + return nil, err + } + + doc := make(document.Document) + doc[document.PublicKeyProperty] = replace[document.ReplacePublicKeyProperty] + doc[document.ServiceProperty] = replace[document.ReplaceServiceProperty] + + return doc, nil +} + +// adds public keys to document. +func applyAddPublicKeys(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("Applying add public keys patch", logfields.WithPatch(entry)) + + addPublicKeys := document.ParsePublicKeys(entry) + existingPublicKeysMap := sliceToMapPK(doc.PublicKeys()) + + var newPublicKeys []document.PublicKey + newPublicKeys = append(newPublicKeys, doc.PublicKeys()...) + + for _, key := range addPublicKeys { + _, ok := existingPublicKeysMap[key.ID()] + if ok { + // if a key ID already exists, we will just replace the existing key + updateKey(newPublicKeys, key) + } else { + // new key - append it to existing keys + newPublicKeys = append(newPublicKeys, key) + } + } + + doc[document.PublicKeyProperty] = convertPublicKeys(newPublicKeys) + + return doc, nil +} + +func updateKey(keys []document.PublicKey, key document.PublicKey) { + for index, pk := range keys { + if pk.ID() == key.ID() { + keys[index] = key + } + } +} + +func convertPublicKeys(pubKeys []document.PublicKey) []interface{} { + var values []interface{} + for _, pk := range pubKeys { + values = append(values, pk.JSONLdObject()) + } + + return values +} + +// remove public keys from the document. +func applyRemovePublicKeys(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("Applying remove public keys patch", logfields.WithPatch(entry)) + + keysToRemove := sliceToMap(document.StringArray(entry)) + + var newPublicKeys []interface{} + + for _, key := range doc.PublicKeys() { + _, ok := keysToRemove[key.ID()] + if !ok { + // not in remove list so add to resulting public keys + newPublicKeys = append(newPublicKeys, key.JSONLdObject()) + } + } + + doc[document.PublicKeyProperty] = newPublicKeys + + return doc, nil +} + +func sliceToMap(ids []string) map[string]bool { + // convert slice to map + values := make(map[string]bool) + for _, id := range ids { + values[id] = true + } + + return values +} + +func sliceToMapPK(publicKeys []document.PublicKey) map[string]document.PublicKey { + // convert slice to map + values := make(map[string]document.PublicKey) + for _, pk := range publicKeys { + values[pk.ID()] = pk + } + + return values +} + +// adds service endpoints to document. +func applyAddServiceEndpoints(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("Applying add service endpoints patch", logfields.WithPatch(entry)) + + didDoc := document.DidDocumentFromJSONLDObject(doc.JSONLdObject()) + + addServices := document.ParseServices(entry) + existingServicesMap := sliceToMapServices(didDoc.Services()) + + var newServices []document.Service + newServices = append(newServices, didDoc.Services()...) + + for _, service := range addServices { + _, ok := existingServicesMap[service.ID()] + if ok { + // if a service ID already exists, we will just replace the existing service + updateService(newServices, service) + } else { + // new service - append it to existing services + newServices = append(newServices, service) + } + } + + doc[document.ServiceProperty] = convertServices(newServices) + + return doc, nil +} + +func updateService(services []document.Service, service document.Service) { + for index, s := range services { + if s.ID() == service.ID() { + services[index] = service + } + } +} + +func convertServices(services []document.Service) []interface{} { + var values []interface{} + for _, service := range services { + values = append(values, service.JSONLdObject()) + } + + return values +} + +func applyRemoveServiceEndpoints(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("Applying remove service endpoints patch", logfields.WithPatch(entry)) + + didDoc := document.DidDocumentFromJSONLDObject(doc.JSONLdObject()) + servicesToRemove := sliceToMap(document.StringArray(entry)) + + var newServices []interface{} + + for _, service := range didDoc.Services() { + _, ok := servicesToRemove[service.ID()] + if !ok { + // not in remove list so add to resulting services + newServices = append(newServices, service.JSONLdObject()) + } + } + + doc[document.ServiceProperty] = newServices + + return doc, nil +} + +func sliceToMapServices(services []document.Service) map[string]document.Service { + // convert slice to map + values := make(map[string]document.Service) + for _, svc := range services { + values[svc.ID()] = svc + } + + return values +} + +// adds also-known-as to document. +func applyAddAlsoKnownAs(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("applying add also-known-as patch", logfields.WithPatch(entry)) + + didDoc := document.DidDocumentFromJSONLDObject(doc.JSONLdObject()) + + addURIs := document.StringArray(entry) + existingURIs := sliceToMap(didDoc.AlsoKnownAs()) + + var newURIs []string + newURIs = append(newURIs, didDoc.AlsoKnownAs()...) + + for _, uri := range addURIs { + _, ok := existingURIs[uri] + if !ok { + // new URI - append it to existing URIs + newURIs = append(newURIs, uri) + } + } + + doc[document.AlsoKnownAs] = interfaceArray(newURIs) + + return doc, nil +} + +func interfaceArray(values []string) []interface{} { + var iArr []interface{} + for _, v := range values { + iArr = append(iArr, v) + } + + return iArr +} + +func applyRemoveAlsoKnownAs(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("Applying remove also-known-as patch", logfields.WithPatch(entry)) + + didDoc := document.DidDocumentFromJSONLDObject(doc.JSONLdObject()) + urisToRemove := sliceToMap(document.StringArray(entry)) + + var newURIs []interface{} + + for _, uri := range didDoc.AlsoKnownAs() { + _, ok := urisToRemove[uri] + if !ok { + // not in remove list so add to resulting services + newURIs = append(newURIs, uri) + } + } + + doc[document.AlsoKnownAs] = newURIs + + return doc, nil +} + +// deepCopy returns deep copy of JSON object. +func deepCopy(doc document.Document) (document.Document, error) { + bytes, err := json.Marshal(doc) + if err != nil { + return nil, err + } + + var result document.Document + + err = json.Unmarshal(bytes, &result) + if err != nil { + return nil, err + } + + return result, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer/composer_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer/composer_test.go new file mode 100644 index 0000000..8833394 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer/composer_test.go @@ -0,0 +1,635 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package doccomposer + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +const invalid = "invalid" + +func TestApplyPatches(t *testing.T) { + documentComposer := New() + + t.Run("success - add one key to existing doc with two keys", func(t *testing.T) { + original, err := setupDefaultDoc() + require.NoError(t, err) + require.Equal(t, 2, len(original.PublicKeys())) + + addPublicKeys, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + doc, err := documentComposer.ApplyPatches(original, []patch.Patch{addPublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + require.Equal(t, 3, len(didDoc.PublicKeys())) + require.Equal(t, "key1", didDoc.PublicKeys()[0].ID()) + require.Equal(t, "key2", didDoc.PublicKeys()[1].ID()) + require.Equal(t, "key3", didDoc.PublicKeys()[2].ID()) + + // make sure that original document is not modified + require.Equal(t, 2, len(original.PublicKeys())) + }) + + t.Run("action not supported", func(t *testing.T) { + p, err := patch.NewAddServiceEndpointsPatch("{}") + require.NoError(t, err) + + p["action"] = invalid + + doc, err := documentComposer.ApplyPatches(make(document.Document), []patch.Patch{p}) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "not supported") + }) + t.Run("error - original document deep copy fails (not json)", func(t *testing.T) { + doc := make(document.Document) + doc["key"] = make(chan int) + + doc, err := documentComposer.ApplyPatches(doc, nil) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "json: unsupported type: chan int") + }) +} + +func TestApplyPatches_PatchesFromOpaqueDoc(t *testing.T) { + documentComposer := New() + + t.Run("success", func(t *testing.T) { + patches, err := patch.PatchesFromDocument(testDoc) + require.NoError(t, err) + + doc, err := documentComposer.ApplyPatches(make(document.Document), patches) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc := document.DidDocumentFromJSONLDObject(doc.JSONLdObject()) + require.Len(t, didDoc.Services(), 2) + require.Len(t, didDoc.PublicKeys(), 2) + }) +} + +func TestApplyPatches_ReplacePatch(t *testing.T) { + documentComposer := New() + + t.Run("success", func(t *testing.T) { + replace, err := patch.NewReplacePatch(replaceDoc) + require.NoError(t, err) + + doc, err := documentComposer.ApplyPatches(make(document.Document), []patch.Patch{replace}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc := document.DidDocumentFromJSONLDObject(doc.JSONLdObject()) + require.Len(t, didDoc.Services(), 1) + require.Len(t, didDoc.PublicKeys(), 1) + }) +} + +func TestApplyPatches_JSON(t *testing.T) { + documentComposer := New() + + t.Run("success", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + ietf, err := patch.NewJSONPatch(patches) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{ietf}) + require.NoError(t, err) + require.NotNil(t, doc) + }) + t.Run("invalid operation", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + ietf, err := patch.NewJSONPatch(invalidPatches) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{ietf}) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "Unexpected kind: invalid") + }) +} + +func TestApplyPatches_AddPublicKeys(t *testing.T) { + documentComposer := New() + + t.Run("succes - add one key to existing two keys", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addPublicKeys, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addPublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 3, len(diddoc.PublicKeys())) + require.Equal(t, "key1", diddoc.PublicKeys()[0].ID()) + require.Equal(t, "key2", diddoc.PublicKeys()[1].ID()) + require.Equal(t, "key3", diddoc.PublicKeys()[2].ID()) + }) + t.Run("success - add existing public key to document; old one will be replaced", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addPublicKeys, err := patch.NewAddPublicKeysPatch(updateExistingKey) + require.NoError(t, err) + + // existing public key will be replaced with new one that has type 'updatedKeyType' + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addPublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + keys := diddoc.PublicKeys() + require.Equal(t, 2, len(keys)) + require.Equal(t, 1, len(keys[1].Purpose())) + }) + t.Run("add same key twice - no error; one key added", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addPublicKeys, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addPublicKeys, addPublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 3, len(diddoc.PublicKeys())) + }) +} + +func TestApplyPatches_RemovePublicKeys(t *testing.T) { + documentComposer := New() + + t.Run("success - remove existing key", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + removePublicKeys, err := patch.NewRemovePublicKeysPatch(`["key1"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removePublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 1, len(didDoc.PublicKeys())) + }) + + t.Run("success - remove existing and non-existing keys", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + removePublicKeys, err := patch.NewRemovePublicKeysPatch(`["key1", "key3"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removePublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 1, len(diddoc.PublicKeys())) + }) + t.Run("success - add and remove same key; doc stays at two keys", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addPublicKeys, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addPublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + removePublicKeys, err := patch.NewRemovePublicKeysPatch(`["key3"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removePublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(diddoc.PublicKeys())) + }) +} + +func TestApplyPatches_AddServiceEndpoints(t *testing.T) { + documentComposer := New() + + t.Run("success - add new service to existing two services", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addServices, err := patch.NewAddServiceEndpointsPatch(addServices) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 3, len(diddoc.Services())) + require.Equal(t, "svc1", diddoc.Services()[0].ID()) + require.Equal(t, "svc2", diddoc.Services()[1].ID()) + require.Equal(t, "svc3", diddoc.Services()[2].ID()) + }) + t.Run("success - add existing service to document ", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addServices, err := patch.NewAddServiceEndpointsPatch(updateExistingService) + require.NoError(t, err) + + // existing service will be replaced with new one that has type 'updatedService' + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + services := diddoc.Services() + require.Equal(t, 2, len(services)) + require.Equal(t, diddoc.Services()[1].Type(), "updatedServiceType") + }) + t.Run("add same service twice - no error; one service added", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addServices, err := patch.NewAddServiceEndpointsPatch(addServices) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addServices, addServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 3, len(diddoc.Services())) + }) +} + +func TestApplyPatches_RemoveServiceEndpoints(t *testing.T) { + documentComposer := New() + + t.Run("success - remove existing service", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + removeServices, err := patch.NewRemoveServiceEndpointsPatch(`["svc1"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 1, len(diddoc.Services())) + }) + + t.Run("success - remove existing and non-existing service", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + removeServices, err := patch.NewRemoveServiceEndpointsPatch(`["svc1", "svc3"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 1, len(diddoc.Services())) + }) + t.Run("success - add and remove same service; doc stays at two services", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addServices, err := patch.NewAddServiceEndpointsPatch(addServices) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + removeServices, err := patch.NewRemoveServiceEndpointsPatch(`["svc3"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(diddoc.Services())) + }) +} + +func TestApplyPatches_RemoveAlsoKnownAs(t *testing.T) { + documentComposer := New() + + t.Run("success - remove existing URIs (one by one)", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + + removeAlsoKnownAs, err := patch.NewRemoveAlsoKnownAs(`["https://myblog.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 1, len(didDoc.AlsoKnownAs())) + require.Equal(t, "https://second.example/", didDoc.AlsoKnownAs()[0]) + + removeAlsoKnownAs, err = patch.NewRemoveAlsoKnownAs(`["https://second.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 0, len(didDoc.AlsoKnownAs())) + }) + + t.Run("success - remove all existing URI", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + + removeAlsoKnownAs, err := patch.NewRemoveAlsoKnownAs(`["https://myblog.example/","https://second.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 0, len(didDoc.AlsoKnownAs())) + }) + + t.Run("success - remove one existing and one non-existing URI", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + + removeAlsoKnownAs, err := patch.NewRemoveAlsoKnownAs(`["https://myblog.example/","https://non-existing.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 1, len(didDoc.AlsoKnownAs())) + require.Equal(t, "https://second.example/", didDoc.AlsoKnownAs()[0]) + }) + + t.Run("success - add and remove same uri; doc stays at two uri", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + + addAlsoKnowAs, err := patch.NewAddAlsoKnownAs(`["https://third.example/","https://fourth.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addAlsoKnowAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 4, len(didDoc.AlsoKnownAs())) + require.Equal(t, "https://myblog.example/", didDoc.AlsoKnownAs()[0]) + require.Equal(t, "https://second.example/", didDoc.AlsoKnownAs()[1]) + require.Equal(t, "https://third.example/", didDoc.AlsoKnownAs()[2]) + require.Equal(t, "https://fourth.example/", didDoc.AlsoKnownAs()[3]) + + removeAlsoKnownAs, err := patch.NewRemoveAlsoKnownAs(`["https://third.example/","https://fourth.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + }) + + t.Run("error - uri is not a string", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + + addAlsoKnowAs, err := patch.NewAddAlsoKnownAs(`[123,"https://another.example/"]`) + require.Error(t, err) + require.Nil(t, addAlsoKnowAs) + require.Contains(t, err.Error(), "also known as uris is not string array") + }) + + t.Run("error - uri is empty", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + + addAlsoKnowAs, err := patch.NewAddAlsoKnownAs(`[]`) + require.Error(t, err) + require.Nil(t, addAlsoKnowAs) + require.Contains(t, err.Error(), "missing also known as uris") + }) +} + +func TestApplyPatches_AddAlsoKnownAs(t *testing.T) { + documentComposer := New() + + t.Run("success - add multiple URIs, followed by same URIs", func(t *testing.T) { + doc := make(document.Document) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 0, len(didDoc.AlsoKnownAs())) + + addAlsoKnownAs, err := patch.NewAddAlsoKnownAs(`["https://myblog.example/", "https://other.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + require.Equal(t, "https://myblog.example/", didDoc.AlsoKnownAs()[0]) + require.Equal(t, "https://other.example/", didDoc.AlsoKnownAs()[1]) + + // add again same URIs - they will be ignored during applying patches + addAlsoKnownAs, err = patch.NewAddAlsoKnownAs(`["https://myblog.example/", "https://other.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + require.Equal(t, "https://myblog.example/", didDoc.AlsoKnownAs()[0]) + require.Equal(t, "https://other.example/", didDoc.AlsoKnownAs()[1]) + }) +} + +func setupDefaultDoc() (document.Document, error) { + documentComposer := New() + + patches, err := patch.PatchesFromDocument(testDoc) + if err != nil { + return nil, err + } + + return documentComposer.ApplyPatches(make(document.Document), patches) +} + +const invalidPatches = `[ + { + "op": "invalid", + "path": "/test", + "value": "new value" + } +]` + +const patches = `[ + { + "op": "replace", + "path": "/test", + "value": "new value" + } +]` + +const testDoc = `{ + "alsoKnownAs": ["https://myblog.example/", "https://second.example/"], + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "key2", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ], + "service": [ + { + "id": "svc1", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }, + { + "id": "svc2", + "type": "SecureDataStore", + "serviceEndpoint": "http://some-cloud.com/hub" + } + ] +}` + +const addKeys = `[{ + "id": "key3", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }]` + +const updateExistingKey = `[{ + "id": "key2", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }]` + +const addServices = `[ + { + "id": "svc3", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + } + ]` + +const updateExistingService = `[ + { + "id": "svc2", + "type": "updatedServiceType", + "serviceEndpoint": "http://hub.my-personal-server.com" + } + ]` + +const replaceDoc = `{ + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "type": "EcdsaSecp256k1VerificationKey2019", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }], + "services": [ + { + "id": "sds3", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }] +}` diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/didtransformer/testdata/doc.json b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/didtransformer/testdata/doc.json new file mode 100644 index 0000000..3c03dc9 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/didtransformer/testdata/doc.json @@ -0,0 +1,100 @@ +{ + "alsoKnownAs": ["https:\\somebody.com"], + "publicKey": [ + { + "id": "master", + "type": "EcdsaSecp256k1VerificationKey2019", + "purposes": ["authentication", "assertionMethod", "keyAgreement", "capabilityDelegation", "capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "auth", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "assertion", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "agreement", + "type": "JsonWebKey2020", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "invocation", + "type": "JsonWebKey2020", + "purposes": ["capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "delegation", + "type": "JsonWebKey2020", + "purposes": ["capabilityDelegation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "general", + "type": "JsonWebKey2020", + "purposes": [], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ], + "service": [ + { + "id": "hub", + "type": "IdentityHub", + "routingKeys": "routingKeysValue", + "recipientKeys": "recipientKeysValue", + "serviceEndpoint": "https://example.com/hub/" + }, + { + "id": "hub-object", + "type": "IdentityHub", + "serviceEndpoint": { + "@context": "https://schema.identity.foundation/hub", + "type": "UserHubEndpoint", + "instances": ["did:example:456", "did:example:789"] + } + } + ] +} \ No newline at end of file diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/didtransformer/transformer.go b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/didtransformer/transformer.go new file mode 100644 index 0000000..fbb3147 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/didtransformer/transformer.go @@ -0,0 +1,366 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package didtransformer + +import ( + "errors" + "fmt" + + "github.com/btcsuite/btcutil/base58" + "github.com/multiformats/go-multibase" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + internaljws "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/metadata" +) + +const ( + didContext = "https://www.w3.org/ns/did/v1" + + didResolutionContext = "https://w3id.org/did-resolution/v1" + + // ed25519VerificationKey2018 requires special handling (convert to base58). + ed25519VerificationKey2018 = "Ed25519VerificationKey2018" + + // ed25519VerificationKey202p requires special handling (convert to multibase). + ed25519VerificationKey2020 = "Ed25519VerificationKey2020" + + bls12381G2Key2020 = "Bls12381G2Key2020" + jsonWebKey2020 = "JsonWebKey2020" + ecdsaSecp256k1VerificationKey2019 = "EcdsaSecp256k1VerificationKey2019" + x25519KeyAgreementKey2019 = "X25519KeyAgreementKey2019" + + bls12381G2Key2020Ctx = "https://w3id.org/security/suites/bls12381-2020/v1" + jsonWebKey2020Ctx = "https://w3id.org/security/suites/jws-2020/v1" + ecdsaSecp256k1VerificationKey2019Ctx = "https://w3id.org/security/suites/secp256k1-2019/v1" + ed25519VerificationKey2018Ctx = "https://w3id.org/security/suites/ed25519-2018/v1" + ed25519VerificationKey2020Ctx = "https://w3id.org/security/suites/ed25519-2020/v1" + x25519KeyAgreementKey2019Ctx = "https://w3id.org/security/suites/x25519-2019/v1" +) + +type keyContextMap map[string]string + +//nolint:gochecknoglobals +var defaultKeyContextMap = keyContextMap{ + bls12381G2Key2020: bls12381G2Key2020Ctx, + jsonWebKey2020: jsonWebKey2020Ctx, + ecdsaSecp256k1VerificationKey2019: ecdsaSecp256k1VerificationKey2019Ctx, + ed25519VerificationKey2018: ed25519VerificationKey2018Ctx, + ed25519VerificationKey2020: ed25519VerificationKey2020Ctx, + x25519KeyAgreementKey2019: x25519KeyAgreementKey2019Ctx, +} + +// Option is a registry instance option. +type Option func(opts *Transformer) + +// WithMethodContext sets optional method context(s). +func WithMethodContext(ctx []string) Option { + return func(opts *Transformer) { + opts.methodCtx = ctx + } +} + +// WithKeyContext sets optional key context. +func WithKeyContext(ctx map[string]string) Option { + return func(opts *Transformer) { + opts.keyCtx = ctx + } +} + +// WithBase sets optional @base context. +func WithBase(enabled bool) Option { + return func(opts *Transformer) { + opts.includeBase = enabled + } +} + +// WithIncludePublishedOperations sets optional include published operations flag. +func WithIncludePublishedOperations(enabled bool) Option { + return func(opts *Transformer) { + opts.includePublishedOperations = enabled + } +} + +// WithIncludeUnpublishedOperations sets optional include unpublished operations flag. +func WithIncludeUnpublishedOperations(enabled bool) Option { + return func(opts *Transformer) { + opts.includeUnpublishedOperations = enabled + } +} + +// Transformer is responsible for transforming internal to external document. +type Transformer struct { + keyCtx map[string]string + methodCtx []string // used for setting additional contexts during resolution + includeBase bool + + includePublishedOperations bool + includeUnpublishedOperations bool +} + +// New creates a new DID Transformer. +func New(opts ...Option) *Transformer { + transformer := &Transformer{} + + // apply options + for _, opt := range opts { + opt(transformer) + } + + // if key contexts are not provided via options use default key contexts + if len(transformer.keyCtx) == 0 { + transformer.keyCtx = defaultKeyContextMap + } + + return transformer +} + +// TransformDocument takes internal resolution model and transformation info and creates +// external representation of document (resolution result). +func (t *Transformer) TransformDocument(rm *protocol.ResolutionModel, + info protocol.TransformationInfo) (*document.ResolutionResult, error) { + docMetadata, err := metadata.New( + metadata.WithIncludeUnpublishedOperations(t.includeUnpublishedOperations), + metadata.WithIncludePublishedOperations(t.includePublishedOperations)). + CreateDocumentMetadata(rm, info) + if err != nil { + return nil, err + } + + id, ok := info[document.IDProperty] + if !ok { + return nil, errors.New("id is required for document transformation") + } + + internal := document.DidDocumentFromJSONLDObject(rm.Doc.JSONLdObject()) + + // start with empty document + external := document.DidDocumentFromJSONLDObject(make(document.DIDDocument)) + + // add main context + ctx := []interface{}{didContext} + + // add optional method contexts + for _, c := range t.methodCtx { + ctx = append(ctx, c) + } + + if t.includeBase { + ctx = append(ctx, getBase(id.(string))) + } + + alsoKnownAs := internal.AlsoKnownAs() + if len(alsoKnownAs) > 0 { + external[document.AlsoKnownAs] = alsoKnownAs + } + + external[document.ContextProperty] = ctx + external[document.IDProperty] = id + + result := &document.ResolutionResult{ + Context: didResolutionContext, + Document: external.JSONLdObject(), + DocumentMetadata: docMetadata, + } + + // add keys + err = t.processKeys(internal, result) + if err != nil { + return nil, fmt.Errorf("failed to transform public keys for did document: %s", err.Error()) + } + + // add services + t.processServices(internal, result) + + return result, nil +} + +func getBase(id string) interface{} { + return &struct { + Base string `json:"@base"` + }{ + Base: id, + } +} + +// processServices will process services and add them to external document. +func (t *Transformer) processServices(internal document.DIDDocument, resolutionResult *document.ResolutionResult) { + var services []document.Service + + did := resolutionResult.Document.ID() + + // add did to service id + for _, sv := range internal.Services() { + externalService := make(document.Service) + externalService[document.IDProperty] = t.getObjectID(did, sv.ID()) + externalService[document.TypeProperty] = sv.Type() + externalService[document.ServiceEndpointProperty] = sv.ServiceEndpoint() + + for key, value := range sv { + _, ok := externalService[key] + if !ok { + externalService[key] = value + } + } + + services = append(services, externalService) + } + + if len(services) > 0 { + resolutionResult.Document[document.ServiceProperty] = services + } +} + +// processKeys will process keys according to Sidetree rules bellow and add them to external document. +// every key will be included in the verificationMethod section of the resolved DID Document. +// +// -- authentication: the key MUST be included by reference (full id) in the authentication +// section of the resolved DID Document +// -- assertion: the key MUST be included by reference in the assertionMethod section. +// -- agreement: the key MUST be included by reference in the keyAgreement section. +// -- delegation: the key MUST be included by reference in the capabilityDelegation section. +// -- invocation: the key MUST be included by reference in the capabilityInvocation section. +// nolint:funlen,gocyclo +func (t *Transformer) processKeys(internal document.DIDDocument, + resolutionResult *document.ResolutionResult) error { + purposes := map[string][]interface{}{ + document.AuthenticationProperty: make([]interface{}, 0), + document.AssertionMethodProperty: make([]interface{}, 0), + document.KeyAgreementProperty: make([]interface{}, 0), + document.DelegationKeyProperty: make([]interface{}, 0), + document.InvocationKeyProperty: make([]interface{}, 0), + } + + did := resolutionResult.Document.ID() + + var publicKeys []document.PublicKey + + var keyContexts []string + + for _, pk := range internal.PublicKeys() { + id := t.getObjectID(did, pk.ID()) + + externalPK := make(document.PublicKey) + externalPK[document.IDProperty] = id + externalPK[document.TypeProperty] = pk.Type() + externalPK[document.ControllerProperty] = did + + if pkJwk := pk.PublicKeyJwk(); pkJwk != nil { //nolint:nestif + if pk.Type() == ed25519VerificationKey2018 { + ed25519PubKey, err := getED2519PublicKey(pkJwk) + if err != nil { + return err + } + + externalPK[document.PublicKeyBase58Property] = base58.Encode(ed25519PubKey) + } else if pk.Type() == ed25519VerificationKey2020 { + ed25519PubKey, err := getED2519PublicKey(pkJwk) + if err != nil { + return err + } + + multibaseEncode, err := multibase.Encode(multibase.Base58BTC, ed25519PubKey) + if err != nil { + return err + } + + externalPK[document.PublicKeyMultibaseProperty] = multibaseEncode + } else { + externalPK[document.PublicKeyJwkProperty] = pkJwk + } + } else if pkb58 := pk.PublicKeyBase58(); pkb58 != "" { + externalPK[document.PublicKeyBase58Property] = pkb58 + } else if pkMultibase := pk.PublicKeyMultibase(); pkMultibase != "" { + externalPK[document.PublicKeyMultibaseProperty] = pkMultibase + } else { + externalPK[document.PublicKeyJwkProperty] = nil // if key missing, default to adding nil jwk + } + + keyContext, ok := t.keyCtx[pk.Type()] + if !ok { + return fmt.Errorf("key context not found for key type: %s", pk.Type()) + } + + if !contains(keyContexts, keyContext) { + keyContexts = append(keyContexts, keyContext) + } + + publicKeys = append(publicKeys, externalPK) + + for _, p := range pk.Purpose() { + switch p { + case document.KeyPurposeAuthentication: + purposes[document.AuthenticationProperty] = append(purposes[document.AuthenticationProperty], id) + case document.KeyPurposeAssertionMethod: + purposes[document.AssertionMethodProperty] = append(purposes[document.AssertionMethodProperty], id) + case document.KeyPurposeKeyAgreement: + purposes[document.KeyAgreementProperty] = append(purposes[document.KeyAgreementProperty], id) + case document.KeyPurposeCapabilityDelegation: + purposes[document.DelegationKeyProperty] = append(purposes[document.DelegationKeyProperty], id) + case document.KeyPurposeCapabilityInvocation: + purposes[document.InvocationKeyProperty] = append(purposes[document.InvocationKeyProperty], id) + } + } + } + + if len(publicKeys) > 0 { + resolutionResult.Document[document.VerificationMethodProperty] = publicKeys + + // we need to add key context(s) to original context + ctx := append(resolutionResult.Document.Context(), interfaceArray(keyContexts)...) + resolutionResult.Document[document.ContextProperty] = ctx + } + + for key, value := range purposes { + if len(value) > 0 { + resolutionResult.Document[key] = value + } + } + + return nil +} + +func contains(values []string, value string) bool { + for _, v := range values { + if v == value { + return true + } + } + + return false +} + +func interfaceArray(values []string) []interface{} { + var iArr []interface{} + for _, v := range values { + iArr = append(iArr, v) + } + + return iArr +} + +func (t *Transformer) getObjectID(docID, objectID string) interface{} { + relativeID := "#" + objectID + if t.includeBase { + return relativeID + } + + return docID + relativeID +} + +func getED2519PublicKey(pkJWK document.JWK) ([]byte, error) { + jwk := &jws.JWK{ + Crv: pkJWK.Crv(), + Kty: pkJWK.Kty(), + X: pkJWK.X(), + Y: pkJWK.Y(), + } + + return internaljws.GetED25519PublicKey(jwk) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/didtransformer/transformer_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/didtransformer/transformer_test.go new file mode 100644 index 0000000..f5895ff --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/didtransformer/transformer_test.go @@ -0,0 +1,820 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package didtransformer + +import ( + "crypto/ed25519" + "crypto/rand" + "encoding/json" + "fmt" + "io" + "os" + "testing" + + "github.com/btcsuite/btcutil/base58" + "github.com/multiformats/go-multibase" + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/pubkey" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/metadata" +) + +const testID = "doc:abc:123" + +func TestNewTransformer(t *testing.T) { + transformer := New() + require.NotNil(t, transformer) + require.Empty(t, transformer.methodCtx) + require.Equal(t, false, transformer.includeBase) + require.Equal(t, false, transformer.includePublishedOperations) + require.Equal(t, false, transformer.includeUnpublishedOperations) + + const ctx1 = "ctx-1" + transformer = New(WithMethodContext([]string{ctx1})) + require.Equal(t, 1, len(transformer.methodCtx)) + require.Equal(t, ctx1, transformer.methodCtx[0]) + + const ctx2 = "ctx-2" + transformer = New(WithMethodContext([]string{ctx1, ctx2})) + require.Equal(t, 2, len(transformer.methodCtx)) + require.Equal(t, ctx2, transformer.methodCtx[1]) + + transformer = New(WithBase(true)) + require.Equal(t, true, transformer.includeBase) + + keyCtx := map[string]string{ + "key-1": "value-1", + "key-2": "value-2", + } + + transformer = New(WithKeyContext(keyCtx)) + require.Equal(t, 2, len(transformer.keyCtx)) + + transformer = New(WithIncludePublishedOperations(true), WithIncludeUnpublishedOperations(true)) + require.Equal(t, true, transformer.includePublishedOperations) + require.Equal(t, true, transformer.includeUnpublishedOperations) +} + +func TestTransformDocument(t *testing.T) { + r := reader(t, "testdata/doc.json") + docBytes, err := io.ReadAll(r) + require.NoError(t, err) + doc, err := document.FromBytes(docBytes) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc, RecoveryCommitment: "recovery", UpdateCommitment: "update"} + + t.Run("success", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + require.NotNil(t, result) + require.Equal(t, testID, result.Document[document.IDProperty]) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + + // test document has 5 keys defined, two distinct key types: EcdsaSecp256k1VerificationKey2019, JsonWebKey2020 + require.Equal(t, 3, len(didDoc.Context())) + require.Equal(t, didContext, didDoc.Context()[0]) + require.NotEmpty(t, didDoc[document.AlsoKnownAs]) + require.Equal(t, ecdsaSecp256k1VerificationKey2019Ctx, didDoc.Context()[1]) + require.Equal(t, jsonWebKey2020Ctx, didDoc.Context()[2]) + + // validate services + service := didDoc.Services()[0] + require.Equal(t, service.ID(), testID+"#hub") + require.Equal(t, "https://example.com/hub/", service.ServiceEndpoint().(string)) + require.Equal(t, "recipientKeysValue", service["recipientKeys"]) + require.Equal(t, "routingKeysValue", service["routingKeys"]) + require.Equal(t, "IdentityHub", service.Type()) + + service = didDoc.Services()[1] + require.Equal(t, service.ID(), testID+"#hub-object") + require.NotEmpty(t, service.ServiceEndpoint()) + require.Empty(t, service["recipientKeys"]) + require.Equal(t, "IdentityHub", service.Type()) + + serviceEndpointEntry := service.ServiceEndpoint() + serviceEndpoint := serviceEndpointEntry.(map[string]interface{}) //nolint: errcheck + require.Equal(t, "https://schema.identity.foundation/hub", serviceEndpoint["@context"]) + require.Equal(t, "UserHubEndpoint", serviceEndpoint["type"]) + require.Equal(t, []interface{}{"did:example:456", "did:example:789"}, serviceEndpoint["instances"]) + + // validate public keys + pk := didDoc.VerificationMethods()[0] + require.Contains(t, pk.ID(), testID) + require.NotEmpty(t, pk.Type()) + require.NotEmpty(t, pk.PublicKeyJwk()) + require.Empty(t, pk.PublicKeyBase58()) + + expectedPublicKeys := []string{ + "master", "general", "authentication", "assertion", "agreement", "delegation", "invocation"} + require.Equal(t, len(expectedPublicKeys), len(didDoc.VerificationMethods())) + + expectedAuthenticationKeys := []string{"master", "authentication"} + require.Equal(t, len(expectedAuthenticationKeys), len(didDoc.Authentications())) + + expectedAssertionMethodKeys := []string{"master", "assertion"} + require.Equal(t, len(expectedAssertionMethodKeys), len(didDoc.AssertionMethods())) + + expectedAgreementKeys := []string{"master", "agreement"} + require.Equal(t, len(expectedAgreementKeys), len(didDoc.AgreementKeys())) + + expectedDelegationKeys := []string{"master", "delegation"} + require.Equal(t, len(expectedDelegationKeys), len(didDoc.DelegationKeys())) + + expectedInvocationKeys := []string{"master", "invocation"} + require.Equal(t, len(expectedInvocationKeys), len(didDoc.InvocationKeys())) + }) + t.Run("success - with canonical, equivalent ID", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = "did:abc:123" + info[document.PublishedProperty] = true + info[document.CanonicalIDProperty] = "canonical" + info[document.EquivalentIDProperty] = []string{"equivalent"} + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + require.Equal(t, "did:abc:123", result.Document[document.IDProperty]) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + + require.Equal(t, "canonical", result.DocumentMetadata[document.CanonicalIDProperty]) + require.NotEmpty(t, result.DocumentMetadata[document.EquivalentIDProperty]) + }) + + t.Run("success - all supported contexts for key type", func(t *testing.T) { + d, err := document.FromBytes([]byte(allKeyTypes)) + require.NoError(t, err) + + trans := New() + + internalDoc := &protocol.ResolutionModel{Doc: d} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := trans.TransformDocument(internalDoc, info) + require.NoError(t, err) + require.NotEmpty(t, result) + + didDoc := result.Document + + require.Equal(t, 7, len(didDoc.Context())) + require.Equal(t, didContext, didDoc.Context()[0]) + require.Equal(t, bls12381G2Key2020Ctx, didDoc.Context()[1]) + require.Equal(t, jsonWebKey2020Ctx, didDoc.Context()[2]) + require.Equal(t, ecdsaSecp256k1VerificationKey2019Ctx, didDoc.Context()[3]) + require.Equal(t, ed25519VerificationKey2018Ctx, didDoc.Context()[4]) + require.Equal(t, x25519KeyAgreementKey2019Ctx, didDoc.Context()[5]) + require.Equal(t, ed25519VerificationKey2020Ctx, didDoc.Context()[6]) + }) + + t.Run("success - override contexts for key type", func(t *testing.T) { + testKeyContexts := map[string]string{ + bls12381G2Key2020: "context-1", + jsonWebKey2020: "context-2", + ecdsaSecp256k1VerificationKey2019: "context-3", + ed25519VerificationKey2018: "context-4", + x25519KeyAgreementKey2019: "context-5", + ed25519VerificationKey2020: "context-6", + } + + d, err := document.FromBytes([]byte(allKeyTypes)) + require.NoError(t, err) + + trans := New(WithKeyContext(testKeyContexts)) + + internalDoc := &protocol.ResolutionModel{Doc: d} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := trans.TransformDocument(internalDoc, info) + require.NoError(t, err) + require.NotEmpty(t, result) + + didDoc := result.Document + + require.Equal(t, 7, len(didDoc.Context())) + require.Equal(t, didContext, didDoc.Context()[0]) + require.Equal(t, "context-1", didDoc.Context()[1]) + require.Equal(t, "context-2", didDoc.Context()[2]) + require.Equal(t, "context-3", didDoc.Context()[3]) + require.Equal(t, "context-4", didDoc.Context()[4]) + require.Equal(t, "context-5", didDoc.Context()[5]) + require.Equal(t, "context-6", didDoc.Context()[6]) + }) + + t.Run("success - include operations (published/unpublished)", func(t *testing.T) { + trans := New( + WithIncludePublishedOperations(true), + WithIncludeUnpublishedOperations(true)) + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + publishedOps := []*operation.AnchoredOperation{ + {Type: "create", UniqueSuffix: "suffix", CanonicalReference: "ref1"}, + {Type: "update", UniqueSuffix: "suffix", CanonicalReference: "ref2"}, + } + + unpublishedOps := []*operation.AnchoredOperation{ + {Type: "update", UniqueSuffix: "suffix"}, + } + + rm := &protocol.ResolutionModel{ + Doc: doc, + RecoveryCommitment: "recovery", + UpdateCommitment: "update", + PublishedOperations: publishedOps, + UnpublishedOperations: unpublishedOps, + } + + result, err := trans.TransformDocument(rm, info) + require.NoError(t, err) + require.NotNil(t, result) + require.Equal(t, testID, result.Document[document.IDProperty]) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + + require.Equal(t, 2, len( + methodMetadata[document.PublishedOperationsProperty].([]*metadata.PublishedOperation))) + require.Equal(t, 1, len( + methodMetadata[document.UnpublishedOperationsProperty].([]*metadata.UnpublishedOperation))) + }) + + t.Run("error - internal document is missing", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(nil, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "resolution model is required for creating document metadata") + }) + + t.Run("error - transformation info is missing", func(t *testing.T) { + result, err := transformer.TransformDocument(internal, nil) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "transformation info is required for creating document metadata") + }) + + t.Run("error - transformation info is missing id", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "id is required for document transformation") + }) + + t.Run("error - missing context for key type", func(t *testing.T) { + doc, err := document.FromBytes([]byte(noContextForKeyType)) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "key context not found for key type: InvalidType") + }) +} + +func TestWithMethodContext(t *testing.T) { + doc := make(document.Document) + + transformer := New(WithMethodContext([]string{"ctx-1", "ctx-2"})) + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + require.Equal(t, 3, len(didDoc.Context())) + require.Equal(t, "ctx-1", didDoc.Context()[1]) + require.Equal(t, "ctx-2", didDoc.Context()[2]) +} + +func TestWithBase(t *testing.T) { + r := reader(t, "testdata/doc.json") + docBytes, err := io.ReadAll(r) + require.NoError(t, err) + doc, err := document.FromBytes(docBytes) + require.NoError(t, err) + + transformer := New(WithBase(true)) + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + + // test document has 5 keys defined, two distinct key types: EcdsaSecp256k1VerificationKey2019, JsonWebKey2020 + // two distinct key context + did context + @base context + require.Equal(t, 4, len(didDoc.Context())) + + // second context is @base + baseMap := didDoc.Context()[1].(map[string]interface{}) //nolint: errcheck + baseMap["@base"] = testID + + // validate service id doesn't contain document id + service := didDoc.Services()[0] + require.NotContains(t, service.ID(), testID) + + // validate public key id doesn't contain document id + pk := didDoc.VerificationMethods()[0] + require.NotContains(t, pk.ID(), testID) +} + +func TestEd25519VerificationKey2018(t *testing.T) { + publicKey, _, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + jwk, err := pubkey.GetPublicKeyJWK(publicKey) + require.NoError(t, err) + + publicKeyBytes, err := json.Marshal(jwk) + require.NoError(t, err) + + data := fmt.Sprintf(ed25519DocTemplate, string(publicKeyBytes)) + + doc, err := document.FromBytes([]byte(data)) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + require.Equal(t, didDoc.VerificationMethods()[0].Controller(), didDoc.ID()) + require.Equal(t, didContext, didDoc.Context()[0]) + + // validate service + service := didDoc.Services()[0] + require.Contains(t, service.ID(), testID) + require.NotEmpty(t, service.ServiceEndpoint()) + require.Equal(t, "OpenIdConnectVersion1.0Service", service.Type()) + + // validate public key + pk := didDoc.VerificationMethods()[0] + require.Contains(t, pk.ID(), testID) + require.Equal(t, "Ed25519VerificationKey2018", pk.Type()) + require.Empty(t, pk.PublicKeyJwk()) + + // test base58 encoding + require.Equal(t, base58.Encode(publicKey), pk.PublicKeyBase58()) + + // validate length of expected keys + expectedPublicKeys := []string{"assertion"} + require.Equal(t, len(expectedPublicKeys), len(didDoc.VerificationMethods())) + + expectedAssertionMethodKeys := []string{"assertion"} + require.Equal(t, len(expectedAssertionMethodKeys), len(didDoc.AssertionMethods())) + + require.Equal(t, 0, len(didDoc.Authentications())) + require.Equal(t, 0, len(didDoc.AgreementKeys())) +} + +func TestEd25519VerificationKey2020(t *testing.T) { + publicKey, _, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + jwk, err := pubkey.GetPublicKeyJWK(publicKey) + require.NoError(t, err) + + publicKeyBytes, err := json.Marshal(jwk) + require.NoError(t, err) + + data := fmt.Sprintf(ed25519VerificationKey2020DocTemplate, string(publicKeyBytes)) + + doc, err := document.FromBytes([]byte(data)) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + require.Equal(t, didDoc.VerificationMethods()[0].Controller(), didDoc.ID()) + require.Equal(t, didContext, didDoc.Context()[0]) + + // validate service + service := didDoc.Services()[0] + require.Contains(t, service.ID(), testID) + require.NotEmpty(t, service.ServiceEndpoint()) + require.Equal(t, "OpenIdConnectVersion1.0Service", service.Type()) + + // validate public key + pk := didDoc.VerificationMethods()[0] + require.Contains(t, pk.ID(), testID) + require.Equal(t, "Ed25519VerificationKey2020", pk.Type()) + require.Empty(t, pk.PublicKeyJwk()) + + // test base58 encoding + multibaseEncode, err := multibase.Encode(multibase.Base58BTC, publicKey) + require.NoError(t, err) + + require.Equal(t, multibaseEncode, pk.PublicKeyMultibase()) + + // validate length of expected keys + expectedPublicKeys := []string{"assertion"} + require.Equal(t, len(expectedPublicKeys), len(didDoc.VerificationMethods())) + + expectedAssertionMethodKeys := []string{"assertion"} + require.Equal(t, len(expectedAssertionMethodKeys), len(didDoc.AssertionMethods())) + + require.Equal(t, 0, len(didDoc.Authentications())) + require.Equal(t, 0, len(didDoc.AgreementKeys())) +} + +func TestEd25519VerificationKey2018_Error(t *testing.T) { + doc, err := document.FromBytes([]byte(ed25519Invalid)) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "unknown curve") +} + +func TestEd25519VerificationKey2020_Error(t *testing.T) { + doc, err := document.FromBytes([]byte(ed25519VerificationKey2020DocInvalid)) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "unknown curve") +} + +func TestPublicKeyBase58(t *testing.T) { + pkB58 := "36d8RkFy2SdabnGzcZ3LcCSDA8NP5T4bsoADwuXtoN3B" + + doc, err := document.FromBytes([]byte(fmt.Sprintf(publicKeyBase58Template, pkB58))) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + require.Equal(t, didDoc.VerificationMethods()[0].Controller(), didDoc.ID()) + require.Equal(t, didContext, didDoc.Context()[0]) + + pk := didDoc.VerificationMethods()[0] + require.Contains(t, pk.ID(), testID) + require.Equal(t, "Ed25519VerificationKey2018", pk.Type()) + require.Empty(t, pk.PublicKeyJwk()) + + require.Equal(t, pkB58, pk.PublicKeyBase58()) +} + +func TestPublicKeyMultibase(t *testing.T) { + pkMultibase := "z6Mkf5rGMoatrSj1f4CyvuHBeXJELe9RPdzo2PKGNCKVtZxP" + + doc, err := document.FromBytes([]byte(fmt.Sprintf(publicKeyMultibaseTemplate, pkMultibase))) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + require.Equal(t, didDoc.VerificationMethods()[0].Controller(), didDoc.ID()) + require.Equal(t, didContext, didDoc.Context()[0]) + + pk := didDoc.VerificationMethods()[0] + require.Contains(t, pk.ID(), testID) + require.Equal(t, "Ed25519VerificationKey2020", pk.Type()) + require.Empty(t, pk.PublicKeyJwk()) + + require.Equal(t, pkMultibase, pk.PublicKeyMultibase()) +} + +func reader(t *testing.T, filename string) io.Reader { + f, err := os.Open(filename) + require.NoError(t, err) + + return f +} + +const ed25519DocTemplate = `{ + "publicKey": [ + { + "id": "assertion", + "type": "Ed25519VerificationKey2018", + "purposes": ["assertionMethod"], + "publicKeyJwk": %s + } + ], + "service": [ + { + "id": "oidc", + "type": "OpenIdConnectVersion1.0Service", + "serviceEndpoint": "https://openid.example.com/" + } + ] +}` + +const ed25519VerificationKey2020DocTemplate = `{ + "publicKey": [ + { + "id": "assertion", + "type": "Ed25519VerificationKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": %s + } + ], + "service": [ + { + "id": "oidc", + "type": "OpenIdConnectVersion1.0Service", + "serviceEndpoint": "https://openid.example.com/" + } + ] +}` + +const publicKeyBase58Template = `{ + "publicKey": [ + { + "id": "assertion", + "type": "Ed25519VerificationKey2018", + "purposes": ["assertionMethod"], + "publicKeyBase58": "%s" + } + ], + "service": [ + { + "id": "oidc", + "type": "OpenIdConnectVersion1.0Service", + "serviceEndpoint": "https://openid.example.com/" + } + ] +}` + +const publicKeyMultibaseTemplate = `{ + "publicKey": [ + { + "id": "assertion", + "type": "Ed25519VerificationKey2020", + "purposes": ["assertionMethod"], + "publicKeyMultibase": "%s" + } + ], + "service": [ + { + "id": "oidc", + "type": "OpenIdConnectVersion1.0Service", + "serviceEndpoint": "https://openid.example.com/" + } + ] +}` + +const ed25519Invalid = `{ + "publicKey": [ + { + "id": "assertion", + "type": "Ed25519VerificationKey2018", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "OKP", + "crv": "curve", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const ed25519VerificationKey2020DocInvalid = `{ + "publicKey": [ + { + "id": "assertion", + "type": "Ed25519VerificationKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "OKP", + "crv": "curve", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const noContextForKeyType = `{ + "publicKey": [ + { + "id": "assertion", + "type": "InvalidType", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "OKP", + "crv": "curve", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const allKeyTypes = `{ + "publicKey": [ + { + "id": "key-1", + "type": "Bls12381G2Key2020", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "OKP", + "crv": "P-256", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "key-2", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "key-3", + "type": "EcdsaSecp256k1VerificationKey2019", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "key-4", + "type": "Ed25519VerificationKey2018", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty":"OKP", + "crv":"Ed25519", + "x":"K24aib_Py_D2ST8F_IiIA2SJo1EiseS0hbaa36tVSAU" + } + }, + { + "id": "key-5", + "type": "X25519KeyAgreementKey2019", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "key-6", + "type": "Ed25519VerificationKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty":"OKP", + "crv":"Ed25519", + "x":"K24aib_Py_D2ST8F_IiIA2SJo1EiseS0hbaa36tVSAU" + } + } + ] +}` diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/doctransformer/transformer.go b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/doctransformer/transformer.go new file mode 100644 index 0000000..def0f94 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/doctransformer/transformer.go @@ -0,0 +1,77 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package doctransformer + +import ( + "errors" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/metadata" +) + +// Transformer is responsible for transforming internal to external document. +type Transformer struct { + includePublishedOperations bool + includeUnpublishedOperations bool +} + +// Option is a registry instance option. +type Option func(opts *Transformer) + +// New creates a new document transformer. +func New(opts ...Option) *Transformer { + transformer := &Transformer{} + + // apply options + for _, opt := range opts { + opt(transformer) + } + + return transformer +} + +// WithIncludePublishedOperations sets optional include published operations flag. +func WithIncludePublishedOperations(enabled bool) Option { + return func(opts *Transformer) { + opts.includePublishedOperations = enabled + } +} + +// WithIncludeUnpublishedOperations sets optional include unpublished operations flag. +func WithIncludeUnpublishedOperations(enabled bool) Option { + return func(opts *Transformer) { + opts.includeUnpublishedOperations = enabled + } +} + +// TransformDocument takes internal resolution model and transformation info and creates +// external representation of document (resolution result). +func (v *Transformer) TransformDocument(rm *protocol.ResolutionModel, + info protocol.TransformationInfo) (*document.ResolutionResult, error) { + docMetadata, err := metadata.New( + metadata.WithIncludeUnpublishedOperations(v.includeUnpublishedOperations), + metadata.WithIncludePublishedOperations(v.includePublishedOperations)). + CreateDocumentMetadata(rm, info) + if err != nil { + return nil, err + } + + id, ok := info[document.IDProperty] + if !ok { + return nil, errors.New("id is required for document transformation") + } + + rm.Doc[document.IDProperty] = id + + result := &document.ResolutionResult{ + Document: rm.Doc, + DocumentMetadata: docMetadata, + } + + return result, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/doctransformer/transformer_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/doctransformer/transformer_test.go new file mode 100644 index 0000000..e47c37e --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/doctransformer/transformer_test.go @@ -0,0 +1,159 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package doctransformer + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/metadata" +) + +const testID = "doc:abc:123" + +func TestNewTransformer(t *testing.T) { + transformer := New() + require.NotNil(t, transformer) + require.Equal(t, false, transformer.includePublishedOperations) + require.Equal(t, false, transformer.includeUnpublishedOperations) + + transformer = New(WithIncludeUnpublishedOperations(true), WithIncludePublishedOperations(true)) + require.NotNil(t, transformer) + require.Equal(t, true, transformer.includePublishedOperations) + require.Equal(t, true, transformer.includeUnpublishedOperations) +} + +func TestTransformDocument(t *testing.T) { + doc, err := document.FromBytes(validDoc) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc, RecoveryCommitment: "recovery", UpdateCommitment: "update"} + + t.Run("success", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = "did:abc:123" + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + require.Equal(t, "did:abc:123", result.Document[document.IDProperty]) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + }) + + t.Run("success - with canonical, equivalent ID", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = "did:abc:123" + info[document.PublishedProperty] = true + info[document.CanonicalIDProperty] = "canonical" + info[document.EquivalentIDProperty] = []string{"equivalent"} + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + require.Equal(t, "did:abc:123", result.Document[document.IDProperty]) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + + require.Equal(t, "canonical", result.DocumentMetadata[document.CanonicalIDProperty]) + require.NotEmpty(t, result.DocumentMetadata[document.EquivalentIDProperty]) + }) + + t.Run("success - include operations (published/unpublished)", func(t *testing.T) { + trans := New( + WithIncludePublishedOperations(true), + WithIncludeUnpublishedOperations(true)) + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + publishedOps := []*operation.AnchoredOperation{ + {Type: "create", UniqueSuffix: "suffix", CanonicalReference: "ref1"}, + {Type: "update", UniqueSuffix: "suffix", CanonicalReference: "ref2"}, + } + + unpublishedOps := []*operation.AnchoredOperation{ + {Type: "update", UniqueSuffix: "suffix"}, + } + + rm := &protocol.ResolutionModel{ + Doc: doc, + RecoveryCommitment: "recovery", + UpdateCommitment: "update", + PublishedOperations: publishedOps, + UnpublishedOperations: unpublishedOps, + } + + result, err := trans.TransformDocument(rm, info) + require.NoError(t, err) + require.NotNil(t, result) + require.Equal(t, testID, result.Document[document.IDProperty]) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + + require.Equal(t, 2, len(methodMetadata[document.PublishedOperationsProperty].([]*metadata.PublishedOperation))) + require.Equal(t, 1, len(methodMetadata[document.UnpublishedOperationsProperty].([]*metadata.UnpublishedOperation))) + }) + + t.Run("error - internal document is missing", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = "doc:abc:xyz" + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(nil, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "resolution model is required for creating document metadata") + }) + + t.Run("error - transformation info is missing", func(t *testing.T) { + result, err := transformer.TransformDocument(internal, nil) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "transformation info is required for creating document metadata") + }) + + t.Run("error - transformation info is missing id", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "id is required for document transformation") + }) +} + +//nolint:gochecknoglobals +var validDoc = []byte(`{ "name": "John Smith" }`) diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/metadata/metadata.go b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/metadata/metadata.go new file mode 100644 index 0000000..ea02629 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/metadata/metadata.go @@ -0,0 +1,230 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package metadata + +import ( + "errors" + "sort" + "time" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" +) + +// Metadata is responsible for creating document metadata. +type Metadata struct { + includePublishedOperations bool + includeUnpublishedOperations bool +} + +// Option is a metadata instance option. +type Option func(opts *Metadata) + +// New creates a new metadata transformer. +func New(opts ...Option) *Metadata { + md := &Metadata{} + + // apply options + for _, opt := range opts { + opt(md) + } + + return md +} + +// WithIncludePublishedOperations sets optional include published operations flag. +func WithIncludePublishedOperations(enabled bool) Option { + return func(opts *Metadata) { + opts.includePublishedOperations = enabled + } +} + +// WithIncludeUnpublishedOperations sets optional include unpublished operations flag. +func WithIncludeUnpublishedOperations(enabled bool) Option { + return func(opts *Metadata) { + opts.includeUnpublishedOperations = enabled + } +} + +// CreateDocumentMetadata will create document metadata. +// +//nolint:funlen,gocyclo +func (t *Metadata) CreateDocumentMetadata(rm *protocol.ResolutionModel, info protocol.TransformationInfo, +) (document.Metadata, error) { + if rm == nil || rm.Doc == nil { + return nil, errors.New("resolution model is required for creating document metadata") + } + + if info == nil { + return nil, errors.New("transformation info is required for creating document metadata") + } + + published, ok := info[document.PublishedProperty] + if !ok { + return nil, errors.New("published is required for creating document metadata") + } + + methodMetadata := make(document.Metadata) + methodMetadata[document.PublishedProperty] = published + + if rm.RecoveryCommitment != "" { + methodMetadata[document.RecoveryCommitmentProperty] = rm.RecoveryCommitment + } + + if rm.UpdateCommitment != "" { + methodMetadata[document.UpdateCommitmentProperty] = rm.UpdateCommitment + } + + if rm.AnchorOrigin != nil { + methodMetadata[document.AnchorOriginProperty] = rm.AnchorOrigin + } + + if t.includeUnpublishedOperations && len(rm.UnpublishedOperations) > 0 { + methodMetadata[document.UnpublishedOperationsProperty] = getUnpublishedOperations(rm.UnpublishedOperations) + } + + if t.includePublishedOperations && len(rm.PublishedOperations) > 0 { + methodMetadata[document.PublishedOperationsProperty] = getPublishedOperations(rm.PublishedOperations) + } + + docMetadata := make(document.Metadata) + docMetadata[document.MethodProperty] = methodMetadata + + if rm.Deactivated { + docMetadata[document.DeactivatedProperty] = rm.Deactivated + } + + canonicalID, ok := info[document.CanonicalIDProperty] + if ok { + docMetadata[document.CanonicalIDProperty] = canonicalID + } + + equivalentID, ok := info[document.EquivalentIDProperty] + if ok { + docMetadata[document.EquivalentIDProperty] = equivalentID + } + + if published.(bool) { + docMetadata[document.CreatedProperty] = time.Unix(int64(rm.CreatedTime), 0).UTC().Format(time.RFC3339) + } + + if rm.VersionID != "" { + docMetadata[document.VersionIDProperty] = rm.VersionID + if rm.UpdatedTime > 0 { + docMetadata[document.UpdatedProperty] = time.Unix(int64(rm.UpdatedTime), 0).UTC().Format(time.RFC3339) + } + } + + return docMetadata, nil +} + +func sortOperations(ops []*operation.AnchoredOperation) { + sort.Slice(ops, func(i, j int) bool { + if ops[i].TransactionTime < ops[j].TransactionTime { + return true + } + + return ops[i].TransactionNumber < ops[j].TransactionNumber + }) +} + +// remove duplicate published operations and then sort them by transaction (anchoring) time. +func getPublishedOperations(ops []*operation.AnchoredOperation) []*PublishedOperation { + sortOperations(ops) + + uniqueOps := make(map[string]bool) + + var publishedOps []*PublishedOperation + + for _, op := range ops { + _, ok := uniqueOps[op.CanonicalReference] + if !ok { + publishedOps = append(publishedOps, + &PublishedOperation{ + Type: op.Type, + OperationRequest: op.OperationRequest, + TransactionTime: op.TransactionTime, + TransactionNumber: op.TransactionNumber, + ProtocolVersion: op.ProtocolVersion, + CanonicalReference: op.CanonicalReference, + EquivalentReferences: op.EquivalentReferences, + AnchorOrigin: op.AnchorOrigin, + }) + + uniqueOps[op.CanonicalReference] = true + } + } + + return publishedOps +} + +// sort unpublished operations by request time. +func getUnpublishedOperations(ops []*operation.AnchoredOperation) []*UnpublishedOperation { + sortOperations(ops) + + unpublishedOps := make([]*UnpublishedOperation, len(ops)) + + for i, op := range ops { + unpublishedOps[i] = &UnpublishedOperation{ + Type: op.Type, + OperationRequest: op.OperationRequest, + TransactionTime: op.TransactionTime, + ProtocolVersion: op.ProtocolVersion, + AnchorOrigin: op.AnchorOrigin, + } + } + + return unpublishedOps +} + +// PublishedOperation defines an published operation for metadata. It is a subset of anchored operation. +type PublishedOperation struct { + + // Type defines operation type. + Type operation.Type `json:"type"` + + // OperationRequest is the original operation request. + OperationRequest []byte `json:"operation"` + + // TransactionTime is the logical anchoring time. + TransactionTime uint64 `json:"transactionTime"` + + // TransactionNumber is the transaction number of the transaction this operation was batched within. + TransactionNumber uint64 `json:"transactionNumber"` + + // ProtocolVersion is the genesis time of the protocol that was used for this operation. + ProtocolVersion uint64 `json:"protocolVersion"` + + // CanonicalReference contains canonical reference that applies to this operation. + CanonicalReference string `json:"canonicalReference,omitempty"` + + // EquivalenceReferences contains equivalence reference that applies to this operation. + EquivalentReferences []string `json:"equivalentReferences,omitempty"` + + // AnchorOrigin is anchor origin + AnchorOrigin interface{} `json:"anchorOrigin,omitempty"` +} + +// UnpublishedOperation defines an un-published operation for metadata. It is a subset of anchored operation. +type UnpublishedOperation struct { + + // Type defines operation type. + Type operation.Type `json:"type"` + + // OperationRequest is the original operation request. + OperationRequest []byte `json:"operation"` + + // TransactionTime is the logical anchoring time. + TransactionTime uint64 `json:"transactionTime"` + + // ProtocolVersion is the genesis time of the protocol that was used for this operation. + ProtocolVersion uint64 `json:"protocolVersion"` + + // AnchorOrigin is anchor origin. + AnchorOrigin interface{} `json:"anchorOrigin,omitempty"` +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/metadata/metadata_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/metadata/metadata_test.go new file mode 100644 index 0000000..c7a0b64 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/doctransformer/metadata/metadata_test.go @@ -0,0 +1,220 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package metadata + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" +) + +const ( + testDID = "did:abc:123" + canonicalID = "canonical" +) + +func TestPopulateDocumentMetadata(t *testing.T) { + doc, err := document.FromBytes(validDoc) + require.NoError(t, err) + + createdTimeStr := "2020-12-20T19:17:47Z" + updatedTimeStr := "2022-12-20T19:17:47Z" + + createdTime, err := time.Parse(time.RFC3339, createdTimeStr) + require.NoError(t, err) + + updatedTime, err := time.Parse(time.RFC3339, updatedTimeStr) + require.NoError(t, err) + + internal := &protocol.ResolutionModel{ + Doc: doc, + RecoveryCommitment: "recovery", + UpdateCommitment: "update", + AnchorOrigin: "origin.com", + VersionID: "version", + CreatedTime: uint64(createdTime.Unix()), + UpdatedTime: uint64(updatedTime.Unix()), + } + + t.Run("success - all info present", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testDID + info[document.PublishedProperty] = true + info[document.CanonicalIDProperty] = canonicalID + info[document.EquivalentIDProperty] = []string{"equivalent"} + info[document.AnchorOriginProperty] = "domain.com" + info[document.DeactivatedProperty] = true + + documentMetadata, err := New(WithIncludeUnpublishedOperations(true), + WithIncludePublishedOperations(true)).CreateDocumentMetadata(internal, info) + require.NoError(t, err) + + require.Empty(t, documentMetadata[document.DeactivatedProperty]) + require.Equal(t, canonicalID, documentMetadata[document.CanonicalIDProperty]) + require.NotEmpty(t, documentMetadata[document.EquivalentIDProperty]) + + require.Equal(t, createdTimeStr, documentMetadata[document.CreatedProperty]) + require.Equal(t, updatedTimeStr, documentMetadata[document.UpdatedProperty]) + + methodMetadataEntry, ok := documentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + }) + + t.Run("success - include operations (published/unpublished)", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testDID + info[document.PublishedProperty] = true + info[document.CanonicalIDProperty] = canonicalID + info[document.EquivalentIDProperty] = []string{"equivalent"} + info[document.AnchorOriginProperty] = "domain.com" + + publishedOps := []*operation.AnchoredOperation{ + {Type: "create", UniqueSuffix: "suffix", CanonicalReference: "ref1", TransactionTime: 1}, + {Type: "update", UniqueSuffix: "suffix", CanonicalReference: "ref3", TransactionTime: 3}, + {Type: "update", UniqueSuffix: "suffix", CanonicalReference: "ref2", TransactionTime: 2}, + {Type: "update", UniqueSuffix: "suffix", CanonicalReference: "ref2", TransactionTime: 2}, + } + + unpublishedOps := []*operation.AnchoredOperation{ + {Type: "update", UniqueSuffix: "suffix", TransactionTime: 4}, + } + + rm := &protocol.ResolutionModel{ + Doc: doc, + RecoveryCommitment: "recovery", + UpdateCommitment: "update", + PublishedOperations: publishedOps, + UnpublishedOperations: unpublishedOps, + } + + documentMetadata, err := New(WithIncludeUnpublishedOperations(true), + WithIncludePublishedOperations(true)).CreateDocumentMetadata(rm, info) + require.NoError(t, err) + + require.Empty(t, documentMetadata[document.DeactivatedProperty]) + require.Equal(t, canonicalID, documentMetadata[document.CanonicalIDProperty]) + require.NotEmpty(t, documentMetadata[document.EquivalentIDProperty]) + + methodMetadataEntry, ok := documentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + + require.Equal(t, 3, len(methodMetadata[document.PublishedOperationsProperty].([]*PublishedOperation))) + require.Equal(t, 1, len(methodMetadata[document.UnpublishedOperationsProperty].([]*UnpublishedOperation))) + }) + + t.Run("success - deactivated, commitments empty", func(t *testing.T) { + internal2 := &protocol.ResolutionModel{ + Doc: doc, + Deactivated: true, + CreatedTime: uint64(time.Now().Unix() - 60), + UpdatedTime: uint64(time.Now().Unix()), + VersionID: "version", + } + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testDID + info[document.PublishedProperty] = true + info[document.CanonicalIDProperty] = canonicalID + + documentMetadata, err := New().CreateDocumentMetadata(internal2, info) + require.NoError(t, err) + + require.Equal(t, true, documentMetadata[document.DeactivatedProperty]) + require.NotEmpty(t, documentMetadata[document.UpdatedProperty]) + require.NotEmpty(t, documentMetadata[document.CreatedProperty]) + require.Equal(t, canonicalID, documentMetadata[document.CanonicalIDProperty]) + require.Empty(t, documentMetadata[document.EquivalentIDProperty]) + + methodMetadataEntry, ok := documentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Empty(t, methodMetadata[document.RecoveryCommitmentProperty]) + require.Empty(t, methodMetadata[document.UpdateCommitmentProperty]) + }) + + t.Run("success - deactivated, no version ID (unpublished)", func(t *testing.T) { + internal2 := &protocol.ResolutionModel{ + Doc: doc, + Deactivated: true, + CreatedTime: uint64(time.Now().Unix() - 60), + UpdatedTime: uint64(time.Now().Unix()), + } + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testDID + info[document.PublishedProperty] = true + info[document.CanonicalIDProperty] = canonicalID + + documentMetadata, err := New().CreateDocumentMetadata(internal2, info) + require.NoError(t, err) + + require.Equal(t, true, documentMetadata[document.DeactivatedProperty]) + require.Empty(t, documentMetadata[document.UpdatedProperty]) + require.NotEmpty(t, documentMetadata[document.CreatedProperty]) + require.Equal(t, canonicalID, documentMetadata[document.CanonicalIDProperty]) + require.Empty(t, documentMetadata[document.EquivalentIDProperty]) + + methodMetadataEntry, ok := documentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Empty(t, methodMetadata[document.RecoveryCommitmentProperty]) + require.Empty(t, methodMetadata[document.UpdateCommitmentProperty]) + }) + + t.Run("error - internal document is missing", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = "doc:abc:xyz" + info[document.PublishedProperty] = true + + result, err := New().CreateDocumentMetadata(nil, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "resolution model is required for creating document metadata") + }) + + t.Run("error - transformation info is missing", func(t *testing.T) { + result, err := New().CreateDocumentMetadata(internal, nil) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "transformation info is required for creating document metadata") + }) + + t.Run("error - transformation info is missing published", func(t *testing.T) { + info := make(protocol.TransformationInfo) + + result, err := New().CreateDocumentMetadata(internal, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "published is required for creating document metadata") + }) +} + +//nolint:gochecknoglobals +var validDoc = []byte(`{ "name": "John Smith" }`) diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/didvalidator/testdata/doc.json b/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/didvalidator/testdata/doc.json new file mode 100644 index 0000000..bb21b8b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/didvalidator/testdata/doc.json @@ -0,0 +1,144 @@ +{ + "publicKey": [ + { + "id": "master", + "type": "EcdsaSecp256k1VerificationKey2019", + "purposes": ["authentication", "assertionMethod", "keyAgreement", "capabilityDelegation", "capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-auth-gen", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "auth-only", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-assertion-gen", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "assertion-only", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-agreement-gen", + "type": "JsonWebKey2020", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "agreement-only", + "type": "JsonWebKey2020", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-invocation-gen", + "type": "JsonWebKey2020", + "purposes": ["capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "invocation-only", + "type": "JsonWebKey2020", + "purposes": ["capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-delegation-gen", + "type": "JsonWebKey2020", + "purposes": ["capabilityDelegation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "delegation-only", + "type": "JsonWebKey2020", + "purposes": ["capabilityDelegation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "general-only", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ], + "service": [ + { + "id": "hub", + "type": "IdentityHub", + "routingKeys": "routingKeysValue", + "recipientKeys": "recipientKeysValue", + "serviceEndpoint": "https://example.com/hub/" + } + ] +} \ No newline at end of file diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/didvalidator/validator.go b/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/didvalidator/validator.go new file mode 100644 index 0000000..1a0566c --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/didvalidator/validator.go @@ -0,0 +1,63 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package didvalidator + +import ( + "errors" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" +) + +const didSuffix = "didSuffix" + +// Validator is responsible for validating did operations and sidetree rules. +type Validator struct { +} + +// New creates a new did validator. +func New() *Validator { + return &Validator{} +} + +// IsValidPayload verifies that the given payload is a valid Sidetree specific payload +// that can be accepted by the Sidetree update operations. +func (v *Validator) IsValidPayload(payload []byte) error { + doc, err := document.FromBytes(payload) + if err != nil { + return err + } + + didSuffix := doc.GetStringValue(didSuffix) + if didSuffix == "" { + return errors.New("missing did unique suffix") + } + + // checking for previous operation existence has been pushed to handler + return nil +} + +// IsValidOriginalDocument verifies that the given payload is a valid Sidetree specific did document that +// can be accepted by the Sidetree create operation. +func (v *Validator) IsValidOriginalDocument(payload []byte) error { + didDoc, err := document.DidDocumentFromBytes(payload) + if err != nil { + return err + } + + // Sidetree rule: The document must NOT have the id property + if didDoc.ID() != "" { + return errors.New("document must NOT have the id property") + } + + // Sidetree rule: must not have context + ctx := didDoc.Context() + if len(ctx) != 0 { + return errors.New("document must NOT have context") + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/didvalidator/validator_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/didvalidator/validator_test.go new file mode 100644 index 0000000..b8e4c11 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/didvalidator/validator_test.go @@ -0,0 +1,91 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package didvalidator + +import ( + "io" + "os" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNew(t *testing.T) { + v := New() + require.NotNil(t, v) +} + +func TestIsValidOriginalDocument(t *testing.T) { + r := reader(t, "testdata/doc.json") + didDoc, err := io.ReadAll(r) + require.Nil(t, err) + + v := New() + + err = v.IsValidOriginalDocument(didDoc) + require.Nil(t, err) +} + +func TestIsValidOriginalDocument_ContextProvidedError(t *testing.T) { + v := New() + + err := v.IsValidOriginalDocument(docWithContext) + require.NotNil(t, err) + require.Contains(t, err.Error(), "document must NOT have context") +} + +func TestIsValidOriginalDocument_MustNotHaveIDError(t *testing.T) { + v := New() + + err := v.IsValidOriginalDocument(docWithID) + require.NotNil(t, err) + require.Contains(t, err.Error(), "document must NOT have the id property") +} + +func TestIsValidPayload(t *testing.T) { + v := New() + + err := v.IsValidPayload(validUpdate) + require.Nil(t, err) +} + +func TestIsValidPayloadError(t *testing.T) { + v := New() + + err := v.IsValidPayload(invalidUpdate) + require.NotNil(t, err) + require.Contains(t, err.Error(), "missing did unique suffix") +} + +func reader(t *testing.T, filename string) io.Reader { + f, err := os.Open(filename) + require.Nil(t, err) + + return f +} + +//nolint:gochecknoglobals +var ( + docWithContext = []byte(`{ + "@context": ["https://w3id.org/did/v1"], + "publicKey": [{ + "id": "key-1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}`) + + docWithID = []byte(`{ "id" : "001", "name": "John Smith" }`) + + validUpdate = []byte(`{ "didSuffix": "abc" }`) + invalidUpdate = []byte(`{ "patch": "" }`) +) diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/docvalidator/validator.go b/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/docvalidator/validator.go new file mode 100644 index 0000000..fa4b930 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/docvalidator/validator.go @@ -0,0 +1,58 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package docvalidator + +import ( + "errors" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" +) + +const didSuffix = "didSuffix" + +// Validator is responsible for validating document operations and Sidetree rules. +type Validator struct { +} + +// New creates a new document validator. +func New() *Validator { + return &Validator{} +} + +// IsValidPayload verifies that the given payload is a valid Sidetree specific payload +// that can be accepted by the Sidetree update operations. +func (v *Validator) IsValidPayload(payload []byte) error { + doc, err := document.FromBytes(payload) + if err != nil { + return err + } + + uniqueSuffix := doc.GetStringValue(didSuffix) + if uniqueSuffix == "" { + return errors.New("missing unique suffix") + } + + // checking for previous operation existence has been pushed to handler + + return nil +} + +// IsValidOriginalDocument verifies that the given payload is a valid Sidetree specific document that can be accepted by +// the Sidetree create operation. +func (v *Validator) IsValidOriginalDocument(payload []byte) error { + doc, err := document.FromBytes(payload) + if err != nil { + return err + } + + // The document must NOT have the id property + if doc.ID() != "" { + return errors.New("document must NOT have the id property") + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/docvalidator/validator_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/docvalidator/validator_test.go new file mode 100644 index 0000000..770b991 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/docvalidator/docvalidator/validator_test.go @@ -0,0 +1,68 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package docvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNew(t *testing.T) { + v := New() + require.NotNil(t, v) +} + +func TestIsValidOriginalDocument(t *testing.T) { + v := New() + + err := v.IsValidOriginalDocument(validDoc) + require.Nil(t, err) +} + +func TestValidatoIsValidOriginalDocumentError(t *testing.T) { + v := New() + + err := v.IsValidOriginalDocument(invalidDoc) + require.NotNil(t, err) + require.Contains(t, err.Error(), "document must NOT have the id property") +} + +func TestValidatorIsValidPayload(t *testing.T) { + v := New() + + err := v.IsValidPayload(validUpdate) + require.NoError(t, err) +} + +func TestInvalidPayloadError(t *testing.T) { + v := New() + + // payload is invalid json + payload := []byte("[test : 123]") + + err := v.IsValidOriginalDocument(payload) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid character") +} + +func TestValidatorIsValidPayloadError(t *testing.T) { + v := New() + + err := v.IsValidPayload(invalidUpdate) + require.NotNil(t, err) + require.Contains(t, err.Error(), "missing unique suffix") +} + +//nolint:gochecknoglobals +var ( + validDoc = []byte(`{ "name": "John Smith" }`) + invalidDoc = []byte(`{ "id" : "001", "name": "John Smith" }`) + + validUpdate = []byte(`{ "didSuffix": "abc" }`) + invalidUpdate = []byte(`{ "patch": "" }`) +) diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/model/operation.go b/method/sidetreelongform/sidetree-core/versions/1_0/model/operation.go new file mode 100644 index 0000000..5c2642a --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/model/operation.go @@ -0,0 +1,45 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package model + +import ( + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" +) + +// Operation is used for parsing operation request. +type Operation struct { + + // Type defines operation type + Type operation.Type + + // Namespace defines document namespace + Namespace string + + // ID is full ID for this document - namespace + unique suffix + ID string + + // UniqueSuffix is unique suffix + UniqueSuffix string + + // OperationRequest is the original operation request + OperationRequest []byte + + // SignedData is signed data for the operation (compact JWS) + SignedData string + + // RevealValue is multihash of JWK + RevealValue string + + // Delta is operation delta model + Delta *DeltaModel + + // SuffixDataModel is suffix data model + SuffixData *SuffixDataModel + + // AnchorOrigin is anchor origin + AnchorOrigin interface{} +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/model/request.go b/method/sidetreelongform/sidetree-core/versions/1_0/model/request.go new file mode 100644 index 0000000..317c7a9 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/model/request.go @@ -0,0 +1,167 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package model + +import ( + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +// CreateRequest is the struct for create payload JCS. +type CreateRequest struct { + // operation + // Required: true + Operation operation.Type `json:"type,omitempty"` + + // Suffix data object + // Required: true + SuffixData *SuffixDataModel `json:"suffixData,omitempty"` + + // Delta object + // Required: true + Delta *DeltaModel `json:"delta,omitempty"` +} + +// SuffixDataModel is part of create request. +type SuffixDataModel struct { + + // Hash of the delta object (required) + DeltaHash string `json:"deltaHash,omitempty"` + + // Commitment hash for the next recovery or deactivate operation (required) + RecoveryCommitment string `json:"recoveryCommitment,omitempty"` + + // AnchorOrigin signifies the system(s) that know the most recent anchor for this DID (optional) + AnchorOrigin interface{} `json:"anchorOrigin,omitempty"` + + // Type signifies the type of entity a DID represents (optional) + Type string `json:"type,omitempty"` +} + +// DeltaModel contains patch data (patches used for create, recover, update). +type DeltaModel struct { + + // Commitment hash for the next update operation + UpdateCommitment string `json:"updateCommitment,omitempty"` + + // Patches defines document patches + Patches []patch.Patch `json:"patches,omitempty"` +} + +// UpdateRequest is the struct for update request. +type UpdateRequest struct { + // Operation defines operation type + Operation operation.Type `json:"type"` + + // DidSuffix is the suffix of the DID + DidSuffix string `json:"didSuffix"` + + // RevealValue is the reveal value + RevealValue string `json:"revealValue"` + + // SignedData is compact JWS - signature information + SignedData string `json:"signedData"` + + // Delta is encoded delta object + Delta *DeltaModel `json:"delta"` +} + +// DeactivateRequest is the struct for deactivating document. +type DeactivateRequest struct { + // Operation + // Required: true + Operation operation.Type `json:"type"` + + // DidSuffix of the DID + // Required: true + DidSuffix string `json:"didSuffix"` + + // RevealValue is the reveal value + RevealValue string `json:"revealValue"` + + // Compact JWS - signature information + SignedData string `json:"signedData"` +} + +// UpdateSignedDataModel defines signed data model for update. +type UpdateSignedDataModel struct { + // UpdateKey is the current update key + UpdateKey *jws.JWK `json:"updateKey"` + + // DeltaHash of the unsigned delta object + DeltaHash string `json:"deltaHash"` + + // AnchorFrom defines earliest time for this operation. + AnchorFrom int64 `json:"anchorFrom,omitempty"` + + // AnchorUntil defines expiry time for this operation. + AnchorUntil int64 `json:"anchorUntil,omitempty"` +} + +// RecoverSignedDataModel defines signed data model for recovery. +type RecoverSignedDataModel struct { + + // DeltaHash of the unsigned delta object + DeltaHash string `json:"deltaHash"` + + // RecoveryKey is The current recovery key + RecoveryKey *jws.JWK `json:"recoveryKey"` + + // RecoveryCommitment is the commitment used for the next recovery/deactivate + RecoveryCommitment string `json:"recoveryCommitment"` + + // AnchorOrigin signifies the system(s) that know the most recent anchor for this DID (optional) + AnchorOrigin interface{} `json:"anchorOrigin,omitempty"` + + // AnchorFrom defines earliest time for this operation. + AnchorFrom int64 `json:"anchorFrom,omitempty"` + + // AnchorUntil defines expiry time for this operation. + AnchorUntil int64 `json:"anchorUntil,omitempty"` +} + +// DeactivateSignedDataModel defines data model for deactivate. +type DeactivateSignedDataModel struct { + + // DidSuffix is the suffix of the DID + // Required: true + DidSuffix string `json:"didSuffix"` + + // RevealValue is the reveal value + RevealValue string `json:"revealValue"` + + // RecoveryKey is the current recovery key + RecoveryKey *jws.JWK `json:"recoveryKey"` + + // AnchorFrom defines earliest time for this operation. + AnchorFrom int64 `json:"anchorFrom,omitempty"` + + // AnchorUntil defines expiry time for this operation. + AnchorUntil int64 `json:"anchorUntil,omitempty"` +} + +// RecoverRequest is the struct for document recovery payload. +type RecoverRequest struct { + // operation + // Required: true + Operation operation.Type `json:"type"` + + // DidSuffix is the suffix of the DID + // Required: true + DidSuffix string `json:"didSuffix"` + + // RevealValue is the reveal value + RevealValue string `json:"revealValue"` + + // Compact JWS - signature information + SignedData string `json:"signedData"` + + // Delta object + // Required: true + Delta *DeltaModel `json:"delta"` +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/model/util.go b/method/sidetreelongform/sidetree-core/versions/1_0/model/util.go new file mode 100644 index 0000000..02b7975 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/model/util.go @@ -0,0 +1,89 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package model + +import ( + "errors" + "fmt" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" +) + +// GetAnchoredOperation is utility method for converting operation model into anchored operation. +func GetAnchoredOperation(op *Operation) (*operation.AnchoredOperation, error) { + var request interface{} + + switch op.Type { + case operation.TypeCreate: + request = CreateRequest{ + Operation: op.Type, + SuffixData: op.SuffixData, + Delta: op.Delta, + } + + case operation.TypeUpdate: + request = UpdateRequest{ + Operation: op.Type, + DidSuffix: op.UniqueSuffix, + Delta: op.Delta, + SignedData: op.SignedData, + RevealValue: op.RevealValue, + } + + case operation.TypeDeactivate: + request = DeactivateRequest{ + Operation: op.Type, + DidSuffix: op.UniqueSuffix, + SignedData: op.SignedData, + RevealValue: op.RevealValue, + } + + case operation.TypeRecover: + request = RecoverRequest{ + Operation: op.Type, + DidSuffix: op.UniqueSuffix, + Delta: op.Delta, + SignedData: op.SignedData, + RevealValue: op.RevealValue, + } + + default: + return nil, fmt.Errorf("operation type %s not supported for anchored operation", op.Type) + } + + operationBuffer, err := canonicalizer.MarshalCanonical(request) + if err != nil { + return nil, fmt.Errorf("failed to canonicalize anchored operation[%v]: %s", op, err.Error()) + } + + return &operation.AnchoredOperation{ + Type: op.Type, + UniqueSuffix: op.UniqueSuffix, + OperationRequest: operationBuffer, + AnchorOrigin: op.AnchorOrigin, + }, nil +} + +// GetUniqueSuffix calculates unique suffix from suffix data and multihash algorithms. +func GetUniqueSuffix(model *SuffixDataModel, algs []uint) (string, error) { + if len(algs) == 0 { + return "", errors.New("failed to calculate unique suffix: algorithm not provided") + } + + // Even though protocol supports the list of multihashing algorithms in this protocol version (v1) we can have + // only one multihashing algorithm. Later versions may have multiple values for backward compatibility. + // At that point (version 2) the spec will hopefully better define how to handle this scenarios: + // https://github.com/decentralized-identity/sidetree/issues/965 + encodedComputedMultihash, err := hashing.CalculateModelMultihash(model, algs[0]) + if err != nil { + return "", fmt.Errorf("failed to calculate unique suffix: %s", err.Error()) + } + + return encodedComputedMultihash, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/model/util_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/model/util_test.go new file mode 100644 index 0000000..1fe2480 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/model/util_test.go @@ -0,0 +1,144 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package model + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" +) + +const suffix = "suffix" + +func TestGetAnchoredOperation(t *testing.T) { + t.Run("success - create", func(t *testing.T) { + op := &Operation{ + Type: operation.TypeCreate, + UniqueSuffix: suffix, + SuffixData: &SuffixDataModel{ + RecoveryCommitment: "rc", + DeltaHash: "dh", + }, + Delta: &DeltaModel{ + UpdateCommitment: "uc", + }, + } + + //nolint:lll + opBuffer := `{"delta":{"updateCommitment":"uc"},"suffixData":{"deltaHash":"dh","recoveryCommitment":"rc"},"type":"create"}` + + anchored, err := GetAnchoredOperation(op) + require.NoError(t, err) + require.NotNil(t, anchored) + + require.Equal(t, op.Type, anchored.Type) + require.Equal(t, opBuffer, string(anchored.OperationRequest)) + require.Equal(t, suffix, anchored.UniqueSuffix) + }) + + t.Run("success - deactivate", func(t *testing.T) { + op := &Operation{ + Type: operation.TypeDeactivate, + UniqueSuffix: suffix, + RevealValue: "rv", + SignedData: "jws", + } + + opBuffer := `{"didSuffix":"suffix","revealValue":"rv","signedData":"jws","type":"deactivate"}` + + anchored, err := GetAnchoredOperation(op) + require.NoError(t, err) + require.NotNil(t, anchored) + + require.Equal(t, op.Type, anchored.Type) + require.Equal(t, opBuffer, string(anchored.OperationRequest)) + require.Equal(t, suffix, anchored.UniqueSuffix) + }) + + t.Run("success - recover", func(t *testing.T) { + op := &Operation{ + Type: operation.TypeRecover, + UniqueSuffix: suffix, + RevealValue: "rv", + SignedData: "jws", + Delta: &DeltaModel{ + UpdateCommitment: "uc", + }, + } + + //nolint:lll + opBuffer := `{"delta":{"updateCommitment":"uc"},"didSuffix":"suffix","revealValue":"rv","signedData":"jws","type":"recover"}` + + anchored, err := GetAnchoredOperation(op) + require.NoError(t, err) + require.NotNil(t, anchored) + require.Equal(t, op.Type, anchored.Type) + + require.Equal(t, opBuffer, string(anchored.OperationRequest)) + require.Equal(t, suffix, anchored.UniqueSuffix) + }) + + t.Run("success - update", func(t *testing.T) { + op := &Operation{ + Type: operation.TypeUpdate, + UniqueSuffix: suffix, + RevealValue: "rv", + SignedData: "jws", + Delta: &DeltaModel{ + UpdateCommitment: "uc", + }, + } + + //nolint:lll + opBuffer := `{"delta":{"updateCommitment":"uc"},"didSuffix":"suffix","revealValue":"rv","signedData":"jws","type":"update"}` + anchored, err := GetAnchoredOperation(op) + require.NoError(t, err) + require.NotNil(t, anchored) + require.Equal(t, anchored.Type, op.Type) + + require.Equal(t, opBuffer, string(anchored.OperationRequest)) + require.Equal(t, suffix, anchored.UniqueSuffix) + }) + + t.Run("error - type not supported", func(t *testing.T) { + op := &Operation{Type: "other"} + + anchored, err := GetAnchoredOperation(op) + require.Error(t, err) + require.Nil(t, anchored) + require.Contains(t, err.Error(), "operation type other not supported for anchored operation") + }) +} + +func TestGetUniqueSuffix(t *testing.T) { + s := &SuffixDataModel{ + RecoveryCommitment: "rc", + DeltaHash: "dh", + } + + t.Run("success", func(t *testing.T) { + uniqueSuffix, err := GetUniqueSuffix(s, []uint{18}) + require.NoError(t, err) + require.NotEmpty(t, uniqueSuffix) + }) + + t.Run("error - algorithm not provided", func(t *testing.T) { + uniqueSuffix, err := GetUniqueSuffix(s, []uint{}) + require.Error(t, err) + require.Empty(t, uniqueSuffix) + require.Contains(t, err.Error(), "failed to calculate unique suffix: algorithm not provided") + }) + + t.Run("error - algorithm not supported", func(t *testing.T) { + uniqueSuffix, err := GetUniqueSuffix(s, []uint{55}) + require.Error(t, err) + require.Empty(t, uniqueSuffix) + require.Contains(t, err.Error(), "failed to calculate unique suffix: algorithm not supported") + }) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationapplier/operationapplier.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationapplier/operationapplier.go new file mode 100644 index 0000000..5fb6a46 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationapplier/operationapplier.go @@ -0,0 +1,409 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationapplier + +import ( + "fmt" + + "github.com/pkg/errors" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + internal "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/jws" + logfields "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/log" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +//go:generate counterfeiter -o operationparser.gen.go --fake-name MockOperationParser . OperationParser + +var logger = log.New("sidetree-core-applier") + +// Applier is an operation applier. +type Applier struct { + protocol.Protocol + OperationParser + protocol.DocumentComposer +} + +// OperationParser defines the functions for parsing operations. +type OperationParser interface { + ValidateSuffixData(suffixData *model.SuffixDataModel) error + ValidateDelta(delta *model.DeltaModel) error + ParseCreateOperation(request []byte, anchor bool) (*model.Operation, error) + ParseUpdateOperation(request []byte, anchor bool) (*model.Operation, error) + ParseRecoverOperation(request []byte, anchor bool) (*model.Operation, error) + ParseDeactivateOperation(request []byte, anchor bool) (*model.Operation, error) + ParseSignedDataForUpdate(compactJWS string) (*model.UpdateSignedDataModel, error) + ParseSignedDataForDeactivate(compactJWS string) (*model.DeactivateSignedDataModel, error) + ParseSignedDataForRecover(compactJWS string) (*model.RecoverSignedDataModel, error) +} + +// New returns a new operation applier for the given protocol. +// +//nolint:gocritic +func New(p protocol.Protocol, parser OperationParser, dc protocol.DocumentComposer) *Applier { + return &Applier{ + Protocol: p, + OperationParser: parser, + DocumentComposer: dc, + } +} + +// Apply applies the given anchored operation. +func (s *Applier) Apply( + op *operation.AnchoredOperation, rm *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { + switch op.Type { + case operation.TypeCreate: + return s.applyCreateOperation(op, rm) + case operation.TypeUpdate: + return s.applyUpdateOperation(op, rm) + case operation.TypeDeactivate: + return s.applyDeactivateOperation(op, rm) + case operation.TypeRecover: + return s.applyRecoverOperation(op, rm) + default: + return nil, fmt.Errorf("operation type not supported for process operation") + } +} + +//nolint:funlen +func (s *Applier) applyCreateOperation(anchoredOp *operation.AnchoredOperation, + rm *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { + logger.Debug("Applying create operation", logfields.WithOperation(anchoredOp)) + + if rm.Doc != nil { + return nil, errors.New("create has to be the first operation") + } + + op, err := s.OperationParser.ParseCreateOperation(anchoredOp.OperationRequest, true) + if err != nil { + return nil, fmt.Errorf("failed to parse create operation in batch mode: %s", err.Error()) + } + + // from this point any error should advance recovery commitment + result := &protocol.ResolutionModel{ + Doc: make(document.Document), + CreatedTime: anchoredOp.TransactionTime, + LastOperationTransactionTime: anchoredOp.TransactionTime, + LastOperationTransactionNumber: anchoredOp.TransactionNumber, + LastOperationProtocolVersion: anchoredOp.ProtocolVersion, + VersionID: anchoredOp.CanonicalReference, + CanonicalReference: anchoredOp.CanonicalReference, + EquivalentReferences: anchoredOp.EquivalentReferences, + RecoveryCommitment: op.SuffixData.RecoveryCommitment, + AnchorOrigin: op.SuffixData.AnchorOrigin, + PublishedOperations: rm.PublishedOperations, + UnpublishedOperations: rm.UnpublishedOperations, + } + + // verify actual delta hash matches expected delta hash + err = hashing.IsValidModelMultihash(op.Delta, op.SuffixData.DeltaHash) + if err != nil { + logger.Info("Delta doesn't match delta hash; set update commitment to nil and advance recovery commitment", + log.WithError(err), + logfields.WithSuffix(anchoredOp.UniqueSuffix), + logfields.WithOperationType(string(anchoredOp.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), + logfields.WithTransactionNumber(anchoredOp.TransactionNumber)) + + return result, nil + } + + err = s.OperationParser.ValidateDelta(op.Delta) + if err != nil { + logger.Info("Parse delta failed; set update commitment to nil and advance recovery commitment", + log.WithError(err), + logfields.WithSuffix(op.UniqueSuffix), + logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), + logfields.WithTransactionNumber(anchoredOp.TransactionNumber)) + + return result, nil + } + + result.UpdateCommitment = op.Delta.UpdateCommitment + + doc, err := s.ApplyPatches(make(document.Document), op.Delta.Patches) + if err != nil { + logger.Info("Apply patches failed; advance commitments", + log.WithError(err), logfields.WithSuffix(anchoredOp.UniqueSuffix), + logfields.WithOperationType(string(anchoredOp.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), + logfields.WithTransactionNumber(anchoredOp.TransactionNumber)) + + return result, nil + } + + result.Doc = doc + + return result, nil +} + +//nolint:funlen +func (s *Applier) applyUpdateOperation(anchoredOp *operation.AnchoredOperation, + rm *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { + logger.Debug("Applying update operation", logfields.WithOperation(anchoredOp)) + + if rm.Doc == nil { + return nil, errors.New("update cannot be first operation") + } + + op, err := s.OperationParser.ParseUpdateOperation(anchoredOp.OperationRequest, true) + if err != nil { + return nil, fmt.Errorf("failed to parse update operation in batch mode: %s", err.Error()) + } + + signedDataModel, err := s.ParseSignedDataForUpdate(op.SignedData) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal signed data model while applying update: %s", err.Error()) + } + + // verify the delta against the signed delta hash + err = hashing.IsValidModelMultihash(op.Delta, signedDataModel.DeltaHash) + if err != nil { + return nil, fmt.Errorf("update delta doesn't match delta hash: %s", err.Error()) + } + + // verify signature + _, err = internal.VerifyJWS(op.SignedData, signedDataModel.UpdateKey) + if err != nil { + return nil, fmt.Errorf("failed to check signature: %s", err.Error()) + } + + err = s.OperationParser.ValidateDelta(op.Delta) + if err != nil { + return nil, fmt.Errorf("failed to validate delta: %s", err.Error()) + } + + // delta is valid so advance update commitment + result := &protocol.ResolutionModel{ + Doc: rm.Doc, + CreatedTime: rm.CreatedTime, + UpdatedTime: anchoredOp.TransactionTime, + LastOperationTransactionTime: anchoredOp.TransactionTime, + LastOperationTransactionNumber: anchoredOp.TransactionNumber, + LastOperationProtocolVersion: anchoredOp.ProtocolVersion, + VersionID: anchoredOp.CanonicalReference, + CanonicalReference: rm.CanonicalReference, + EquivalentReferences: rm.EquivalentReferences, + UpdateCommitment: op.Delta.UpdateCommitment, + RecoveryCommitment: rm.RecoveryCommitment, + AnchorOrigin: rm.AnchorOrigin, + PublishedOperations: rm.PublishedOperations, + UnpublishedOperations: rm.UnpublishedOperations, + } + + // verify anchor from and until time against anchoring time + err = s.verifyAnchoringTimeRange( + signedDataModel.AnchorFrom, signedDataModel.AnchorUntil, anchoredOp.TransactionTime) + if err != nil { + logger.Info("invalid anchoring time range; advance commitments", + logfields.WithSuffix(op.UniqueSuffix), + logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), + logfields.WithTransactionNumber(anchoredOp.TransactionNumber), + log.WithError(err)) + + return result, nil + } + + doc, err := s.ApplyPatches(rm.Doc, op.Delta.Patches) + if err != nil { + logger.Info("Apply patches failed; advance update commitment", + logfields.WithSuffixes(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), + logfields.WithTransactionNumber(anchoredOp.TransactionNumber), + log.WithError(err)) + + return result, nil + } + + // applying patches succeeded so update document + result.Doc = doc + + return result, nil +} + +func (s *Applier) applyDeactivateOperation(anchoredOp *operation.AnchoredOperation, + rm *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { //nolint:funlen + logger.Debug("Applying deactivate operation", logfields.WithOperation(anchoredOp)) + + if rm.Doc == nil { + return nil, errors.New("deactivate can only be applied to an existing document") + } + + op, err := s.OperationParser.ParseDeactivateOperation(anchoredOp.OperationRequest, true) + if err != nil { + return nil, fmt.Errorf("failed to parse deactive operation in batch mode: %s", err.Error()) + } + + signedDataModel, err := s.ParseSignedDataForDeactivate(op.SignedData) + if err != nil { + return nil, fmt.Errorf("failed to parse signed data model while applying deactivate: %s", err.Error()) + } + + // verify signed did suffix against actual did suffix + if op.UniqueSuffix != signedDataModel.DidSuffix { + return nil, errors.New("did suffix doesn't match signed value") + } + + // verify signature + _, err = internal.VerifyJWS(op.SignedData, signedDataModel.RecoveryKey) + if err != nil { + return nil, fmt.Errorf("failed to check signature: %s", err.Error()) + } + + // verify anchor from and until time against anchoring time + err = s.verifyAnchoringTimeRange(signedDataModel.AnchorFrom, signedDataModel.AnchorUntil, anchoredOp.TransactionTime) + if err != nil { + return nil, fmt.Errorf("invalid anchoring time range: %s", err.Error()) + } + + return &protocol.ResolutionModel{ + Doc: make(document.Document), + CreatedTime: rm.CreatedTime, + UpdatedTime: anchoredOp.TransactionTime, + LastOperationTransactionTime: anchoredOp.TransactionTime, + LastOperationTransactionNumber: anchoredOp.TransactionNumber, + LastOperationProtocolVersion: anchoredOp.ProtocolVersion, + VersionID: anchoredOp.CanonicalReference, + CanonicalReference: rm.CanonicalReference, + EquivalentReferences: rm.EquivalentReferences, + UpdateCommitment: "", + RecoveryCommitment: "", + Deactivated: true, + AnchorOrigin: rm.AnchorOrigin, + PublishedOperations: rm.PublishedOperations, + UnpublishedOperations: rm.UnpublishedOperations, + }, nil +} + +//nolint:funlen +func (s *Applier) applyRecoverOperation(anchoredOp *operation.AnchoredOperation, + rm *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { + logger.Debug("Applying recover operation", logfields.WithOperation(anchoredOp)) + + if rm.Doc == nil { + return nil, errors.New("recover can only be applied to an existing document") + } + + op, err := s.OperationParser.ParseRecoverOperation(anchoredOp.OperationRequest, true) + if err != nil { + return nil, fmt.Errorf("failed to parse recover operation in batch mode: %s", err.Error()) + } + + signedDataModel, err := s.ParseSignedDataForRecover(op.SignedData) + if err != nil { + return nil, fmt.Errorf("failed to parse signed data model while applying recover: %s", err.Error()) + } + + // verify signature + _, err = internal.VerifyJWS(op.SignedData, signedDataModel.RecoveryKey) + if err != nil { + return nil, fmt.Errorf("failed to check signature: %s", err.Error()) + } + + // from this point any error should advance recovery commitment + result := &protocol.ResolutionModel{ + Doc: make(document.Document), + CreatedTime: rm.CreatedTime, + UpdatedTime: anchoredOp.TransactionTime, + LastOperationTransactionTime: anchoredOp.TransactionTime, + LastOperationTransactionNumber: anchoredOp.TransactionNumber, + LastOperationProtocolVersion: anchoredOp.ProtocolVersion, + VersionID: anchoredOp.CanonicalReference, + CanonicalReference: anchoredOp.CanonicalReference, + EquivalentReferences: anchoredOp.EquivalentReferences, + RecoveryCommitment: signedDataModel.RecoveryCommitment, + AnchorOrigin: signedDataModel.AnchorOrigin, + PublishedOperations: rm.PublishedOperations, + UnpublishedOperations: rm.UnpublishedOperations, + } + + // verify the delta against the signed delta hash + err = hashing.IsValidModelMultihash(op.Delta, signedDataModel.DeltaHash) + if err != nil { + logger.Info( + "Recover delta doesn't match delta hash; set update commitment to nil and advance recovery commitment", + logfields.WithSuffixes(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), + logfields.WithTransactionNumber(anchoredOp.TransactionNumber), + log.WithError(err)) + + return result, nil + } + + err = s.OperationParser.ValidateDelta(op.Delta) + if err != nil { + logger.Info("Parse delta failed; set update commitment to nil and advance recovery commitment", + logfields.WithSuffixes(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), + logfields.WithTransactionNumber(anchoredOp.TransactionNumber), + log.WithError(err)) + + return result, nil + } + + result.UpdateCommitment = op.Delta.UpdateCommitment + + // verify anchor from and until time against anchoring time + err = s.verifyAnchoringTimeRange( + signedDataModel.AnchorFrom, signedDataModel.AnchorUntil, anchoredOp.TransactionTime) + if err != nil { + logger.Info("Invalid anchoring time range; advance commitments", + logfields.WithSuffixes(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), + logfields.WithTransactionNumber(anchoredOp.TransactionNumber), + log.WithError(err)) + + return result, nil + } + + doc, err := s.ApplyPatches(make(document.Document), op.Delta.Patches) + if err != nil { + logger.Info("Apply patches failed; advance commitments", + logfields.WithSuffixes(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), + logfields.WithTransactionNumber(anchoredOp.TransactionNumber), + log.WithError(err)) + + return result, nil + } + + result.Doc = doc + + return result, nil +} + +func (s *Applier) verifyAnchoringTimeRange(from, until int64, anchor uint64) error { + if from == 0 && until == 0 { + // from and until are not specified - nothing to check + return nil + } + + if from > int64(anchor) { + return fmt.Errorf("anchor from time is greater then anchoring time") + } + + if s.getAnchorUntil(from, until) < int64(anchor) { + return fmt.Errorf("anchor until time is less then anchoring time") + } + + return nil +} + +func (s *Applier) getAnchorUntil(from, until int64) int64 { + if from != 0 && until == 0 { + return from + int64(s.MaxDeltaSize) + } + + return until +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationapplier/operationapplier_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationapplier/operationapplier_test.go new file mode 100644 index 0000000..d7e1c55 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationapplier/operationapplier_test.go @@ -0,0 +1,1423 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationapplier + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "encoding/json" + "errors" + "strconv" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/signutil" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/ecsigner" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/pubkey" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/client" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationparser" +) + +const ( + sha2_256 = 18 + dummyUniqueSuffix = "dummy" + + updateKeyID = "update-key" +) + +//nolint:gochecknoglobals +var ( + p = protocol.Protocol{ + GenesisTime: 0, + MultihashAlgorithms: []uint{sha2_256}, + MaxOperationCount: 2, + MaxOperationSize: 2000, + MaxOperationHashLength: 100, + MaxDeltaSize: 1000, + MaxCasURILength: 100, + CompressionAlgorithm: "GZIP", + MaxChunkFileSize: 1024, + MaxProvisionalIndexFileSize: 1024, + MaxCoreIndexFileSize: 1024, + MaxProofFileSize: 1024, + SignatureAlgorithms: []string{"EdDSA", "ES256"}, + KeyAlgorithms: []string{"Ed25519", "P-256"}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, //nolint:lll + MaxOperationTimeDelta: 600, + NonceSize: 16, + MaxMemoryDecompressionFactor: 3, + } + + parser = operationparser.New(p) + + dc = doccomposer.New() +) + +func TestApplier_Apply(t *testing.T) { + recoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + updateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + t.Run("update is first operation error", func(t *testing.T) { + applier := New(p, parser, dc) + + const uniqueSuffix = "uniqueSuffix" + updateOp, _, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + doc, err := applier.Apply(updateOp, &protocol.ResolutionModel{}) + require.Error(t, err) + require.Nil(t, doc) + require.Equal(t, "update cannot be first operation", err.Error()) + }) + + t.Run("create is second operation error", func(t *testing.T) { + applier := New(p, parser, &mockDocComposer{}) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + doc, err := applier.Apply(createOp, &protocol.ResolutionModel{ + Doc: make(document.Document), + }) + require.Error(t, err) + require.Nil(t, doc) + require.Equal(t, "create has to be the first operation", err.Error()) + }) + + t.Run("apply recover to non existing document error", func(t *testing.T) { + applier := New(p, parser, dc) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + recoverOp, _, err := getAnchoredRecoverOperation( + recoveryKey, updateKey, createOp.UniqueSuffix, 2) + require.NoError(t, err) + + doc, err := applier.Apply(recoverOp, &protocol.ResolutionModel{}) + require.Error(t, err) + require.Contains(t, err.Error(), "recover can only be applied to an existing document") + require.Nil(t, doc) + }) + + t.Run("invalid operation type error", func(t *testing.T) { + applier := New(p, parser, dc) + + doc, err := applier.Apply( + &operation.AnchoredOperation{Type: "invalid"}, &protocol.ResolutionModel{Doc: make(document.Document)}) + require.Error(t, err) + require.Equal(t, "operation type not supported for process operation", err.Error()) + require.Nil(t, doc) + }) + + t.Run("create delta hash doesn't match delta error", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + createOp, err := getCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + delta, err := getDeltaModel(validDoc, "different") + require.NoError(t, err) + + createOp.Delta = delta + + anchoredOp := getAnchoredOperation(createOp) + err = store.Put(anchoredOp) + require.Nil(t, err) + + applier := New(p, parser, dc) + rm, err := applier.Apply(anchoredOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + require.Equal(t, make(document.Document), rm.Doc) + require.NotEmpty(t, rm.RecoveryCommitment) + require.Empty(t, rm.UpdateCommitment) + }) + + t.Run("error - failed to parse create operation", func(t *testing.T) { + store := mocks.NewMockOperationStore(nil) + + createOp, err := getCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + createOp.SuffixData.RecoveryCommitment = "" + + anchoredOp := getAnchoredOperation(createOp) + err = store.Put(anchoredOp) + require.Nil(t, err) + + applier := New(p, parser, dc) + rm, err := applier.Apply(anchoredOp, &protocol.ResolutionModel{}) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "failed to parse create operation in batch mode") + }) + + t.Run("error - apply patches (document composer) error", func(t *testing.T) { + applier := New(p, parser, &mockDocComposer{Err: errors.New("document composer error")}) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + require.Equal(t, make(document.Document), rm.Doc) + require.NotEmpty(t, rm.RecoveryCommitment) + require.NotEmpty(t, rm.UpdateCommitment) + }) +} + +func TestUpdateDocument(t *testing.T) { + recoveryKey, e := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, e) + + updateKey, e := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, e) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + uniqueSuffix := createOp.UniqueSuffix + + t.Run("success", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + createdTime := rm.CreatedTime + + require.NotZero(t, createdTime) + require.Zero(t, rm.UpdatedTime) + + updateOp, nextUpdateKey, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.Nil(t, err) + + result, err := applier.Apply(updateOp, rm) + require.Nil(t, err) + + // check if service type value is updated (done via json patch) + didDoc := document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special1", didDoc["test"]) + + // test consecutive update + updateOp, _, err = getAnchoredUpdateOperation(nextUpdateKey, uniqueSuffix, 2) + require.Nil(t, err) + + result, err = applier.Apply(updateOp, result) + require.Nil(t, err) + + require.Equal(t, createdTime, result.CreatedTime) + + // check if service type value is updated again (done via json patch) + didDoc = document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special2", didDoc["test"]) + }) + + t.Run("error - operation with reused next commitment", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + // scenario: update 1 followed by update 2 followed by update 3 with reused commitment from 1 + + updateOp, nextUpdateKey, err := getUpdateOperation(updateKey, uniqueSuffix, 1) + require.Nil(t, err) + + delta1 := updateOp.Delta + + rm, err = applier.Apply(getAnchoredOperation(updateOp), rm) + require.Nil(t, err) + + // check if service type value is updated (done via json patch) + didDoc := document.DidDocumentFromJSONLDObject(rm.Doc) + require.Equal(t, "special1", didDoc["test"]) + + // test consecutive update + updateOp, nextUpdateKey, err = getUpdateOperation(nextUpdateKey, uniqueSuffix, 2) + require.Nil(t, err) + + rm, err = applier.Apply(getAnchoredOperation(updateOp), rm) + require.Nil(t, err) + + // service type value is updated since operation is valid + didDoc = document.DidDocumentFromJSONLDObject(rm.Doc) + require.Equal(t, "special2", didDoc["test"]) + + // two successful update operations - next update with reused commitment from op 1 + updateOp, _, err = getUpdateOperation(nextUpdateKey, uniqueSuffix, 1) + require.Nil(t, err) + + delta3 := updateOp.Delta + delta3.UpdateCommitment = delta1.UpdateCommitment + updateOp.Delta = delta3 + + _, err = applier.Apply(getAnchoredOperation(updateOp), rm) + require.EqualError(t, err, + "update delta doesn't match delta hash: supplied hash doesn't match original content") + }) + + t.Run("missing signed data error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + updateOp, _, err := getUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + updateOp.SignedData = "" + + rm, err = applier.Apply(getAnchoredOperation(updateOp), rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "missing signed data") + }) + + t.Run("unmarshal signed data model error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + updateOp, _, err := getUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + signer := ecsigner.New(updateKey, "ES256", "update-kid") + + compactJWS, err := signutil.SignPayload([]byte("payload"), signer) + require.NoError(t, err) + + updateOp.SignedData = compactJWS + + rm, err = applier.Apply(getAnchoredOperation(updateOp), rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), + "failed to parse update operation in batch mode: failed to unmarshal signed data model for update") + }) + + t.Run("invalid signature error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + // sign update operation with different key (than one used in create) + differentKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + s := ecsigner.New(differentKey, "ES256", updateKeyID) + updateOp, _, err := getUpdateOperationWithSigner(s, updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + anchoredOp := getAnchoredOperation(updateOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "ecdsa: invalid signature") + }) + + t.Run("delta hash doesn't match delta error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + updateOp, _, err := getUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + updateOp.Delta = &model.DeltaModel{UpdateCommitment: "different"} + + rm, err = applier.Apply(getAnchoredOperation(updateOp), rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "update delta doesn't match delta hash") + }) + + t.Run("invalid anchoring range - anchor until time is less then anchoring time", func(t *testing.T) { + applier := New(p, parser, dc) + + createResult, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + p := map[string]interface{}{ + "op": "replace", + "path": "/test", + "value": "value", + } + + patchBytes, err := canonicalizer.MarshalCanonical([]map[string]interface{}{p}) + require.NoError(t, err) + + jsonPatch, err := patch.NewJSONPatch(string(patchBytes)) + require.NoError(t, err) + + _, updateCommitment, err := generateKeyAndCommitment() + require.NoError(t, err) + + delta := &model.DeltaModel{ + UpdateCommitment: updateCommitment, + Patches: []patch.Patch{jsonPatch}, + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + require.NoError(t, err) + + updatePubKey, err := pubkey.GetPublicKeyJWK(&updateKey.PublicKey) + require.NoError(t, err) + + now := time.Now().Unix() + + signedData := &model.UpdateSignedDataModel{ + DeltaHash: deltaHash, + UpdateKey: updatePubKey, + AnchorUntil: now - 5*60, + } + + signer := ecsigner.New(updateKey, "ES256", "") + jws, err := signutil.SignModel(signedData, signer) + require.NoError(t, err) + + rv, err := commitment.GetRevealValue(updatePubKey, sha2_256) + require.NoError(t, err) + + updateOp := &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Delta: delta, + Type: operation.TypeUpdate, + SignedData: jws, + RevealValue: rv, + } + + anchoredOp := getAnchoredOperation(updateOp) + anchoredOp.TransactionTime = uint64(now) + + updateResult, err := applier.Apply(anchoredOp, createResult) + require.NoError(t, err) + require.NotNil(t, updateResult) + require.Equal(t, createResult.Doc, updateResult.Doc) + require.NotEqual(t, updateResult.UpdateCommitment, createResult.UpdateCommitment) + }) + + t.Run("error - document composer error", func(t *testing.T) { + applier := New(p, parser, dc) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + createResult, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + updateOp, _, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + applier = New(p, parser, &mockDocComposer{Err: errors.New("document composer error")}) + + updateResult, err := applier.Apply(updateOp, createResult) + require.NoError(t, err) + require.NotNil(t, updateResult) + require.Equal(t, createResult.Doc, updateResult.Doc) + require.NotEqual(t, createResult.UpdateCommitment, updateResult.UpdateCommitment) + require.Equal(t, createResult.RecoveryCommitment, updateResult.RecoveryCommitment) + }) +} + +func TestDeactivate(t *testing.T) { + recoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + recoveryPubKey, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + updateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + uniqueSuffix := createOp.UniqueSuffix + + t.Run("success", func(t *testing.T) { + applier := New(p, parser, dc) + + created, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + deactivateOp, err := getAnchoredDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + deactivated, err := applier.Apply(deactivateOp, created) + require.NoError(t, err) + require.NotNil(t, deactivated) + + require.Equal(t, created.CreatedTime, deactivated.CreatedTime) + }) + + t.Run("success - anchor until time defaulted based on protocol parameter", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverPubKey, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + rv, err := commitment.GetRevealValue(recoverPubKey, sha2_256) + require.NoError(t, err) + + now := time.Now().Unix() + + signedDataModel := model.DeactivateSignedDataModel{ + DidSuffix: uniqueSuffix, + RecoveryKey: recoverPubKey, + AnchorFrom: now - 5*60, + } + + signer := ecsigner.New(recoveryKey, "ES256", "") + jws, err := signutil.SignModel(signedDataModel, signer) + require.NoError(t, err) + + deactiveOp := &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeDeactivate, + SignedData: jws, + RevealValue: rv, + } + + anchoredOp := getAnchoredOperation(deactiveOp) + anchoredOp.TransactionTime = uint64(now) + + rm, err = applier.Apply(anchoredOp, rm) + require.NoError(t, err) + require.NotNil(t, rm) + }) + + t.Run("deactivate can only be applied to an existing document", func(t *testing.T) { + deactivateOp, err := getAnchoredDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + applier := New(p, parser, dc) + doc, err := applier.Apply(deactivateOp, &protocol.ResolutionModel{}) + require.Error(t, err) + require.Contains(t, err.Error(), "deactivate can only be applied to an existing document") + require.Nil(t, doc) + }) + + t.Run("document not found error", func(t *testing.T) { + store, _ := getDefaultStore(recoveryKey, updateKey) + + deactivateOp, err := getAnchoredDeactivateOperation(recoveryKey, dummyUniqueSuffix) + require.NoError(t, err) + err = store.Put(deactivateOp) + require.NoError(t, err) + + applier := New(p, parser, &mockDocComposer{}) + doc, err := applier.Apply(deactivateOp, &protocol.ResolutionModel{}) + require.Error(t, err) + require.Contains(t, err.Error(), "deactivate can only be applied to an existing document") + require.Nil(t, doc) + }) + + t.Run("missing signed data error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + deactivateOp, err := getDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + deactivateOp.SignedData = "" + + anchoredOp := getAnchoredOperation(deactivateOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "missing signed data") + }) + + t.Run("unmarshal signed data model error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + deactivateOp, err := getDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + signer := ecsigner.New(recoveryKey, "ES256", "") + + compactJWS, err := signutil.SignPayload([]byte("payload"), signer) + require.NoError(t, err) + + deactivateOp.SignedData = compactJWS + + anchoredOp := getAnchoredOperation(deactivateOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), + "failed to parse deactive operation in batch mode: failed to unmarshal signed data model for deactivate") + }) + + t.Run("invalid signature error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + // sign recover operation with different recovery key (than one used in create) + differentRecoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + signer := ecsigner.New(differentRecoveryKey, "ES256", "") + deactivateOp, err := getDeactivateOperationWithSigner(signer, recoveryKey, uniqueSuffix) + require.NoError(t, err) + + anchoredOp := getAnchoredOperation(deactivateOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Contains(t, err.Error(), "ecdsa: invalid signature") + require.Nil(t, rm) + }) + + t.Run("did suffix doesn't match signed value error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + deactivateOp, err := getDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + s := ecsigner.New(recoveryKey, "ES256", "") + + jws, err := signutil.SignModel(&model.DeactivateSignedDataModel{ + DidSuffix: "other", + RecoveryKey: recoveryPubKey, + }, s) + require.NoError(t, err) + + deactivateOp.SignedData = jws + + anchoredOp := getAnchoredOperation(deactivateOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), + "failed to parse deactive operation in batch mode: signed did suffix mismatch for deactivate") + }) + + t.Run("invalid anchoring time range - anchor until time is less then anchoring time", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverPubKey, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + rv, err := commitment.GetRevealValue(recoverPubKey, sha2_256) + require.NoError(t, err) + + now := time.Now().Unix() + + signedDataModel := model.DeactivateSignedDataModel{ + DidSuffix: uniqueSuffix, + RecoveryKey: recoverPubKey, + AnchorUntil: now - 5*60, + } + + signer := ecsigner.New(recoveryKey, "ES256", "") + jws, err := signutil.SignModel(signedDataModel, signer) + require.NoError(t, err) + + deactiveOp := &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeDeactivate, + SignedData: jws, + RevealValue: rv, + } + + anchoredOp := getAnchoredOperation(deactiveOp) + anchoredOp.TransactionTime = uint64(now) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), + "invalid anchoring time range: anchor until time is less then anchoring time") + }) +} + +func TestRecover(t *testing.T) { + recoveryKey, e := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, e) + + updateKey, e := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, e) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + uniqueSuffix := createOp.UniqueSuffix + + t.Run("success", func(t *testing.T) { + applier := New(p, parser, dc) + + created, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverOp, nextRecoveryKey, err := getAnchoredRecoverOperation( + recoveryKey, updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + created, err = applier.Apply(recoverOp, created) + require.NoError(t, err) + + // test for recovered key + docBytes, err := created.Doc.Bytes() + require.NoError(t, err) + require.Contains(t, string(docBytes), "recovered") + + // apply recover again - consecutive recoveries are valid + recoverOp, _, err = getAnchoredRecoverOperation(nextRecoveryKey, updateKey, uniqueSuffix, 2) + require.NoError(t, err) + + recovered, err := applier.Apply(recoverOp, created) + require.NoError(t, err) + require.NotNil(t, recovered) + + require.Equal(t, created.CreatedTime, recovered.CreatedTime) + }) + + t.Run("success - operation with invalid signature rejected", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + invalidRecoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + invalidRecoverOp.SignedData = "" + + invalidAnchoredOp := getAnchoredOperation(invalidRecoverOp) + + result, err := applier.Apply(invalidAnchoredOp, rm) + require.Error(t, err) + require.Contains(t, err.Error(), "missing signed data") + require.Nil(t, result) + + // now generate valid recovery operation with same recoveryKey + recoverOp, _, err := getAnchoredRecoverOperation(recoveryKey, updateKey, uniqueSuffix, 2) + require.NoError(t, err) + + result, err = applier.Apply(recoverOp, rm) + require.NoError(t, err) + + // test for recovered key in resolved document + docBytes, err := result.Doc.Bytes() + require.NoError(t, err) + require.Contains(t, string(docBytes), "recovered") + }) + + t.Run("success - operation with valid signature and invalid delta accepted", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + invalidRecoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + invalidRecoverOp.Delta = nil + + invalidAnchoredOp := getAnchoredOperation(invalidRecoverOp) + + result, err := applier.Apply(invalidAnchoredOp, rm) + require.NoError(t, err) + require.NotNil(t, result) + require.Equal(t, make(document.Document), result.Doc) + }) + + t.Run("missing signed data error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + recoverOp.SignedData = "" + + anchoredOp := getAnchoredOperation(recoverOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "missing signed data") + }) + + t.Run("unmarshal signed data model error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + signer := ecsigner.New(recoveryKey, "ES256", "") + + compactJWS, err := signutil.SignPayload([]byte("payload"), signer) + require.NoError(t, err) + + recoverOp.SignedData = compactJWS + + anchoredOp := getAnchoredOperation(recoverOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), + "failed to parse recover operation in batch mode: failed to unmarshal signed data model for recover") + }) + + t.Run("invalid signature error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + // sign recover operation with different recovery key (than one used in create) + differentRecoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + signer := ecsigner.New(differentRecoveryKey, "ES256", "") + recoverOp, _, err := getRecoverOperationWithSigner(signer, recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + anchoredOp := getAnchoredOperation(recoverOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "ecdsa: invalid signature") + }) + + t.Run("delta hash doesn't match delta error", func(t *testing.T) { + applier := New(p, parser, dc) + + createResult, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + recoverOp.Delta = &model.DeltaModel{} + + anchoredOp := getAnchoredOperation(recoverOp) + + recoverResult, err := applier.Apply(anchoredOp, createResult) + require.NoError(t, err) + require.NotNil(t, recoverResult) + require.Equal(t, recoverResult.Doc, make(document.Document)) + require.NotEqual(t, recoverResult.RecoveryCommitment, createResult.RecoveryCommitment) + }) + + t.Run("invalid anchoring range - anchor until time is less then anchoring time", func(t *testing.T) { + applier := New(p, parser, dc) + + createResult, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + updateCommitment, err := getCommitment(updateKey) + require.NoError(t, err) + + delta, err := getDeltaModel(recoveredDoc, updateCommitment) + require.NoError(t, err) + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + require.NoError(t, err) + + recoveryPubKey, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + _, recoveryCommitment, err := generateKeyAndCommitment() + require.NoError(t, err) + + now := time.Now().Unix() + + recoverSignedData := &model.RecoverSignedDataModel{ + RecoveryKey: recoveryPubKey, + RecoveryCommitment: recoveryCommitment, + DeltaHash: deltaHash, + AnchorUntil: now - 6*60, + } + + signer := ecsigner.New(recoveryKey, "ES256", "") + recoverRequest, err := getRecoverRequest(signer, delta, recoverSignedData) + require.NoError(t, err) + + operationBuffer, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + recoverOp := &model.Operation{ + Namespace: mocks.DefaultNS, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeRecover, + OperationRequest: operationBuffer, + Delta: recoverRequest.Delta, + SignedData: recoverRequest.SignedData, + RevealValue: recoverRequest.RevealValue, + } + + anchoredOp := getAnchoredOperation(recoverOp) + anchoredOp.TransactionTime = uint64(now) + + recoverResult, err := applier.Apply(anchoredOp, createResult) + require.NoError(t, err) + require.NotNil(t, recoverResult) + require.Equal(t, recoverResult.Doc, make(document.Document)) + require.NotEqual(t, recoverResult.RecoveryCommitment, createResult.RecoveryCommitment) + }) + + t.Run("error - document composer error", func(t *testing.T) { + applier := New(p, parser, &mockDocComposer{Err: errors.New("doc composer error")}) + + createResult, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + anchoredOp := getAnchoredOperation(recoverOp) + + recoverResult, err := applier.Apply(anchoredOp, createResult) + require.NoError(t, err) + require.NotNil(t, recoverResult) + require.Equal(t, make(document.Document), recoverResult.Doc) + require.NotEqual(t, recoverResult.RecoveryCommitment, createResult.RecoveryCommitment) + }) +} + +func TestVerifyAnchoringTimeRange(t *testing.T) { + applier := New(p, parser, dc) + + now := time.Now().Unix() + + t.Run("success - no anchoring times specified", func(t *testing.T) { + err := applier.verifyAnchoringTimeRange(0, 0, uint64(now)) + require.NoError(t, err) + }) + + t.Run("success - anchoring times specififed", func(t *testing.T) { + err := applier.verifyAnchoringTimeRange(now-5*60, now+5*50, uint64(now)) + require.NoError(t, err) + }) + + t.Run("error - anchor from time is greater then anchoring time", func(t *testing.T) { + err := applier.verifyAnchoringTimeRange(now+55*60, 0, uint64(now)) + require.Error(t, err) + require.Contains(t, err.Error(), "anchor from time is greater then anchoring time") + }) + + t.Run("error - anchor until time is less then anchoring time", func(t *testing.T) { + err := applier.verifyAnchoringTimeRange(now-5*60, now-5*50, uint64(now)) + require.Error(t, err) + require.Contains(t, err.Error(), "anchor until time is less then anchoring time") + }) +} + +func getUpdateOperation(privateKey *ecdsa.PrivateKey, uniqueSuffix string, operationNumber uint, +) (*model.Operation, *ecdsa.PrivateKey, error) { + s := ecsigner.New(privateKey, "ES256", updateKeyID) + + return getUpdateOperationWithSigner(s, privateKey, uniqueSuffix, operationNumber) +} + +func getAnchoredUpdateOperation( + privateKey *ecdsa.PrivateKey, uniqueSuffix string, operationNumber uint, +) (*operation.AnchoredOperation, *ecdsa.PrivateKey, error) { + op, nextUpdateKey, err := getUpdateOperation(privateKey, uniqueSuffix, operationNumber) + if err != nil { + return nil, nil, err + } + + return getAnchoredOperationWithBlockNum(op, uint64(operationNumber)), nextUpdateKey, nil +} + +func getUpdateOperationWithSigner( + s client.Signer, privateKey *ecdsa.PrivateKey, uniqueSuffix string, + operationNumber uint) (*model.Operation, *ecdsa.PrivateKey, error) { + p := map[string]interface{}{ + "op": "replace", + "path": "/test", + "value": "special" + strconv.Itoa(int(operationNumber)), + } + + patchBytes, err := canonicalizer.MarshalCanonical([]map[string]interface{}{p}) + if err != nil { + return nil, nil, err + } + + jsonPatch, err := patch.NewJSONPatch(string(patchBytes)) + if err != nil { + return nil, nil, err + } + + nextUpdateKey, updateCommitment, err := generateKeyAndCommitment() + if err != nil { + return nil, nil, err + } + + delta := &model.DeltaModel{ + UpdateCommitment: updateCommitment, + Patches: []patch.Patch{jsonPatch}, + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, nil, err + } + + updatePubKey, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + if err != nil { + return nil, nil, err + } + + signedData := &model.UpdateSignedDataModel{ + DeltaHash: deltaHash, + UpdateKey: updatePubKey, + } + + jws, err := signutil.SignModel(signedData, s) + if err != nil { + return nil, nil, err + } + + rv, err := commitment.GetRevealValue(updatePubKey, sha2_256) + if err != nil { + return nil, nil, err + } + + op := &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Delta: delta, + Type: operation.TypeUpdate, + SignedData: jws, + RevealValue: rv, + } + + return op, nextUpdateKey, nil +} + +func generateKeyAndCommitment() (*ecdsa.PrivateKey, string, error) { + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return nil, "", err + } + + pubKey, err := pubkey.GetPublicKeyJWK(&key.PublicKey) + if err != nil { + return nil, "", err + } + + c, err := commitment.GetCommitment(pubKey, sha2_256) + if err != nil { + return nil, "", err + } + + return key, c, nil +} + +func getDeactivateOperation(privateKey *ecdsa.PrivateKey, uniqueSuffix string, +) (*model.Operation, error) { + signer := ecsigner.New(privateKey, "ES256", "") + + return getDeactivateOperationWithSigner(signer, privateKey, uniqueSuffix) +} + +func getAnchoredDeactivateOperation(privateKey *ecdsa.PrivateKey, uniqueSuffix string, +) (*operation.AnchoredOperation, error) { + op, err := getDeactivateOperation(privateKey, uniqueSuffix) + if err != nil { + return nil, err + } + + return getAnchoredOperation(op), nil +} + +func getDeactivateOperationWithSigner(singer client.Signer, privateKey *ecdsa.PrivateKey, uniqueSuffix string, +) (*model.Operation, error) { + recoverPubKey, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(recoverPubKey, sha2_256) + if err != nil { + return nil, err + } + + signedDataModel := model.DeactivateSignedDataModel{ + DidSuffix: uniqueSuffix, + RecoveryKey: recoverPubKey, + } + + jws, err := signutil.SignModel(signedDataModel, singer) + if err != nil { + return nil, err + } + + return &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeDeactivate, + SignedData: jws, + RevealValue: rv, + }, nil +} + +func getRecoverOperation(recoveryKey, updateKey *ecdsa.PrivateKey, uniqueSuffix string, +) (*model.Operation, *ecdsa.PrivateKey, error) { + signer := ecsigner.New(recoveryKey, "ES256", "") + + return getRecoverOperationWithSigner(signer, recoveryKey, updateKey, uniqueSuffix) +} + +func getAnchoredRecoverOperation(recoveryKey, updateKey *ecdsa.PrivateKey, uniqueSuffix string, operationNumber uint, +) (*operation.AnchoredOperation, *ecdsa.PrivateKey, error) { + op, nextRecoveryKey, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + if err != nil { + return nil, nil, err + } + + return getAnchoredOperationWithBlockNum(op, uint64(operationNumber)), nextRecoveryKey, nil +} + +func getRecoverOperationWithSigner(signer client.Signer, recoveryKey, updateKey *ecdsa.PrivateKey, uniqueSuffix string, +) (*model.Operation, *ecdsa.PrivateKey, error) { + recoverRequest, nextRecoveryKey, err := getDefaultRecoverRequest(signer, recoveryKey, updateKey) + if err != nil { + return nil, nil, err + } + + operationBuffer, err := json.Marshal(recoverRequest) + if err != nil { + return nil, nil, err + } + + return &model.Operation{ + Namespace: mocks.DefaultNS, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeRecover, + OperationRequest: operationBuffer, + Delta: recoverRequest.Delta, + SignedData: recoverRequest.SignedData, + RevealValue: recoverRequest.RevealValue, + }, nextRecoveryKey, nil +} + +func getRecoverRequest(signer client.Signer, delta *model.DeltaModel, signedDataModel *model.RecoverSignedDataModel, +) (*model.RecoverRequest, error) { + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, err + } + + signedDataModel.DeltaHash = deltaHash + + jws, err := signutil.SignModel(signedDataModel, signer) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(signedDataModel.RecoveryKey, sha2_256) + if err != nil { + return nil, err + } + + return &model.RecoverRequest{ + Operation: operation.TypeRecover, + DidSuffix: "suffix", + Delta: delta, + SignedData: jws, + RevealValue: rv, + }, nil +} + +func getDefaultRecoverRequest(signer client.Signer, recoveryKey, updateKey *ecdsa.PrivateKey, +) (*model.RecoverRequest, *ecdsa.PrivateKey, error) { + updateCommitment, err := getCommitment(updateKey) + if err != nil { + return nil, nil, err + } + + delta, err := getDeltaModel(recoveredDoc, updateCommitment) + if err != nil { + return nil, nil, err + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, nil, err + } + + recoveryPubKey, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + if err != nil { + return nil, nil, err + } + + nextRecoveryKey, recoveryCommitment, err := generateKeyAndCommitment() + if err != nil { + return nil, nil, err + } + + recoverSignedData := &model.RecoverSignedDataModel{ + RecoveryKey: recoveryPubKey, + RecoveryCommitment: recoveryCommitment, + DeltaHash: deltaHash, + } + + req, err := getRecoverRequest(signer, delta, recoverSignedData) + if err != nil { + return nil, nil, err + } + + return req, nextRecoveryKey, nil +} + +func getDefaultStore(recoveryKey, updateKey *ecdsa.PrivateKey) (*mocks.MockOperationStore, string) { + store := mocks.NewMockOperationStore(nil) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + if err != nil { + panic(err) + } + + // store default create operation + err = store.Put(createOp) + if err != nil { + panic(err) + } + + return store, createOp.UniqueSuffix +} + +func getCreateOperationWithDoc(recoveryKey, updateKey *ecdsa.PrivateKey, doc string) (*model.Operation, error) { + createRequest, err := getCreateRequest(recoveryKey, updateKey) + if err != nil { + return nil, err + } + + operationBuffer, err := json.Marshal(createRequest) + if err != nil { + return nil, err + } + + uniqueSuffix, err := hashing.CalculateModelMultihash(createRequest.SuffixData, sha2_256) + if err != nil { + return nil, err + } + + updateCommitment, err := getCommitment(updateKey) + if err != nil { + return nil, err + } + + delta, err := getDeltaModel(doc, updateCommitment) + if err != nil { + return nil, err + } + + suffixData, err := getSuffixData(recoveryKey, delta) + if err != nil { + return nil, err + } + + return &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeCreate, + OperationRequest: operationBuffer, + Delta: delta, + SuffixData: suffixData, + }, nil +} + +func getCreateOperation(recoveryKey, updateKey *ecdsa.PrivateKey) (*model.Operation, error) { + return getCreateOperationWithDoc(recoveryKey, updateKey, validDoc) +} + +func getAnchoredCreateOperation(recoveryKey, updateKey *ecdsa.PrivateKey) (*operation.AnchoredOperation, error) { + op, err := getCreateOperation(recoveryKey, updateKey) + if err != nil { + return nil, err + } + + return getAnchoredOperation(op), nil +} + +func getAnchoredOperation(op *model.Operation) *operation.AnchoredOperation { + anchoredOp, err := model.GetAnchoredOperation(op) + if err != nil { + panic(err) + } + + anchoredOp.TransactionTime = uint64(time.Now().Unix()) + + return anchoredOp +} + +func getAnchoredOperationWithBlockNum(op *model.Operation, blockNum uint64) *operation.AnchoredOperation { + anchored := getAnchoredOperation(op) + anchored.TransactionTime = blockNum + + return anchored +} + +func getCreateRequest(recoveryKey, updateKey *ecdsa.PrivateKey) (*model.CreateRequest, error) { + updateCommitment, err := getCommitment(updateKey) + if err != nil { + return nil, err + } + + delta, err := getDeltaModel(validDoc, updateCommitment) + if err != nil { + return nil, err + } + + suffixData, err := getSuffixData(recoveryKey, delta) + if err != nil { + return nil, err + } + + return &model.CreateRequest{ + Operation: operation.TypeCreate, + Delta: delta, + SuffixData: suffixData, + }, nil +} + +func getDeltaModel(doc string, updateCommitment string) (*model.DeltaModel, error) { + patches, err := patch.PatchesFromDocument(doc) + if err != nil { + return nil, err + } + + return &model.DeltaModel{ + Patches: patches, + UpdateCommitment: updateCommitment, + }, nil +} + +func getCommitment(key *ecdsa.PrivateKey) (string, error) { + pubKey, err := pubkey.GetPublicKeyJWK(&key.PublicKey) + if err != nil { + return "", err + } + + c, err := commitment.GetCommitment(pubKey, sha2_256) + if err != nil { + return "", err + } + + return c, nil +} + +func getSuffixData(privateKey *ecdsa.PrivateKey, delta *model.DeltaModel) (*model.SuffixDataModel, error) { + recoveryCommitment, err := getCommitment(privateKey) + if err != nil { + return nil, err + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, err + } + + return &model.SuffixDataModel{ + DeltaHash: deltaHash, + RecoveryCommitment: recoveryCommitment, + }, nil +} + +const validDoc = `{ + "publicKey": [{ + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const recoveredDoc = `{ + "publicKey": [{ + "id": "recovered", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +type mockDocComposer struct { + Err error +} + +// ApplyPatches mocks applying patches to the document. +func (m *mockDocComposer) ApplyPatches(doc document.Document, patches []patch.Patch) (document.Document, error) { + if m.Err != nil { + return nil, m.Err + } + + return make(document.Document), nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/commitment.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/commitment.go new file mode 100644 index 0000000..7c5c70b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/commitment.go @@ -0,0 +1,55 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "fmt" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" +) + +// GetRevealValue returns this operation reveal value. +func (p *Parser) GetRevealValue(opBytes []byte) (string, error) { + // namespace is irrelevant in this case + op, err := p.ParseOperation("", opBytes, true) + if err != nil { + return "", fmt.Errorf("get reveal value - parse operation error: %s", err.Error()) + } + + if op.Type == operation.TypeCreate { + return "", fmt.Errorf("operation type '%s' not supported for getting operation reveal value", op.Type) + } + + return op.RevealValue, nil +} + +// GetCommitment returns next operation commitment. +func (p *Parser) GetCommitment(opBytes []byte) (string, error) { + // namespace is irrelevant in this case + op, err := p.ParseOperation("", opBytes, true) + if err != nil { + return "", fmt.Errorf("get commitment - parse operation error: %s", err.Error()) + } + + switch op.Type { //nolint:exhaustive + case operation.TypeUpdate: + return op.Delta.UpdateCommitment, nil + + case operation.TypeDeactivate: + return "", nil + + case operation.TypeRecover: + signedDataModel, innerErr := p.ParseSignedDataForRecover(op.SignedData) + if innerErr != nil { + return "", fmt.Errorf("failed to parse signed data model for recover: %s", innerErr.Error()) + } + + return signedDataModel.RecoveryCommitment, nil + } + + return "", fmt.Errorf("operation type '%s' not supported for getting next operation commitment", op.Type) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/commitment_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/commitment_test.go new file mode 100644 index 0000000..ef8ff69 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/commitment_test.go @@ -0,0 +1,296 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/ecsigner" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/pubkey" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/client" +) + +func TestParser_GetCommitment(t *testing.T) { + p := mocks.NewMockProtocolClient() + + parser := New(p.Protocol) + + recoveryKey, _, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + updateKey, _, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + _, recoveryCommitment, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + _, updateCommitment, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + t.Run("success - recoverRequest", func(t *testing.T) { + recoverRequest, err := generateRecoverRequest(recoveryKey, recoveryCommitment, parser.Protocol) + require.NoError(t, err) + + c, err := parser.GetCommitment(recoverRequest) + require.NoError(t, err) + require.NotNil(t, c) + require.Equal(t, c, recoveryCommitment) + }) + + t.Run("success - deactivate", func(t *testing.T) { + deactivate, err := generateDeactivateRequest(recoveryKey) + require.NoError(t, err) + + c, err := parser.GetCommitment(deactivate) + require.NoError(t, err) + require.NotNil(t, c) + require.Equal(t, c, "") + }) + + t.Run("success - update", func(t *testing.T) { + update, err := generateUpdateRequest(updateKey, updateCommitment, parser.Protocol) + require.NoError(t, err) + + c, err := parser.GetCommitment(update) + require.NoError(t, err) + require.NotNil(t, c) + require.Equal(t, c, updateCommitment) + }) + + t.Run("success - update", func(t *testing.T) { + update, err := generateUpdateRequest(updateKey, updateCommitment, parser.Protocol) + require.NoError(t, err) + + c, err := parser.GetCommitment(update) + require.NoError(t, err) + require.NotNil(t, c) + require.Equal(t, c, updateCommitment) + }) + + t.Run("error - create", func(t *testing.T) { + create, err := generateCreateRequest(recoveryCommitment, updateCommitment, parser.Protocol) + require.NoError(t, err) + + c, err := parser.GetCommitment(create) + require.Error(t, err) + require.Empty(t, c) + require.Contains(t, err.Error(), "operation type 'create' not supported for getting next operation commitment") + }) + + t.Run("error - parse operation fails", func(t *testing.T) { + c, err := parser.GetCommitment([]byte(`{"type":"other"}`)) + require.Error(t, err) + require.Empty(t, c) + require.Contains(t, err.Error(), "get commitment - parse operation error") + }) +} + +func TestParser_GetRevealValue(t *testing.T) { + p := mocks.NewMockProtocolClient() + + parser := New(p.Protocol) + + recoveryKey, _, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + updateKey, _, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + _, recoveryCommitment, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + _, updateCommitment, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + t.Run("success - recoverRequest", func(t *testing.T) { + recoverRequest, err := generateRecoverRequest(recoveryKey, recoveryCommitment, parser.Protocol) + require.NoError(t, err) + + rv, err := parser.GetRevealValue(recoverRequest) + require.NoError(t, err) + require.NotEmpty(t, rv) + + pubJWK, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + expected, err := commitment.GetRevealValue(pubJWK, parser.Protocol.MultihashAlgorithms[0]) + require.NoError(t, err) + + require.Equal(t, rv, expected) + }) + + t.Run("success - deactivate", func(t *testing.T) { + deactivate, err := generateDeactivateRequest(recoveryKey) + require.NoError(t, err) + + rv, err := parser.GetRevealValue(deactivate) + require.NoError(t, err) + require.NotEmpty(t, rv) + + pubJWK, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + expected, err := commitment.GetRevealValue(pubJWK, parser.Protocol.MultihashAlgorithms[0]) + require.NoError(t, err) + + require.Equal(t, rv, expected) + }) + + t.Run("success - update", func(t *testing.T) { + update, err := generateUpdateRequest(updateKey, updateCommitment, parser.Protocol) + require.NoError(t, err) + + rv, err := parser.GetRevealValue(update) + require.NoError(t, err) + require.NotEmpty(t, rv) + + pubJWK, err := pubkey.GetPublicKeyJWK(&updateKey.PublicKey) + require.NoError(t, err) + + expected, err := commitment.GetRevealValue(pubJWK, parser.Protocol.MultihashAlgorithms[0]) + require.NoError(t, err) + + require.Equal(t, rv, expected) + }) + + t.Run("error - create", func(t *testing.T) { + create, err := generateCreateRequest(recoveryCommitment, updateCommitment, parser.Protocol) + require.NoError(t, err) + + c, err := parser.GetRevealValue(create) + require.Error(t, err) + require.Empty(t, c) + require.Contains(t, err.Error(), "operation type 'create' not supported for getting operation reveal value") + }) + + t.Run("error - parse operation fails", func(t *testing.T) { + c, err := parser.GetRevealValue([]byte(`{"type":"other"}`)) + require.Error(t, err) + require.Empty(t, c) + require.Contains(t, err.Error(), "get reveal value - parse operation error") + }) +} + +func generateRecoverRequest( + recoveryKey *ecdsa.PrivateKey, recoveryCommitment string, p protocol.Protocol) ([]byte, error) { + jwk, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + if err != nil { + return nil, err + } + + _, updateCommitment, err := generateKeyAndCommitment(p) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(jwk, sha2_256) + if err != nil { + return nil, err + } + + info := &client.RecoverRequestInfo{ + DidSuffix: "recoverRequest-suffix", + OpaqueDocument: `{"test":"value"}`, + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, // not evaluated in operation getting commitment/reveal value + RecoveryKey: jwk, + MultihashCode: p.MultihashAlgorithms[0], + Signer: ecsigner.New(recoveryKey, "ES256", ""), + RevealValue: rv, + } + + return client.NewRecoverRequest(info) +} + +func generateCreateRequest(recoveryCommitment, updateCommitment string, p protocol.Protocol) ([]byte, error) { + info := &client.CreateRequestInfo{ + OpaqueDocument: `{"test":"value"}`, + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, + MultihashCode: p.MultihashAlgorithms[0], + } + + return client.NewCreateRequest(info) +} + +func generateDeactivateRequest(recoveryKey *ecdsa.PrivateKey) ([]byte, error) { + jwk, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(jwk, sha2_256) + if err != nil { + return nil, err + } + + info := &client.DeactivateRequestInfo{ + DidSuffix: "deactivate-suffix", + Signer: ecsigner.New(recoveryKey, "ES256", ""), + RecoveryKey: jwk, + RevealValue: rv, + } + + return client.NewDeactivateRequest(info) +} + +func generateUpdateRequest(updateKey *ecdsa.PrivateKey, updateCommitment string, p protocol.Protocol) ([]byte, error) { + jwk, err := pubkey.GetPublicKeyJWK(&updateKey.PublicKey) + if err != nil { + return nil, err + } + + testPatch, err := patch.NewJSONPatch(`[{"op": "replace", "path": "/name", "value": "Jane"}]`) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(jwk, sha2_256) + if err != nil { + return nil, err + } + + info := &client.UpdateRequestInfo{ + DidSuffix: "update-suffix", + Signer: ecsigner.New(updateKey, "ES256", ""), + UpdateCommitment: updateCommitment, + UpdateKey: jwk, + Patches: []patch.Patch{testPatch}, + MultihashCode: p.MultihashAlgorithms[0], + RevealValue: rv, + } + + return client.NewUpdateRequest(info) +} + +func generateKeyAndCommitment(p protocol.Protocol) (*ecdsa.PrivateKey, string, error) { + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return nil, "", err + } + + pubKey, err := pubkey.GetPublicKeyJWK(&key.PublicKey) + if err != nil { + return nil, "", err + } + + c, err := commitment.GetCommitment(pubKey, p.MultihashAlgorithms[0]) + if err != nil { + return nil, "", err + } + + return key, c, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/create.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/create.go new file mode 100644 index 0000000..86ad2fb --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/create.go @@ -0,0 +1,170 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator" +) + +// ParseCreateOperation will parse create operation. +func (p *Parser) ParseCreateOperation(request []byte, batch bool) (*model.Operation, error) { + schema, err := p.parseCreateRequest(request) + if err != nil { + return nil, err + } + + // create is not valid if suffix data is not valid + err = p.ValidateSuffixData(schema.SuffixData) + if err != nil { + return nil, err + } + + if !batch { + err = p.anchorOriginValidator.Validate(schema.SuffixData.AnchorOrigin) + if err != nil { + return nil, err + } + + err = p.ValidateDelta(schema.Delta) + if err != nil { + return nil, err + } + + // verify actual delta hash matches expected delta hash + err = hashing.IsValidModelMultihash(schema.Delta, schema.SuffixData.DeltaHash) + if err != nil { + return nil, fmt.Errorf("delta doesn't match suffix data delta hash: %s", err.Error()) + } + + if schema.Delta.UpdateCommitment == schema.SuffixData.RecoveryCommitment { + return nil, errors.New("recovery and update commitments cannot be equal, re-using public keys is not allowed") + } + } + + uniqueSuffix, err := model.GetUniqueSuffix(schema.SuffixData, p.MultihashAlgorithms) + if err != nil { + return nil, err + } + + return &model.Operation{ + OperationRequest: request, + Type: operation.TypeCreate, + UniqueSuffix: uniqueSuffix, + Delta: schema.Delta, + SuffixData: schema.SuffixData, + AnchorOrigin: schema.SuffixData.AnchorOrigin, + }, nil +} + +// parseCreateRequest parses a 'create' request. +func (p *Parser) parseCreateRequest(payload []byte) (*model.CreateRequest, error) { + schema := &model.CreateRequest{} + + err := json.Unmarshal(payload, schema) + if err != nil { + return nil, err + } + + return schema, nil +} + +// ValidateDelta validates delta. +func (p *Parser) ValidateDelta(delta *model.DeltaModel) error { + if delta == nil { + return errors.New("missing delta") + } + + if len(delta.Patches) == 0 { + return errors.New("missing patches") + } + + for _, ptch := range delta.Patches { + action, err := ptch.GetAction() + if err != nil { + return err + } + + if !p.isPatchEnabled(action) { + return fmt.Errorf("%s patch action is not enabled", action) + } + + if err := patchvalidator.Validate(ptch); err != nil { + return err + } + } + + if err := p.validateMultihash(delta.UpdateCommitment, "update commitment"); err != nil { + return err + } + + return p.validateDeltaSize(delta) +} + +func (p *Parser) validateMultihash(mh, alias string) error { + if len(mh) > int(p.MaxOperationHashLength) { + return fmt.Errorf("%s length[%d] exceeds maximum hash length[%d]", alias, len(mh), p.MaxOperationHashLength) + } + + if !hashing.IsComputedUsingMultihashAlgorithms(mh, p.MultihashAlgorithms) { + return fmt.Errorf("%s is not computed with the required hash algorithms: %d", alias, p.MultihashAlgorithms) + } + + return nil +} + +func (p *Parser) validateDeltaSize(delta *model.DeltaModel) error { + canonicalDelta, err := canonicalizer.MarshalCanonical(delta) + if err != nil { + return fmt.Errorf("marshal canonical for delta failed: %s", err.Error()) + } + + if len(canonicalDelta) > int(p.MaxDeltaSize) { + return fmt.Errorf("delta size[%d] exceeds maximum delta size[%d]", len(canonicalDelta), p.MaxDeltaSize) + } + + return nil +} + +func (p *Parser) isPatchEnabled(action patch.Action) bool { + for _, allowed := range p.Patches { + if patch.Action(allowed) == action { + return true + } + } + + return false +} + +// ValidateSuffixData validates suffix data. +func (p *Parser) ValidateSuffixData(suffixData *model.SuffixDataModel) error { + if suffixData == nil { + return errors.New("missing suffix data") + } + + if err := p.validateMultihash(suffixData.RecoveryCommitment, "recovery commitment"); err != nil { + return err + } + + return p.validateMultihash(suffixData.DeltaHash, "delta hash") +} + +func (p *Parser) validateCreateRequest(create *model.CreateRequest) error { + if create.SuffixData == nil { + return errors.New("missing suffix data") + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/create_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/create_test.go new file mode 100644 index 0000000..4107c9b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/create_test.go @@ -0,0 +1,415 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/encoder" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +const ( + invalid = "invalid" +) + +func TestParseCreateOperation(t *testing.T) { + p := protocol.Protocol{ + MaxOperationHashLength: 100, + MaxDeltaSize: maxDeltaSize, + MultihashAlgorithms: []uint{sha2_256}, + Patches: []string{"replace", "add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, //nolint:lll + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + request, err := getCreateRequestBytes() + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, false) + require.NoError(t, err) + require.Equal(t, operation.TypeCreate, op.Type) + }) + + t.Run("success - JCS", func(t *testing.T) { + op, err := parser.ParseCreateOperation([]byte(jcsRequest), true) + require.NoError(t, err) + require.Equal(t, operation.TypeCreate, op.Type) + }) + + t.Run("parse create request error", func(t *testing.T) { + schema, err := parser.ParseCreateOperation([]byte(""), true) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "unexpected end of JSON input") + }) + t.Run("missing suffix data", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + create.SuffixData = nil + + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, true) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), "missing suffix data") + }) + + t.Run("parse suffix data error", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + + create.SuffixData = &model.SuffixDataModel{} + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, true) + require.Error(t, err) + require.Contains(t, err.Error(), "recovery commitment is not computed with the required hash algorithms: [18]") + require.Nil(t, op) + }) + t.Run("missing delta", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + create.Delta = nil + + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, false) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), "missing delta") + }) + + t.Run("missing delta is ok in batch mode", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + create.Delta = nil + + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, true) + require.NoError(t, err) + require.NotNil(t, op) + require.Nil(t, op.Delta) + }) + + t.Run("parse patch data error", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + + create.Delta = &model.DeltaModel{} + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "missing patches") + require.Nil(t, op) + }) + + t.Run("delta doesn't match suffix data delta hash", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + + delta, err := getDelta() + require.NoError(t, err) + delta.UpdateCommitment = computeMultihash([]byte("different")) + + create.Delta = delta + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "delta doesn't match suffix data delta hash") + require.Nil(t, op) + }) + + t.Run("error - update commitment equals recovery commitment", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + + create.SuffixData.RecoveryCommitment = create.Delta.UpdateCommitment + + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), + "recovery and update commitments cannot be equal, re-using public keys is not allowed") + require.Nil(t, op) + }) +} + +func TestValidateSuffixData(t *testing.T) { + p := protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MultihashAlgorithms: []uint{sha2_256}, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + suffixData, err := getSuffixData() + require.NoError(t, err) + + err = parser.ValidateSuffixData(suffixData) + require.NoError(t, err) + }) + t.Run("invalid patch data hash", func(t *testing.T) { + suffixData, err := getSuffixData() + require.NoError(t, err) + + suffixData.DeltaHash = "" + err = parser.ValidateSuffixData(suffixData) + require.Error(t, err) + require.Contains(t, err.Error(), "delta hash is not computed with the required hash algorithms: [18]") + }) + t.Run("invalid next recovery commitment hash", func(t *testing.T) { + suffixData, err := getSuffixData() + require.NoError(t, err) + + suffixData.RecoveryCommitment = "" + err = parser.ValidateSuffixData(suffixData) + require.Error(t, err) + require.Contains(t, err.Error(), "recovery commitment is not computed with the required hash algorithms: [18]") + }) + t.Run("recovery commitment exceeds maximum hash length", func(t *testing.T) { + lowHashLength := protocol.Protocol{ + MaxOperationHashLength: 10, + MultihashAlgorithms: []uint{sha2_256}, + } + + suffixData, err := getSuffixData() + require.NoError(t, err) + + err = New(lowHashLength).ValidateSuffixData(suffixData) + require.Error(t, err) + require.Contains(t, err.Error(), "recovery commitment length[46] exceeds maximum hash length[10]") + }) +} + +func TestValidateDelta(t *testing.T) { + patches := []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"} + + p := protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MaxDeltaSize: maxDeltaSize, + MultihashAlgorithms: []uint{sha2_256}, + Patches: patches, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + delta, err := getDelta() + require.NoError(t, err) + + err = parser.ValidateDelta(delta) + require.NoError(t, err) + }) + + t.Run("error - delta exceeds max delta size ", func(t *testing.T) { + parserWithLowMaxDeltaSize := New(protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MaxDeltaSize: 50, + MultihashAlgorithms: []uint{sha2_256}, + Patches: patches, + }) + + delta, err := getDelta() + require.NoError(t, err) + + err = parserWithLowMaxDeltaSize.ValidateDelta(delta) + require.Error(t, err) + require.Contains(t, err.Error(), "delta size[336] exceeds maximum delta size[50]") + }) + + t.Run("invalid next update commitment hash", func(t *testing.T) { + delta, err := getDelta() + require.NoError(t, err) + + delta.UpdateCommitment = "" + err = parser.ValidateDelta(delta) + require.Error(t, err) + require.Contains(t, err.Error(), + "update commitment is not computed with the required hash algorithms: [18]") + }) + + t.Run("update commitment exceeds maximum hash length", func(t *testing.T) { + lowMaxHashLength := protocol.Protocol{ + MaxOperationHashLength: 10, + MaxDeltaSize: 50, + MultihashAlgorithms: []uint{sha2_256}, + Patches: patches, + } + + delta, err := getDelta() + require.NoError(t, err) + + err = New(lowMaxHashLength).ValidateDelta(delta) + require.Error(t, err) + require.Contains(t, err.Error(), + "update commitment length[46] exceeds maximum hash length[10]") + }) + + t.Run("missing patches", func(t *testing.T) { + delta, err := getDelta() + require.NoError(t, err) + + delta.Patches = []patch.Patch{} + err = parser.ValidateDelta(delta) + require.Error(t, err) + require.Contains(t, err.Error(), + "missing patches") + }) + + t.Run("error - invalid delta", func(t *testing.T) { + err := parser.validateDeltaSize(nil) + require.Error(t, err) + require.Contains(t, err.Error(), "marshal canonical for delta failed") + }) +} + +func TestValidateCreateRequest(t *testing.T) { + p := protocol.Protocol{} + + parser := New(p) + + t.Run("success", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + + err = parser.validateCreateRequest(create) + require.NoError(t, err) + }) + + t.Run("missing suffix data", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + create.SuffixData = nil + + err = parser.validateCreateRequest(create) + require.Error(t, err) + require.Contains(t, err.Error(), "missing suffix data") + }) +} + +func getCreateRequest() (*model.CreateRequest, error) { + delta, err := getDelta() + if err != nil { + return nil, err + } + + suffixData, err := getSuffixData() + if err != nil { + return nil, err + } + + return &model.CreateRequest{ + Operation: operation.TypeCreate, + Delta: delta, + SuffixData: suffixData, + }, nil +} + +func getCreateRequestBytes() ([]byte, error) { + req, err := getCreateRequest() + if err != nil { + return nil, err + } + + return json.Marshal(req) +} + +func getDelta() (*model.DeltaModel, error) { + patches, err := patch.PatchesFromDocument(validDoc) + if err != nil { + return nil, err + } + + return &model.DeltaModel{ + Patches: patches, + UpdateCommitment: computeMultihash([]byte("updateReveal")), + }, nil +} + +func getSuffixData() (*model.SuffixDataModel, error) { + jwk := &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + } + + recoveryCommitment, err := commitment.GetCommitment(jwk, sha2_256) + if err != nil { + return nil, err + } + + delta, err := getDelta() + if err != nil { + return nil, err + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, err + } + + return &model.SuffixDataModel{ + DeltaHash: deltaHash, + RecoveryCommitment: recoveryCommitment, + }, nil +} + +func computeMultihash(data []byte) string { + mh, err := hashing.ComputeMultihash(sha2_256, data) + if err != nil { + panic(err) + } + + return encoder.EncodeToString(mh) +} + +const validDoc = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +// samples bellow are taken from reference implementation tests. +// +//nolint:lll +const ( + jcsRequest = `{"delta":{"patches":[{"action":"replace","document":{"publicKeys":[{"id":"anySigningKeyId","publicKeyJwk":{"crv":"secp256k1","kty":"EC","x":"H61vqAm_-TC3OrFSqPrEfSfg422NR8QHPqr0mLx64DM","y":"s0WnWY87JriBjbyoY3FdUmifK7JJRLR65GtPthXeyuc"},"purposes":["authentication"],"type":"EcdsaSecp256k1VerificationKey2019"}],"services":[{"serviceEndpoint":"http://any.endpoint","id":"anyServiceEndpointId","type":"anyType"}]}}],"updateCommitment":"EiBMWE2JFaFipPdthcFiQek-SXTMi5IWIFXAN8hKFCyLJw"},"suffixData":{"deltaHash":"EiBP6gAOxx3YOL8PZPZG3medFgdqWSDayVX3u1W2f-IPEQ","recoveryCommitment":"EiBg8oqvU0Zq_H5BoqmWf0IrhetQ91wXc5fDPpIjB9wW5w"},"type":"create"}` +) diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/deactivate.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/deactivate.go new file mode 100644 index 0000000..cccf39d --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/deactivate.go @@ -0,0 +1,103 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// ParseDeactivateOperation will parse deactivate operation. +func (p *Parser) ParseDeactivateOperation(request []byte, batch bool) (*model.Operation, error) { + schema, err := p.parseDeactivateRequest(request) + if err != nil { + return nil, err + } + + signedData, err := p.ParseSignedDataForDeactivate(schema.SignedData) + if err != nil { + return nil, err + } + + if signedData.DidSuffix != schema.DidSuffix { + return nil, errors.New("signed did suffix mismatch for deactivate") + } + + err = hashing.IsValidModelMultihash(signedData.RecoveryKey, schema.RevealValue) + if err != nil { + return nil, fmt.Errorf("canonicalized recovery public key hash doesn't match reveal value: %s", err.Error()) + } + + if !batch { + until := p.getAnchorUntil(signedData.AnchorFrom, signedData.AnchorUntil) + + if err := p.anchorTimeValidator.Validate(signedData.AnchorFrom, until); err != nil { + return nil, err + } + } + + return &model.Operation{ + Type: operation.TypeDeactivate, + OperationRequest: request, + UniqueSuffix: schema.DidSuffix, + SignedData: schema.SignedData, + RevealValue: schema.RevealValue, + }, nil +} + +func (p *Parser) parseDeactivateRequest(payload []byte) (*model.DeactivateRequest, error) { + schema := &model.DeactivateRequest{} + + err := json.Unmarshal(payload, schema) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal deactivate request: %s", err.Error()) + } + + if err := p.validateDeactivateRequest(schema); err != nil { + return nil, err + } + + return schema, nil +} + +func (p *Parser) validateDeactivateRequest(req *model.DeactivateRequest) error { + if req.DidSuffix == "" { + return errors.New("missing did suffix") + } + + if req.SignedData == "" { + return errors.New("missing signed data") + } + + return p.validateMultihash(req.RevealValue, "reveal value") +} + +// ParseSignedDataForDeactivate will parse and validate signed data for deactivate. +func (p *Parser) ParseSignedDataForDeactivate(compactJWS string) (*model.DeactivateSignedDataModel, error) { + jws, err := p.parseSignedData(compactJWS) + if err != nil { + return nil, err + } + + signedData := &model.DeactivateSignedDataModel{} + + err = json.Unmarshal(jws.Payload, signedData) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal signed data model for deactivate: %s", err.Error()) + } + + if err := p.validateSigningKey(signedData.RecoveryKey); err != nil { + return nil, fmt.Errorf("validate signed data for deactivate: %s", err.Error()) + } + + return signedData, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/deactivate_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/deactivate_test.go new file mode 100644 index 0000000..8d8b900 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/deactivate_test.go @@ -0,0 +1,246 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/signutil" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +const sha2_256 = 18 + +func TestParseDeactivateOperation(t *testing.T) { + p := protocol.Protocol{ + MultihashAlgorithms: []uint{sha2_256}, + MaxOperationHashLength: maxHashLength, + SignatureAlgorithms: []string{"alg"}, + KeyAlgorithms: []string{"crv"}, + MaxOperationTimeDelta: 5 * 60, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + payload, err := getDeactivateRequestBytes() + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(payload, false) + require.NoError(t, err) + require.Equal(t, operation.TypeDeactivate, op.Type) + + signedData, err := parser.ParseSignedDataForDeactivate(op.SignedData) + require.NoError(t, err) + + expectedRevealValue, err := commitment.GetRevealValue(signedData.RecoveryKey, sha2_256) + require.NoError(t, err) + + require.Equal(t, expectedRevealValue, op.RevealValue) + }) + + t.Run("success - anchor until default to anchor from + max operation time delta protocol param", func(t *testing.T) { + now := time.Now().Unix() + + signedData := &model.DeactivateSignedDataModel{ + DidSuffix: "did", + RecoveryKey: &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + }, + AnchorFrom: now - 5*60, + } + + deactivateRequest, err := getDeactivateRequest(signedData) + require.NoError(t, err) + + reqBytes, err := json.Marshal(deactivateRequest) + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(reqBytes, false) + require.NoError(t, err) + require.NotEmpty(t, op) + }) + + t.Run("missing unique suffix", func(t *testing.T) { + schema, err := parser.ParseDeactivateOperation([]byte("{}"), false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "missing did suffix") + }) + t.Run("missing signed data", func(t *testing.T) { + op, err := parser.ParseDeactivateOperation([]byte(`{"didSuffix":"abc"}`), false) + require.Error(t, err) + require.Contains(t, err.Error(), "missing signed data") + require.Nil(t, op) + }) + t.Run("parse request", func(t *testing.T) { + request, err := json.Marshal("invalidJSON") + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "cannot unmarshal string") + require.Nil(t, op) + }) + t.Run("parse signed data error - decoding failed", func(t *testing.T) { + deactivateRequest, err := getDefaultDeactivateRequest() + require.NoError(t, err) + + deactivateRequest.SignedData = "invalid" + request, err := json.Marshal(deactivateRequest) + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid JWS compact format") + require.Nil(t, op) + }) + t.Run("validate signed data error - did suffix mismatch", func(t *testing.T) { + signedData := getSignedDataForDeactivate() + signedData.DidSuffix = "different" + + recoverRequest, err := getDeactivateRequest(signedData) + require.NoError(t, err) + + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "signed did suffix mismatch for deactivate") + require.Nil(t, op) + }) + t.Run("parse signed data error - unmarshal signed data failed", func(t *testing.T) { + deactivateRequest, err := getDefaultDeactivateRequest() + require.NoError(t, err) + + compactJWS, err := signutil.SignPayload([]byte("payload"), NewMockSigner()) + require.NoError(t, err) + + deactivateRequest.SignedData = compactJWS + request, err := json.Marshal(deactivateRequest) + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to unmarshal signed data model for deactivate") + require.Nil(t, op) + }) + t.Run("error - key algorithm not supported", func(t *testing.T) { + p := protocol.Protocol{ + MultihashAlgorithms: []uint{sha2_256}, + MaxOperationHashLength: maxHashLength, + SignatureAlgorithms: []string{"alg"}, + KeyAlgorithms: []string{"other"}, + } + parser := New(p) + + request, err := getDeactivateRequestBytes() + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), + "validate signed data for deactivate: key algorithm 'crv' is not in the allowed list [other]") + require.Nil(t, op) + }) +} + +func TestValidateDeactivateRequest(t *testing.T) { + parser := New(protocol.Protocol{MaxOperationHashLength: maxHashLength, MultihashAlgorithms: []uint{sha2_256}}) + + t.Run("success", func(t *testing.T) { + deactivate, err := getDefaultDeactivateRequest() + require.NoError(t, err) + + err = parser.validateDeactivateRequest(deactivate) + require.NoError(t, err) + }) + t.Run("missing signed data", func(t *testing.T) { + deactivate, err := getDefaultDeactivateRequest() + require.NoError(t, err) + deactivate.SignedData = "" + + err = parser.validateDeactivateRequest(deactivate) + require.Error(t, err) + require.Contains(t, err.Error(), "missing signed data") + }) + t.Run("missing did suffix", func(t *testing.T) { + deactivate, err := getDefaultDeactivateRequest() + require.NoError(t, err) + deactivate.DidSuffix = "" + + err = parser.validateDeactivateRequest(deactivate) + require.Error(t, err) + require.Contains(t, err.Error(), "missing did suffix") + }) + + t.Run("invalid reveal value", func(t *testing.T) { + deactivate, err := getDefaultDeactivateRequest() + require.NoError(t, err) + deactivate.RevealValue = "invalid" + + err = parser.validateDeactivateRequest(deactivate) + require.Error(t, err) + require.Contains(t, err.Error(), + "reveal value is not computed with the required hash algorithms: [18]") + }) +} + +func getDeactivateRequest(signedData *model.DeactivateSignedDataModel) (*model.DeactivateRequest, error) { + compactJWS, err := signutil.SignModel(signedData, NewMockSigner()) + if err != nil { + return nil, err + } + + revealValue, err := hashing.CalculateModelMultihash(signedData.RecoveryKey, sha2_256) + if err != nil { + return nil, err + } + + return &model.DeactivateRequest{ + Operation: operation.TypeDeactivate, + DidSuffix: "did", + SignedData: compactJWS, + RevealValue: revealValue, + }, nil +} + +func getDefaultDeactivateRequest() (*model.DeactivateRequest, error) { + return getDeactivateRequest(getSignedDataForDeactivate()) +} + +func getSignedDataForDeactivate() *model.DeactivateSignedDataModel { + return &model.DeactivateSignedDataModel{ + DidSuffix: "did", + RecoveryKey: &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + }, + } +} + +func getDeactivateRequestBytes() ([]byte, error) { + req, err := getDeactivateRequest(getSignedDataForDeactivate()) + if err != nil { + return nil, err + } + + return json.Marshal(req) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/method.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/method.go new file mode 100644 index 0000000..7c14e80 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/method.go @@ -0,0 +1,85 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "errors" + "strings" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/encoder" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +const ( + longFormSeparator = ":" + didSeparator = ":" +) + +// ParseDID inspects resolution request and returns: +// - did and create request in case of long form resolution +// - just did in case of short form resolution (common scenario). +func (p *Parser) ParseDID(namespace, shortOrLongFormDID string) (string, []byte, error) { + var err error + + withoutNamespace := strings.ReplaceAll(shortOrLongFormDID, namespace+didSeparator, "") + posLongFormSeparator := strings.Index(withoutNamespace, longFormSeparator) + + if posLongFormSeparator == -1 { + // there is short form did + return shortOrLongFormDID, nil, nil + } + + // long form format: '::Base64url(JCS({suffix-data, delta}))' + endOfDIDPos := strings.LastIndex(shortOrLongFormDID, longFormSeparator) + + did := shortOrLongFormDID[0:endOfDIDPos] + longFormDID := shortOrLongFormDID[endOfDIDPos+1:] + + createRequest, err := parseInitialState(longFormDID) + if err != nil { + return "", nil, err + } + + createRequestBytes, err := canonicalizer.MarshalCanonical(createRequest) + if err != nil { + return "", nil, err + } + + // return did and initial state + return did, createRequestBytes, nil +} + +// parse initial state will get create request from encoded initial value. +func parseInitialState(initialState string) (*model.CreateRequest, error) { + decodedJCS, err := encoder.DecodeString(initialState) + if err != nil { + return nil, err + } + + var createRequest model.CreateRequest + + err = json.Unmarshal(decodedJCS, &createRequest) + if err != nil { + return nil, err + } + + expected, err := canonicalizer.MarshalCanonical(createRequest) + if err != nil { + return nil, err + } + + if encoder.EncodeToString(expected) != initialState { + return nil, errors.New("initial state is not valid") + } + + createRequest.Operation = operation.TypeCreate + + return &createRequest, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/method_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/method_test.go new file mode 100644 index 0000000..b000447 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/method_test.go @@ -0,0 +1,109 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/docutil" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/encoder" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +const ( + docNS = "doc:method" +) + +func TestParser_ParseDID(t *testing.T) { + p := mocks.NewMockProtocolClient() + + parser := New(p.Protocol) + + const testDID = "doc:method:abc" + + req := model.CreateRequest{ + Delta: &model.DeltaModel{}, + SuffixData: &model.SuffixDataModel{}, + } + + reqBytes, err := canonicalizer.MarshalCanonical(req) + require.NoError(t, err) + fmt.Println(string(reqBytes)) + + initialState := encoder.EncodeToString(reqBytes) + + t.Run("success - just did, no initial state value", func(t *testing.T) { + did, initial, err := parser.ParseDID(docNS, testDID) + require.NoError(t, err) + require.Equal(t, testDID, did) + require.Empty(t, initial) + }) + + t.Run("success - did with dot in namespace", func(t *testing.T) { + namespaceWithDot := "did:bloc:trustbloc.dev" + didWithDot := namespaceWithDot + docutil.NamespaceDelimiter + "EiB2gB7F-aDjg8qPsTuZfVqWkJtIWXn4nObHSgtZ1IzMaQ" + + did, initial, err := parser.ParseDID(namespaceWithDot, didWithDot) + require.NoError(t, err) + require.Equal(t, didWithDot, did) + require.Nil(t, initial) + }) + + t.Run("success - did with initial state JCS", func(t *testing.T) { + did, initial, err := parser.ParseDID(docNS, testDID+longFormSeparator+initialState) + + require.NoError(t, err) + require.Equal(t, testDID, did) + require.Equal(t, `{"delta":{},"suffixData":{},"type":"create"}`, string(initial)) + }) + + t.Run("success - did with dot in namespace and initial state", func(t *testing.T) { + namespaceWithDot := "did:bloc:trustbloc.dev" + didWithDot := namespaceWithDot + docutil.NamespaceDelimiter + "EiB2gB7F-aDjg8qPsTuZfVqWkJtIWXn4nObHSgtZ1IzMaQ" + + didWithDotWithInitialState := didWithDot + longFormSeparator + initialState + did, initial, err := parser.ParseDID(namespaceWithDot, didWithDotWithInitialState) + require.NoError(t, err) + require.Equal(t, didWithDot, did) + require.Equal(t, `{"delta":{},"suffixData":{},"type":"create"}`, string(initial)) + }) + + t.Run("error - initial state not encoded", func(t *testing.T) { + notEncoded := "not encoded" + + did, initial, err := parser.ParseDID(namespace, testDID+longFormSeparator+notEncoded) + require.Error(t, err) + require.Empty(t, did) + require.Nil(t, initial) + require.Contains(t, err.Error(), "illegal base64 data") + }) + + t.Run("error - initial state not JSON", func(t *testing.T) { + invalidJCS := encoder.EncodeToString([]byte(`not JSON`)) + + did, initial, err := parser.ParseDID(docNS, testDID+longFormSeparator+invalidJCS) + require.Error(t, err) + require.Empty(t, did) + require.Nil(t, initial) + require.Contains(t, err.Error(), "invalid character") + }) + + t.Run("error - initial state not expected JCS", func(t *testing.T) { + unexpectedJCS := encoder.EncodeToString([]byte(`{"key":"value"}`)) + + did, initial, err := parser.ParseDID(docNS, testDID+longFormSeparator+unexpectedJCS) + require.Error(t, err) + require.Empty(t, did) + require.Nil(t, initial) + require.Contains(t, err.Error(), "initial state is not valid") + }) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/operation.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/operation.go new file mode 100644 index 0000000..b67d646 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/operation.go @@ -0,0 +1,176 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/docutil" + logfields "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/log" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +var logger = log.New("sidetree-core-parser") + +// Parser is an operation parser. +type Parser struct { + protocol.Protocol + anchorOriginValidator ObjectValidator + anchorTimeValidator TimeValidator +} + +// New returns a new operation parser. +// +//nolint:gocritic +func New(p protocol.Protocol, opts ...Option) *Parser { + parser := &Parser{ + Protocol: p, + } + + // default anchor origin validator + parser.anchorOriginValidator = &objectValidator{} + + // default anchor time validator + parser.anchorTimeValidator = &timeValidator{} + + // apply options + for _, opt := range opts { + opt(parser) + } + + return parser +} + +// ObjectValidator validates object. Currently used for anchor origin validation +// however it can be used for any object validation. +type ObjectValidator interface { + Validate(obj interface{}) error +} + +// Option is a parser instance option. +type Option func(opts *Parser) + +// WithAnchorOriginValidator sets optional anchor origin validator. +func WithAnchorOriginValidator(v ObjectValidator) Option { + return func(opts *Parser) { + if v != nil { + opts.anchorOriginValidator = v + } + } +} + +// ErrOperationExpired is thrown if anchor until time is less then reference time(e.g. server time or anchoring time). +var ErrOperationExpired = errors.New("operation expired") + +// ErrOperationEarly is thrown if anchor from time is greater then reference time(e.g. server time or anchoring time). +var ErrOperationEarly = errors.New("operation early") + +// TimeValidator validates earliest and expiry time for an operation against server time. +type TimeValidator interface { + Validate(from, until int64) error +} + +// WithAnchorTimeValidator sets optional anchor time validator. +func WithAnchorTimeValidator(v TimeValidator) Option { + return func(opts *Parser) { + if v != nil { + opts.anchorTimeValidator = v + } + } +} + +// Parse parses and validates operation. +func (p *Parser) Parse(namespace string, operationBuffer []byte) (*operation.Operation, error) { + // parse and validate operation buffer using this versions model and validation rules + internal, err := p.ParseOperation(namespace, operationBuffer, false) + if err != nil { + return nil, err + } + + return &operation.Operation{ + Type: internal.Type, + UniqueSuffix: internal.UniqueSuffix, + ID: internal.ID, + OperationRequest: operationBuffer, + }, nil +} + +// ParseOperation parses and validates operation. Batch mode flag gives hints for the validation of +// operation object (anticipating future pruning/checkpoint requirements). +func (p *Parser) ParseOperation(namespace string, operationBuffer []byte, batch bool) (*model.Operation, error) { + // check maximum operation size against protocol before parsing + if len(operationBuffer) > int(p.MaxOperationSize) { + return nil, fmt.Errorf("operation size[%d] exceeds maximum operation size[%d]", + len(operationBuffer), int(p.MaxOperationSize)) + } + + schema := &operationSchema{} + + err := json.Unmarshal(operationBuffer, schema) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal operation buffer into operation schema: %s", err.Error()) + } + + var ( + op *model.Operation + parseErr error + ) + + switch schema.Operation { + case operation.TypeCreate: + op, parseErr = p.ParseCreateOperation(operationBuffer, batch) + case operation.TypeUpdate: + op, parseErr = p.ParseUpdateOperation(operationBuffer, batch) + case operation.TypeDeactivate: + op, parseErr = p.ParseDeactivateOperation(operationBuffer, batch) + case operation.TypeRecover: + op, parseErr = p.ParseRecoverOperation(operationBuffer, batch) + default: + return nil, fmt.Errorf("parse operation: operation type [%s] not supported", schema.Operation) + } + + if parseErr != nil { + logger.Warn("Error parsing operation for batch", logfields.WithOperation(schema.Operation), + logfields.WithIsBatch(batch), log.WithError(parseErr)) + + return nil, parseErr + } + + op.Namespace = namespace + op.ID = namespace + docutil.NamespaceDelimiter + op.UniqueSuffix + + return op, nil +} + +// operationSchema is used to get operation type. +type operationSchema struct { + + // operation + Operation operation.Type `json:"type"` +} + +type objectValidator struct { +} + +func (ov *objectValidator) Validate(_ interface{}) error { + // default validator allows any anchor origin + return nil +} + +type timeValidator struct { +} + +func (tv *timeValidator) Validate(_, _ int64) error { + // default time validator allows any anchor time + return nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/operation_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/operation_test.go new file mode 100644 index 0000000..1879738 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/operation_test.go @@ -0,0 +1,255 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "errors" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" +) + +const ( + namespace = "did:sidetree" + + maxOperationSize = 2000 + maxHashLength = 100 + maxDeltaSize = 1000 +) + +func TestNewParser(t *testing.T) { + p := protocol.Protocol{} + + parser := New(p) + require.NotNil(t, parser) + require.NotNil(t, parser.anchorOriginValidator) + + // validator cannot be set to nil (default validator will kick in) + parser = New(p, WithAnchorOriginValidator(nil)) + require.NotNil(t, parser) + require.NotNil(t, parser.anchorOriginValidator) + + // supply custom validator + ov := &mockObjectValidator{} + + parser = New(p, WithAnchorOriginValidator(ov)) + require.NotNil(t, parser) + require.Equal(t, ov, parser.anchorOriginValidator) + + // custom anchor time validator + tv := &mockTimeValidator{} + + parser = New(p, WithAnchorTimeValidator(tv)) + require.NotNil(t, parser) + require.Equal(t, tv, parser.anchorTimeValidator) +} + +func TestGetOperation(t *testing.T) { + p := protocol.Protocol{ + MaxOperationSize: maxOperationSize, + MaxOperationHashLength: maxHashLength, + MaxDeltaSize: maxDeltaSize, + MultihashAlgorithms: []uint{sha2_256}, + SignatureAlgorithms: []string{"alg"}, + KeyAlgorithms: []string{"crv"}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, //nolint:lll + } + + parser := New(p) + + t.Run("create", func(t *testing.T) { + operation, err := getCreateRequestBytes() + require.NoError(t, err) + + op, err := parser.Parse(namespace, operation) + require.NoError(t, err) + require.NotNil(t, op) + }) + t.Run("update", func(t *testing.T) { + operation, err := getUpdateRequestBytes() + require.NoError(t, err) + + op, err := parser.Parse(namespace, operation) + require.NoError(t, err) + require.NotNil(t, op) + }) + t.Run("deactivate", func(t *testing.T) { + operation, err := getDeactivateRequestBytes() + require.NoError(t, err) + + op, err := parser.Parse(namespace, operation) + require.NoError(t, err) + require.NotNil(t, op) + }) + t.Run("recover", func(t *testing.T) { + operation, err := getRecoverRequestBytes() + require.NoError(t, err) + + op, err := parser.Parse(namespace, operation) + require.NoError(t, err) + require.NotNil(t, op) + }) + t.Run("operation parsing error - anchor origin validator error (create)", func(t *testing.T) { + operation, err := getCreateRequestBytes() + require.NoError(t, err) + + testErr := errors.New("validation error") + parserWithErr := New(p, WithAnchorOriginValidator(&mockObjectValidator{Err: testErr})) + + op, err := parserWithErr.Parse(namespace, operation) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), testErr.Error()) + }) + t.Run("operation parsing error - anchor origin validator error (recover)", func(t *testing.T) { + operation, err := getRecoverRequestBytes() + require.NoError(t, err) + + testErr := errors.New("validation error") + parserWithErr := New(p, WithAnchorOriginValidator(&mockObjectValidator{Err: testErr})) + + op, err := parserWithErr.Parse(namespace, operation) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), testErr.Error()) + }) + t.Run("operation parsing error - anchor time validator error (update)", func(t *testing.T) { + operation, err := getUpdateRequestBytes() + require.NoError(t, err) + + testErr := errors.New("anchor time validation error") + parserWithErr := New(p, WithAnchorTimeValidator(&mockTimeValidator{Err: testErr})) + + op, err := parserWithErr.Parse(namespace, operation) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), testErr.Error()) + }) + t.Run("operation parsing error - anchor time validator error (deactivate)", func(t *testing.T) { + operation, err := getDeactivateRequestBytes() + require.NoError(t, err) + + testErr := errors.New("anchor time validation error") + parserWithErr := New(p, WithAnchorTimeValidator(&mockTimeValidator{Err: testErr})) + + op, err := parserWithErr.Parse(namespace, operation) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), testErr.Error()) + }) + t.Run("operation parsing error - anchor time validator error (recover)", func(t *testing.T) { + operation, err := getRecoverRequestBytes() + require.NoError(t, err) + + testErr := errors.New("anchor time validation error") + parserWithErr := New(p, WithAnchorTimeValidator(&mockTimeValidator{Err: testErr})) + + op, err := parserWithErr.Parse(namespace, operation) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), testErr.Error()) + }) + + t.Run("operation parsing error - exceeds max operation size", func(t *testing.T) { + // set-up invalid hash algorithm in protocol configuration + invalid := protocol.Protocol{ + MaxOperationSize: 20, + MaxDeltaSize: maxDeltaSize, + } + + operation, err := getRecoverRequestBytes() + require.NoError(t, err) + + op, err := New(invalid).Parse(namespace, operation) + require.Error(t, err) + require.Contains(t, err.Error(), "operation size[761] exceeds maximum operation size[20]") + require.Nil(t, op) + }) + t.Run("operation parsing error", func(t *testing.T) { + // set-up invalid hash algorithm in protocol configuration + invalid := protocol.Protocol{ + SignatureAlgorithms: []string{"not-used"}, + MaxOperationSize: maxOperationSize, + MaxDeltaSize: maxDeltaSize, + MaxOperationHashLength: maxHashLength, + MultihashAlgorithms: []uint{sha2_256}, + } + + operation, err := getRecoverRequestBytes() + require.NoError(t, err) + + op, err := New(invalid).Parse(namespace, operation) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to parse signed data: algorithm 'alg' is not in the allowed list [not-used]") + require.Nil(t, op) + }) + t.Run("unsupported operation type error", func(t *testing.T) { + operation := getUnsupportedRequest() + op, err := parser.Parse(namespace, operation) + require.Error(t, err) + require.Contains(t, err.Error(), "parse operation: operation type [unsupported] not supported") + require.Nil(t, op) + }) + t.Run("unmarshal request error - not JSON", func(t *testing.T) { + op, err := parser.Parse(namespace, []byte("operation")) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to unmarshal operation buffer into operation schema") + require.Nil(t, op) + }) +} + +func getUnsupportedRequest() []byte { + schema := &operationSchema{ + Operation: "unsupported", + } + + payload, err := json.Marshal(schema) + if err != nil { + panic(err) + } + + return payload +} + +type mockObjectValidator struct { + Err error +} + +func (mov *mockObjectValidator) Validate(_ interface{}) error { + return mov.Err +} + +type mockTimeValidator struct { + Err error +} + +func (mtv *mockTimeValidator) Validate(from, until int64) error { + if mtv.Err != nil { + return mtv.Err + } + + if from == 0 && until == 0 { + // from and until are not specified - no error + return nil + } + + serverTime := time.Now().Unix() + + if from >= serverTime { + return ErrOperationEarly + } + + if until <= serverTime { + return ErrOperationExpired + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/addkeys.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/addkeys.go new file mode 100644 index 0000000..e2da8a3 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/addkeys.go @@ -0,0 +1,40 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "fmt" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +// NewAddPublicKeysValidator creates new validator. +func NewAddPublicKeysValidator() *AddPublicKeysValidator { + return &AddPublicKeysValidator{} +} + +// AddPublicKeysValidator implements validator for "add-public-keys" patch. +type AddPublicKeysValidator struct { +} + +// Validate validates patch. +func (v *AddPublicKeysValidator) Validate(p patch.Patch) error { + value, err := p.GetValue() + if err != nil { + return err + } + + _, err = getRequiredArray(value) + if err != nil { + return fmt.Errorf("invalid add public keys value: %s", err.Error()) + } + + publicKeys := document.ParsePublicKeys(value) + + return validatePublicKeys(publicKeys) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/addkeys_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/addkeys_test.go new file mode 100644 index 0000000..b5c36c5 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/addkeys_test.go @@ -0,0 +1,58 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +func TestAddPublicKeysPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + + err = NewAddPublicKeysValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("error - missing value", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + + delete(p, patch.PublicKeys) + err = NewAddPublicKeysValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "add-public-keys patch is missing key: publicKeys") + }) + t.Run("error - invalid value for public keys", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + + p[patch.PublicKeys] = "" + err = NewAddPublicKeysValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid add public keys value: expected array of interfaces") + }) +} + +const addPublicKeysPatch = `{ + "action": "add-public-keys", + "publicKeys": [{ + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/addservices.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/addservices.go new file mode 100644 index 0000000..994492c --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/addservices.go @@ -0,0 +1,40 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "fmt" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +// NewAddServicesValidator creates new validator. +func NewAddServicesValidator() *AddServicesValidator { + return &AddServicesValidator{} +} + +// AddServicesValidator implements validator for "add-public-keys" patch. +type AddServicesValidator struct { +} + +// Validate validates patch. +func (v *AddServicesValidator) Validate(p patch.Patch) error { + value, err := p.GetValue() + if err != nil { + return err + } + + _, err = getRequiredArray(value) + if err != nil { + return fmt.Errorf("invalid add services value: %s", err.Error()) + } + + services := document.ParseServices(value) + + return validateServices(services) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/addservices_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/addservices_test.go new file mode 100644 index 0000000..f5f49c8 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/addservices_test.go @@ -0,0 +1,66 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +func TestAddServiceEndpointsPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addServiceEndpoints)) + require.NoError(t, err) + + err = NewAddServicesValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("missing service endpoints", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addServiceEndpoints)) + require.NoError(t, err) + + delete(p, patch.ServicesKey) + err = NewAddServicesValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "add-services patch is missing key: services") + }) + t.Run("error - service is missing id", func(t *testing.T) { + p, err := patch.NewAddServiceEndpointsPatch(testAddServiceEndpointsMissingID) + require.NoError(t, err) + + err = NewAddServicesValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "service id is missing") + }) +} + +const addServiceEndpoints = `{ + "action": "add-services", + "services": [ + { + "id": "sds1", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }, + { + "id": "sds2", + "type": "SecureDataStore", + "serviceEndpoint": "http://some-cloud.com/hub" + } + ] +}` + +const testAddServiceEndpointsMissingID = `[ + { + "id": "", + "type": "SecureDataStore", + "serviceEndpoint": "http://some-cloud.com/hub" + } + ]` diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/alsoknownas.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/alsoknownas.go new file mode 100644 index 0000000..cb80ee1 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/alsoknownas.go @@ -0,0 +1,71 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "fmt" + "net/url" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +// NewAlsoKnownAsValidator creates new validator. +func NewAlsoKnownAsValidator() *AlsoKnownAsValidator { + return &AlsoKnownAsValidator{} +} + +// AlsoKnownAsValidator implements validator for "add-also-known-as" and "remove-also-known-as" patches. +// Both patches take have as value URIs so the validation for both add and remove are the same. +type AlsoKnownAsValidator struct { +} + +// Validate validates patch. +func (v *AlsoKnownAsValidator) Validate(p patch.Patch) error { + action, err := p.GetAction() + if err != nil { + return err + } + + value, err := p.GetValue() + if err != nil { + return fmt.Errorf("%s", err) + } + + _, err = getRequiredArray(value) + if err != nil { + return fmt.Errorf("%s: %w", action, err) + } + + uris := document.StringArray(value) + + if err := validate(uris); err != nil { + return fmt.Errorf("%s: validate URIs: %w", action, err) + } + + return nil +} + +// validateURIs validates URIs. +func validate(uris []string) error { + ids := make(map[string]bool) + + for _, uri := range uris { + u, err := url.Parse(uri) + if err != nil { + return fmt.Errorf("failed to parse URI: %w", err) + } + + if _, ok := ids[u.String()]; ok { + return fmt.Errorf("duplicate uri: %s", u.String()) + } + + ids[u.String()] = true + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/alsoknownas_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/alsoknownas_test.go new file mode 100644 index 0000000..ea56910 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/alsoknownas_test.go @@ -0,0 +1,78 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +func TestAddAlsoKnowAsValidator(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addAlsoKnownAs)) + require.NoError(t, err) + + err = NewAlsoKnownAsValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("error - missing action", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addAlsoKnownAs)) + require.NoError(t, err) + + delete(p, patch.ActionKey) + err = NewAlsoKnownAsValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "patch is missing action key") + }) + t.Run("error - missing uris", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addAlsoKnownAs)) + require.NoError(t, err) + + delete(p, patch.UrisKey) + err = NewAlsoKnownAsValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "add-also-known-as patch is missing key: uris") + }) + t.Run("error - uris value is not expected type", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addAlsoKnownAs)) + require.NoError(t, err) + + p[patch.UrisKey] = []int{123} + err = NewAlsoKnownAsValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "add-also-known-as: expected array of interfaces") + }) + t.Run("error - uri is not valid", func(t *testing.T) { + p, err := patch.NewAddAlsoKnownAs(`[":abc"]`) + require.NoError(t, err) + + err = NewAlsoKnownAsValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "add-also-known-as: validate URIs: failed to parse URI:") + }) + t.Run("error - duplicate URI", func(t *testing.T) { + p, err := patch.NewAddAlsoKnownAs(`["https://abc.com", "https://abc.com"]`) + require.NoError(t, err) + + err = NewAlsoKnownAsValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "add-also-known-as: validate URIs: duplicate uri: https://abc.com") + }) +} + +const addAlsoKnownAs = `{ + "action": "add-also-known-as", + "uris": ["did:abc:123", "https://other.com"] +}` + +const removeAlsoKnownAs = `{ + "action": "remove-also-known-as", + "uris": ["did:abc:123"] +}` diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/document.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/document.go new file mode 100644 index 0000000..703c83f --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/document.go @@ -0,0 +1,385 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "errors" + "fmt" + "net/url" + "regexp" + "strings" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" +) + +//nolint:gochecknoglobals +var ( + asciiRegex = regexp.MustCompile("^[A-Za-z0-9_-]+$") +) + +const ( + bls12381G2Key2020 = "Bls12381G2Key2020" + jsonWebKey2020 = "JsonWebKey2020" + ecdsaSecp256k1VerificationKey2019 = "EcdsaSecp256k1VerificationKey2019" + x25519KeyAgreementKey2019 = "X25519KeyAgreementKey2019" + ed25519VerificationKey2018 = "Ed25519VerificationKey2018" + ed25519VerificationKey2020 = "Ed25519VerificationKey2020" + + // public keys, services id length. + maxIDLength = 50 + + maxServiceTypeLength = 30 +) + +//nolint:gochecknoglobals +var allowedPurposes = map[document.KeyPurpose]bool{ + document.KeyPurposeAuthentication: true, + document.KeyPurposeAssertionMethod: true, + document.KeyPurposeKeyAgreement: true, + document.KeyPurposeCapabilityDelegation: true, + document.KeyPurposeCapabilityInvocation: true, +} + +type existenceMap map[string]string + +//nolint:gochecknoglobals +var allowedKeyTypesGeneral = existenceMap{ + bls12381G2Key2020: bls12381G2Key2020, + jsonWebKey2020: jsonWebKey2020, + ecdsaSecp256k1VerificationKey2019: ecdsaSecp256k1VerificationKey2019, + ed25519VerificationKey2018: ed25519VerificationKey2018, + ed25519VerificationKey2020: ed25519VerificationKey2020, + x25519KeyAgreementKey2019: x25519KeyAgreementKey2019, +} + +//nolint:gochecknoglobals +var allowedKeyTypesVerification = existenceMap{ + bls12381G2Key2020: bls12381G2Key2020, + jsonWebKey2020: jsonWebKey2020, + ecdsaSecp256k1VerificationKey2019: ecdsaSecp256k1VerificationKey2019, + ed25519VerificationKey2018: ed25519VerificationKey2018, + ed25519VerificationKey2020: ed25519VerificationKey2020, +} + +//nolint:gochecknoglobals +var allowedKeyTypesAgreement = existenceMap{ + // TODO: Verify appropriate agreement key types for JWS and Secp256k1 + bls12381G2Key2020: bls12381G2Key2020, + jsonWebKey2020: jsonWebKey2020, + ecdsaSecp256k1VerificationKey2019: ecdsaSecp256k1VerificationKey2019, + x25519KeyAgreementKey2019: x25519KeyAgreementKey2019, +} + +//nolint:gochecknoglobals +var allowedKeyTypes = map[string]existenceMap{ + document.KeyPurposeAuthentication: allowedKeyTypesVerification, + document.KeyPurposeAssertionMethod: allowedKeyTypesVerification, + document.KeyPurposeKeyAgreement: allowedKeyTypesAgreement, + document.KeyPurposeCapabilityDelegation: allowedKeyTypesVerification, + document.KeyPurposeCapabilityInvocation: allowedKeyTypesVerification, +} + +// validatePublicKeys validates public keys. +func validatePublicKeys(pubKeys []document.PublicKey) error { + ids := make(map[string]bool) + + for _, pubKey := range pubKeys { + if err := validatePublicKeyProperties(pubKey); err != nil { + return err + } + + kid := pubKey.ID() + if err := validateID(kid); err != nil { + return fmt.Errorf("public key: %s", err.Error()) + } + + if _, ok := ids[kid]; ok { + return fmt.Errorf("duplicate public key id: %s", kid) + } + + ids[kid] = true + + if err := validateKeyPurposes(pubKey); err != nil { + return err + } + + if !validateKeyTypePurpose(pubKey) { + return fmt.Errorf("invalid key type: %s", pubKey.Type()) + } + + if err := validateJWK(pubKey.PublicKeyJwk()); err != nil { + if pubKey.PublicKeyBase58() == "" || pubKey.Type() == jsonWebKey2020 { + return err + } + } + } + + return nil +} + +func validatePublicKeyProperties(pubKey document.PublicKey) error { //nolint:gocyclo + requiredKeys := []string{document.TypeProperty, document.IDProperty} + optionalKeys := []string{document.PurposesProperty} + oneOfNKeys := [][]string{{document.PublicKeyJwkProperty, document.PublicKeyBase58Property}} + + allowedKeys := append(requiredKeys, optionalKeys...) //nolint:gocritic + + for _, keyGroup := range oneOfNKeys { + allowedKeys = append(allowedKeys, keyGroup...) + } + + for _, required := range requiredKeys { + if _, ok := pubKey[required]; !ok { + return fmt.Errorf("key '%s' is required for public key", required) + } + } + + for _, keyGroup := range oneOfNKeys { + var satisfied bool + + for _, key := range keyGroup { + _, ok := pubKey[key] + if ok && satisfied { // at most one element + satisfied = false + + break + } + + satisfied = satisfied || ok + } + + if !satisfied { + return fmt.Errorf("exactly one key required of '%s'", strings.Join(keyGroup, "', '")) + } + } + + for key := range pubKey { + if !contains(allowedKeys, key) { + return fmt.Errorf("key '%s' is not allowed for public key", key) + } + } + + return nil +} + +// validateID validates id. +func validateID(id string) error { + if len(id) > maxIDLength { + return fmt.Errorf("id exceeds maximum length: %d", maxIDLength) + } + + if !asciiRegex.MatchString(id) { + return errors.New("id contains invalid characters") + } + + return nil +} + +// validateServices validates services. +func validateServices(services []document.Service) error { + ids := make(map[string]bool) + + for _, service := range services { + if err := validateService(service); err != nil { + return err + } + + if _, ok := ids[service.ID()]; ok { + return fmt.Errorf("duplicate service id: %s", service.ID()) + } + + ids[service.ID()] = true + } + + return nil +} + +func validateService(service document.Service) error { + // expected fields are type, id, and serviceEndpoint and some optional fields + if err := validateServiceID(service.ID()); err != nil { + return err + } + + if err := validateServiceType(service.Type()); err != nil { + return err + } + + if err := validateServiceEndpoint(service.ServiceEndpoint()); err != nil { + return err + } + + return nil +} + +func validateServiceID(id string) error { + if id == "" { + return errors.New("service id is missing") + } + + if err := validateID(id); err != nil { + return fmt.Errorf("service: %s", err.Error()) + } + + return nil +} + +func validateServiceType(serviceType string) error { + if serviceType == "" { + return errors.New("service type is missing") + } + + if len(serviceType) > maxServiceTypeLength { + return fmt.Errorf("service type exceeds maximum length: %d", maxServiceTypeLength) + } + + return nil +} + +func validateServiceEndpoint(serviceEndpoint interface{}) error { + if serviceEndpoint == nil { + return errors.New("service endpoint is missing") + } + + uri, ok := serviceEndpoint.(string) + if ok { + return validateURI(uri) + } + + uris, ok := serviceEndpoint.([]string) + if ok { + return validateURIs(uris) + } + + objs, ok := serviceEndpoint.([]interface{}) + if ok { + return validateServiceEndpointObjects(objs) + } + + return nil +} + +func validateServiceEndpointObjects(objs []interface{}) error { + for _, obj := range objs { + uri, ok := obj.(string) + if ok { + return validateURI(uri) + } + } + + return nil +} + +func validateURIs(uris []string) error { + for _, uri := range uris { + if err := validateURI(uri); err != nil { + return err + } + } + + return nil +} + +func validateURI(uri string) error { + if uri == "" { + return errors.New("service endpoint URI is empty") + } + + if _, err := url.ParseRequestURI(uri); err != nil { + return fmt.Errorf("service endpoint '%s' is not a valid URI: %s", uri, err.Error()) + } + + return nil +} + +// validateKeyTypePurpose validates if the public key type is valid for a certain purpose. +func validateKeyTypePurpose(pubKey document.PublicKey) bool { + if len(pubKey.Purpose()) == 0 { + // general key + _, ok := allowedKeyTypesGeneral[pubKey.Type()] + if !ok { + return false + } + } + + for _, purpose := range pubKey.Purpose() { + allowed, ok := allowedKeyTypes[purpose] + if !ok { + return false + } + + _, ok = allowed[pubKey.Type()] + if !ok { + return false + } + } + + return true +} + +// validateJWK validates JWK. +func validateJWK(jwk document.JWK) error { + if jwk == nil { + return errors.New("key has to be in JWK format") + } + + return jwk.Validate() +} + +// The object MAY include a purposes property, and if included, its value MUST be an array of one or more +// of the strings listed in allowed purposes array. +func validateKeyPurposes(pubKey document.PublicKey) error { + _, exists := pubKey[document.PurposesProperty] + + if exists && len(pubKey.Purpose()) == 0 { + return fmt.Errorf("if '%s' key is specified, it must contain at least one purpose", document.PurposesProperty) + } + + if len(pubKey.Purpose()) > len(allowedPurposes) { + return fmt.Errorf("public key purpose exceeds maximum length: %d", len(allowedPurposes)) + } + + for _, purpose := range pubKey.Purpose() { + if _, ok := allowedPurposes[document.KeyPurpose(purpose)]; !ok { + return fmt.Errorf("invalid purpose: %s", purpose) + } + } + + return nil +} + +func contains(values []string, value string) bool { + for _, v := range values { + if v == value { + return true + } + } + + return false +} + +func validateIds(ids []string) error { + for _, id := range ids { + if err := validateID(id); err != nil { + return err + } + } + + return nil +} + +func getRequiredArray(entry interface{}) ([]interface{}, error) { + arr, ok := entry.([]interface{}) + if !ok { + return nil, errors.New("expected array of interfaces") + } + + if len(arr) == 0 { + return nil, errors.New("required array is empty") + } + + return arr, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/document_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/document_test.go new file mode 100644 index 0000000..f4de1ff --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/document_test.go @@ -0,0 +1,759 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "io" + "os" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" +) + +func TestValidatePublicKeys(t *testing.T) { + t.Run("success", func(t *testing.T) { + r := reader(t, "testdata/doc.json") + + data, err := io.ReadAll(r) + require.Nil(t, err) + + doc, err := document.DidDocumentFromBytes(data) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Nil(t, err) + }) + + t.Run("success - missing purpose", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(noPurpose)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.NoError(t, err) + }) + + t.Run("success - base58 key", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(withB58key)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.NoError(t, err) + }) +} + +func TestValidatePublicKeysErrors(t *testing.T) { + t.Run("error - empty purpose", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(emptyPurpose)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), + "if 'purposes' key is specified, it must contain at least one purpose") + }) + t.Run("invalid purpose", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(wrongPurpose)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid purpose") + }) + t.Run("purpose exceeds maximum", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(tooMuchPurpose)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "public key purpose exceeds maximum length") + }) + t.Run("invalid key type", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(invalidKeyType)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid key type") + }) + t.Run("missing id", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(noID)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "key 'id' is required for public key") + }) + t.Run("invalid id - too long", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(idLong)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "public key: id exceeds maximum length") + }) + t.Run("duplicate id", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(duplicateID)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "duplicate public key id") + }) + + t.Run("unknown property", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(moreProperties)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "key 'other' is not allowed for public key") + }) + + t.Run("invalid jwk", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(invalidJWK)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "JWK crv is missing") + }) + + t.Run("pkB58 key with jwk type", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(jwkTypeWithB58Key)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "key has to be in JWK format") + }) + + t.Run("no public key field", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(missingPubKey)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "exactly one key required of") + require.Contains(t, err.Error(), document.PublicKeyJwkProperty) + require.Contains(t, err.Error(), document.PublicKeyBase58Property) + }) + + t.Run("too many public key fields", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(multiplePublicKeyFields)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "exactly one key required of") + require.Contains(t, err.Error(), document.PublicKeyJwkProperty) + require.Contains(t, err.Error(), document.PublicKeyBase58Property) + }) +} + +func TestValidateServices(t *testing.T) { + t.Run("success", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDoc)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.NoError(t, err) + }) + t.Run("error - duplicate service id", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocWithDuplicateServices)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "duplicate service id: sid-123_ABC") + }) + t.Run("success - service can have allowed optional property", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocOptionalProperty)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.NoError(t, err) + }) + t.Run("error - missing service id", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocNoID)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service id is missing") + }) + t.Run("error - missing service type", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocNoType)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service type is missing") + }) + t.Run("error - service endpoint missing", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocEndpointMissing)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service endpoint is missing") + }) + t.Run("success - service endpoint is an object", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocEndpointIsAnObject)) + require.NoError(t, err) + err = validateServices(doc.Services()) + require.NoError(t, err) + }) + t.Run("success - service endpoint is an array of objects", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocEndpointIsAnArrayOfObjects)) + require.NoError(t, err) + err = validateServices(doc.Services()) + require.NoError(t, err) + }) + t.Run("success - service endpoint is an array of string objects", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocEndpointIsAnArrayOfURLStrings)) + require.NoError(t, err) + err = validateServices(doc.Services()) + require.NoError(t, err) + }) + t.Run("success - service endpoint is an array of strings", func(t *testing.T) { + servicesMap := make(map[string]interface{}) + servicesMap["id"] = "someID" + servicesMap["type"] = "someType" + servicesMap["serviceEndpoint"] = []string{"https://hello.com", "https://there.com"} + + err := validateServices([]document.Service{document.NewService(servicesMap)}) + require.NoError(t, err) + }) + t.Run("error - service endpoint is an array of invalid strings", func(t *testing.T) { + servicesMap := make(map[string]interface{}) + servicesMap["id"] = "someID" + servicesMap["type"] = "someType" + servicesMap["serviceEndpoint"] = []string{"invalid-1", "invalid-2"} + + err := validateServices([]document.Service{document.NewService(servicesMap)}) + require.Error(t, err) + require.Contains(t, err.Error(), "service endpoint 'invalid-1' is not a valid URI") + }) + t.Run("error - service endpoint is an array of invalid string URL objects", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocEndpointIsAnArrayOfInvalidURLStrings)) + require.NoError(t, err) + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service endpoint 'hello' is not a valid URI") + }) + t.Run("error - empty service endpoint URI", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocNoServiceEndpointURI)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service endpoint URI is empty") + }) + t.Run("error - service id too long", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocLongID)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service: id exceeds maximum length") + }) + t.Run("error - service type too long", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocLongType)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service type exceeds maximum length") + }) + t.Run("error - service endpoint not URI", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocEndpointNotURI)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service endpoint 'hello' is not a valid URI") + }) + t.Run("success - didcomm service", func(t *testing.T) { + doc, err := document.DIDDocumentFromReader(reader(t, "testdata/doc.json")) + require.NoError(t, err) + err = validateServices(doc.Services()) + require.NoError(t, err) + }) +} + +func TestValidateID(t *testing.T) { + t.Run("success", func(t *testing.T) { + err := validateID("recovered") + require.NoError(t, err) + }) + t.Run("error - id not ASCII encoded character", func(t *testing.T) { + err := validateID("a****") + require.Error(t, err) + require.Contains(t, err.Error(), "id contains invalid characters") + }) + t.Run("error - exceeded maximum length", func(t *testing.T) { + err := validateID("1234567890abcdefghijk123456789012345678901234567890") + require.Error(t, err) + require.Contains(t, err.Error(), "id exceeds maximum length: 50") + }) +} + +func TestValidateJWK(t *testing.T) { + t.Run("success", func(t *testing.T) { + jwk := document.JWK{ + "kty": "kty", + "crv": "crv", + "x": "x", + "y": "y", + } + + err := validateJWK(jwk) + require.NoError(t, err) + }) + + t.Run("missing kty", func(t *testing.T) { + jwk := document.JWK{ + "kty": "", + "crv": "crv", + "x": "x", + "y": "y", + } + + err := validateJWK(jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "JWK kty is missing") + }) +} + +func TestGeneralKeyPurpose(t *testing.T) { + for _, pubKeyType := range allowedKeyTypesAgreement { + pk := createMockPublicKeyWithType(pubKeyType) + err := validatePublicKeys([]document.PublicKey{pk}) + require.NoError(t, err, "valid purpose for type") + } + + pk := createMockPublicKeyWithTypeAndPurpose("invalid", []interface{}{document.KeyPurposeAuthentication}) + err := validatePublicKeys([]document.PublicKey{pk}) + require.Error(t, err, "invalid purpose for type") +} + +func TestInvalidKeyPurpose(t *testing.T) { + pk := createMockPublicKeyWithTypeAndPurpose(jsonWebKey2020, []interface{}{"invalidpurpose"}) + err := validatePublicKeys([]document.PublicKey{pk}) + require.Error(t, err, "invalid purpose") +} + +func TestVerificationKeyPurpose(t *testing.T) { + testKeyPurpose(t, allowedKeyTypesVerification, document.KeyPurposeAssertionMethod) + testKeyPurpose(t, allowedKeyTypesVerification, document.KeyPurposeAuthentication) + testKeyPurpose(t, allowedKeyTypesVerification, document.KeyPurposeCapabilityDelegation) + testKeyPurpose(t, allowedKeyTypesVerification, document.KeyPurposeCapabilityInvocation) +} + +func TestAgreementKeyPurpose(t *testing.T) { + testKeyPurpose(t, allowedKeyTypesAgreement, document.KeyPurposeKeyAgreement) +} + +func reader(t *testing.T, filename string) io.Reader { + f, err := os.Open(filename) + require.Nil(t, err) + + return f +} + +func testKeyPurpose(t *testing.T, allowedKeys existenceMap, pubKeyPurpose string) { + for _, pubKeyType := range allowedKeys { + pk := createMockPublicKeyWithTypeAndPurpose(pubKeyType, []interface{}{pubKeyPurpose}) + err := validatePublicKeys([]document.PublicKey{pk}) + require.NoError(t, err, "valid purpose for type") + + pk = createMockPublicKeyWithTypeAndPurpose(pubKeyType, []interface{}{pubKeyPurpose}) + err = validatePublicKeys([]document.PublicKey{pk}) + require.NoError(t, err, "valid purpose for type") + } + + for _, pubKeyType := range allowedKeyTypesGeneral { + _, ok := allowedKeys[pubKeyType] + if ok { + continue + } + + pk := createMockPublicKeyWithTypeAndPurpose(pubKeyType, + []interface{}{pubKeyPurpose, document.KeyPurposeKeyAgreement}) + err := validatePublicKeys([]document.PublicKey{pk}) + require.Error(t, err, "invalid purpose for type") + + pk = createMockPublicKeyWithTypeAndPurpose(pubKeyType, + []interface{}{pubKeyPurpose, document.KeyPurposeAssertionMethod}) + err = validatePublicKeys([]document.PublicKey{pk}) + require.Error(t, err, "invalid purpose for type") + + pk = createMockPublicKeyWithTypeAndPurpose(pubKeyType, []interface{}{pubKeyPurpose}) + err = validatePublicKeys([]document.PublicKey{pk}) + require.Error(t, err, "invalid purpose for type") + + pk = createMockPublicKeyWithTypeAndPurpose(pubKeyType, []interface{}{pubKeyPurpose}) + err = validatePublicKeys([]document.PublicKey{pk}) + require.Error(t, err, "invalid purpose for type") + } +} + +func createMockPublicKeyWithTypeAndPurpose(pubKeyType string, purpose []interface{}) document.PublicKey { + pk := map[string]interface{}{ + "id": "key1", + "type": pubKeyType, + "purposes": purpose, + "publicKeyJwk": map[string]interface{}{ + "kty": "kty", + "crv": "crv", + "x": "x", + "y": "y", + }, + } + + return pk +} + +func createMockPublicKeyWithType(pubKeyType string) document.PublicKey { + pk := map[string]interface{}{ + "id": "key1", + "type": pubKeyType, + "publicKeyJwk": map[string]interface{}{ + "kty": "kty", + "crv": "crv", + "x": "x", + "y": "y", + }, + } + + return pk +} + +const moreProperties = `{ + "publicKey": [ + { + "id": "key1", + "other": "unknown", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const noPurpose = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const invalidJWK = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const withB58key = `{ + "publicKey": [ + { + "id": "key1", + "type": "Ed25519VerificationKey2018", + "publicKeyBase58": "36d8RkFy2SdabnGzcZ3LcCSDA8NP5T4bsoADwuXtoN3B" + } + ] +}` + +const jwkTypeWithB58Key = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyBase58": "36d8RkFy2SdabnGzcZ3LcCSDA8NP5T4bsoADwuXtoN3B" + } + ] +}` + +const emptyPurpose = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": [], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const wrongPurpose = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["invalid"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +//nolint:lll +const tooMuchPurpose = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication", "assertionMethod", "keyAgreement", "capabilityDelegation", "capabilityInvocation", "other"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const idLong = `{ + "publicKey": [ + { + "id": "idwihmorethan50characters123456789012345678901234567890", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const noID = `{ + "publicKey": [ + { + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const invalidKeyType = `{ + "publicKey": [ + { + "id": "key1", + "type": "InvalidKeyType", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const missingPubKey = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020" + } + ] +}` + +const multiplePublicKeyFields = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyBase58": "36d8RkFy2SdabnGzcZ3LcCSDA8NP5T4bsoADwuXtoN3B", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const duplicateID = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const serviceDoc = `{ + "service": [{ + "id": "sid-123_ABC", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }] +}` + +const serviceDocWithDuplicateServices = `{ + "service": [{ + "id": "sid-123_ABC", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }, + { + "id": "sid-123_ABC", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }] +}` + +const serviceDocNoID = `{ + "service": [{ + "id": "", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }] +}` + +const serviceDocLongID = `{ + "service": [{ + "id": "thisissomeidthathasmorethan50characters123456789012345678901234567890", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }] +}` + +const serviceDocLongType = `{ + "service": [{ + "id": "id", + "type": "VerifiableCredentialServiceVerifiableCredentialServiceVerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }] +}` + +const serviceDocEndpointMissing = `{ + "service": [{ + "id": "vcs", + "type": "type" + }] +}` + +const serviceDocEndpointNotURI = `{ + "service": [{ + "id": "vcs", + "type": "type", + "serviceEndpoint": "hello" + }] +}` + +const serviceDocEndpointIsAnObject = `{ + "service": [{ + "id": "vcs", + "type": "type", + "serviceEndpoint": {"key":"value"} + }] +}` + +const serviceDocEndpointIsAnArrayOfObjects = `{ + "service": [{ + "id": "vcs", + "type": "type", + "serviceEndpoint": [{"key":"value"},{"key2":"value2"}] + }] +}` + +const serviceDocEndpointIsAnArrayOfURLStrings = `{ + "service": [{ + "id": "vcs", + "type": "type", + "serviceEndpoint": ["https://hello.com", "https://there.com"] + }] +}` + +const serviceDocEndpointIsAnArrayOfInvalidURLStrings = `{ + "service": [{ + "id": "vcs", + "type": "type", + "serviceEndpoint": ["hello", "there"] + }] +}` + +const serviceDocNoType = `{ + "service": [{ + "id": "vcs", + "type": "", + "serviceEndpoint": "https://example.com/vc/" + }] +}` + +const serviceDocNoServiceEndpointURI = `{ + "service": [{ + "id": "vcs", + "type": "VerifiableCredentialService", + "serviceEndpoint": "" + }] +}` + +const serviceDocOptionalProperty = `{ + "service": [{ + "id": "vcs", + "routingKeys": "value", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }] +}` diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/ietf.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/ietf.go new file mode 100644 index 0000000..4f77624 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/ietf.go @@ -0,0 +1,76 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "encoding/json" + "fmt" + "strings" + + jsonpatch "github.com/evanphx/json-patch" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +// NewJSONValidator creates new validator. +func NewJSONValidator() *JSONValidator { + return &JSONValidator{} +} + +// JSONValidator implements validator for "ietf-json-patch" patch. +type JSONValidator struct { +} + +// Validate validates patch. +func (v *JSONValidator) Validate(p patch.Patch) error { + value, err := p.GetValue() + if err != nil { + return err + } + + patches, err := getRequiredArray(value) + if err != nil { + return fmt.Errorf("invalid json patch value: %s", err.Error()) + } + + patchesBytes, err := json.Marshal(patches) + if err != nil { + return err + } + + return validateJSONPatches(patchesBytes) +} + +func validateJSONPatches(patches []byte) error { + jsonPatches, err := jsonpatch.DecodePatch(patches) + if err != nil { + return fmt.Errorf("%s: %s", patch.JSONPatch, err.Error()) + } + + for _, p := range jsonPatches { + pathMsg, ok := p["path"] + if !ok { + return fmt.Errorf("%s: path not found", patch.JSONPatch) + } + + var path string + if err := json.Unmarshal(*pathMsg, &path); err != nil { + return fmt.Errorf("%s: invalid path", patch.JSONPatch) + } + + if strings.HasPrefix(path, "/"+document.ServiceProperty) { + return fmt.Errorf("%s: cannot modify services", patch.JSONPatch) + } + + if strings.HasPrefix(path, "/"+document.PublicKeyProperty) { + return fmt.Errorf("%s: cannot modify public keys", patch.JSONPatch) + } + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/ietf_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/ietf_test.go new file mode 100644 index 0000000..49d1a79 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/ietf_test.go @@ -0,0 +1,92 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +func TestIETFPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(ietfPatch)) + require.NoError(t, err) + + err = NewJSONValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("error - path not found", func(t *testing.T) { + p, err := patch.FromBytes([]byte(ietfPatchNoPath)) + require.NoError(t, err) + + err = NewJSONValidator().Validate(p) + require.Error(t, err) + require.Equal(t, err.Error(), "ietf-json-patch: path not found") + }) + t.Run("error - cannot update services", func(t *testing.T) { + p, err := patch.FromBytes([]byte(ietfServicesPatch)) + require.NoError(t, err) + + err = NewJSONValidator().Validate(p) + require.Error(t, err) + require.Equal(t, err.Error(), "ietf-json-patch: cannot modify services") + }) + t.Run("error - cannot update public keys", func(t *testing.T) { + p, err := patch.FromBytes([]byte(ietfPublicKeysPatch)) + require.NoError(t, err) + + err = NewJSONValidator().Validate(p) + require.Error(t, err) + require.Equal(t, err.Error(), "ietf-json-patch: cannot modify public keys") + }) + t.Run("error missing patches", func(t *testing.T) { + p := make(patch.Patch) + p[patch.ActionKey] = patch.JSONPatch + + err := NewJSONValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "ietf-json-patch patch is missing key: patches") + }) +} + +const ietfPatch = `{ + "action": "ietf-json-patch", + "patches": [{ + "op": "replace", + "path": "/name", + "value": "value" + }] +}` + +const ietfPatchNoPath = `{ + "action": "ietf-json-patch", + "patches": [{ + "op": "replace", + "value": "value" + }] +}` + +const ietfServicesPatch = `{ + "action": "ietf-json-patch", + "patches": [{ + "op": "replace", + "path": "/service", + "value": "new value" + }] +}` + +const ietfPublicKeysPatch = `{ + "action": "ietf-json-patch", + "patches": [{ + "op": "replace", + "path": "/publicKey/0/type", + "value": "new type" + }] +}` diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/removekeys.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/removekeys.go new file mode 100644 index 0000000..1b09104 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/removekeys.go @@ -0,0 +1,38 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "fmt" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +// NewRemovePublicKeysValidator creates validator for "remove-public-keys" patch. +func NewRemovePublicKeysValidator() *RemovePublicKeysValidator { + return &RemovePublicKeysValidator{} +} + +// RemovePublicKeysValidator implements validator for "remove-public-keys" patch. +type RemovePublicKeysValidator struct { +} + +// Validate validates patch. +func (v *RemovePublicKeysValidator) Validate(p patch.Patch) error { + value, err := p.GetValue() + if err != nil { + return err + } + + genericArr, err := getRequiredArray(value) + if err != nil { + return fmt.Errorf("invalid remove public keys value: %s", err.Error()) + } + + return validateIds(document.StringArray(genericArr)) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/removekeys_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/removekeys_test.go new file mode 100644 index 0000000..6b0bd6e --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/removekeys_test.go @@ -0,0 +1,56 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +func TestRemovePublicKeysPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(removePublicKeysPatch)) + require.NoError(t, err) + + err = NewRemovePublicKeysValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("error - missing public key ids", func(t *testing.T) { + p := make(patch.Patch) + p[patch.ActionKey] = patch.RemovePublicKeys + + err := NewRemovePublicKeysValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "remove-public-keys patch is missing key: ids") + }) + t.Run("error - invalid add public keys value", func(t *testing.T) { + p := make(patch.Patch) + p[patch.ActionKey] = patch.RemovePublicKeys + p[patch.IdsKey] = "whatever" + + err := NewRemovePublicKeysValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "expected array of interfaces") + }) + t.Run("invalid public key ids", func(t *testing.T) { + const ids = `["a123*b456"]` + p, err := patch.NewRemovePublicKeysPatch(ids) + require.NoError(t, err) + + err = NewRemovePublicKeysValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "id contains invalid characters") + }) +} + +const removePublicKeysPatch = `{ + "action": "remove-public-keys", + "ids": ["key1", "key2"] +}` diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/removeservices.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/removeservices.go new file mode 100644 index 0000000..72ed718 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/removeservices.go @@ -0,0 +1,38 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "fmt" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +// NewRemoveServicesValidator creates new validator. +func NewRemoveServicesValidator() *RemoveServicesValidator { + return &RemoveServicesValidator{} +} + +// RemoveServicesValidator implements validator for "remove-services" patch. +type RemoveServicesValidator struct { +} + +// Validate validates patch. +func (v *RemoveServicesValidator) Validate(p patch.Patch) error { + value, err := p.GetValue() + if err != nil { + return err + } + + genericArr, err := getRequiredArray(value) + if err != nil { + return fmt.Errorf("invalid remove services value: %s", err.Error()) + } + + return validateIds(document.StringArray(genericArr)) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/removeservices_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/removeservices_test.go new file mode 100644 index 0000000..14b9fc4 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/removeservices_test.go @@ -0,0 +1,56 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +func TestRemoveServiceEndpointsPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(removeServiceEndpoints)) + require.NoError(t, err) + + err = NewRemoveServicesValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("error - missing public key ids", func(t *testing.T) { + p := make(patch.Patch) + p[patch.ActionKey] = patch.RemoveServiceEndpoints + + err := NewRemoveServicesValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "remove-services patch is missing key: ids") + }) + t.Run("error - invalid service ids", func(t *testing.T) { + p := make(patch.Patch) + p[patch.ActionKey] = patch.RemoveServiceEndpoints + p[patch.IdsKey] = "invalid" + + err := NewRemoveServicesValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "expected array of interfaces") + }) + t.Run("invalid service ids", func(t *testing.T) { + const ids = `["a123*b456"]` + p, err := patch.NewRemoveServiceEndpointsPatch(ids) + require.NoError(t, err) + + err = NewRemoveServicesValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "id contains invalid characters") + }) +} + +const removeServiceEndpoints = `{ + "action": "remove-services", + "ids": ["sds1", "sds2"] +}` diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/replace.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/replace.go new file mode 100644 index 0000000..b84dfee --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/replace.go @@ -0,0 +1,66 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "errors" + "fmt" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +// NewReplaceValidator creates new validator. +func NewReplaceValidator() *ReplaceValidator { + return &ReplaceValidator{} +} + +// ReplaceValidator implements validator for "replace" patch. +type ReplaceValidator struct { +} + +// Validate validates patch. +func (v *ReplaceValidator) Validate(p patch.Patch) error { + value, err := p.GetValue() + if err != nil { + return err + } + + entryMap, err := getRequiredMap(value) + if err != nil { + return err + } + + doc := document.ReplaceDocumentFromJSONLDObject(entryMap) + + allowedKeys := []string{document.ReplaceServiceProperty, document.ReplacePublicKeyProperty} + + for key := range doc { + if !contains(allowedKeys, key) { + return fmt.Errorf("key '%s' is not allowed in replace document", key) + } + } + + if err := validatePublicKeys(doc.PublicKeys()); err != nil { + return fmt.Errorf("failed to validate public keys for replace document: %s", err.Error()) + } + + if err := validateServices(doc.Services()); err != nil { + return fmt.Errorf("failed to validate services for replace document: %s", err.Error()) + } + + return nil +} + +func getRequiredMap(entry interface{}) (map[string]interface{}, error) { + required, ok := entry.(map[string]interface{}) + if !ok { + return nil, errors.New("unexpected interface for document") + } + + return required, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/replace_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/replace_test.go new file mode 100644 index 0000000..6f0b350 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/replace_test.go @@ -0,0 +1,125 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +func TestValidateReplacePatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(replacePatch)) + require.NoError(t, err) + + err = NewReplaceValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("missing document", func(t *testing.T) { + p, err := patch.FromBytes([]byte(replacePatch)) + require.NoError(t, err) + require.NotNil(t, p) + + delete(p, patch.DocumentKey) + err = NewReplaceValidator().Validate(p) + require.Contains(t, err.Error(), "replace patch is missing key: document") + }) + t.Run("error - document has invalid property", func(t *testing.T) { + doc, err := document.FromBytes([]byte(replaceDocWithExtraProperties)) + require.NoError(t, err) + + p := make(patch.Patch) + p[patch.ActionKey] = patch.Replace + p[patch.DocumentKey] = doc.JSONLdObject() + + err = NewReplaceValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "key 'id' is not allowed in replace document") + }) + t.Run("error - public keys (missing type)", func(t *testing.T) { + p, err := patch.NewReplacePatch(replaceDocInvalidPublicKey) + require.NoError(t, err) + require.NotNil(t, p) + + err = NewReplaceValidator().Validate(p) + require.Contains(t, err.Error(), "key 'type' is required for public key") + }) + t.Run("error - services (missing endpoint)", func(t *testing.T) { + p, err := patch.NewReplacePatch(replaceDocInvalidServiceEndpoint) + require.NoError(t, err) + require.NotNil(t, p) + + err = NewReplaceValidator().Validate(p) + require.Contains(t, err.Error(), "service endpoint is missing") + }) +} + +const replacePatch = `{ + "action": "replace", + "document": { + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "type": "EcdsaSecp256k1VerificationKey2019", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }], + "services": [ + { + "id": "sds3", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }] + } +}` + +const replaceDocWithExtraProperties = `{ + "id": "some-id", + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "type": "EcdsaSecp256k1VerificationKey2019", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const replaceDocInvalidPublicKey = `{ + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const replaceDocInvalidServiceEndpoint = `{ + "services": [ + { + "id": "sds3", + "type": "SecureDataStore" + }] +}` diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/testdata/doc.json b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/testdata/doc.json new file mode 100644 index 0000000..bb21b8b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/testdata/doc.json @@ -0,0 +1,144 @@ +{ + "publicKey": [ + { + "id": "master", + "type": "EcdsaSecp256k1VerificationKey2019", + "purposes": ["authentication", "assertionMethod", "keyAgreement", "capabilityDelegation", "capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-auth-gen", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "auth-only", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-assertion-gen", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "assertion-only", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-agreement-gen", + "type": "JsonWebKey2020", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "agreement-only", + "type": "JsonWebKey2020", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-invocation-gen", + "type": "JsonWebKey2020", + "purposes": ["capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "invocation-only", + "type": "JsonWebKey2020", + "purposes": ["capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-delegation-gen", + "type": "JsonWebKey2020", + "purposes": ["capabilityDelegation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "delegation-only", + "type": "JsonWebKey2020", + "purposes": ["capabilityDelegation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "general-only", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ], + "service": [ + { + "id": "hub", + "type": "IdentityHub", + "routingKeys": "routingKeysValue", + "recipientKeys": "recipientKeysValue", + "serviceEndpoint": "https://example.com/hub/" + } + ] +} \ No newline at end of file diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/validator.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/validator.go new file mode 100644 index 0000000..84f6da0 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/validator.go @@ -0,0 +1,34 @@ +package patchvalidator + +import ( + "fmt" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +// Validate validates patch. +func Validate(p patch.Patch) error { + action, err := p.GetAction() + if err != nil { + return err + } + + switch action { + case patch.Replace: + return NewReplaceValidator().Validate(p) + case patch.JSONPatch: + return NewJSONValidator().Validate(p) + case patch.AddPublicKeys: + return NewAddPublicKeysValidator().Validate(p) + case patch.RemovePublicKeys: + return NewRemovePublicKeysValidator().Validate(p) + case patch.AddServiceEndpoints: + return NewAddServicesValidator().Validate(p) + case patch.RemoveServiceEndpoints: + return NewRemoveServicesValidator().Validate(p) + case patch.AddAlsoKnownAs, patch.RemoveAlsoKnownAs: + return NewAlsoKnownAsValidator().Validate(p) + } + + return fmt.Errorf(" validation for action '%s' is not supported", action) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/validator_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/validator_test.go new file mode 100644 index 0000000..4ed363b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/patchvalidator/validator_test.go @@ -0,0 +1,82 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" +) + +func TestValidate(t *testing.T) { + t.Run("success - add public keys", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - remove public keys", func(t *testing.T) { + p, err := patch.FromBytes([]byte(removePublicKeysPatch)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - add service endpoints", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addServiceEndpoints)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - remove service endpoints", func(t *testing.T) { + p, err := patch.FromBytes([]byte(removeServiceEndpoints)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - add also known as", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addAlsoKnownAs)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - remove also known as", func(t *testing.T) { + p, err := patch.FromBytes([]byte(removeAlsoKnownAs)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - ietf patch", func(t *testing.T) { + p, err := patch.FromBytes([]byte(ietfPatch)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - replace patch", func(t *testing.T) { + p, err := patch.FromBytes([]byte(replacePatch)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("error - patch not supported", func(t *testing.T) { + p := make(patch.Patch) + p[patch.ActionKey] = "invalid" + + err := Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "action 'invalid' is not supported") + }) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/recover.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/recover.go new file mode 100644 index 0000000..e2bf66b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/recover.go @@ -0,0 +1,270 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "fmt" + + "github.com/pkg/errors" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/encoder" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + internal "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// ParseRecoverOperation will parse recover operation. +func (p *Parser) ParseRecoverOperation(request []byte, batch bool) (*model.Operation, error) { + schema, err := p.parseRecoverRequest(request) + if err != nil { + return nil, err + } + + signedData, err := p.ParseSignedDataForRecover(schema.SignedData) + if err != nil { + return nil, err + } + + if !batch { + err = p.anchorOriginValidator.Validate(signedData.AnchorOrigin) + if err != nil { + return nil, err + } + + until := p.getAnchorUntil(signedData.AnchorFrom, signedData.AnchorUntil) + + err = p.anchorTimeValidator.Validate(signedData.AnchorFrom, until) + if err != nil { + return nil, err + } + + err = p.ValidateDelta(schema.Delta) + if err != nil { + return nil, err + } + + if schema.Delta.UpdateCommitment == signedData.RecoveryCommitment { + return nil, errors.New("recovery and update commitments cannot be equal, re-using public keys is not allowed") + } + } + + err = hashing.IsValidModelMultihash(signedData.RecoveryKey, schema.RevealValue) + if err != nil { + return nil, fmt.Errorf("canonicalized recovery public key hash doesn't match reveal value: %s", err.Error()) + } + + return &model.Operation{ + OperationRequest: request, + Type: operation.TypeRecover, + UniqueSuffix: schema.DidSuffix, + Delta: schema.Delta, + SignedData: schema.SignedData, + RevealValue: schema.RevealValue, + AnchorOrigin: signedData.AnchorOrigin, + }, nil +} + +func (p *Parser) parseRecoverRequest(payload []byte) (*model.RecoverRequest, error) { + schema := &model.RecoverRequest{} + + err := json.Unmarshal(payload, schema) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal recover request: %s", err.Error()) + } + + if err := p.validateRecoverRequest(schema); err != nil { + return nil, err + } + + return schema, nil +} + +// ParseSignedDataForRecover will parse and validate signed data for recover. +func (p *Parser) ParseSignedDataForRecover(compactJWS string) (*model.RecoverSignedDataModel, error) { + signedData, err := p.parseSignedData(compactJWS) + if err != nil { + return nil, err + } + + schema := &model.RecoverSignedDataModel{} + + err = json.Unmarshal(signedData.Payload, schema) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal signed data model for recover: %s", err.Error()) + } + + if err := p.validateSignedDataForRecovery(schema); err != nil { + return nil, fmt.Errorf("validate signed data for recovery: %s", err.Error()) + } + + return schema, nil +} + +func (p *Parser) validateSignedDataForRecovery(signedData *model.RecoverSignedDataModel) error { + if err := p.validateSigningKey(signedData.RecoveryKey); err != nil { + return err + } + + if err := p.validateMultihash(signedData.RecoveryCommitment, "recovery commitment"); err != nil { + return err + } + + if err := p.validateMultihash(signedData.DeltaHash, "delta hash"); err != nil { + return err + } + + return p.validateCommitment(signedData.RecoveryKey, signedData.RecoveryCommitment) +} + +func (p *Parser) parseSignedData(compactJWS string) (*internal.JSONWebSignature, error) { + if compactJWS == "" { + return nil, errors.New("missing signed data") + } + + sig, err := internal.ParseJWS(compactJWS) + if err != nil { + return nil, fmt.Errorf("failed to parse signed data: %s", err.Error()) + } + + err = p.validateProtectedHeaders(sig.ProtectedHeaders, p.SignatureAlgorithms) + if err != nil { + return nil, fmt.Errorf("failed to parse signed data: %s", err.Error()) + } + + return sig, nil +} + +func (p *Parser) validateProtectedHeaders(headers jws.Headers, allowedAlgorithms []string) error { + if headers == nil { + return errors.New("missing protected headers") + } + + // kid MAY be present in the protected header. + // alg MUST be present in the protected header, its value MUST NOT be none. + // no additional members may be present in the protected header. + + alg, ok := headers.Algorithm() + if !ok { + return errors.New("algorithm must be present in the protected header") + } + + if alg == "" { + return errors.New("algorithm cannot be empty in the protected header") + } + + allowedHeaders := map[string]bool{ + jws.HeaderAlgorithm: true, + jws.HeaderKeyID: true, + } + + for k := range headers { + if _, ok := allowedHeaders[k]; !ok { + return fmt.Errorf("invalid protected header: %s", k) + } + } + + if !contains(allowedAlgorithms, alg) { + return errors.Errorf("algorithm '%s' is not in the allowed list %v", alg, allowedAlgorithms) + } + + return nil +} + +func (p *Parser) validateRecoverRequest(req *model.RecoverRequest) error { + if req.DidSuffix == "" { + return errors.New("missing did suffix") + } + + if req.SignedData == "" { + return errors.New("missing signed data") + } + + return p.validateMultihash(req.RevealValue, "reveal value") +} + +func (p *Parser) validateSigningKey(key *jws.JWK) error { + if key == nil { + return errors.New("missing signing key") + } + + // validate mandatory values + err := key.Validate() + if err != nil { + return fmt.Errorf("signing key validation failed: %s", err.Error()) + } + + // validate key algorithm + if !contains(p.KeyAlgorithms, key.Crv) { + return errors.Errorf("key algorithm '%s' is not in the allowed list %v", key.Crv, p.KeyAlgorithms) + } + + // validate optional nonce + err = p.validateNonce(key.Nonce) + if err != nil { + return fmt.Errorf("validate signing key nonce: %s", err.Error()) + } + + return nil +} + +func contains(values []string, value string) bool { + for _, v := range values { + if v == value { + return true + } + } + + return false +} + +func (p *Parser) validateCommitment(jwk *jws.JWK, nextCommitment string) error { + code, err := hashing.GetMultihashCode(nextCommitment) + if err != nil { + return err + } + + currentCommitment, err := commitment.GetCommitment(jwk, uint(code)) + if err != nil { + return fmt.Errorf("calculate current commitment: %s", err.Error()) + } + + if currentCommitment == nextCommitment { + return errors.New("re-using public keys for commitment is not allowed") + } + + return nil +} + +func (p *Parser) validateNonce(nonce string) error { + // nonce is optional + if nonce == "" { + return nil + } + + nonceBytes, err := encoder.DecodeString(nonce) + if err != nil { + return fmt.Errorf("failed to decode nonce '%s': %s", nonce, err.Error()) + } + + if len(nonceBytes) != int(p.NonceSize) { + return fmt.Errorf("nonce size '%d' doesn't match configured nonce size '%d'", len(nonceBytes), p.NonceSize) + } + + return nil +} + +func (p *Parser) getAnchorUntil(from, until int64) int64 { + if from != 0 && until == 0 { + return from + int64(p.MaxDeltaSize) + } + + return until +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/recover_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/recover_test.go new file mode 100644 index 0000000..9e419ef --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/recover_test.go @@ -0,0 +1,577 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/encoder" + internal "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/signutil" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +const ( + kidKey = "kid" + algKey = "alg" +) + +func TestParseRecoverOperation(t *testing.T) { + p := protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MaxDeltaSize: maxDeltaSize, + MultihashAlgorithms: []uint{sha2_256}, + SignatureAlgorithms: []string{"alg"}, + KeyAlgorithms: []string{"crv"}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, //nolint:lll + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + request, err := getRecoverRequestBytes() + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.NoError(t, err) + require.Equal(t, operation.TypeRecover, op.Type) + + signedData, err := parser.ParseSignedDataForRecover(op.SignedData) + require.NoError(t, err) + + expectedRevealValue, err := commitment.GetRevealValue(signedData.RecoveryKey, sha2_256) + require.NoError(t, err) + + require.Equal(t, expectedRevealValue, op.RevealValue) + }) + t.Run("parse recoverRequest request error", func(t *testing.T) { + schema, err := parser.ParseRecoverOperation([]byte(""), false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "unexpected end of JSON input") + }) + t.Run("validate recoverRequest request", func(t *testing.T) { + recoverRequest, err := getDefaultRecoverRequest() + require.NoError(t, err) + + recoverRequest.DidSuffix = "" + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), "missing did suffix") + }) + t.Run("parse patch data error", func(t *testing.T) { + recoverRequest, err := getDefaultRecoverRequest() + require.NoError(t, err) + + recoverRequest.Delta = &model.DeltaModel{} + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "missing patches") + require.Nil(t, op) + }) + t.Run("validate patch data error", func(t *testing.T) { + delta, err := getDelta() + require.NoError(t, err) + + delta.Patches = []patch.Patch{} + recoverRequest, err := getRecoverRequest(delta, getSignedDataForRecovery()) + require.NoError(t, err) + + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "missing patches") + require.Nil(t, op) + }) + t.Run("parse signed data error", func(t *testing.T) { + recoverRequest, err := getDefaultRecoverRequest() + require.NoError(t, err) + + recoverRequest.SignedData = invalid + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid JWS compact format") + require.Nil(t, op) + }) + t.Run("parse signed data error - unmarshal failed", func(t *testing.T) { + recoverRequest, err := getDefaultRecoverRequest() + require.NoError(t, err) + + compactJWS, err := signutil.SignPayload([]byte("payload"), NewMockSigner()) + require.NoError(t, err) + + recoverRequest.SignedData = compactJWS + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to unmarshal signed data model for recover") + require.Nil(t, op) + }) + t.Run("validate signed data error", func(t *testing.T) { + signedData := getSignedDataForRecovery() + signedData.RecoveryKey = &jws.JWK{} + + delta, err := getDelta() + require.NoError(t, err) + + recoverRequest, err := getRecoverRequest(delta, signedData) + require.NoError(t, err) + + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), + "validate signed data for recovery: signing key validation failed: JWK crv is missing") + require.Nil(t, op) + }) + + t.Run("error - update commitment equals recovery commitment", func(t *testing.T) { + signedData := getSignedDataForRecovery() + + delta, err := getDelta() + require.NoError(t, err) + + delta.UpdateCommitment = signedData.RecoveryCommitment + recoverRequest, err := getRecoverRequest(delta, signedData) + require.NoError(t, err) + + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), + "recovery and update commitments cannot be equal, re-using public keys is not allowed") + require.Nil(t, op) + }) + + t.Run("error - current commitment cannot equal recovery commitment", func(t *testing.T) { + signedData := getSignedDataForRecovery() + + recoveryCommitment, err := commitment.GetCommitment(signedData.RecoveryKey, sha2_256) + require.NoError(t, err) + + signedData.RecoveryCommitment = recoveryCommitment + + delta, err := getDelta() + require.NoError(t, err) + + recoverRequest, err := getRecoverRequest(delta, signedData) + require.NoError(t, err) + + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "re-using public keys for commitment is not allowed") + require.Nil(t, op) + }) +} + +func TestValidateSignedDataForRecovery(t *testing.T) { + p := protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MultihashAlgorithms: []uint{sha2_256}, + KeyAlgorithms: []string{"crv"}, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + signed := getSignedDataForRecovery() + err := parser.validateSignedDataForRecovery(signed) + require.NoError(t, err) + }) + t.Run("invalid patch data hash", func(t *testing.T) { + signed := getSignedDataForRecovery() + signed.DeltaHash = "" + err := parser.validateSignedDataForRecovery(signed) + require.Error(t, err) + require.Contains(t, err.Error(), + "delta hash is not computed with the required hash algorithms: [18]") + }) + t.Run("invalid next recovery commitment hash", func(t *testing.T) { + signed := getSignedDataForRecovery() + signed.RecoveryCommitment = "" + err := parser.validateSignedDataForRecovery(signed) + require.Error(t, err) + require.Contains(t, err.Error(), + "recovery commitment is not computed with the required hash algorithms: [18]") + }) + t.Run("recovery commitment exceeds maximum hash length", func(t *testing.T) { + lowMaxHashLength := protocol.Protocol{ + MaxOperationHashLength: 10, + MultihashAlgorithms: []uint{sha2_256}, + KeyAlgorithms: []string{"crv"}, + } + + signed := getSignedDataForRecovery() + + err := New(lowMaxHashLength).validateSignedDataForRecovery(signed) + require.Error(t, err) + require.Contains(t, err.Error(), "recovery commitment length[46] exceeds maximum hash length[10]") + }) +} + +func TestParseSignedData(t *testing.T) { + mockSigner := NewMockSigner() + + p := protocol.Protocol{ + MultihashAlgorithms: []uint{sha2_256}, + SignatureAlgorithms: []string{"alg"}, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + jwsSignature, err := internal.NewJWS(nil, nil, []byte("payload"), mockSigner) + require.NoError(t, err) + + compactJWS, err := jwsSignature.SerializeCompact(false) + require.NoError(t, err) + + jws, err := parser.parseSignedData(compactJWS) + require.NoError(t, err) + require.NotNil(t, jws) + }) + t.Run("missing signed data", func(t *testing.T) { + jws, err := parser.parseSignedData("") + require.Error(t, err) + require.Nil(t, jws) + require.Contains(t, err.Error(), "missing signed data") + }) + t.Run("missing protected headers", func(t *testing.T) { + jws, err := parser.parseSignedData(".cGF5bG9hZA.c2lnbmF0dXJl") + require.Error(t, err) + require.Nil(t, jws) + require.Contains(t, err.Error(), "unmarshal JSON headers: unexpected end of JSON input") + }) + t.Run("missing payload", func(t *testing.T) { + jwsSignature, err := internal.NewJWS(nil, nil, nil, mockSigner) + require.NoError(t, err) + + compactJWS, err := jwsSignature.SerializeCompact(false) + require.NoError(t, err) + + jws, err := parser.parseSignedData(compactJWS) + require.Error(t, err) + require.Nil(t, jws) + require.Contains(t, err.Error(), "compact jws payload is empty") + }) + t.Run("missing signature", func(t *testing.T) { + jws, err := parser.parseSignedData("eyJhbGciOiJhbGciLCJraWQiOiJraWQifQ.cGF5bG9hZA.") + require.Error(t, err) + require.Nil(t, jws) + require.Contains(t, err.Error(), "compact jws signature is empty") + }) + t.Run("error - invalid signing algorithm", func(t *testing.T) { + jwsSignature, err := internal.NewJWS(nil, nil, []byte("payload"), mockSigner) + require.NoError(t, err) + + compactJWS, err := jwsSignature.SerializeCompact(false) + require.NoError(t, err) + + parser := New(protocol.Protocol{ + SignatureAlgorithms: []string{"other"}, + }) + + jws, err := parser.parseSignedData(compactJWS) + require.Error(t, err) + require.Nil(t, jws) + require.Contains(t, err.Error(), + "failed to parse signed data: algorithm 'alg' is not in the allowed list [other]") + }) +} + +func TestValidateSigningKey(t *testing.T) { + testJWK := &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + } + + parser := New(protocol.Protocol{KeyAlgorithms: []string{"crv"}, NonceSize: 16}) + + t.Run("success", func(t *testing.T) { + err := parser.validateSigningKey(testJWK) + require.NoError(t, err) + }) + + t.Run("error - required info is missing (kty)", func(t *testing.T) { + err := parser.validateSigningKey(&jws.JWK{ + Crv: "crv", + X: "x", + }) + require.Error(t, err) + require.Contains(t, err.Error(), "signing key validation failed: JWK kty is missing") + }) + + t.Run("error - key algorithm not supported", func(t *testing.T) { + err := New(protocol.Protocol{KeyAlgorithms: []string{"other"}}).validateSigningKey(testJWK) + require.Error(t, err) + require.Contains(t, err.Error(), "key algorithm 'crv' is not in the allowed list [other]") + }) + + t.Run("error - failed to decode signing key nonce", func(t *testing.T) { + nonceJWK := &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + Nonce: "nonce", + } + + err := parser.validateSigningKey(nonceJWK) + require.Error(t, err) + require.Contains(t, err.Error(), + "validate signing key nonce: failed to decode nonce 'nonce': illegal base64 data") + }) + + t.Run("error - failed to validate nonce size", func(t *testing.T) { + nonceJWK := &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + Nonce: encoder.EncodeToString([]byte("nonce")), + } + + err := parser.validateSigningKey(nonceJWK) + require.Error(t, err) + require.Contains(t, err.Error(), + "validate signing key nonce: nonce size '5' doesn't match configured nonce size '16'") + }) + + t.Run("success - valid nonce size", func(t *testing.T) { + nonceJWK := &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + Nonce: encoder.EncodeToString([]byte("nonce")), + } + + parserWithNonceSize := New(protocol.Protocol{ + KeyAlgorithms: []string{"crv"}, + NonceSize: 5, + }) + + err := parserWithNonceSize.validateSigningKey(nonceJWK) + require.NoError(t, err) + }) +} + +func TestValidateRecoverRequest(t *testing.T) { + parser := New(protocol.Protocol{MaxOperationHashLength: maxHashLength, MultihashAlgorithms: []uint{sha2_256}}) + + t.Run("success", func(t *testing.T) { + recoverRequest, err := getDefaultRecoverRequest() + require.NoError(t, err) + + err = parser.validateRecoverRequest(recoverRequest) + require.NoError(t, err) + }) + t.Run("missing signed data", func(t *testing.T) { + recoverRequest, err := getDefaultRecoverRequest() + require.NoError(t, err) + recoverRequest.SignedData = "" + + err = parser.validateRecoverRequest(recoverRequest) + require.Error(t, err) + require.Contains(t, err.Error(), "missing signed data") + }) + t.Run("missing did suffix", func(t *testing.T) { + recoverRequest, err := getDefaultRecoverRequest() + require.NoError(t, err) + recoverRequest.DidSuffix = "" + + err = parser.validateRecoverRequest(recoverRequest) + require.Error(t, err) + require.Contains(t, err.Error(), "missing did suffix") + }) + + t.Run("invalid reveal value", func(t *testing.T) { + recoverRequest, err := getDefaultRecoverRequest() + require.NoError(t, err) + recoverRequest.RevealValue = "reveal" + + err = parser.validateRecoverRequest(recoverRequest) + require.Error(t, err) + require.Contains(t, err.Error(), "reveal value is not computed with the required hash algorithms: [18]") + }) +} + +func TestValidateProtectedHeader(t *testing.T) { + algs := []string{"alg-1", "alg-2"} + + parser := New(protocol.Protocol{}) + + t.Run("success - kid can be empty", func(t *testing.T) { + protected := getHeaders("alg-1", "") + + err := parser.validateProtectedHeaders(protected, algs) + require.NoError(t, err) + }) + t.Run("success - kid can be provided", func(t *testing.T) { + protected := getHeaders("alg-1", "kid-1") + + err := parser.validateProtectedHeaders(protected, algs) + require.NoError(t, err) + }) + t.Run("error - missing header", func(t *testing.T) { + err := parser.validateProtectedHeaders(nil, algs) + require.Error(t, err) + require.Contains(t, err.Error(), "missing protected headers") + }) + + t.Run("err - algorithm must be present in the protected header", func(t *testing.T) { + protected := make(jws.Headers) + protected[kidKey] = "kid-1" + + err := parser.validateProtectedHeaders(protected, algs) + require.Error(t, err) + require.Contains(t, err.Error(), "algorithm must be present in the protected header") + }) + + t.Run("err - algorithm cannot be empty", func(t *testing.T) { + protected := getHeaders("", "kid-1") + + err := parser.validateProtectedHeaders(protected, algs) + require.Error(t, err) + require.Contains(t, err.Error(), "algorithm cannot be empty in the protected header") + }) + + t.Run("err - invalid protected header value", func(t *testing.T) { + protected := make(jws.Headers) + + protected["kid"] = "kid" + protected["alg"] = "alg" + protected["other"] = "value" + + err := parser.validateProtectedHeaders(protected, algs) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid protected header: other") + }) + t.Run("error - algorithm not allowed", func(t *testing.T) { + protected := getHeaders("alg-other", "kid") + + err := parser.validateProtectedHeaders(protected, algs) + require.Error(t, err) + require.Equal(t, "algorithm 'alg-other' is not in the allowed list [alg-1 alg-2]", err.Error()) + }) +} + +func getHeaders(alg, kid string) jws.Headers { + header := make(jws.Headers) + header[algKey] = alg + header[kidKey] = kid + + return header +} + +func getRecoverRequest(delta *model.DeltaModel, signedData *model.RecoverSignedDataModel, +) (*model.RecoverRequest, error) { + compactJWS, err := signutil.SignModel(signedData, NewMockSigner()) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(signedData.RecoveryKey, sha2_256) + if err != nil { + return nil, err + } + + return &model.RecoverRequest{ + Operation: operation.TypeRecover, + DidSuffix: "suffix", + Delta: delta, + SignedData: compactJWS, + RevealValue: rv, + }, nil +} + +func getDefaultRecoverRequest() (*model.RecoverRequest, error) { + delta, err := getDelta() + if err != nil { + return nil, err + } + + return getRecoverRequest(delta, getSignedDataForRecovery()) +} + +func getSignedDataForRecovery() *model.RecoverSignedDataModel { + return &model.RecoverSignedDataModel{ + RecoveryKey: &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + }, + RecoveryCommitment: computeMultihash([]byte("recoveryReveal")), + DeltaHash: computeMultihash([]byte("operation")), + } +} + +func getRecoverRequestBytes() ([]byte, error) { + req, err := getDefaultRecoverRequest() + if err != nil { + return nil, err + } + + return json.Marshal(req) +} + +// MockSigner implements signer interface. +type MockSigner struct { + MockSignature []byte + MockHeaders jws.Headers + Err error +} + +// New creates new mock signer (default to recovery signer). +func NewMockSigner() *MockSigner { + headers := make(jws.Headers) + headers[jws.HeaderAlgorithm] = "alg" + headers[jws.HeaderKeyID] = "kid" + + return &MockSigner{MockHeaders: headers, MockSignature: []byte("signature")} +} + +// Headers provides required JWS protected headers. It provides information about signing key and algorithm. +func (ms *MockSigner) Headers() jws.Headers { + return ms.MockHeaders +} + +// Sign signs msg and returns mock signature value. +func (ms *MockSigner) Sign(msg []byte) ([]byte, error) { + if ms.Err != nil { + return nil, ms.Err + } + + return ms.MockSignature, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/update.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/update.go new file mode 100644 index 0000000..cf9e491 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/update.go @@ -0,0 +1,119 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// ParseUpdateOperation will parse update operation. +func (p *Parser) ParseUpdateOperation(request []byte, batch bool) (*model.Operation, error) { + schema, err := p.parseUpdateRequest(request) + if err != nil { + return nil, err + } + + signedData, err := p.ParseSignedDataForUpdate(schema.SignedData) + if err != nil { + return nil, err + } + + if !batch { + until := p.getAnchorUntil(signedData.AnchorFrom, signedData.AnchorUntil) + + err = p.anchorTimeValidator.Validate(signedData.AnchorFrom, until) + if err != nil { + return nil, err + } + + err = p.ValidateDelta(schema.Delta) + if err != nil { + return nil, err + } + + err = p.validateCommitment(signedData.UpdateKey, schema.Delta.UpdateCommitment) + if err != nil { + return nil, fmt.Errorf("calculate current commitment: %s", err.Error()) + } + } + + err = hashing.IsValidModelMultihash(signedData.UpdateKey, schema.RevealValue) + if err != nil { + return nil, fmt.Errorf("canonicalized update public key hash doesn't match reveal value: %s", err.Error()) + } + + return &model.Operation{ + Type: operation.TypeUpdate, + OperationRequest: request, + UniqueSuffix: schema.DidSuffix, + Delta: schema.Delta, + SignedData: schema.SignedData, + RevealValue: schema.RevealValue, + }, nil +} + +func (p *Parser) parseUpdateRequest(payload []byte) (*model.UpdateRequest, error) { + schema := &model.UpdateRequest{} + + err := json.Unmarshal(payload, schema) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal update request: %s", err.Error()) + } + + if err := p.validateUpdateRequest(schema); err != nil { + return nil, err + } + + return schema, nil +} + +// ParseSignedDataForUpdate will parse and validate signed data for update. +func (p *Parser) ParseSignedDataForUpdate(compactJWS string) (*model.UpdateSignedDataModel, error) { + jws, err := p.parseSignedData(compactJWS) + if err != nil { + return nil, err + } + + schema := &model.UpdateSignedDataModel{} + + err = json.Unmarshal(jws.Payload, schema) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal signed data model for update: %s", err.Error()) + } + + if err := p.validateSignedDataForUpdate(schema); err != nil { + return nil, fmt.Errorf("validate signed data for update: %s", err.Error()) + } + + return schema, nil +} + +func (p *Parser) validateUpdateRequest(update *model.UpdateRequest) error { + if update.DidSuffix == "" { + return errors.New("missing did suffix") + } + + if update.SignedData == "" { + return errors.New("missing signed data") + } + + return p.validateMultihash(update.RevealValue, "reveal value") +} + +func (p *Parser) validateSignedDataForUpdate(signedData *model.UpdateSignedDataModel) error { + if err := p.validateSigningKey(signedData.UpdateKey); err != nil { + return err + } + + return p.validateMultihash(signedData.DeltaHash, "delta hash") +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/update_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/update_test.go new file mode 100644 index 0000000..79383ce --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/operationparser/update_test.go @@ -0,0 +1,323 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/signutil" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +func TestParseUpdateOperation(t *testing.T) { + p := protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MaxDeltaSize: maxDeltaSize, + MultihashAlgorithms: []uint{sha2_256}, + SignatureAlgorithms: []string{"alg"}, + KeyAlgorithms: []string{"crv"}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, //nolint:lll + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + payload, err := getUpdateRequestBytes() + require.NoError(t, err) + + op, err := parser.ParseUpdateOperation(payload, false) + require.NoError(t, err) + require.Equal(t, operation.TypeUpdate, op.Type) + + signedData, err := parser.ParseSignedDataForUpdate(op.SignedData) + require.NoError(t, err) + + expectedRevealValue, err := commitment.GetRevealValue(signedData.UpdateKey, sha2_256) + require.NoError(t, err) + + require.Equal(t, expectedRevealValue, op.RevealValue) + }) + t.Run("invalid json", func(t *testing.T) { + schema, err := parser.ParseUpdateOperation([]byte(""), false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "unexpected end of JSON input") + }) + t.Run("validate update request error", func(t *testing.T) { + req, err := getDefaultUpdateRequest() + require.NoError(t, err) + req.DidSuffix = "" + + payload, err := json.Marshal(req) + require.NoError(t, err) + + schema, err := parser.ParseUpdateOperation(payload, false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "missing did suffix") + }) + t.Run("invalid next update commitment hash", func(t *testing.T) { + delta, err := getUpdateDelta() + require.NoError(t, err) + delta.UpdateCommitment = "" + + req, err := getUpdateRequest(delta) + require.NoError(t, err) + payload, err := json.Marshal(req) + require.NoError(t, err) + + schema, err := parser.ParseUpdateOperation(payload, false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), + "update commitment is not computed with the required hash algorithms: [18]") + }) + t.Run("invalid signed data", func(t *testing.T) { + delta, err := getUpdateDelta() + require.NoError(t, err) + + req, err := getUpdateRequest(delta) + require.NoError(t, err) + + req.SignedData = "." + payload, err := json.Marshal(req) + require.NoError(t, err) + + schema, err := parser.ParseUpdateOperation(payload, false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "invalid JWS compact format") + }) + t.Run("parse signed data error - unmarshal failed", func(t *testing.T) { + req, err := getDefaultUpdateRequest() + require.NoError(t, err) + + compactJWS, err := signutil.SignPayload([]byte("payload"), NewMockSigner()) + require.NoError(t, err) + + req.SignedData = compactJWS + request, err := json.Marshal(req) + require.NoError(t, err) + + op, err := parser.ParseUpdateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to unmarshal signed data model for update") + require.Nil(t, op) + }) + + t.Run("error - current commitment cannot equal update commitment", func(t *testing.T) { + delta, err := getUpdateDelta() + require.NoError(t, err) + + currentCommitment, err := commitment.GetCommitment(testJWK, sha2_256) + require.NoError(t, err) + + delta.UpdateCommitment = currentCommitment + + req, err := getUpdateRequest(delta) + require.NoError(t, err) + + payload, err := json.Marshal(req) + require.NoError(t, err) + + schema, err := parser.ParseUpdateOperation(payload, false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "re-using public keys for commitment is not allowed") + }) +} + +func TestParseSignedDataForUpdate(t *testing.T) { + p := protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MultihashAlgorithms: []uint{sha2_256}, + SignatureAlgorithms: []string{"alg"}, + KeyAlgorithms: []string{"crv"}, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + req, err := getDefaultUpdateRequest() + require.NoError(t, err) + + schema, err := parser.ParseSignedDataForUpdate(req.SignedData) + require.NoError(t, err) + require.NotNil(t, schema) + }) + t.Run("invalid JWS compact format", func(t *testing.T) { + schema, err := parser.ParseSignedDataForUpdate("invalid") + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "invalid JWS compact format") + }) + t.Run("hash not computed with latest algorithm", func(t *testing.T) { + signedModel := model.UpdateSignedDataModel{ + DeltaHash: "hash", + UpdateKey: testJWK, + } + + payload, err := json.Marshal(signedModel) + require.NoError(t, err) + + compactJWS, err := signutil.SignPayload(payload, NewMockSigner()) + require.NoError(t, err) + + schema, err := parser.ParseSignedDataForUpdate(compactJWS) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "delta hash is not computed with the required hash algorithms: [18]") + }) + t.Run("payload not JSON object", func(t *testing.T) { + compactJWS, err := signutil.SignPayload([]byte("test"), NewMockSigner()) + require.NoError(t, err) + + schema, err := parser.ParseSignedDataForUpdate(compactJWS) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "invalid character") + }) +} + +func TestValidateUpdateDelta(t *testing.T) { + t.Run("invalid next update commitment hash", func(t *testing.T) { + p := protocol.Protocol{ + MultihashAlgorithms: []uint{sha2_256}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, //nolint:lll + } + + parser := New(p) + + delta, err := getUpdateDelta() + require.NoError(t, err) + + delta.UpdateCommitment = "" + err = parser.ValidateDelta(delta) + require.Error(t, err) + require.Contains(t, err.Error(), + "update commitment is not computed with the required hash algorithms") + }) +} + +func TestValidateUpdateRequest(t *testing.T) { + parser := New(protocol.Protocol{MaxOperationHashLength: maxHashLength, MultihashAlgorithms: []uint{sha2_256}}) + + t.Run("success", func(t *testing.T) { + update, err := getDefaultUpdateRequest() + require.NoError(t, err) + + err = parser.validateUpdateRequest(update) + require.NoError(t, err) + }) + t.Run("missing signed data", func(t *testing.T) { + update, err := getDefaultUpdateRequest() + require.NoError(t, err) + update.SignedData = "" + + err = parser.validateUpdateRequest(update) + require.Error(t, err) + require.Contains(t, err.Error(), "missing signed data") + }) + t.Run("missing did suffix", func(t *testing.T) { + update, err := getDefaultUpdateRequest() + require.NoError(t, err) + update.DidSuffix = "" + + err = parser.validateUpdateRequest(update) + require.Error(t, err) + require.Contains(t, err.Error(), "missing did suffix") + }) + t.Run("invalid reveal value", func(t *testing.T) { + update, err := getDefaultUpdateRequest() + require.NoError(t, err) + update.RevealValue = "reveal" + + err = parser.validateUpdateRequest(update) + require.Error(t, err) + require.Contains(t, err.Error(), "reveal value is not computed with the required hash algorithms: [18]") + }) +} + +func getUpdateRequest(delta *model.DeltaModel) (*model.UpdateRequest, error) { + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, err + } + + signedModel := model.UpdateSignedDataModel{ + DeltaHash: deltaHash, + UpdateKey: testJWK, + } + + rv, err := commitment.GetRevealValue(testJWK, sha2_256) + if err != nil { + return nil, err + } + + compactJWS, err := signutil.SignModel(signedModel, NewMockSigner()) + if err != nil { + return nil, err + } + + return &model.UpdateRequest{ + DidSuffix: "suffix", + SignedData: compactJWS, + Operation: operation.TypeUpdate, + Delta: delta, + RevealValue: rv, + }, nil +} + +func getDefaultUpdateRequest() (*model.UpdateRequest, error) { + delta, err := getUpdateDelta() + if err != nil { + return nil, err + } + + return getUpdateRequest(delta) +} + +func getUpdateRequestBytes() ([]byte, error) { + req, err := getDefaultUpdateRequest() + if err != nil { + return nil, err + } + + return json.Marshal(req) +} + +func getUpdateDelta() (*model.DeltaModel, error) { + jsonPatch, err := patch.NewJSONPatch(getTestPatch()) + if err != nil { + return nil, err + } + + return &model.DeltaModel{ + UpdateCommitment: computeMultihash([]byte("updateReveal")), + Patches: []patch.Patch{jsonPatch}, + }, nil +} + +func getTestPatch() string { + return `[{"op": "replace", "path": "/name", "value": "Jane"}]` +} + +//nolint:gochecknoglobals +var testJWK = &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprocessor/txnprocessor.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprocessor/txnprocessor.go new file mode 100644 index 0000000..23fbaea --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprocessor/txnprocessor.go @@ -0,0 +1,164 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package txnprocessor + +import ( + "fmt" + + "github.com/pkg/errors" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/txn" + logfields "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/log" +) + +var logger = log.New("sidetree-core-observer") + +// OperationStore interface to access operation store. +type OperationStore interface { + Put(ops []*operation.AnchoredOperation) error +} + +type unpublishedOperationStore interface { + // DeleteAll deletes unpublished operations. + DeleteAll(ops []*operation.AnchoredOperation) error +} + +// Providers contains the providers required by the TxnProcessor. +type Providers struct { + OpStore OperationStore + OperationProtocolProvider protocol.OperationProvider +} + +// TxnProcessor processes Sidetree transactions by persisting them to an operation store. +type TxnProcessor struct { + *Providers + + unpublishedOperationStore unpublishedOperationStore + unpublishedOperationTypes []operation.Type +} + +// New returns a new document operation processor. +func New(providers *Providers, opts ...Option) *TxnProcessor { + tp := &TxnProcessor{ + Providers: providers, + + unpublishedOperationStore: &noopUnpublishedOpsStore{}, + unpublishedOperationTypes: []operation.Type{}, + } + + // apply options + for _, opt := range opts { + opt(tp) + } + + return tp +} + +// Option is an option for transaction processor. +type Option func(opts *TxnProcessor) + +// WithUnpublishedOperationStore is unpublished operation store option. +func WithUnpublishedOperationStore(store unpublishedOperationStore, opTypes []operation.Type) Option { + return func(opts *TxnProcessor) { + opts.unpublishedOperationStore = store + opts.unpublishedOperationTypes = opTypes + } +} + +// Process persists all the operations for the given anchor. +// +//nolint:gocritic +func (p *TxnProcessor) Process(sidetreeTxn txn.SidetreeTxn, suffixes ...string) (int, error) { + logger.Debug("Processing sidetree txn for suffixes", logfields.WithSidetreeTxn(sidetreeTxn), + logfields.WithSuffixes(suffixes...)) + + txnOps, err := p.OperationProtocolProvider.GetTxnOperations(&sidetreeTxn) + if err != nil { + return 0, fmt.Errorf("failed to retrieve operations for anchor string[%s]: %s", sidetreeTxn.AnchorString, err) + } + + return p.processTxnOperations(txnOps, &sidetreeTxn) +} + +func (p *TxnProcessor) processTxnOperations( + txnOps []*operation.AnchoredOperation, sidetreeTxn *txn.SidetreeTxn) (int, error) { + logger.Debug("Processing transaction operations", logfields.WithTotal(len(txnOps))) + + batchSuffixes := make(map[string]bool) + + var unpublishedOps []*operation.AnchoredOperation + + var ops []*operation.AnchoredOperation + + for _, op := range txnOps { + _, ok := batchSuffixes[op.UniqueSuffix] + if ok { + logger.Warn("Duplicate suffix found in transaction operations: discarding operation", + logfields.WithNamespace(sidetreeTxn.Namespace), + logfields.WithSuffix(op.UniqueSuffix), logfields.WithOperation(op)) + + continue + } + + updatedOp := updateAnchoredOperation(op, sidetreeTxn) + + logger.Debug("Updated operation with anchoring time", logfields.WithSuffix(updatedOp.UniqueSuffix)) + + ops = append(ops, updatedOp) + + batchSuffixes[op.UniqueSuffix] = true + + if containsOperationType(p.unpublishedOperationTypes, op.Type) { + unpublishedOps = append(unpublishedOps, op) + } + } + + err := p.OpStore.Put(ops) + if err != nil { + return 0, errors.Wrapf(err, "failed to store operation from anchor string[%s]", sidetreeTxn.AnchorString) + } + + err = p.unpublishedOperationStore.DeleteAll(unpublishedOps) + if err != nil { + return 0, fmt.Errorf( + "failed to delete unpublished operations for anchor string[%s]: %w", sidetreeTxn.AnchorString, err) + } + + return len(ops), nil +} + +func updateAnchoredOperation(op *operation.AnchoredOperation, sidetreeTxn *txn.SidetreeTxn, +) *operation.AnchoredOperation { + // The logical anchoring time that this operation was anchored on + op.TransactionTime = sidetreeTxn.TransactionTime + // The transaction number of the transaction this operation was batched within + op.TransactionNumber = sidetreeTxn.TransactionNumber + // The genesis time of the protocol that was used for this operation + op.ProtocolVersion = sidetreeTxn.ProtocolVersion + + return op +} + +func containsOperationType(values []operation.Type, value operation.Type) bool { + for _, v := range values { + if v == value { + return true + } + } + + return false +} + +type noopUnpublishedOpsStore struct{} + +func (noop *noopUnpublishedOpsStore) DeleteAll(_ []*operation.AnchoredOperation) error { + return nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprocessor/txnprocessor_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprocessor/txnprocessor_test.go new file mode 100644 index 0000000..dc14b56 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprocessor/txnprocessor_test.go @@ -0,0 +1,179 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package txnprocessor + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/txn" +) + +const anchorString = "1.coreIndexURI" + +func TestTxnProcessor_Process(t *testing.T) { + t.Run("test error from txn operations provider", func(t *testing.T) { + errExpected := fmt.Errorf("txn operations provider error") + + opp := &mockTxnOpsProvider{ + err: errExpected, + } + + providers := &Providers{ + OpStore: &mockOperationStore{}, + OperationProtocolProvider: opp, + } + + p := New(providers) + _, err := p.Process(txn.SidetreeTxn{}) + require.Error(t, err) + require.Contains(t, err.Error(), errExpected.Error()) + }) +} + +func TestProcessTxnOperations(t *testing.T) { + t.Run("test error from operationStore Put", func(t *testing.T) { + providers := &Providers{ + OpStore: &mockOperationStore{putFunc: func(ops []*operation.AnchoredOperation) error { + return fmt.Errorf("put error") + }}, + } + + p := New(providers) + _, err := p.processTxnOperations( + []*operation.AnchoredOperation{{UniqueSuffix: "abc"}}, &txn.SidetreeTxn{AnchorString: anchorString}) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to store operation from anchor string") + }) + + t.Run("test success", func(t *testing.T) { + providers := &Providers{ + OperationProtocolProvider: &mockTxnOpsProvider{}, + OpStore: &mockOperationStore{}, + } + + p := New(providers) + batchOps, err := p.OperationProtocolProvider.GetTxnOperations(&txn.SidetreeTxn{AnchorString: anchorString}) + require.NoError(t, err) + + numProcessed, err := p.processTxnOperations(batchOps, &txn.SidetreeTxn{AnchorString: anchorString}) + require.NoError(t, err) + require.Equal(t, 1, numProcessed) + }) + + t.Run("success - with unpublished operation store option", func(t *testing.T) { + providers := &Providers{ + OperationProtocolProvider: &mockTxnOpsProvider{}, + OpStore: &mockOperationStore{}, + } + + opt := WithUnpublishedOperationStore(&mockUnpublishedOpsStore{}, []operation.Type{operation.TypeUpdate}) + + p := New(providers, opt) + batchOps, err := p.OperationProtocolProvider.GetTxnOperations(&txn.SidetreeTxn{AnchorString: anchorString}) + require.NoError(t, err) + + _, err = p.processTxnOperations(batchOps, &txn.SidetreeTxn{AnchorString: anchorString}) + require.NoError(t, err) + }) + + t.Run("error - unpublished operation store error", func(t *testing.T) { + providers := &Providers{ + OperationProtocolProvider: &mockTxnOpsProvider{}, + OpStore: &mockOperationStore{}, + } + + opt := WithUnpublishedOperationStore( + &mockUnpublishedOpsStore{DeleteAllErr: fmt.Errorf("delete all error")}, + []operation.Type{operation.TypeUpdate}) + + p := New(providers, opt) + batchOps, err := p.OperationProtocolProvider.GetTxnOperations(&txn.SidetreeTxn{AnchorString: anchorString}) + require.NoError(t, err) + + _, err = p.processTxnOperations(batchOps, &txn.SidetreeTxn{AnchorString: anchorString}) + require.Error(t, err) + require.Contains(t, err.Error(), + "failed to delete unpublished operations for anchor string[1.coreIndexURI]: delete all error") + }) + + t.Run("success - multiple operations with same suffix in transaction operations", func(t *testing.T) { + providers := &Providers{ + OperationProtocolProvider: &mockTxnOpsProvider{}, + OpStore: &mockOperationStore{}, + } + + p := New(providers) + batchOps, err := p.OperationProtocolProvider.GetTxnOperations(&txn.SidetreeTxn{AnchorString: anchorString}) + require.NoError(t, err) + + // add same operations again to create scenario where batch has multiple operations with same suffix + // only first operation will be processed, subsequent operations will be discarded + batchOps = append(batchOps, batchOps...) + + _, err = p.processTxnOperations(batchOps, &txn.SidetreeTxn{AnchorString: anchorString}) + require.NoError(t, err) + }) +} + +func TestUpdateOperation(t *testing.T) { + t.Run("test success", func(t *testing.T) { + updatedOps := updateAnchoredOperation(&operation.AnchoredOperation{UniqueSuffix: "abc"}, + &txn.SidetreeTxn{TransactionTime: 20, TransactionNumber: 2}) + require.Equal(t, uint64(20), updatedOps.TransactionTime) + require.Equal(t, uint64(2), updatedOps.TransactionNumber) + }) +} + +type mockOperationStore struct { + putFunc func(ops []*operation.AnchoredOperation) error + getFunc func(suffix string) ([]*operation.AnchoredOperation, error) +} + +func (m *mockOperationStore) Put(ops []*operation.AnchoredOperation) error { + if m.putFunc != nil { + return m.putFunc(ops) + } + + return nil +} + +func (m *mockOperationStore) Get(suffix string) ([]*operation.AnchoredOperation, error) { + if m.getFunc != nil { + return m.getFunc(suffix) + } + + return nil, nil +} + +type mockTxnOpsProvider struct { + err error +} + +func (m *mockTxnOpsProvider) GetTxnOperations(txn *txn.SidetreeTxn) ([]*operation.AnchoredOperation, error) { + if m.err != nil { + return nil, m.err + } + + op := &operation.AnchoredOperation{ + UniqueSuffix: "abc", + Type: operation.TypeUpdate, + } + + return []*operation.AnchoredOperation{op}, nil +} + +type mockUnpublishedOpsStore struct { + DeleteAllErr error +} + +func (m *mockUnpublishedOpsStore) DeleteAll(_ []*operation.AnchoredOperation) error { + return m.DeleteAllErr +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/anchordata.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/anchordata.go new file mode 100644 index 0000000..ced0019 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/anchordata.go @@ -0,0 +1,62 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package txnprovider + +import ( + "fmt" + "regexp" + "strconv" + "strings" +) + +const ( + delimiter = "." + allowedParts = 2 +) + +//nolint:gochecknoglobals +var ( + integerRegex = regexp.MustCompile(`^[1-9]\d*$`) +) + +// AnchorData holds anchored data. +type AnchorData struct { + NumberOfOperations int + CoreIndexFileURI string +} + +// ParseAnchorData will parse anchor string into anchor data model. +func ParseAnchorData(data string) (*AnchorData, error) { + parts := strings.Split(data, delimiter) + + if len(parts) != allowedParts { + return nil, fmt.Errorf( + "parse anchor data[%s] failed: expecting [%d] parts, got [%d] parts", + data, allowedParts, len(parts)) + } + + ok := integerRegex.MatchString(parts[0]) + if !ok { + return nil, fmt.Errorf( + "parse anchor data[%s] failed: number of operations must be positive integer", data) + } + + opsNum, err := strconv.Atoi(parts[0]) + if err != nil { + return nil, fmt.Errorf("parse anchor data[%s] failed: %s", data, err.Error()) + } + + return &AnchorData{ + NumberOfOperations: opsNum, + CoreIndexFileURI: parts[1], + }, nil +} + +// GetAnchorString will create anchor string from anchor data. +func (ad *AnchorData) GetAnchorString() string { + return fmt.Sprintf("%d", ad.NumberOfOperations) + delimiter + ad.CoreIndexFileURI +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/anchordata_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/anchordata_test.go new file mode 100644 index 0000000..6464e03 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/anchordata_test.go @@ -0,0 +1,56 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package txnprovider + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestParseAnchorData(t *testing.T) { + t.Run("success", func(t *testing.T) { + ad, err := ParseAnchorData("101.coreIndexURI") + require.NoError(t, err) + require.NotNil(t, ad) + + require.Equal(t, ad.NumberOfOperations, 101) + require.Equal(t, ad.CoreIndexFileURI, "coreIndexURI") + }) + + t.Run("error - invalid number of parts", func(t *testing.T) { + ad, err := ParseAnchorData("1.coreIndexURI.other") + require.Error(t, err) + require.Nil(t, ad) + + require.Contains(t, err.Error(), "expecting [2] parts, got [3] parts") + }) + + t.Run("error - invalid number of operations", func(t *testing.T) { + ad, err := ParseAnchorData("abc.coreIndexURI") + require.Error(t, err) + require.Nil(t, ad) + + require.Contains(t, err.Error(), "number of operations must be positive integer") + }) + + t.Run("error - invalid number of operations starts with 0", func(t *testing.T) { + ad, err := ParseAnchorData("01.coreIndexURI") + require.Error(t, err) + require.Nil(t, ad) + + require.Contains(t, err.Error(), "number of operations must be positive integer") + }) + + t.Run("error - number of operations is negative", func(t *testing.T) { + ad, err := ParseAnchorData("-1.coreIndexURI") + require.Error(t, err) + require.Nil(t, ad) + + require.Contains(t, err.Error(), "number of operations must be positive integer") + }) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/handler.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/handler.go new file mode 100644 index 0000000..cd1a268 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/handler.go @@ -0,0 +1,324 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package txnprovider + +import ( + "errors" + "fmt" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/cas" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/docutil" + logfields "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/log" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationparser" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models" +) + +type compressionProvider interface { + Compress(alg string, data []byte) ([]byte, error) +} + +type metricsProvider interface { + CASWriteSize(dataType string, size int) +} + +// OperationHandler creates batch files(chunk, map, anchor) from batch operations. +type OperationHandler struct { + cas cas.Client + protocol protocol.Protocol + parser OperationParser + cp compressionProvider + metrics metricsProvider +} + +// NewOperationHandler returns new operations handler. +// +//nolint:gocritic +func NewOperationHandler(p protocol.Protocol, cas cas.Client, cp compressionProvider, parser OperationParser, + metrics metricsProvider) *OperationHandler { + return &OperationHandler{ + cas: cas, + protocol: p, + parser: parser, + cp: cp, + metrics: metrics, + } +} + +// PrepareTxnFiles will create batch files(core index, core proof, provisional index, provisional proof and chunk) +// from batch operation and return anchor string, batch files information and operations. +// nolint:funlen +func (h *OperationHandler) PrepareTxnFiles(ops []*operation.QueuedOperation, +) (*protocol.AnchoringInfo, error) { + parsedOps, info, err := h.parseOperations(ops) + if err != nil { + return nil, err + } + + var artifacts []*protocol.AnchorDocument + + // special case: if all ops are deactivate don't create chunk and provisional files + provisionalIndexURI := "" + + if len(parsedOps.Deactivate) != len(ops) { + chunkURI, innerErr := h.createChunkFile(parsedOps) + if innerErr != nil { + return nil, innerErr + } + + artifacts = append(artifacts, + &protocol.AnchorDocument{ + ID: chunkURI, + Desc: "chunk file", + Type: protocol.TypeProvisional, + }) + + provisionalProofURI, innerErr := h.createProvisionalProofFile(parsedOps.Update) + if innerErr != nil { + return nil, innerErr + } + + if provisionalProofURI != "" { + artifacts = append(artifacts, + &protocol.AnchorDocument{ + ID: provisionalProofURI, + Desc: "provisional proof file", + Type: protocol.TypeProvisional, + }) + } + + provisionalIndexURI, innerErr = h.createProvisionalIndexFile( + []string{chunkURI}, provisionalProofURI, parsedOps.Update) + if innerErr != nil { + return nil, innerErr + } + + artifacts = append(artifacts, + &protocol.AnchorDocument{ + ID: provisionalIndexURI, + Desc: "provisional index file", + Type: protocol.TypeProvisional, + }) + } + + coreProofURI, err := h.createCoreProofFile(parsedOps.Recover, parsedOps.Deactivate) + if err != nil { + return nil, err + } + + if coreProofURI != "" { + artifacts = append(artifacts, + &protocol.AnchorDocument{ + ID: coreProofURI, + Desc: "core proof file", + Type: protocol.TypePermanent, + }) + } + + coreIndexURI, err := h.createCoreIndexFile(coreProofURI, provisionalIndexURI, parsedOps) + if err != nil { + return nil, err + } + + artifacts = append(artifacts, + &protocol.AnchorDocument{ + ID: coreIndexURI, + Desc: "core index file", + Type: protocol.TypePermanent, + }) + + ad := AnchorData{ + NumberOfOperations: parsedOps.Size(), + CoreIndexFileURI: coreIndexURI, + } + + return &protocol.AnchoringInfo{ + AnchorString: ad.GetAnchorString(), + Artifacts: artifacts, + OperationReferences: info.OperationReferences, + ExpiredOperations: info.ExpiredOperations, + AdditionalOperations: info.AdditionalOperations, + }, nil +} + +//nolint:funlen,gocyclo +func (h *OperationHandler) parseOperations(ops []*operation.QueuedOperation, +) (*models.SortedOperations, *additionalAnchoringInfo, error) { + if len(ops) == 0 { + return nil, nil, errors.New("prepare txn operations called without operations, should not happen") + } + + batchSuffixes := make(map[string]*operation.Reference) + + var ( + expiredOperations []*operation.QueuedOperation + additionalOperations []*operation.QueuedOperation + ) + + result := &models.SortedOperations{} + + for _, queuedOperation := range ops { + op, e := h.parser.ParseOperation(queuedOperation.Namespace, queuedOperation.OperationRequest, false) + if e != nil { + if e == operationparser.ErrOperationExpired { + // stale operations should not be added to the batch; ignore operation + logger.Warn("Stale operation found in batch operations: discarding operation", + logfields.WithOperation(queuedOperation.Namespace)) + + expiredOperations = append(expiredOperations, queuedOperation) + + continue + } + + // operations are already validated/parsed at REST so any error at this point + // will result in rejecting whole batch + return nil, nil, e + } + + _, ok := batchSuffixes[op.UniqueSuffix] + if ok { + logger.Debug("Additional operation found in batch operations - adding operation"+ + " to additional queue to be processed in the next batch", + logfields.WithNamespace(queuedOperation.Namespace), logfields.WithSuffix(op.UniqueSuffix)) + + additionalOperations = append(additionalOperations, queuedOperation) + + continue + } + + var anchorOrigin interface{} + + switch op.Type { + case operation.TypeCreate: + result.Create = append(result.Create, op) + + anchorOrigin = op.SuffixData.AnchorOrigin + + case operation.TypeUpdate: + result.Update = append(result.Update, op) + + anchorOrigin = queuedOperation.AnchorOrigin + + case operation.TypeRecover: + result.Recover = append(result.Recover, op) + + signedData, e := h.parser.ParseSignedDataForRecover(op.SignedData) + if e != nil { + return nil, nil, e + } + + anchorOrigin = signedData.AnchorOrigin + + case operation.TypeDeactivate: + result.Deactivate = append(result.Deactivate, op) + + anchorOrigin = queuedOperation.AnchorOrigin + } + + opRef := &operation.Reference{ + UniqueSuffix: op.UniqueSuffix, + Type: op.Type, + AnchorOrigin: anchorOrigin, + } + + batchSuffixes[op.UniqueSuffix] = opRef + } + + opRefs := make([]*operation.Reference, 0, len(batchSuffixes)) + for _, opRef := range batchSuffixes { + opRefs = append(opRefs, opRef) + } + + return result, &additionalAnchoringInfo{ + OperationReferences: opRefs, + ExpiredOperations: expiredOperations, + AdditionalOperations: additionalOperations, + }, nil +} + +// createCoreIndexFile will create core index file from operations, proof files and provisional index file and write +// it to CAS. Returns core index file address. +func (h *OperationHandler) createCoreIndexFile( + coreProofURI, mapURI string, ops *models.SortedOperations) (string, error) { + coreIndexFile := models.CreateCoreIndexFile(coreProofURI, mapURI, ops) + + return h.writeModelToCAS(coreIndexFile, "core index") +} + +// createCoreProofFile will create core proof file from recover and deactivate operations and write it to CAS +// returns core proof file address. +func (h *OperationHandler) createCoreProofFile(recoverOps, deactivateOps []*model.Operation) (string, error) { + if len(recoverOps)+len(deactivateOps) == 0 { + return "", nil + } + + chunkFile := models.CreateCoreProofFile(recoverOps, deactivateOps) + + return h.writeModelToCAS(chunkFile, "core proof") +} + +// createProvisionalProofFile will create provisional proof file from update operations and write it to CAS +// returns provisional proof file address. +func (h *OperationHandler) createProvisionalProofFile(updateOps []*model.Operation) (string, error) { + if len(updateOps) == 0 { + return "", nil + } + + chunkFile := models.CreateProvisionalProofFile(updateOps) + + return h.writeModelToCAS(chunkFile, "provisional proof") +} + +// createChunkFile will create chunk file from operations and write it to CAS +// returns chunk file address. +func (h *OperationHandler) createChunkFile(ops *models.SortedOperations) (string, error) { + chunkFile := models.CreateChunkFile(ops) + + return h.writeModelToCAS(chunkFile, "chunk") +} + +// createProvisionalIndexFile will create provisional index file from operations, provisional proof URI +// and chunk file URIs. The provisional index file is then written to CAS. +// returns the address of the provisional index file in the CAS. +func (h *OperationHandler) createProvisionalIndexFile( + chunks []string, provisionalURI string, ops []*model.Operation) (string, error) { + provisionalIndexFile := models.CreateProvisionalIndexFile(chunks, provisionalURI, ops) + + return h.writeModelToCAS(provisionalIndexFile, "provisional index") +} + +func (h *OperationHandler) writeModelToCAS(m interface{}, alias string) (string, error) { + bytes, err := docutil.MarshalCanonical(m) + if err != nil { + return "", fmt.Errorf("failed to marshal %s file: %s", alias, err.Error()) + } + + logger.Debug("Writing file", logfields.WithAlias(alias), logfields.WithContent(bytes)) + + compressedBytes, err := h.cp.Compress(h.protocol.CompressionAlgorithm, bytes) + if err != nil { + return "", err + } + + // make file available in CAS + address, err := h.cas.Write(compressedBytes) + if err != nil { + return "", fmt.Errorf("failed to store %s file: %s", alias, err.Error()) + } + + h.metrics.CASWriteSize(alias, len(compressedBytes)) + + return address, nil +} + +type additionalAnchoringInfo struct { + OperationReferences []*operation.Reference + ExpiredOperations []*operation.QueuedOperation + AdditionalOperations []*operation.QueuedOperation +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/handler_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/handler_test.go new file mode 100644 index 0000000..1786787 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/handler_test.go @@ -0,0 +1,746 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package txnprovider + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "encoding/json" + "errors" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/compression" + internaljws "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/ecsigner" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/pubkey" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/client" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationparser" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models" +) + +//go:generate counterfeiter -o operationparser.gen.go --fake-name MockOperationParser . OperationParser + +const ( + sha2_256 = 18 + defaultNS = "did:sidetree" + + createAnchorOrigin = "create-anchor-origin" + recoverAnchorOrigin = "recover-anchor-origin" + + universalAnchorOrigin = "universal-anchor-origin" +) + +func TestNewOperationHandler(t *testing.T) { + protocol := mocks.NewMockProtocolClient().Protocol + + handler := NewOperationHandler( + protocol, + mocks.NewMockCasClient(nil), + compression.New(compression.WithDefaultAlgorithms()), + operationparser.New(protocol), + &mocks.MetricsProvider{}) + require.NotNil(t, handler) +} + +func TestOperationHandler_PrepareTxnFiles(t *testing.T) { + const ( + createOpsNum = 2 + recoverOpsNum = 1 + deactivateOpsNum = 1 + updateOpsNum = 1 + ) + + compression := compression.New(compression.WithDefaultAlgorithms()) + + protocol := mocks.NewMockProtocolClient().Protocol + + t.Run("success", func(t *testing.T) { + ops := getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum) + + handler := NewOperationHandler( + protocol, + mocks.NewMockCasClient(nil), + compression, + operationparser.New(protocol), + &mocks.MetricsProvider{}) + + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.NoError(t, err) + require.NotEmpty(t, anchoringInfo.AnchorString) + require.Equal(t, len(anchoringInfo.OperationReferences), createOpsNum+updateOpsNum+deactivateOpsNum+recoverOpsNum) + require.Len(t, anchoringInfo.Artifacts, 5) + + anchorData, err := ParseAnchorData(anchoringInfo.AnchorString) + require.NoError(t, err) + + bytes, err := handler.cas.Read(anchorData.CoreIndexFileURI) + require.NoError(t, err) + require.NotNil(t, bytes) + + content, err := compression.Decompress(compressionAlgorithm, bytes) + require.NoError(t, err) + + var cif models.CoreIndexFile + err = json.Unmarshal(content, &cif) + require.NoError(t, err) + require.NotNil(t, cif) + require.Equal(t, createOpsNum, len(cif.Operations.Create)) + require.Equal(t, createAnchorOrigin, cif.Operations.Create[0].SuffixData.AnchorOrigin) + require.Equal(t, recoverOpsNum, len(cif.Operations.Recover)) + require.Equal(t, deactivateOpsNum, len(cif.Operations.Deactivate)) + + bytes, err = handler.cas.Read(cif.ProvisionalIndexFileURI) + require.NoError(t, err) + require.NotNil(t, bytes) + + content, err = compression.Decompress(compressionAlgorithm, bytes) + require.NoError(t, err) + + var mf models.ProvisionalIndexFile + err = json.Unmarshal(content, &mf) + require.NoError(t, err) + require.NotNil(t, mf) + require.Equal(t, updateOpsNum, len(mf.Operations.Update)) + + bytes, err = handler.cas.Read(mf.Chunks[0].ChunkFileURI) + require.NoError(t, err) + require.NotNil(t, bytes) + + content, err = compression.Decompress(compressionAlgorithm, bytes) + require.NoError(t, err) + + var cf models.ChunkFile + err = json.Unmarshal(content, &cf) + require.NoError(t, err) + require.NotNil(t, cf) + require.Equal(t, createOpsNum+recoverOpsNum+updateOpsNum, len(cf.Deltas)) + + bytes, err = handler.cas.Read(cif.CoreProofFileURI) + require.NoError(t, err) + require.NotNil(t, bytes) + + content, err = compression.Decompress(compressionAlgorithm, bytes) + require.NoError(t, err) + + var cpf models.CoreProofFile + err = json.Unmarshal(content, &cpf) + require.NoError(t, err) + require.NotNil(t, cpf) + require.Equal(t, recoverOpsNum, len(cpf.Operations.Recover)) + require.Equal(t, deactivateOpsNum, len(cpf.Operations.Deactivate)) + + signedData, err := internaljws.ParseJWS(cpf.Operations.Recover[0]) + require.NoError(t, err) + + var signedModel model.RecoverSignedDataModel + err = json.Unmarshal(signedData.Payload, &signedModel) + require.NoError(t, err) + + require.Equal(t, recoverAnchorOrigin, signedModel.AnchorOrigin) + + bytes, err = handler.cas.Read(mf.ProvisionalProofFileURI) + require.NoError(t, err) + require.NotNil(t, bytes) + + content, err = compression.Decompress(compressionAlgorithm, bytes) + require.NoError(t, err) + + var ppf models.ProvisionalProofFile + err = json.Unmarshal(content, &ppf) + require.NoError(t, err) + require.NotNil(t, ppf) + require.Equal(t, updateOpsNum, len(ppf.Operations.Update)) + }) + + t.Run("success - stale operations not included", func(t *testing.T) { + // operations without from and until - will go through + ops := getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum) + + // until = current time - 5 minutes + expiry := time.Now().Unix() - 5*60 + + // generate stale recover operation + op, err := generateQueueOperationWithAnchorTimes(operation.TypeRecover, "stale-recover", 0, expiry) + require.NoError(t, err) + + ops = append(ops, op) + + // generate stale recover operation + op, err = generateQueueOperationWithAnchorTimes(operation.TypeDeactivate, "stale-deactivate", 0, expiry) + require.NoError(t, err) + + ops = append(ops, op) + + // generate stale recover operation + op, err = generateQueueOperationWithAnchorTimes(operation.TypeUpdate, "stale-update", 0, expiry) + require.NoError(t, err) + + ops = append(ops, op) + + handler := NewOperationHandler( + protocol, + mocks.NewMockCasClient(nil), + compression, + operationparser.New(protocol, operationparser.WithAnchorTimeValidator(&mockTimeValidator{})), + &mocks.MetricsProvider{}) + + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.NoError(t, err) + require.NotEmpty(t, anchoringInfo.AnchorString) + require.Equal(t, len(anchoringInfo.OperationReferences), createOpsNum+updateOpsNum+deactivateOpsNum+recoverOpsNum) + }) + + t.Run("success - no recover, deactivate or update ops", func(t *testing.T) { + const zeroUpdateOps = 0 + const zeroRecoverOps = 0 + const zeroDeactiveOps = 0 + ops := getTestOperations(createOpsNum, zeroUpdateOps, zeroDeactiveOps, zeroRecoverOps) + + handler := NewOperationHandler( + protocol, + mocks.NewMockCasClient(nil), + compression, + operationparser.New(protocol), + &mocks.MetricsProvider{}) + + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.NoError(t, err) + require.NotEmpty(t, anchoringInfo.AnchorString) + require.Len(t, anchoringInfo.OperationReferences, createOpsNum) + // additional artifacts: chunk, provisional index, core index + require.Equal(t, 3, len(anchoringInfo.Artifacts)) + + anchorData, err := ParseAnchorData(anchoringInfo.AnchorString) + require.NoError(t, err) + + bytes, err := handler.cas.Read(anchorData.CoreIndexFileURI) + require.NoError(t, err) + require.NotNil(t, bytes) + + content, err := compression.Decompress(compressionAlgorithm, bytes) + require.NoError(t, err) + + var cif models.CoreIndexFile + err = json.Unmarshal(content, &cif) + require.NoError(t, err) + require.NotNil(t, cif) + require.Equal(t, createOpsNum, len(cif.Operations.Create)) + require.Equal(t, zeroRecoverOps, len(cif.Operations.Recover)) + require.Equal(t, zeroDeactiveOps, len(cif.Operations.Deactivate)) + require.Empty(t, cif.CoreProofFileURI) + + bytes, err = handler.cas.Read(cif.ProvisionalIndexFileURI) + require.NoError(t, err) + require.NotNil(t, bytes) + + content, err = compression.Decompress(compressionAlgorithm, bytes) + require.NoError(t, err) + + var pif models.ProvisionalIndexFile + err = json.Unmarshal(content, &pif) + require.NoError(t, err) + require.NotNil(t, pif) + require.Nil(t, pif.Operations) + + bytes, err = handler.cas.Read(pif.Chunks[0].ChunkFileURI) + require.NoError(t, err) + require.NotNil(t, bytes) + + content, err = compression.Decompress(compressionAlgorithm, bytes) + require.NoError(t, err) + + var cf models.ChunkFile + err = json.Unmarshal(content, &cf) + require.NoError(t, err) + require.NotNil(t, cf) + require.Equal(t, createOpsNum+zeroRecoverOps+zeroUpdateOps, len(cf.Deltas)) + }) + + t.Run("error - no operations provided", func(t *testing.T) { + handler := NewOperationHandler( + protocol, + mocks.NewMockCasClient(nil), + compression, + operationparser.New(protocol), + &mocks.MetricsProvider{}) + + anchoringInfo, err := handler.PrepareTxnFiles(nil) + require.Error(t, err) + require.Empty(t, anchoringInfo) + require.Contains(t, err.Error(), "prepare txn operations called without operations, should not happen") + }) + + t.Run("error - parse operation fails", func(t *testing.T) { + handler := NewOperationHandler( + protocol, + mocks.NewMockCasClient(nil), + compression, + operationparser.New(protocol), + &mocks.MetricsProvider{}) + + op := &operation.QueuedOperation{ + OperationRequest: []byte(`{"key":"value"}`), + UniqueSuffix: "suffix", + Namespace: defaultNS, + } + + anchoringInfo, err := handler.PrepareTxnFiles([]*operation.QueuedOperation{op}) + require.Error(t, err) + require.Empty(t, anchoringInfo) + require.Contains(t, err.Error(), "parse operation: operation type [] not supported") + }) + + t.Run("error - write to CAS error for chunk file", func(t *testing.T) { + ops := getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum) + + handler := NewOperationHandler( + protocol, + mocks.NewMockCasClient(errors.New("CAS error")), + compression, + operationparser.New(protocol), + &mocks.MetricsProvider{}) + + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.Error(t, err) + require.Empty(t, anchoringInfo) + require.Contains(t, err.Error(), "failed to store chunk file: CAS error") + }) + + t.Run("error - write to CAS error for core index file", func(t *testing.T) { + ops := getTestOperations(0, 0, deactivateOpsNum, 0) + + handler := NewOperationHandler( + protocol, + mocks.NewMockCasClient(errors.New("CAS error")), + compression, + operationparser.New(protocol), + &mocks.MetricsProvider{}) + + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.Error(t, err) + require.Empty(t, anchoringInfo) + require.Contains(t, err.Error(), "failed to store core proof file: CAS error") + }) +} + +func TestWriteModelToCAS(t *testing.T) { + protocol := mocks.NewMockProtocolClient().Protocol + + handler := NewOperationHandler( + protocol, + mocks.NewMockCasClient(nil), + compression.New(compression.WithDefaultAlgorithms()), + operationparser.New(protocol), + &mocks.MetricsProvider{}) + + t.Run("success", func(t *testing.T) { + address, err := handler.writeModelToCAS(&models.CoreIndexFile{}, "alias") + require.NoError(t, err) + require.NotEmpty(t, address) + }) + + t.Run("error - marshal fails", func(t *testing.T) { + address, err := handler.writeModelToCAS("test", "alias") + require.Error(t, err) + require.Empty(t, address) + require.Contains(t, err.Error(), "failed to marshal alias file") + }) + + t.Run("error - CAS error", func(t *testing.T) { + handlerWithCASError := NewOperationHandler( + protocol, + mocks.NewMockCasClient(errors.New("CAS error")), + compression.New(compression.WithDefaultAlgorithms()), + operationparser.New(protocol), + &mocks.MetricsProvider{}) + + address, err := handlerWithCASError.writeModelToCAS(&models.CoreIndexFile{}, "alias") + require.Error(t, err) + require.Empty(t, address) + require.Contains(t, err.Error(), "failed to store alias file: CAS error") + }) + + t.Run("error - compression error", func(t *testing.T) { + pc := mocks.NewMockProtocolClient() + pc.Protocol.CompressionAlgorithm = "invalid" + + handlerWithProtocolError := NewOperationHandler( + pc.Protocol, + mocks.NewMockCasClient(nil), + compression.New(compression.WithDefaultAlgorithms()), + operationparser.New(pc.Protocol), + &mocks.MetricsProvider{}, + ) + + address, err := handlerWithProtocolError.writeModelToCAS(&models.CoreIndexFile{}, "alias") + require.Error(t, err) + require.Empty(t, address) + require.Contains(t, err.Error(), "compression algorithm 'invalid' not supported") + }) +} + +func getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum int) []*operation.QueuedOperation { + var ops []*operation.QueuedOperation + ops = append(ops, generateOperations(createOpsNum, operation.TypeCreate)...) + ops = append(ops, generateOperations(recoverOpsNum, operation.TypeRecover)...) + ops = append(ops, generateOperations(deactivateOpsNum, operation.TypeDeactivate)...) + ops = append(ops, generateOperations(updateOpsNum, operation.TypeUpdate)...) + + return ops +} + +func generateOperations(numOfOperations int, opType operation.Type) (ops []*operation.QueuedOperation) { + for j := 1; j <= numOfOperations; j++ { + op, err := generateOperationInfo(j, opType) + if err != nil { + panic(err) + } + + ops = append(ops, op) + } + + return +} + +func generateOperationInfo(num int, opType operation.Type) (*operation.QueuedOperation, error) { + op, err := generateOperationBuffer(num, opType) + if err != nil { + return nil, err + } + + return &operation.QueuedOperation{ + OperationRequest: op, + UniqueSuffix: fmt.Sprintf("%s-%d", opType, num), + Namespace: defaultNS, + AnchorOrigin: universalAnchorOrigin, + }, nil +} + +func generateOperation(num int, opType operation.Type) (*model.Operation, error) { + op, err := generateOperationBuffer(num, opType) + if err != nil { + return nil, err + } + + cp, err := mocks.NewMockProtocolClient().Current() + if err != nil { + panic(err) + } + + parser := operationparser.New(cp.Protocol()) + + return parser.ParseOperation(defaultNS, op, false) +} + +func generateOperationBuffer(num int, opType operation.Type) ([]byte, error) { + switch opType { + case operation.TypeCreate: + return generateCreateOperation(num) + case operation.TypeRecover: + return generateRecoverOperation(num) + case operation.TypeDeactivate: + return generateDeactivateOperation(num) + case operation.TypeUpdate: + return generateUpdateOperation(num) + default: + return nil, errors.New("operation type not supported") + } +} + +func generateCreateOperation(num int) ([]byte, error) { + recoverJWK := &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", + } + + updateJWK := &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", + Y: "y", + } + + recoverCommitment, err := commitment.GetCommitment(recoverJWK, sha2_256) + if err != nil { + return nil, err + } + + updateCommitment, err := commitment.GetCommitment(updateJWK, sha2_256) + if err != nil { + return nil, err + } + + doc := fmt.Sprintf(`{"test":%d}`, num) + info := &client.CreateRequestInfo{ + OpaqueDocument: doc, + RecoveryCommitment: recoverCommitment, + UpdateCommitment: updateCommitment, + MultihashCode: sha2_256, + AnchorOrigin: createAnchorOrigin, // optional + } + + return client.NewCreateRequest(info) +} + +func generateRecoverRequestInfo(num int) (*client.RecoverRequestInfo, error) { + privKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return nil, err + } + + jwk, err := pubkey.GetPublicKeyJWK(&privKey.PublicKey) + if err != nil { + return nil, err + } + + recoveryCommitment, err := generateUniqueCommitment() + if err != nil { + return nil, err + } + + updateCommitment, err := generateUniqueCommitment() + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(jwk, sha2_256) + if err != nil { + return nil, err + } + + return &client.RecoverRequestInfo{ + DidSuffix: fmt.Sprintf("recover-%d", num), + OpaqueDocument: `{"test":"value"}`, + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, + RecoveryKey: jwk, + AnchorOrigin: recoverAnchorOrigin, + MultihashCode: sha2_256, + Signer: ecsigner.New(privKey, "ES256", ""), + RevealValue: rv, + }, nil +} + +func generateRecoverOperation(num int) ([]byte, error) { + info, err := generateRecoverRequestInfo(num) + if err != nil { + return nil, err + } + + return client.NewRecoverRequest(info) +} + +func generateDeactivateRequestInfo(num int) (*client.DeactivateRequestInfo, error) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return nil, err + } + + recoveryPubKey, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(recoveryPubKey, sha2_256) + if err != nil { + return nil, err + } + + return &client.DeactivateRequestInfo{ + DidSuffix: fmt.Sprintf("deactivate-%d", num), + Signer: ecsigner.New(privateKey, "ES256", ""), + RecoveryKey: recoveryPubKey, + RevealValue: rv, + }, nil +} + +func generateDeactivateOperation(num int) ([]byte, error) { + info, err := generateDeactivateRequestInfo(num) + if err != nil { + return nil, err + } + + return client.NewDeactivateRequest(info) +} + +func generateUpdateRequestInfo(num int) (*client.UpdateRequestInfo, error) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return nil, err + } + + testPatch, err := getTestPatch() + if err != nil { + return nil, err + } + + updateCommitment, err := generateUniqueCommitment() + if err != nil { + return nil, err + } + + updatePubKey, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(updatePubKey, sha2_256) + if err != nil { + return nil, err + } + + return &client.UpdateRequestInfo{ + DidSuffix: fmt.Sprintf("update-%d", num), + Signer: ecsigner.New(privateKey, "ES256", ""), + UpdateCommitment: updateCommitment, + UpdateKey: updatePubKey, + Patches: []patch.Patch{testPatch}, + MultihashCode: sha2_256, + RevealValue: rv, + }, nil +} + +func generateUpdateOperation(num int) ([]byte, error) { + info, err := generateUpdateRequestInfo(num) + if err != nil { + return nil, err + } + + return client.NewUpdateRequest(info) +} + +func getTestPatch() (patch.Patch, error) { + return patch.NewJSONPatch(`[{"op": "replace", "path": "/name", "value": "Jane"}]`) +} + +func generateUniqueCommitment() (string, error) { + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return "", err + } + + pubKey, err := pubkey.GetPublicKeyJWK(&key.PublicKey) + if err != nil { + return "", err + } + + c, err := commitment.GetCommitment(pubKey, sha2_256) + if err != nil { + return "", err + } + + return c, nil +} + +func generateRecoverOperationWithAnchorTimes(suffix string, from, until int64) ([]byte, error) { + op, err := generateRecoverRequestInfo(1) + if err != nil { + return nil, err + } + + op.DidSuffix = suffix + op.AnchorUntil = until + op.AnchorFrom = from + + return client.NewRecoverRequest(op) +} + +func generateDeactivateOperationWithAnchorTimes(suffix string, from, until int64) ([]byte, error) { + op, err := generateDeactivateRequestInfo(1) + if err != nil { + return nil, err + } + + op.DidSuffix = suffix + op.AnchorUntil = until + op.AnchorFrom = from + + return client.NewDeactivateRequest(op) +} + +func generateUpdateOperationWithAnchorTimes(suffix string, from, until int64) ([]byte, error) { + op, err := generateUpdateRequestInfo(1) + if err != nil { + return nil, err + } + + op.DidSuffix = suffix + op.AnchorUntil = until + op.AnchorFrom = from + + return client.NewUpdateRequest(op) +} + +func generateQueueOperationWithAnchorTimes(opType operation.Type, suffix string, from, until int64, +) (*operation.QueuedOperation, error) { + var ( + opBuffer []byte + err error + ) + + switch opType { + case operation.TypeCreate: + return nil, errors.New("create operation is not supported") + case operation.TypeRecover: + opBuffer, err = generateRecoverOperationWithAnchorTimes(suffix, from, until) + case operation.TypeDeactivate: + opBuffer, err = generateDeactivateOperationWithAnchorTimes(suffix, from, until) + case operation.TypeUpdate: + opBuffer, err = generateUpdateOperationWithAnchorTimes(suffix, from, until) + default: + return nil, fmt.Errorf("operation type '%s' not supported", opType) + } + + if err != nil { + return nil, err + } + + return &operation.QueuedOperation{ + OperationRequest: opBuffer, + UniqueSuffix: suffix, + Namespace: defaultNS, + }, nil +} + +type mockTimeValidator struct { + Err error +} + +func (mtv *mockTimeValidator) Validate(from, until int64) error { + if mtv.Err != nil { + return mtv.Err + } + + if from == 0 && until == 0 { + // from and until are not specified - no error + return nil + } + + serverTime := time.Now().Unix() + + if from >= serverTime { + return operationparser.ErrOperationEarly + } + + if until <= serverTime { + return operationparser.ErrOperationExpired + } + + return nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/chunk.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/chunk.go new file mode 100644 index 0000000..c0662f8 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/chunk.go @@ -0,0 +1,52 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package models + +import ( + "encoding/json" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// ChunkFile defines chunk file schema. +type ChunkFile struct { + // Deltas included in this chunk file, each delta is an encoded string + Deltas []*model.DeltaModel `json:"deltas"` +} + +// CreateChunkFile will combine all operation deltas into chunk file. +// returns chunk file model. +func CreateChunkFile(ops *SortedOperations) *ChunkFile { + var deltas []*model.DeltaModel + + deltas = append(deltas, getDeltas(ops.Create)...) + deltas = append(deltas, getDeltas(ops.Recover)...) + deltas = append(deltas, getDeltas(ops.Update)...) + + return &ChunkFile{Deltas: deltas} +} + +// ParseChunkFile will parse chunk file model from content. +func ParseChunkFile(content []byte) (*ChunkFile, error) { + file := &ChunkFile{} + err := json.Unmarshal(content, file) + + if err != nil { + return nil, err + } + + return file, nil +} + +func getDeltas(ops []*model.Operation) []*model.DeltaModel { + var deltas []*model.DeltaModel + for _, op := range ops { + deltas = append(deltas, op.Delta) + } + + return deltas +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/chunk_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/chunk_test.go new file mode 100644 index 0000000..f67a7fe --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/chunk_test.go @@ -0,0 +1,49 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package models + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestHandler_CreateChunkFile(t *testing.T) { + const ( + createOpsNum = 5 + updateOpsNum = 4 + deactivateOpsNum = 3 + recoverOpsNum = 1 + ) + + ops := getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum) + + chunk := CreateChunkFile(ops) + require.NotNil(t, chunk) + require.Equal(t, createOpsNum+updateOpsNum+recoverOpsNum, len(chunk.Deltas)) +} + +func TestParseChunkFile(t *testing.T) { + const ( + createOpsNum = 5 + updateOpsNum = 4 + deactivateOpsNum = 3 + recoverOpsNum = 1 + ) + + ops := getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum) + + model := CreateChunkFile(ops) + bytes, err := json.Marshal(model) + require.NoError(t, err) + + parsed, err := ParseChunkFile(bytes) + require.NoError(t, err) + + require.Equal(t, createOpsNum+updateOpsNum+recoverOpsNum, len(parsed.Deltas)) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/common.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/common.go new file mode 100644 index 0000000..39d9a27 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/common.go @@ -0,0 +1,57 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package models + +import ( + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// SortedOperations stores operations per type. +type SortedOperations struct { + Create []*model.Operation + Update []*model.Operation + Recover []*model.Operation + Deactivate []*model.Operation +} + +// Size returns the length of all operations(combined). +func (o *SortedOperations) Size() int { + return len(o.Create) + len(o.Recover) + len(o.Deactivate) + len(o.Update) +} + +// OperationReference contains minimum proving data. +type OperationReference struct { + // DidSuffix is the suffix of the DID + DidSuffix string `json:"didSuffix"` + + // RevealValue is multihash of JWK + RevealValue string `json:"revealValue"` +} + +func getOperationReferences(ops []*model.Operation) []OperationReference { + var result []OperationReference + + for _, op := range ops { + upd := OperationReference{ + DidSuffix: op.UniqueSuffix, + RevealValue: op.RevealValue, + } + + result = append(result, upd) + } + + return result +} + +func getSignedData(ops []*model.Operation) []string { + var result []string + for _, op := range ops { + result = append(result, op.SignedData) + } + + return result +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/coreindex.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/coreindex.go new file mode 100644 index 0000000..6520a6a --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/coreindex.go @@ -0,0 +1,82 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package models + +import ( + "encoding/json" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// CoreIndexFile defines the schema of an core index file. +type CoreIndexFile struct { + + // ProvisionalIndexFileURI is provisional index file URI + ProvisionalIndexFileURI string `json:"provisionalIndexFileUri,omitempty"` + + // CoreProofFileURI is core proof file URI + CoreProofFileURI string `json:"coreProofFileUri,omitempty"` + + // CoreOperations contain proving data for create, recover and deactivate operations. + Operations *CoreOperations `json:"operations,omitempty"` +} + +// CreateReference contains create operation reference. +type CreateReference struct { + // SuffixData object + SuffixData *model.SuffixDataModel `json:"suffixData"` +} + +// CoreOperations contains operation references. +type CoreOperations struct { + Create []CreateReference `json:"create,omitempty"` + Recover []OperationReference `json:"recover,omitempty"` + Deactivate []OperationReference `json:"deactivate,omitempty"` +} + +// CreateCoreIndexFile will create core index file from provided operations. +// returns core index file model. +func CreateCoreIndexFile(coreProofURI, provisionalIndexURI string, ops *SortedOperations) *CoreIndexFile { + var coreOps *CoreOperations + + if len(ops.Create)+len(ops.Recover)+len(ops.Deactivate) > 0 { + coreOps = &CoreOperations{} + + coreOps.Create = assembleCreateReferences(ops.Create) + coreOps.Recover = getOperationReferences(ops.Recover) + coreOps.Deactivate = getOperationReferences(ops.Deactivate) + } + + return &CoreIndexFile{ + CoreProofFileURI: coreProofURI, + ProvisionalIndexFileURI: provisionalIndexURI, + Operations: coreOps, + } +} + +func assembleCreateReferences(createOps []*model.Operation) []CreateReference { + var result []CreateReference + + for _, op := range createOps { + create := CreateReference{SuffixData: op.SuffixData} + result = append(result, create) + } + + return result +} + +// ParseCoreIndexFile will parse core index file from content. +func ParseCoreIndexFile(content []byte) (*CoreIndexFile, error) { + file := &CoreIndexFile{} + err := json.Unmarshal(content, file) + + if err != nil { + return nil, err + } + + return file, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/coreindex_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/coreindex_test.go new file mode 100644 index 0000000..7c18dd7 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/coreindex_test.go @@ -0,0 +1,117 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package models + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +const ( + signedData = "signed-data" + revealValue = "reveal-value" +) + +func TestCreateCoreIndex(t *testing.T) { + const ( + createOpsNum = 2 + updateOpsNum = 2 + deactivateOpsNum = 2 + recoverOpsNum = 2 + ) + + ops := getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum) + + cif := CreateCoreIndexFile("coreURI", "mapURI", ops) + require.NotNil(t, cif) + require.Equal(t, createOpsNum, len(cif.Operations.Create)) + require.Equal(t, deactivateOpsNum, len(cif.Operations.Deactivate)) + require.Equal(t, recoverOpsNum, len(cif.Operations.Recover)) +} + +func TestParseCoreIndex(t *testing.T) { + const ( + createOpsNum = 5 + updateOpsNum = 4 + deactivateOpsNum = 3 + recoverOpsNum = 1 + ) + + ops := getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum) + + model := CreateCoreIndexFile("coreURI", "mapURI", ops) + + bytes, err := json.Marshal(model) + require.NoError(t, err) + + parsed, err := ParseCoreIndexFile(bytes) + require.NoError(t, err) + + require.Equal(t, createOpsNum, len(parsed.Operations.Create)) + require.Equal(t, deactivateOpsNum, len(parsed.Operations.Deactivate)) + require.Equal(t, recoverOpsNum, len(parsed.Operations.Recover)) + + require.Equal(t, parsed.Operations.Recover[0].RevealValue, revealValue) + require.Equal(t, parsed.Operations.Deactivate[0].RevealValue, revealValue) +} + +func getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum int) *SortedOperations { + result := &SortedOperations{} + result.Create = append(result.Create, generateOperations(createOpsNum, operation.TypeCreate)...) + result.Recover = append(result.Recover, generateOperations(recoverOpsNum, operation.TypeRecover)...) + result.Deactivate = append(result.Deactivate, generateOperations(deactivateOpsNum, operation.TypeDeactivate)...) + result.Update = append(result.Update, generateOperations(updateOpsNum, operation.TypeUpdate)...) + + return result +} + +func generateOperations(numOfOperations int, opType operation.Type) (ops []*model.Operation) { + for j := 1; j <= numOfOperations; j++ { + ops = append(ops, generateOperation(j, opType)) + } + + return +} + +func generateOperation(num int, opType operation.Type) *model.Operation { + return &model.Operation{ + Type: opType, + UniqueSuffix: fmt.Sprintf("%s-%d", opType, num), + Namespace: "did:sidetree", + SuffixData: &model.SuffixDataModel{}, + Delta: &model.DeltaModel{}, + SignedData: signedData, + RevealValue: revealValue, + } +} + +func TestMarshalCoreIndexFile(t *testing.T) { + t.Run("success - check operations tag is omitted if no operations ", func(t *testing.T) { + model := CreateCoreIndexFile("", "provisionalIndexURI", &SortedOperations{}) + bytes, err := canonicalizer.MarshalCanonical(model) + require.NoError(t, err) + // core index file can have just references to provisional index file (no operations) + require.Equal(t, `{"provisionalIndexFileUri":"provisionalIndexURI"}`, string(bytes)) + }) + t.Run("success - core index file can have just references to core proof file,"+ + " no provisional index file (deactivate ops only))", func(t *testing.T) { + sortedOperations := getTestOperations(0, 0, 1, 0) + + model := CreateCoreIndexFile("coreProofURI", "", sortedOperations) + bytes, err := canonicalizer.MarshalCanonical(model) + require.NoError(t, err) + //nolint:lll + require.Equal(t, `{"coreProofFileUri":"coreProofURI","operations":{"deactivate":[{"didSuffix":"deactivate-1","revealValue":"reveal-value"}]}}`, string(bytes)) + }) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/coreproof.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/coreproof.go new file mode 100644 index 0000000..0881bbd --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/coreproof.go @@ -0,0 +1,54 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package models + +import ( + "encoding/json" + + "github.com/pkg/errors" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// CoreProofFile defines the schema for core proof file. Core proof file contains the cryptographic proofs +// (signatures, hashes, etc.) that form the signature-chained backbone for the state lineages of all DIDs in the system. +// The cryptographic proofs present in core proof file also link a given operation to its verbose state data, +// which resides in a related chunk file. +type CoreProofFile struct { + + // Operations contain proving data for recover and deactivate operations. + Operations CoreProofOperations `json:"operations,omitempty"` +} + +// CoreProofOperations contains proving data for any recover and deactivate operations to be included in a batch. +type CoreProofOperations struct { + Recover []string `json:"recover,omitempty"` + Deactivate []string `json:"deactivate,omitempty"` +} + +// CreateCoreProofFile will create core proof file from provided operations. +// returns core proof file model. +func CreateCoreProofFile(recoverOps, deactivateOps []*model.Operation) *CoreProofFile { + return &CoreProofFile{ + Operations: CoreProofOperations{ + Recover: getSignedData(recoverOps), + Deactivate: getSignedData(deactivateOps), + }, + } +} + +// ParseCoreProofFile will parse core proof model from content. +func ParseCoreProofFile(content []byte) (*CoreProofFile, error) { + file := &CoreProofFile{} + err := json.Unmarshal(content, file) + + if err != nil { + return nil, errors.WithMessagef(err, "failed to unmarshal core proof file") + } + + return file, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/coreproof_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/coreproof_test.go new file mode 100644 index 0000000..c9540aa --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/coreproof_test.go @@ -0,0 +1,59 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package models + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" +) + +func TestCreateCoreProofFile(t *testing.T) { + const ( + deactivateOpsNum = 2 + recoverOpsNum = 2 + ) + + recoverOps := generateOperations(recoverOpsNum, operation.TypeRecover) + deactivateOps := generateOperations(deactivateOpsNum, operation.TypeDeactivate) + + af := CreateCoreProofFile(recoverOps, deactivateOps) + require.NotNil(t, af) + require.Equal(t, deactivateOpsNum, len(af.Operations.Deactivate)) + require.Equal(t, recoverOpsNum, len(af.Operations.Recover)) +} + +func TestParseCoreProofFile(t *testing.T) { + t.Run("success", func(t *testing.T) { + const deactivateOpsNum = 3 + const recoverOpsNum = 1 + + recoverOps := generateOperations(recoverOpsNum, operation.TypeRecover) + deactivateOps := generateOperations(deactivateOpsNum, operation.TypeDeactivate) + + model := CreateCoreProofFile(recoverOps, deactivateOps) + + bytes, err := json.Marshal(model) + require.NoError(t, err) + + parsed, err := ParseCoreProofFile(bytes) + require.NoError(t, err) + + require.Equal(t, deactivateOpsNum, len(parsed.Operations.Deactivate)) + require.Equal(t, recoverOpsNum, len(parsed.Operations.Recover)) + }) + + t.Run("error - unmarshal error", func(t *testing.T) { + parsed, err := ParseCoreProofFile([]byte("not JSON")) + require.Error(t, err) + require.Nil(t, parsed) + require.Contains(t, err.Error(), "failed to unmarshal core proof file") + }) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/provisionalindex.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/provisionalindex.go new file mode 100644 index 0000000..36939ae --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/provisionalindex.go @@ -0,0 +1,77 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package models + +import ( + "encoding/json" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// ProvisionalIndexFile defines the schema for provisional index file and its related operations. +type ProvisionalIndexFile struct { + + // ProvisionalProofFileURI is provisional proof file URI + ProvisionalProofFileURI string `json:"provisionalProofFileUri,omitempty"` + + // Chunks are chunk entries for the related delta data for a given chunk of operations in the batch. + Chunks []Chunk `json:"chunks"` + + // Operations will contain provisional (update) operations + Operations *ProvisionalOperations `json:"operations,omitempty"` +} + +// ProvisionalOperations contains minimal operation proving data for provisional (update) operations. +type ProvisionalOperations struct { + Update []OperationReference `json:"update,omitempty"` +} + +// Chunk holds chunk file URI. +type Chunk struct { + ChunkFileURI string `json:"chunkFileUri"` +} + +// CreateProvisionalIndexFile will create provisional index file model from operations and chunk file URI. +// returns chunk file model. +func CreateProvisionalIndexFile( + chunkURIs []string, provisionalProofURI string, updateOps []*model.Operation) *ProvisionalIndexFile { + var provisionalOps *ProvisionalOperations + if len(updateOps) > 0 { + provisionalOps = &ProvisionalOperations{} + + provisionalOps.Update = getOperationReferences(updateOps) + } + + return &ProvisionalIndexFile{ + Chunks: getChunks(chunkURIs), + ProvisionalProofFileURI: provisionalProofURI, + Operations: provisionalOps, + } +} + +// ParseProvisionalIndexFile will parse content into provisional index file model. +func ParseProvisionalIndexFile(content []byte) (*ProvisionalIndexFile, error) { + file := &ProvisionalIndexFile{} + + err := json.Unmarshal(content, file) + if err != nil { + return nil, err + } + + return file, nil +} + +func getChunks(uris []string) []Chunk { + var chunks []Chunk + for _, uri := range uris { + chunks = append(chunks, Chunk{ + ChunkFileURI: uri, + }) + } + + return chunks +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/provisionalindex_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/provisionalindex_test.go new file mode 100644 index 0000000..a726af8 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/provisionalindex_test.go @@ -0,0 +1,64 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package models + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" +) + +func TestHandler_CreateMapFile(t *testing.T) { + const updateOpsNum = 2 + + ops := generateOperations(updateOpsNum, operation.TypeUpdate) + + chunks := []string{"chunk_uri"} + batch := CreateProvisionalIndexFile(chunks, "provisionalURI", ops) + require.NotNil(t, batch) + require.Equal(t, updateOpsNum, len(batch.Operations.Update)) +} + +func TestHandler_ParseMapFile(t *testing.T) { + const updateOpsNum = 5 + + ops := generateOperations(updateOpsNum, operation.TypeUpdate) + + chunks := []string{"chunk_uri"} + model := CreateProvisionalIndexFile(chunks, "provisionalURI", ops) + + bytes, err := json.Marshal(model) + require.NoError(t, err) + + parsed, err := ParseProvisionalIndexFile(bytes) + require.NoError(t, err) + + require.Equal(t, updateOpsNum, len(parsed.Operations.Update)) + + require.Equal(t, parsed.Operations.Update[0].RevealValue, revealValue) +} + +func TestMarshalProvisionalIndexFile(t *testing.T) { + t.Run("success - provisional index with no operations ", func(t *testing.T) { + model := CreateProvisionalIndexFile([]string{"chunkURI"}, "", nil) + bytes, err := canonicalizer.MarshalCanonical(model) + require.NoError(t, err) + require.Equal(t, `{"chunks":[{"chunkFileUri":"chunkURI"}]}`, string(bytes)) + }) + t.Run("success - provisional index with operations", func(t *testing.T) { + model := CreateProvisionalIndexFile([]string{"chunkURI"}, + "", generateOperations(1, operation.TypeUpdate)) + bytes, err := canonicalizer.MarshalCanonical(model) + require.NoError(t, err) + //nolint:lll + require.Equal(t, `{"chunks":[{"chunkFileUri":"chunkURI"}],"operations":{"update":[{"didSuffix":"update-1","revealValue":"reveal-value"}]}}`, string(bytes)) + }) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/provisionalproof.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/provisionalproof.go new file mode 100644 index 0000000..6fbcb6b --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/provisionalproof.go @@ -0,0 +1,50 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package models + +import ( + "encoding/json" + + "github.com/pkg/errors" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" +) + +// ProvisionalProofFile defines the schema for provisional proof file. Provisional proof file contains the cryptographic +// proofs (signatures, hashes, etc.) for all the (eventually) prunable DID operations in the system. The cryptographic +// proofs present in provisional proof file also link a given operation to its verbose state data, which resides +// in a related chunk file. +type ProvisionalProofFile struct { + Operations ProvisionalProofOperations `json:"operations,omitempty"` +} + +// ProvisionalProofOperations contains proving data for any update operation to be included in the batch. +type ProvisionalProofOperations struct { + Update []string `json:"update,omitempty"` +} + +// CreateProvisionalProofFile will create provisional proof file model from operations. +// returns provisional proof file model. +func CreateProvisionalProofFile(updateOps []*model.Operation) *ProvisionalProofFile { + return &ProvisionalProofFile{ + Operations: ProvisionalProofOperations{ + Update: getSignedData(updateOps), + }, + } +} + +// ParseProvisionalProofFile will parse provisional proof file model from content. +func ParseProvisionalProofFile(content []byte) (*ProvisionalProofFile, error) { + file := &ProvisionalProofFile{} + + err := json.Unmarshal(content, file) + if err != nil { + return nil, errors.WithMessagef(err, "failed to unmarshal provisional proof file") + } + + return file, nil +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/provisionalproof_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/provisionalproof_test.go new file mode 100644 index 0000000..a8d4660 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models/provisionalproof_test.go @@ -0,0 +1,51 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package models + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" +) + +func TestCreateProvisionalProofFileFile(t *testing.T) { + const updateOpsNum = 2 + + updateOps := generateOperations(updateOpsNum, operation.TypeUpdate) + + batch := CreateProvisionalProofFile(updateOps) + require.NotNil(t, batch) + require.Equal(t, updateOpsNum, len(batch.Operations.Update)) +} + +func TestParseProvisionalProofFile(t *testing.T) { + t.Run("success", func(t *testing.T) { + const updateOpsNum = 2 + + updateOps := generateOperations(updateOpsNum, operation.TypeUpdate) + + model := CreateProvisionalProofFile(updateOps) + + bytes, err := json.Marshal(model) + require.NoError(t, err) + + parsed, err := ParseProvisionalProofFile(bytes) + require.NoError(t, err) + + require.Equal(t, updateOpsNum, len(parsed.Operations.Update)) + }) + + t.Run("error - unmarshal error", func(t *testing.T) { + parsed, err := ParseProvisionalProofFile([]byte("not JSON")) + require.Error(t, err) + require.Nil(t, parsed) + require.Contains(t, err.Error(), "failed to unmarshal provisional proof file") + }) +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/provider.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/provider.go new file mode 100644 index 0000000..ac4330f --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/provider.go @@ -0,0 +1,859 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package txnprovider + +import ( + "fmt" + + "github.com/pkg/errors" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/txn" + logfields "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/internal/log" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models" +) + +var logger = log.New("sidetree-core-txnhandler") + +// DCAS interface to access content addressable storage. +type DCAS interface { + Read(key string) ([]byte, error) +} + +type decompressionProvider interface { + Decompress(alg string, data []byte) ([]byte, error) +} + +type sourceURIFormatter func(casURI, source string) (string, error) + +type options struct { + formatCASURIForSource sourceURIFormatter +} + +// Opt is an OperationProvider option. +type Opt func(ops *options) + +// WithSourceCASURIFormatter sets the formatter to use when converting an alternate source to a +// CAS URI. +func WithSourceCASURIFormatter(formatter sourceURIFormatter) Opt { + return func(ops *options) { + ops.formatCASURIForSource = formatter + } +} + +// OperationProvider is an operation provider. +type OperationProvider struct { + *options + + protocol.Protocol + parser OperationParser + cas DCAS + dp decompressionProvider +} + +// OperationParser defines the functions for parsing operations. +type OperationParser interface { + ParseOperation(namespace string, operationRequest []byte, batch bool) (*model.Operation, error) + ValidateSuffixData(suffixData *model.SuffixDataModel) error + ValidateDelta(delta *model.DeltaModel) error + ParseSignedDataForUpdate(compactJWS string) (*model.UpdateSignedDataModel, error) + ParseSignedDataForDeactivate(compactJWS string) (*model.DeactivateSignedDataModel, error) + ParseSignedDataForRecover(compactJWS string) (*model.RecoverSignedDataModel, error) +} + +// NewOperationProvider returns a new operation provider. +// +//nolint:gocritic +func NewOperationProvider(p protocol.Protocol, parser OperationParser, cas DCAS, + dp decompressionProvider, opts ...Opt) *OperationProvider { + o := &options{ + formatCASURIForSource: func(_, _ string) (string, error) { + return "", errors.New("CAS URI formatter not defined") + }, + } + + for _, opt := range opts { + opt(o) + } + + return &OperationProvider{ + options: o, + Protocol: p, + parser: parser, + cas: cas, + dp: dp, + } +} + +// GetTxnOperations will read batch files(core/provisional index, proof files and chunk file) +// and assemble batch operations from those files. +func (h *OperationProvider) GetTxnOperations(t *txn.SidetreeTxn) ([]*operation.AnchoredOperation, error) { + // parse core index file URI and number of operations from anchor string + anchorData, err := ParseAnchorData(t.AnchorString) + if err != nil { + return nil, err + } + + cif, err := h.getCoreIndexFile(anchorData.CoreIndexFileURI, t.AlternateSources...) + if err != nil { + return nil, err + } + + batchFiles, err := h.getBatchFiles(cif, t.AlternateSources...) + if err != nil { + return nil, err + } + + txnOps, err := h.assembleAnchoredOperations(batchFiles, t) + if err != nil { + return nil, err + } + + if len(txnOps) != anchorData.NumberOfOperations { + return nil, fmt.Errorf("number of txn ops[%d] doesn't match anchor string num of ops[%d]", + len(txnOps), anchorData.NumberOfOperations) + } + + return txnOps, nil +} + +// batchFiles contains the content of all batch files that are referenced in core index file. +type batchFiles struct { + CoreIndex *models.CoreIndexFile + CoreProof *models.CoreProofFile + ProvisionalIndex *models.ProvisionalIndexFile + ProvisionalProof *models.ProvisionalProofFile + Chunk *models.ChunkFile +} + +type provisionalFiles struct { + ProvisionalIndex *models.ProvisionalIndexFile + ProvisionalProof *models.ProvisionalProofFile + Chunk *models.ChunkFile +} + +// getBatchFiles retrieves all batch files that are referenced in core index file. +func (h *OperationProvider) getBatchFiles( + cif *models.CoreIndexFile, alternateSources ...string) (*batchFiles, error) { + var err error + + files := &batchFiles{CoreIndex: cif} + + // core proof file will not exist if we have only update operations in the batch + if cif.CoreProofFileURI != "" { + files.CoreProof, err = h.getCoreProofFile(cif.CoreProofFileURI, alternateSources...) + if err != nil { + return nil, err + } + } + + if cif.ProvisionalIndexFileURI != "" { + provisionalFiles, innerErr := h.getProvisionalFiles(cif.ProvisionalIndexFileURI, alternateSources...) + if innerErr != nil { + return nil, innerErr + } + + files.ProvisionalIndex = provisionalFiles.ProvisionalIndex + files.ProvisionalProof = provisionalFiles.ProvisionalProof + files.Chunk = provisionalFiles.Chunk + } + + // validate batch file counts + err = validateBatchFileCounts(files) + if err != nil { + return nil, err + } + + logger.Debug("Successfully downloaded and validated all batch files") + + return files, nil +} + +func (h *OperationProvider) getProvisionalFiles( + provisionalIndexURI string, alternateSources ...string) (*provisionalFiles, error) { + var err error + + files := &provisionalFiles{} + + files.ProvisionalIndex, err = h.getProvisionalIndexFile(provisionalIndexURI, alternateSources...) + if err != nil { + return nil, err + } + + // provisional proof file will not exist if we don't have any update operations in the batch + if files.ProvisionalIndex.ProvisionalProofFileURI != "" { + files.ProvisionalProof, err = h.getProvisionalProofFile( + files.ProvisionalIndex.ProvisionalProofFileURI, alternateSources...) + if err != nil { + return nil, err + } + } + + if len(files.ProvisionalIndex.Chunks) == 0 { + return nil, errors.Errorf("provisional index file is missing chunk file URI") + } + + chunkURI := files.ProvisionalIndex.Chunks[0].ChunkFileURI + + files.Chunk, err = h.getChunkFile(chunkURI, alternateSources...) + if err != nil { + return nil, err + } + + return files, nil +} + +// validateBatchFileCounts validates that operation numbers match in batch files. +func validateBatchFileCounts(batchFiles *batchFiles) error { + coreCreateNum := 0 + coreRecoverNum := 0 + coreDeactivateNum := 0 + + if batchFiles.CoreIndex.Operations != nil { + coreCreateNum = len(batchFiles.CoreIndex.Operations.Create) + coreRecoverNum = len(batchFiles.CoreIndex.Operations.Recover) + coreDeactivateNum = len(batchFiles.CoreIndex.Operations.Deactivate) + } + + if batchFiles.CoreIndex.CoreProofFileURI != "" { + coreProofRecoverNum := len(batchFiles.CoreProof.Operations.Recover) + coreProofDeactivateNum := len(batchFiles.CoreProof.Operations.Deactivate) + + if coreRecoverNum != coreProofRecoverNum { + return fmt.Errorf( + "number of recover ops[%d] in core index doesn't match number of recover ops[%d] in core proof", + coreRecoverNum, coreProofRecoverNum) + } + + if coreDeactivateNum != coreProofDeactivateNum { + return fmt.Errorf( + "number of deactivate ops[%d] in core index doesn't match number of deactivate ops[%d] in core proof", + coreDeactivateNum, coreProofDeactivateNum) + } + } + + if batchFiles.CoreIndex.ProvisionalIndexFileURI != "" { //nolint:nestif + provisionalUpdateNum := 0 + if batchFiles.ProvisionalIndex.Operations != nil { + provisionalUpdateNum = len(batchFiles.ProvisionalIndex.Operations.Update) + } + + if batchFiles.ProvisionalIndex.ProvisionalProofFileURI != "" { + provisionalProofUpdateNum := len(batchFiles.ProvisionalProof.Operations.Update) + + if provisionalUpdateNum != provisionalProofUpdateNum { + return fmt.Errorf("number of update ops[%d] in provisional index doesn't"+ + " match number of update ops[%d] in provisional proof", + provisionalUpdateNum, provisionalProofUpdateNum) + } + } + + expectedDeltaCount := coreCreateNum + coreRecoverNum + provisionalUpdateNum + + if expectedDeltaCount != len(batchFiles.Chunk.Deltas) { + return fmt.Errorf( + "number of create+recover+update operations[%d] doesn't match number of deltas[%d]", + expectedDeltaCount, len(batchFiles.Chunk.Deltas)) + } + } + + return nil +} + +func createAnchoredOperations(ops []*model.Operation) ([]*operation.AnchoredOperation, error) { + var anchoredOps []*operation.AnchoredOperation + + for _, op := range ops { + anchoredOp, err := model.GetAnchoredOperation(op) + if err != nil { + return nil, err + } + + anchoredOps = append(anchoredOps, anchoredOp) + } + + return anchoredOps, nil +} + +//nolint:funlen +func (h *OperationProvider) assembleAnchoredOperations( + batchFiles *batchFiles, t *txn.SidetreeTxn) ([]*operation.AnchoredOperation, error) { + cifOps, err := h.parseCoreIndexOperations(batchFiles.CoreIndex, t) + if err != nil { + return nil, fmt.Errorf("parse core index operations: %s", err.Error()) + } + + logger.Debug("Successfully parsed core index operations", + logfields.WithTotalCreateOperations(len(cifOps.Create)), + logfields.WithTotalRecoverOperations(len(cifOps.Recover)), + logfields.WithTotalDeactivateOperations(len(cifOps.Deactivate))) + + // add signed data from core proof file to deactivate operations + for i := range cifOps.Deactivate { + cifOps.Deactivate[i].SignedData = batchFiles.CoreProof.Operations.Deactivate[i] + } + + // deactivate operations only + if batchFiles.CoreIndex.ProvisionalIndexFileURI == "" { + return createAnchoredOperations(cifOps.Deactivate) + } + + pifOps := parseProvisionalIndexOperations(batchFiles.ProvisionalIndex) + + logger.Debug("Successfully parsed provisional index operations", + logfields.WithTotalUpdateOperations(len(pifOps.Update))) + + // check for duplicate suffixes for this combination core/provisional index files + txnSuffixes := append(cifOps.Suffixes, pifOps.Suffixes...) //nolint:gocritic + + err = checkForDuplicates(txnSuffixes) + if err != nil { + return nil, fmt.Errorf("check for duplicate suffixes in core/provisional index files: %s", err.Error()) + } + + var operations []*model.Operation + operations = append(operations, cifOps.Create...) + + // add signed data from core proof file + for i := range cifOps.Recover { + cifOps.Recover[i].SignedData = batchFiles.CoreProof.Operations.Recover[i] + + // parse signed data to extract anchor origin + signedDataModel, err := h.parser.ParseSignedDataForRecover(cifOps.Recover[i].SignedData) + if err != nil { + return nil, fmt.Errorf("failed to validate signed data for recover[%d]: %s", i, err.Error()) + } + + cifOps.Recover[i].AnchorOrigin = signedDataModel.AnchorOrigin + } + + operations = append(operations, cifOps.Recover...) + + // add signed data from provisional proof file + for i := range pifOps.Update { + pifOps.Update[i].SignedData = batchFiles.ProvisionalProof.Operations.Update[i] + } + + operations = append(operations, pifOps.Update...) + + if len(operations) != len(batchFiles.Chunk.Deltas) { + // this should never happen since we are assembling batch files + return nil, + fmt.Errorf("number of create+recover+update operations[%d] doesn't match number of deltas[%d]", + len(operations), len(batchFiles.Chunk.Deltas)) + } + + for i, delta := range batchFiles.Chunk.Deltas { + operations[i].Delta = delta + } + + operations = append(operations, cifOps.Deactivate...) + + return createAnchoredOperations(operations) +} + +func checkForDuplicates(values []string) error { + var duplicates []string + + valuesMap := make(map[string]bool) + + for _, val := range values { + if _, ok := valuesMap[val]; !ok { + valuesMap[val] = true + } else { + duplicates = append(duplicates, val) + } + } + + if len(duplicates) > 0 { + return fmt.Errorf("duplicate values found %v", duplicates) + } + + return nil +} + +// getCoreIndexFile will download core index file from cas and parse it into core index file model. +func (h *OperationProvider) getCoreIndexFile( + uri string, alternateSources ...string) (*models.CoreIndexFile, error) { //nolint:dupl + content, err := h.readFromCAS(uri, h.MaxCoreIndexFileSize, alternateSources...) + if err != nil { + return nil, errors.Wrapf(err, "error reading core index file") + } + + logger.Debug("Successfully downloaded core index file", logfields.WithURIString(uri), logfields.WithContent(content)) + + cif, err := models.ParseCoreIndexFile(content) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse content for core index file[%s]", uri) + } + + err = h.validateCoreIndexFile(cif) + if err != nil { + return nil, errors.Wrapf(err, "core index file[%s]", uri) + } + + return cif, nil +} + +func (h *OperationProvider) validateCoreIndexFile(cif *models.CoreIndexFile) error { + recoverNum := 0 + deactivateNum := 0 + + if cif.Operations != nil { + recoverNum = len(cif.Operations.Recover) + deactivateNum = len(cif.Operations.Deactivate) + } + + if recoverNum+deactivateNum > 0 && cif.CoreProofFileURI == "" { + return errors.New("missing core proof file URI") + } + + if recoverNum+deactivateNum == 0 && len(cif.CoreProofFileURI) > 0 { + return errors.New("core proof file URI should be empty if there are no recover and/or deactivate operations") + } + + err := h.validateCoreIndexCASReferences(cif) + if err != nil { + return err + } + + return h.validateCoreIndexOperations(cif.Operations) +} + +func (h *OperationProvider) validateCoreIndexCASReferences(cif *models.CoreIndexFile) error { + if err := h.validateURI(cif.CoreProofFileURI); err != nil { + return errors.Wrapf(err, "core proof URI") + } + + if err := h.validateURI(cif.ProvisionalIndexFileURI); err != nil { + return errors.Wrapf(err, "provisional index URI") + } + + return nil +} + +func (h *OperationProvider) validateCoreIndexOperations(ops *models.CoreOperations) error { + if ops == nil { // nothing to do + return nil + } + + for i, op := range ops.Create { + err := h.parser.ValidateSuffixData(op.SuffixData) + if err != nil { + return fmt.Errorf("failed to validate suffix data for create[%d]: %s", i, err.Error()) + } + } + + for i, op := range ops.Recover { + err := h.validateOperationReference(op) + if err != nil { + return fmt.Errorf("failed to validate operation reference for recover[%d]: %s", i, err.Error()) + } + } + + for i, op := range ops.Deactivate { + err := h.validateOperationReference(op) + if err != nil { + return fmt.Errorf("failed to validate operation reference for deactivate[%d]: %s", i, err.Error()) + } + } + + return nil +} + +func (h *OperationProvider) validateOperationReference(op models.OperationReference) error { + if err := h.validateRequiredMultihash(op.DidSuffix, "did suffix"); err != nil { + return err + } + + return h.validateRequiredMultihash(op.RevealValue, "reveal value") +} + +func (h *OperationProvider) validateRequiredMultihash(mh, alias string) error { + if mh == "" { + return fmt.Errorf("missing %s", alias) + } + + if len(mh) > int(h.MaxOperationHashLength) { + return fmt.Errorf("%s length[%d] exceeds maximum hash length[%d]", alias, len(mh), h.MaxOperationHashLength) + } + + return nil +} + +// getCoreProofFile will download core proof file from cas and parse it into core proof file model. +func (h *OperationProvider) getCoreProofFile(uri string, alternateSources ...string) (*models.CoreProofFile, error) { //nolint:dupl,lll + content, err := h.readFromCAS(uri, h.MaxProofFileSize, alternateSources...) + if err != nil { + return nil, errors.Wrapf(err, "error reading core proof file") + } + + logger.Debug("Successfully downloaded core proof file", logfields.WithURIString(uri), logfields.WithContent(content)) + + cpf, err := models.ParseCoreProofFile(content) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse content for core proof file[%s]", uri) + } + + err = h.validateCoreProofFile(cpf) + if err != nil { + return nil, errors.Wrapf(err, "core proof file[%s]", uri) + } + + return cpf, nil +} + +func (h *OperationProvider) validateCoreProofFile(cpf *models.CoreProofFile) error { + for i, signedData := range cpf.Operations.Recover { + _, err := h.parser.ParseSignedDataForRecover(signedData) + if err != nil { + return fmt.Errorf("failed to validate signed data for recover[%d]: %s", i, err.Error()) + } + } + + for i, signedData := range cpf.Operations.Deactivate { + _, err := h.parser.ParseSignedDataForDeactivate(signedData) + if err != nil { + return fmt.Errorf("failed to validate signed data for deactivate[%d]: %s", i, err.Error()) + } + } + + return nil +} + +// getProvisionalProofFile will download provisional proof file from cas and parse it into provisional proof file model. +// +//nolint:dupl,lll +func (h *OperationProvider) getProvisionalProofFile(uri string, alternateSources ...string) (*models.ProvisionalProofFile, error) { + content, err := h.readFromCAS(uri, h.MaxProofFileSize, alternateSources...) + if err != nil { + return nil, errors.Wrapf(err, "error reading provisional proof file") + } + + logger.Debug("Successfully downloaded provisional proof file", logfields.WithURIString(uri), logfields.WithContent(content)) + + ppf, err := models.ParseProvisionalProofFile(content) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse content for provisional proof file[%s]", uri) + } + + err = h.validateProvisionalProofFile(ppf) + if err != nil { + return nil, errors.Wrapf(err, "provisional proof file[%s]", uri) + } + + return ppf, nil +} + +func (h *OperationProvider) validateProvisionalProofFile(ppf *models.ProvisionalProofFile) error { + for i, signedData := range ppf.Operations.Update { + _, err := h.parser.ParseSignedDataForUpdate(signedData) + if err != nil { + return fmt.Errorf("failed to validate signed data for update[%d]: %s", i, err.Error()) + } + } + + return nil +} + +// getProvisionalIndexFile will download provisional index file from cas and parse it into provisional index file model. +// +//nolint:dupl,lll +func (h *OperationProvider) getProvisionalIndexFile(uri string, alternateSources ...string) (*models.ProvisionalIndexFile, error) { + content, err := h.readFromCAS(uri, h.MaxProvisionalIndexFileSize, alternateSources...) + if err != nil { + return nil, errors.Wrapf(err, "error reading provisional index file") + } + + logger.Debug("Successfully downloaded provisional index file", logfields.WithURIString(uri), logfields.WithContent(content)) + + pif, err := models.ParseProvisionalIndexFile(content) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse content for provisional index file[%s]", uri) + } + + err = h.validateProvisionalIndexFile(pif) + if err != nil { + return nil, errors.Wrapf(err, "provisional index file[%s]", uri) + } + + return pif, nil +} + +func (h *OperationProvider) validateProvisionalIndexFile(pif *models.ProvisionalIndexFile) error { + updateNum := 0 + + if pif.Operations != nil { + updateNum = len(pif.Operations.Update) + } + + if updateNum > 0 && pif.ProvisionalProofFileURI == "" { + return errors.New("missing provisional proof file URI") + } + + if updateNum == 0 && len(pif.ProvisionalProofFileURI) > 0 { + return errors.New("provisional proof file URI should be empty if there are no update operations") + } + + err := h.validateProvisionalIndexCASReferences(pif) + if err != nil { + return err + } + + return h.validateProvisionalIndexOperations(pif.Operations) +} + +func (h *OperationProvider) validateProvisionalIndexCASReferences(pif *models.ProvisionalIndexFile) error { + if err := h.validateURI(pif.ProvisionalProofFileURI); err != nil { + return errors.Wrapf(err, "provisional proof URI") + } + + if len(pif.Chunks) > 0 { + if err := h.validateURI(pif.Chunks[0].ChunkFileURI); err != nil { + return errors.Wrapf(err, "chunk URI") + } + } + + return nil +} + +func (h *OperationProvider) validateProvisionalIndexOperations(ops *models.ProvisionalOperations) error { + if ops == nil { // nothing to do + return nil + } + + for i, op := range ops.Update { + err := h.validateOperationReference(op) + if err != nil { + return fmt.Errorf("failed to validate operation reference for update[%d]: %s", i, err.Error()) + } + } + + return nil +} + +// getChunkFile will download chunk file from cas and parse it into chunk file model. +func (h *OperationProvider) getChunkFile(uri string, alternateSources ...string, +) (*models.ChunkFile, error) { //nolint:dupl + content, err := h.readFromCAS(uri, h.MaxChunkFileSize, alternateSources...) + if err != nil { + return nil, errors.Wrapf(err, "error reading chunk file") + } + + logger.Debug("Successfully downloaded chunk file", logfields.WithURIString(uri), logfields.WithContent(content)) + + cf, err := models.ParseChunkFile(content) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse content for chunk file[%s]", uri) + } + + err = h.validateChunkFile(cf) + if err != nil { + return nil, errors.Wrapf(err, "chunk file[%s]", uri) + } + + return cf, nil +} + +func (h *OperationProvider) validateChunkFile(cf *models.ChunkFile) error { + for i, delta := range cf.Deltas { + err := h.parser.ValidateDelta(delta) + if err != nil { + return fmt.Errorf("failed to validate delta[%d]: %s", i, err.Error()) + } + } + + return nil +} + +func (h *OperationProvider) readFromCAS(uri string, maxSize uint, alternateSources ...string) ([]byte, error) { + bytes, err := h.cas.Read(uri) + if err != nil { + if len(alternateSources) == 0 { + return nil, fmt.Errorf("retrieve CAS content at uri[%s]: %w", uri, err) + } + + logger.Info("Failed to retrieve CAS content. Trying alternate sources.", + logfields.WithURIString(uri), log.WithError(err), logfields.WithSources(alternateSources...)) + + b, e := h.readFromAlternateCASSources(uri, alternateSources) + if e != nil { + logger.Warn("Failed to retrieve CAS content from alternate sources.", + logfields.WithURIString(uri), log.WithError(e), logfields.WithSources(alternateSources...)) + + return nil, fmt.Errorf("retrieve CAS content at uri[%s]: %w", uri, err) + } + + logger.Info("Successfully retrieved CAS content from alternate sources.", + logfields.WithURIString(uri), logfields.WithSources(alternateSources...)) + + bytes = b + } + + if len(bytes) > int(maxSize) { + return nil, fmt.Errorf("uri[%s]: content size %d exceeded maximum size %d", uri, len(bytes), maxSize) + } + + content, err := h.dp.Decompress(h.CompressionAlgorithm, bytes) + if err != nil { + return nil, errors.Wrapf(err, "decompress CAS uri[%s] using '%s'", uri, h.CompressionAlgorithm) + } + + maxDecompressedSize := maxSize * h.MaxMemoryDecompressionFactor + if len(content) > int(maxDecompressedSize) { + return nil, fmt.Errorf("uri[%s]: decompressed content size %d exceeded maximum decompressed content size %d", + uri, len(content), maxDecompressedSize) + } + + return content, nil +} + +// coreOperations contains operations in core index file. +type coreOperations struct { + Create []*model.Operation + Recover []*model.Operation + Deactivate []*model.Operation + Suffixes []string +} + +//nolint:funlen +func (h *OperationProvider) parseCoreIndexOperations(cif *models.CoreIndexFile, t *txn.SidetreeTxn, +) (*coreOperations, error) { + if cif.Operations == nil { + // nothing to do + return &coreOperations{}, nil + } + + logger.Debug("Parsing core index file operations for anchor string", logfields.WithAnchorString(t.AnchorString)) + + var suffixes []string + + var createOps []*model.Operation + + for _, op := range cif.Operations.Create { + suffix, err := model.GetUniqueSuffix(op.SuffixData, h.MultihashAlgorithms) + if err != nil { + return nil, err + } + + create := &model.Operation{ + Type: operation.TypeCreate, + UniqueSuffix: suffix, + SuffixData: op.SuffixData, + AnchorOrigin: op.SuffixData.AnchorOrigin, + } + + suffixes = append(suffixes, suffix) + createOps = append(createOps, create) + } + + var recoverOps []*model.Operation + + for _, op := range cif.Operations.Recover { + recoverOp := &model.Operation{ + Type: operation.TypeRecover, + UniqueSuffix: op.DidSuffix, + RevealValue: op.RevealValue, + } + + suffixes = append(suffixes, op.DidSuffix) + recoverOps = append(recoverOps, recoverOp) + } + + var deactivateOps []*model.Operation + + for _, op := range cif.Operations.Deactivate { + deactivate := &model.Operation{ + Type: operation.TypeDeactivate, + UniqueSuffix: op.DidSuffix, + RevealValue: op.RevealValue, + } + + suffixes = append(suffixes, op.DidSuffix) + deactivateOps = append(deactivateOps, deactivate) + } + + err := checkForDuplicates(suffixes) + if err != nil { + return nil, fmt.Errorf("check for duplicate suffixes in core index files: %s", err.Error()) + } + + return &coreOperations{ + Create: createOps, + Recover: recoverOps, + Deactivate: deactivateOps, + Suffixes: suffixes, + }, nil +} + +// provisionalOperations contains parsed operations from provisional index file. +type provisionalOperations struct { + Update []*model.Operation + Suffixes []string +} + +func parseProvisionalIndexOperations(pif *models.ProvisionalIndexFile) *provisionalOperations { + if pif.Operations == nil { // nothing to do + return &provisionalOperations{} + } + + var suffixes []string + + var updateOps []*model.Operation + + for _, op := range pif.Operations.Update { + update := &model.Operation{ + Type: operation.TypeUpdate, + UniqueSuffix: op.DidSuffix, + RevealValue: op.RevealValue, + } + + suffixes = append(suffixes, op.DidSuffix) + updateOps = append(updateOps, update) + } + + return &provisionalOperations{Update: updateOps, Suffixes: suffixes} +} + +func (h *OperationProvider) validateURI(uri string) error { + if len(uri) > int(h.Protocol.MaxCasURILength) { + return fmt.Errorf("CAS URI length[%d] exceeds maximum CAS URI length[%d]", len(uri), h.Protocol.MaxCasURILength) + } + + return nil +} + +// readFromAlternateCASSources reads the URI from alternate CAS sources. The URI of the alternate source +// is composed using a provided CAS URI formatter, since the format of the URI is implementation-specific. +func (h *OperationProvider) readFromAlternateCASSources(casURI string, sources []string) ([]byte, error) { + for _, source := range sources { + casURIForSource, err := h.formatCASURIForSource(casURI, source) + if err != nil { + logger.Warn("Error formatting CAS reference for alternate source", + logfields.WithSource(source), log.WithError(err)) + + continue + } + + b, err := h.cas.Read(casURIForSource) + if err == nil { + logger.Debug("Successfully retrieved CAS content from alternate source", + logfields.WithSource(casURIForSource)) + + return b, nil + } + + logger.Warn("Error retrieving CAS content from alternate source", + logfields.WithSource(casURIForSource), log.WithError(err)) + } + + return nil, fmt.Errorf("retrieve CAS content from alternate source failed") +} diff --git a/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/provider_test.go b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/provider_test.go new file mode 100644 index 0000000..91f4b35 --- /dev/null +++ b/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/provider_test.go @@ -0,0 +1,1747 @@ +/* +Copyright SecureKey Technologies Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package txnprovider + +import ( + "encoding/json" + "errors" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/did-go/doc/json/canonicalizer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/cas" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/operation" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/protocol" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/api/txn" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/compression" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/mocks" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/doccomposer" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/model" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/operationparser" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/txnprovider/models" +) + +//nolint:lll +const ( + compressionAlgorithm = "GZIP" + maxFileSize = 2000 // in bytes + + sampleCasURI = "bafkreih6ot2yfqcerzp5l2qupc77it2vdmepfhszitmswnpdtk34m4ura4" + longValue = "bafkreih6ot2yfqcerzp5l2qupc77it2vdmepfhszitmswnpdtk34m4ura4bafkreih6ot2yfqcerzp5l2qupc77it2vdmepfhszitmswnpdtk34m4ura4" +) + +func TestNewOperationProvider(t *testing.T) { + pc := mocks.NewMockProtocolClient() + + handler := NewOperationProvider( + pc.Protocol, + operationparser.New(pc.Protocol), + mocks.NewMockCasClient(nil), + compression.New(compression.WithDefaultAlgorithms())) + + require.NotNil(t, handler) +} + +func TestHandler_GetTxnOperations(t *testing.T) { + const ( + createOpsNum = 2 + updateOpsNum = 3 + deactivateOpsNum = 2 + recoverOpsNum = 2 + ) + + pc := mocks.NewMockProtocolClient() + parser := operationparser.New(pc.Protocol) + cp := compression.New(compression.WithDefaultAlgorithms()) + + t.Run("success", func(t *testing.T) { + cas := mocks.NewMockCasClient(nil) + handler := NewOperationHandler(pc.Protocol, cas, cp, operationparser.New(pc.Protocol), + &mocks.MetricsProvider{}) + + ops := getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum) + + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.NoError(t, err) + require.NotEmpty(t, anchoringInfo.AnchorString) + require.Len(t, anchoringInfo.OperationReferences, createOpsNum+updateOpsNum+deactivateOpsNum+recoverOpsNum) + require.Len(t, anchoringInfo.Artifacts, 5) + + provider := NewOperationProvider(pc.Protocol, parser, cas, cp) + + txnOps, err := provider.GetTxnOperations(&txn.SidetreeTxn{ + Namespace: defaultNS, + AnchorString: anchoringInfo.AnchorString, + TransactionNumber: 1, + TransactionTime: 1, + }) + + require.NoError(t, err) + require.Equal(t, createOpsNum+updateOpsNum+deactivateOpsNum+recoverOpsNum, len(txnOps)) + }) + + t.Run("error - delta exceeds maximum delta size in chunk file", func(t *testing.T) { + cas := mocks.NewMockCasClient(nil) + handler := NewOperationHandler(pc.Protocol, cas, cp, operationparser.New(pc.Protocol), + &mocks.MetricsProvider{}) + + ops := getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum) + + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.NoError(t, err) + require.NotEmpty(t, anchoringInfo.AnchorString) + require.Equal(t, len(anchoringInfo.OperationReferences), + createOpsNum+updateOpsNum+deactivateOpsNum+recoverOpsNum) + + smallDeltaProofSize := mocks.GetDefaultProtocolParameters() + smallDeltaProofSize.MaxDeltaSize = 50 + + provider := NewOperationProvider(smallDeltaProofSize, operationparser.New(smallDeltaProofSize), cas, cp) + + txnOps, err := provider.GetTxnOperations(&txn.SidetreeTxn{ + Namespace: defaultNS, + AnchorString: anchoringInfo.AnchorString, + TransactionNumber: 1, + TransactionTime: 1, + }) + + require.Error(t, err) + require.Nil(t, txnOps) + require.Contains(t, err.Error(), + "failed to validate delta[0]: delta size[160] exceeds maximum delta size[50]") + }) + + t.Run("error - number of operations doesn't match", func(t *testing.T) { + cas := mocks.NewMockCasClient(nil) + handler := NewOperationHandler(pc.Protocol, cas, cp, operationparser.New(pc.Protocol), + &mocks.MetricsProvider{}) + + ops := getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum) + + // anchor string has 9 operations "9.coreIndexURI" + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.NoError(t, err) + require.NotEmpty(t, anchoringInfo.AnchorString) + + // update number of operations in anchor string from 9 to 7 + ad, err := ParseAnchorData(anchoringInfo.AnchorString) + require.NoError(t, err) + ad.NumberOfOperations = 7 + anchorString := ad.GetAnchorString() + + provider := NewOperationProvider( + mocks.NewMockProtocolClient().Protocol, operationparser.New(pc.Protocol), cas, cp) + + txnOps, err := provider.GetTxnOperations(&txn.SidetreeTxn{ + Namespace: defaultNS, + AnchorString: anchorString, + TransactionNumber: 1, + TransactionTime: 1, + }) + + require.Error(t, err) + require.Nil(t, txnOps) + require.Contains(t, err.Error(), "number of txn ops[9] doesn't match anchor string num of ops[7]") + }) + + t.Run("error - read from CAS error", func(t *testing.T) { + protocolClient := mocks.NewMockProtocolClient() + handler := NewOperationProvider( + protocolClient.Protocol, + operationparser.New(protocolClient.Protocol), + mocks.NewMockCasClient(errors.New("CAS error")), cp) + + txnOps, err := handler.GetTxnOperations(&txn.SidetreeTxn{ + Namespace: defaultNS, + AnchorString: "1" + delimiter + "coreIndexURI", + TransactionNumber: 1, + TransactionTime: 1, + }) + + require.Error(t, err) + require.Nil(t, txnOps) + require.Contains(t, err.Error(), + "error reading core index file: retrieve CAS content at uri[coreIndexURI]: CAS error") + }) + + t.Run("error - parse core index operations error", func(t *testing.T) { + cas := mocks.NewMockCasClient(nil) + handler := NewOperationHandler(pc.Protocol, cas, cp, operationparser.New(pc.Protocol), + &mocks.MetricsProvider{}) + + ops := getTestOperations(createOpsNum, updateOpsNum, deactivateOpsNum, recoverOpsNum) + + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.NoError(t, err) + require.NotEmpty(t, anchoringInfo.AnchorString) + + invalid := mocks.NewMockProtocolClient().Protocol + invalid.MultihashAlgorithms = []uint{55} + + provider := NewOperationProvider(invalid, operationparser.New(invalid), cas, cp) + + txnOps, err := provider.GetTxnOperations(&txn.SidetreeTxn{ + Namespace: mocks.DefaultNS, + AnchorString: anchoringInfo.AnchorString, + TransactionNumber: 1, + TransactionTime: 1, + }) + + require.Error(t, err) + require.Nil(t, txnOps) + require.Contains(t, err.Error(), "failed to validate suffix data") + }) + + t.Run("error - parse anchor data error", func(t *testing.T) { + p := mocks.NewMockProtocolClient().Protocol + provider := NewOperationProvider(p, operationparser.New(p), mocks.NewMockCasClient(nil), cp) + + txnOps, err := provider.GetTxnOperations(&txn.SidetreeTxn{ + AnchorString: "abc.anchor", + TransactionNumber: 1, + TransactionTime: 1, + }) + + require.Error(t, err) + require.Nil(t, txnOps) + require.Contains(t, err.Error(), "parse anchor data[abc.anchor] failed") + }) + + t.Run("success - deactivate only", func(t *testing.T) { + const deactivateOpsNum = 2 + + var ops []*operation.QueuedOperation + ops = append(ops, generateOperations(deactivateOpsNum, operation.TypeDeactivate)...) + + cas := mocks.NewMockCasClient(nil) + handler := NewOperationHandler(pc.Protocol, cas, cp, operationparser.New(pc.Protocol), + &mocks.MetricsProvider{}) + + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.NoError(t, err) + require.NotEmpty(t, anchoringInfo.AnchorString) + require.Equal(t, len(anchoringInfo.OperationReferences), deactivateOpsNum) + require.Equal(t, anchoringInfo.OperationReferences[0].Type, operation.TypeDeactivate) + // core proof, core index + require.Len(t, anchoringInfo.Artifacts, 2) + + p := mocks.NewMockProtocolClient().Protocol + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + txnOps, err := provider.GetTxnOperations(&txn.SidetreeTxn{ + Namespace: defaultNS, + AnchorString: anchoringInfo.AnchorString, + TransactionNumber: 1, + TransactionTime: 1, + }) + + require.NoError(t, err) + require.Equal(t, deactivateOpsNum, len(txnOps)) + }) + + t.Run("success - update only", func(t *testing.T) { + const updateOpsNum = 2 + + var ops []*operation.QueuedOperation + ops = append(ops, generateOperations(updateOpsNum, operation.TypeUpdate)...) + + cas := mocks.NewMockCasClient(nil) + handler := NewOperationHandler(pc.Protocol, cas, cp, operationparser.New(pc.Protocol), + &mocks.MetricsProvider{}) + + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.NoError(t, err) + require.NotEmpty(t, anchoringInfo.AnchorString) + require.Equal(t, len(anchoringInfo.OperationReferences), updateOpsNum) + require.Equal(t, anchoringInfo.OperationReferences[0].Type, operation.TypeUpdate) + // chunk, provisional proof and provisional index, core index + require.Len(t, anchoringInfo.Artifacts, 4) + + p := mocks.NewMockProtocolClient().Protocol + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + txnOps, err := provider.GetTxnOperations(&txn.SidetreeTxn{ + Namespace: defaultNS, + AnchorString: anchoringInfo.AnchorString, + TransactionNumber: 1, + TransactionTime: 1, + }) + + require.NoError(t, err) + require.Equal(t, updateOpsNum, len(txnOps)) + }) + + t.Run("success - create only", func(t *testing.T) { + const createOpsNum = 2 + + var ops []*operation.QueuedOperation + ops = append(ops, generateOperations(createOpsNum, operation.TypeCreate)...) + + cas := mocks.NewMockCasClient(nil) + handler := NewOperationHandler(pc.Protocol, cas, cp, operationparser.New(pc.Protocol), + &mocks.MetricsProvider{}) + + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.NoError(t, err) + require.NotEmpty(t, anchoringInfo.AnchorString) + require.Equal(t, len(anchoringInfo.OperationReferences), createOpsNum) + require.Equal(t, anchoringInfo.OperationReferences[0].Type, operation.TypeCreate) + // chunk, provisional index, and core index + require.Len(t, anchoringInfo.Artifacts, 3) + + p := mocks.NewMockProtocolClient().Protocol + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + txnOps, err := provider.GetTxnOperations(&txn.SidetreeTxn{ + Namespace: defaultNS, + AnchorString: anchoringInfo.AnchorString, + TransactionNumber: 1, + TransactionTime: 1, + }) + + require.NoError(t, err) + require.Equal(t, createOpsNum, len(txnOps)) + }) + + t.Run("success - recover only", func(t *testing.T) { + const recoverOpsNum = 2 + + var ops []*operation.QueuedOperation + ops = append(ops, generateOperations(recoverOpsNum, operation.TypeRecover)...) + + cas := mocks.NewMockCasClient(nil) + handler := NewOperationHandler(pc.Protocol, cas, cp, operationparser.New(pc.Protocol), + &mocks.MetricsProvider{}) + + anchoringInfo, err := handler.PrepareTxnFiles(ops) + require.NoError(t, err) + require.NotEmpty(t, anchoringInfo.AnchorString) + require.Equal(t, len(anchoringInfo.OperationReferences), recoverOpsNum) + require.Equal(t, anchoringInfo.OperationReferences[0].Type, operation.TypeRecover) + // chunk, provisional index, core proof, core index + require.Len(t, anchoringInfo.Artifacts, 4) + + p := mocks.NewMockProtocolClient().Protocol + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + txnOps, err := provider.GetTxnOperations(&txn.SidetreeTxn{ + Namespace: defaultNS, + AnchorString: anchoringInfo.AnchorString, + TransactionNumber: 1, + TransactionTime: 1, + }) + + require.NoError(t, err) + require.Equal(t, recoverOpsNum, len(txnOps)) + }) +} + +func TestHandler_GetCoreIndexFile(t *testing.T) { + cp := compression.New(compression.WithDefaultAlgorithms()) + p := protocol.Protocol{ + MaxCoreIndexFileSize: maxFileSize, + CompressionAlgorithm: compressionAlgorithm, + MaxCasURILength: 100, + MultihashAlgorithms: []uint{sha2_256}, + MaxMemoryDecompressionFactor: 3, + } + + cas := mocks.NewMockCasClient(nil) + content, err := cp.Compress(compressionAlgorithm, []byte("{}")) + require.NoError(t, err) + address, err := cas.Write(content) + require.NoError(t, err) + + parser := operationparser.New(p) + + t.Run("success", func(t *testing.T) { + provider := NewOperationProvider(p, parser, cas, cp) + + file, err := provider.getCoreIndexFile(address) + require.NoError(t, err) + require.NotNil(t, file) + }) + + t.Run("error - core index file exceeds maximum size", func(t *testing.T) { + provider := NewOperationProvider( + protocol.Protocol{MaxCoreIndexFileSize: 15, CompressionAlgorithm: compressionAlgorithm}, parser, cas, cp) + + file, err := provider.getCoreIndexFile(address) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "exceeded maximum size 15") + }) + + t.Run("error - parse core index file error (invalid JSON)", func(t *testing.T) { + cas := mocks.NewMockCasClient(nil) + content, err := cp.Compress(compressionAlgorithm, []byte("invalid")) + require.NoError(t, err) + address, err := cas.Write(content) + require.NoError(t, err) + + provider := NewOperationProvider(p, parser, cas, cp) + file, err := provider.getCoreIndexFile(address) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "failed to parse content for core index file") + }) + + t.Run("error - validate core index file (invalid suffix data)", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate suffix data for first create + batchFiles.CoreIndex.Operations.Create[0].SuffixData = &model.SuffixDataModel{ + DeltaHash: "", + RecoveryCommitment: "", + } + + invalidCif, err := json.Marshal(batchFiles.CoreIndex) + require.NoError(t, err) + + cas := mocks.NewMockCasClient(nil) + content, err := cp.Compress(compressionAlgorithm, invalidCif) + require.NoError(t, err) + address, err := cas.Write(content) + require.NoError(t, err) + + provider := NewOperationProvider(p, parser, cas, cp) + file, err := provider.getCoreIndexFile(address) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "failed to validate suffix data for create[0]") + }) + + t.Run("error - missing core proof URI", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate signed data for first recover + batchFiles.CoreIndex.CoreProofFileURI = "" + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.Error(t, err) + require.Contains(t, err.Error(), "missing core proof file URI") + }) +} + +func TestHandler_ValidateCoreIndexFile(t *testing.T) { + p := mocks.NewMockProtocolClient().Protocol + + t.Run("success", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.NoError(t, err) + }) + + t.Run("error - missing core proof URI", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate core proof URI + batchFiles.CoreIndex.CoreProofFileURI = "" + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.Error(t, err) + require.Contains(t, err.Error(), "missing core proof file URI") + }) + + t.Run("error - core proof URI present without recover and deactivate ops", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate deactivate and recover operations + batchFiles.CoreIndex.Operations.Deactivate = nil + batchFiles.CoreIndex.Operations.Recover = nil + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.Error(t, err) + require.Contains(t, err.Error(), "core proof file URI should be "+ + "empty if there are no recover and/or deactivate operations") + }) + + t.Run("error - invalid suffix data for create", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate signed data for first recover + batchFiles.CoreIndex.Operations.Create[0].SuffixData = &model.SuffixDataModel{} + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to validate suffix data for create[0]") + }) + + t.Run("error - invalid did suffix for recover", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate did suffix for first recover + batchFiles.CoreIndex.Operations.Recover[0].DidSuffix = "" + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to validate operation "+ + "reference for recover[0]: missing did suffix") + }) + + t.Run("error - invalid reveal value for deactivate", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate reveal value for first deactivate + batchFiles.CoreIndex.Operations.Deactivate[0].RevealValue = "" + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to validate operation "+ + "reference for deactivate[0]: missing reveal value") + }) + + t.Run("error - reveal value exceeds maximum hash length", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate reveal value for first deactivate + batchFiles.CoreIndex.Operations.Deactivate[0].RevealValue = longValue + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.Error(t, err) + require.Contains(t, err.Error(), + "failed to validate operation reference for deactivate[0]: "+ + "reveal value length[118] exceeds maximum hash length[100]") + }) + + t.Run("error - did suffix exceeds maximum hash length", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate reveal value for first deactivate + batchFiles.CoreIndex.Operations.Recover[0].DidSuffix = longValue + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.Error(t, err) + require.Contains(t, err.Error(), + "failed to validate operation reference for recover[0]:"+ + " did suffix length[118] exceeds maximum hash length[100]") + }) + + t.Run("error - recovery commitment length exceeds max hash length", func(t *testing.T) { + lowMaxHashLength := mocks.GetDefaultProtocolParameters() + lowMaxHashLength.MaxOperationHashLength = 10 + + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + provider := NewOperationProvider(lowMaxHashLength, operationparser.New(lowMaxHashLength), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.Error(t, err) + require.Contains(t, err.Error(), + "failed to validate suffix data for create[0]: "+ + "recovery commitment length[46] exceeds maximum hash length[10]") + }) +} + +func TestHandler_GetProvisionalIndexFile(t *testing.T) { + cp := compression.New(compression.WithDefaultAlgorithms()) + p := protocol.Protocol{ + MaxProvisionalIndexFileSize: maxFileSize, + CompressionAlgorithm: compressionAlgorithm, + MaxMemoryDecompressionFactor: 3, + } + + cas := mocks.NewMockCasClient(nil) + content, err := cp.Compress(compressionAlgorithm, []byte("{}")) + require.NoError(t, err) + address, err := cas.Write(content) + require.NoError(t, err) + + t.Run("success", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + file, err := provider.getProvisionalIndexFile(address) + require.NoError(t, err) + require.NotNil(t, file) + }) + + t.Run("error - provisional index file exceeds maximum size", func(t *testing.T) { + lowMaxFileSize := protocol.Protocol{MaxProvisionalIndexFileSize: 5, CompressionAlgorithm: compressionAlgorithm} + parser := operationparser.New(lowMaxFileSize) + provider := NewOperationProvider(lowMaxFileSize, parser, cas, cp) + + file, err := provider.getProvisionalIndexFile(address) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "exceeded maximum size 5") + }) + + t.Run("error - parse provisional index file error (invalid JSON)", func(t *testing.T) { + cas := mocks.NewMockCasClient(nil) + content, err := cp.Compress(compressionAlgorithm, []byte("invalid")) + require.NoError(t, err) + + address, err := cas.Write(content) + require.NoError(t, err) + + parser := operationparser.New(p) + provider := NewOperationProvider(p, parser, cas, cp) + file, err := provider.getProvisionalIndexFile(address) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "failed to parse content for provisional index file") + }) +} + +func TestHandler_ValidateProvisionalIndexFile(t *testing.T) { + p := mocks.NewMockProtocolClient().Protocol + + t.Run("success", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateProvisionalIndexFile(batchFiles.ProvisionalIndex) + require.NoError(t, err) + }) + + t.Run("error - missing provisional proof file", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate provisional proof URI + batchFiles.ProvisionalIndex.ProvisionalProofFileURI = "" + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateProvisionalIndexFile(batchFiles.ProvisionalIndex) + require.Error(t, err) + require.Contains(t, err.Error(), "missing provisional proof file URI") + }) + + t.Run("error - provisional proof file uri present without update ops", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // remove update operations + batchFiles.ProvisionalIndex.Operations.Update = nil + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateProvisionalIndexFile(batchFiles.ProvisionalIndex) + require.Error(t, err) + require.Contains(t, err.Error(), + "provisional proof file URI should be empty if there are no update operations") + }) + + t.Run("error - missing did suffix", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate did suffix + batchFiles.ProvisionalIndex.Operations.Update[0].DidSuffix = "" + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateProvisionalIndexFile(batchFiles.ProvisionalIndex) + require.Error(t, err) + require.Contains(t, err.Error(), + "failed to validate operation reference for update[0]: missing did suffix") + }) + + t.Run("error - missing reveal value", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate did suffix + batchFiles.ProvisionalIndex.Operations.Update[0].RevealValue = "" + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateProvisionalIndexFile(batchFiles.ProvisionalIndex) + require.Error(t, err) + require.Contains(t, err.Error(), + "failed to validate operation reference for update[0]: missing reveal value") + }) + + t.Run("success - validate IPFS CID", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // set valid IPFS CID + batchFiles.ProvisionalIndex.ProvisionalProofFileURI = sampleCasURI + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateProvisionalIndexFile(batchFiles.ProvisionalIndex) + require.NoError(t, err) + }) + + t.Run("error - provisional proof URI too long", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + batchFiles.ProvisionalIndex.ProvisionalProofFileURI = longValue + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateProvisionalIndexFile(batchFiles.ProvisionalIndex) + require.Error(t, err) + require.Contains(t, err.Error(), + "provisional proof URI: CAS URI length[118] exceeds maximum CAS URI length[100]") + }) + + t.Run("error - chunk URI too long", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + batchFiles.ProvisionalIndex.Chunks[0].ChunkFileURI = longValue + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateProvisionalIndexFile(batchFiles.ProvisionalIndex) + require.Error(t, err) + require.Contains(t, err.Error(), "chunk URI: CAS URI length[118] exceeds maximum CAS URI length[100]") + }) +} + +func TestHandler_GetChunkFile(t *testing.T) { + cp := compression.New(compression.WithDefaultAlgorithms()) + p := protocol.Protocol{ + MaxChunkFileSize: maxFileSize, + CompressionAlgorithm: compressionAlgorithm, + MaxMemoryDecompressionFactor: 3, + } + + cas := mocks.NewMockCasClient(nil) + content, err := cp.Compress(compressionAlgorithm, []byte("{}")) + require.NoError(t, err) + address, err := cas.Write(content) + require.NoError(t, err) + + t.Run("success", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + file, err := provider.getChunkFile(address) + require.NoError(t, err) + require.NotNil(t, file) + }) + + t.Run("error - chunk file exceeds maximum size", func(t *testing.T) { + lowMaxFileSize := protocol.Protocol{MaxChunkFileSize: 10, CompressionAlgorithm: compressionAlgorithm} + provider := NewOperationProvider(lowMaxFileSize, operationparser.New(p), cas, cp) + + file, err := provider.getChunkFile(address) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "exceeded maximum size 10") + }) + + t.Run("error - parse chunk file error (invalid JSON)", func(t *testing.T) { + content, err := cp.Compress(compressionAlgorithm, []byte("invalid")) + require.NoError(t, err) + address, err := cas.Write(content) + require.NoError(t, err) + + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + file, err := provider.getChunkFile(address) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "failed to parse content for chunk file") + }) + + t.Run("error - validate chunk file (invalid delta)", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate first delta + batchFiles.Chunk.Deltas[0] = &model.DeltaModel{} + + invalid, err := json.Marshal(batchFiles.Chunk) + require.NoError(t, err) + + cas := mocks.NewMockCasClient(nil) + content, err := cp.Compress(compressionAlgorithm, invalid) + require.NoError(t, err) + + address, err := cas.Write(content) + require.NoError(t, err) + + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + file, err := provider.getChunkFile(address) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "failed to validate delta[0]") + }) +} + +func TestHandler_ValidateChunkFile(t *testing.T) { + p := mocks.NewMockProtocolClient().Protocol + + t.Run("success", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateChunkFile(batchFiles.Chunk) + require.NoError(t, err) + }) + + t.Run("error - invalid delta", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate first delta + batchFiles.Chunk.Deltas[0] = &model.DeltaModel{} + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateChunkFile(batchFiles.Chunk) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to validate delta[0]") + }) +} + +func TestHandler_readFromCAS(t *testing.T) { + cp := compression.New(compression.WithDefaultAlgorithms()) + p := protocol.Protocol{ + MaxChunkFileSize: maxFileSize, + CompressionAlgorithm: compressionAlgorithm, + MaxMemoryDecompressionFactor: 3, + } + + cas := mocks.NewMockCasClient(nil) + content, err := cp.Compress(compressionAlgorithm, []byte("{}")) + require.NoError(t, err) + address, err := cas.Write(content) + require.NoError(t, err) + + t.Run("success", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + file, err := provider.readFromCAS(address, maxFileSize) + require.NoError(t, err) + require.NotNil(t, file) + }) + + t.Run("error - read from CAS error", func(t *testing.T) { + provider := NewOperationProvider(p, + operationparser.New(p), mocks.NewMockCasClient(errors.New("CAS error")), cp) + + file, err := provider.getChunkFile("address") + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), " retrieve CAS content at uri[address]: CAS error") + }) + + t.Run("error - content exceeds maximum size", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + file, err := provider.readFromCAS(address, 20) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "exceeded maximum size 20") + }) + + t.Run("error - content exceeds maximum decompressed size", func(t *testing.T) { + p2 := protocol.Protocol{ + CompressionAlgorithm: compressionAlgorithm, + MaxMemoryDecompressionFactor: 1, + } + + provider := NewOperationProvider(p2, operationparser.New(p2), cas, cp) + + testContent, err := cp.Compress(compressionAlgorithm, []byte(sampleChunkFile)) + require.NoError(t, err) + testAddress, err := cas.Write(testContent) + require.NoError(t, err) + + file, err := provider.readFromCAS(testAddress, 247) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), + "decompressed content size 267 exceeded maximum decompressed content size 247") + }) + + t.Run("error - decompression error", func(t *testing.T) { + p2 := protocol.Protocol{ + MaxChunkFileSize: maxFileSize, + CompressionAlgorithm: "alg", + MaxMemoryDecompressionFactor: 3, + } + + provider := NewOperationProvider(p2, operationparser.New(p2), cas, cp) + + file, err := provider.readFromCAS(address, maxFileSize) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "compression algorithm 'alg' not supported") + }) + + t.Run("alternate sources", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), cas, cp, + WithSourceCASURIFormatter(func(uri, domain string) (string, error) { + return fmt.Sprintf("%s:%s", domain, uri), nil + }), + ) + + _, err := provider.readFromCAS("address", maxFileSize, + "https:orb.domain1.com", "https:orb.domain2.com") + require.Error(t, err) + require.Contains(t, err.Error(), "not found") + }) + + t.Run("alternate sources - no formatter", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + _, err := provider.readFromCAS("address", maxFileSize, + "https:orb.domain1.com", "https:orb.domain2.com") + require.Error(t, err) + require.Contains(t, err.Error(), "not found") + }) +} + +func TestHandler_GetCorePoofFile(t *testing.T) { + cp := compression.New(compression.WithDefaultAlgorithms()) + p := protocol.Protocol{ + MaxProofFileSize: maxFileSize, + CompressionAlgorithm: compressionAlgorithm, + MaxMemoryDecompressionFactor: 3, + } + + cas := mocks.NewMockCasClient(nil) + content, err := cp.Compress(compressionAlgorithm, []byte("{}")) + require.NoError(t, err) + uri, err := cas.Write(content) + require.NoError(t, err) + + t.Run("success", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + file, err := provider.getCoreProofFile(uri) + require.NoError(t, err) + require.NotNil(t, file) + }) + + t.Run("error - core proof file exceeds maximum size", func(t *testing.T) { + lowMaxFileSize := protocol.Protocol{MaxProofFileSize: 10, CompressionAlgorithm: compressionAlgorithm} + provider := NewOperationProvider(lowMaxFileSize, operationparser.New(p), cas, cp) + + file, err := provider.getCoreProofFile(uri) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "exceeded maximum size 10") + }) + + t.Run("error - parse core proof file error (invalid JSON)", func(t *testing.T) { + content, err := cp.Compress(compressionAlgorithm, []byte("invalid")) + require.NoError(t, err) + address, err := cas.Write(content) + require.NoError(t, err) + + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + file, err := provider.getCoreProofFile(address) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "failed to parse content for core proof file") + }) + + t.Run("error - validate core proof file (invalid signed data)", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate signed data for first recover + batchFiles.CoreProof.Operations.Recover[0] = "invalid-jws" + + invalid, err := json.Marshal(batchFiles.CoreProof) + require.NoError(t, err) + + cas := mocks.NewMockCasClient(nil) + content, err := cp.Compress(compressionAlgorithm, invalid) + require.NoError(t, err) + address, err := cas.Write(content) + require.NoError(t, err) + + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + file, err := provider.getCoreProofFile(address) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "failed to validate signed data for recover[0]") + }) +} + +func TestHandler_ValidateCorePoofFile(t *testing.T) { + p := mocks.NewMockProtocolClient().Protocol + + t.Run("success", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreProofFile(batchFiles.CoreProof) + require.NoError(t, err) + }) + + t.Run("error - invalid signed data for recover", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate signed data for first recover + batchFiles.CoreProof.Operations.Recover[0] = "recover-jws" + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreProofFile(batchFiles.CoreProof) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to validate signed data for recover[0]") + }) + + t.Run("error - invalid signed data for deactivate", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate signed data for first deactivate + batchFiles.CoreProof.Operations.Deactivate[0] = "deactivate-jws" + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreProofFile(batchFiles.CoreProof) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to validate signed data for deactivate[0]") + }) + + t.Run("success - validate IPFS CID", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // set valid IPFS CID + batchFiles.CoreIndex.CoreProofFileURI = sampleCasURI + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.NoError(t, err) + }) + + t.Run("error - core proof URI too long", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + batchFiles.CoreIndex.CoreProofFileURI = longValue + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.Error(t, err) + require.Contains(t, err.Error(), + "core proof URI: CAS URI length[118] exceeds maximum CAS URI length[100]") + }) + + t.Run("error - provisional index URI too long", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + batchFiles.CoreIndex.ProvisionalIndexFileURI = longValue + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateCoreIndexFile(batchFiles.CoreIndex) + require.Error(t, err) + require.Contains(t, err.Error(), + "provisional index URI: CAS URI length[118] exceeds maximum CAS URI length[100]") + }) +} + +func TestHandler_GetProvisionalPoofFile(t *testing.T) { + cp := compression.New(compression.WithDefaultAlgorithms()) + p := protocol.Protocol{ + MaxProofFileSize: maxFileSize, + CompressionAlgorithm: compressionAlgorithm, + MaxMemoryDecompressionFactor: 3, + } + + cas := mocks.NewMockCasClient(nil) + content, err := cp.Compress(compressionAlgorithm, []byte("{}")) + require.NoError(t, err) + uri, err := cas.Write(content) + require.NoError(t, err) + + t.Run("success", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + file, err := provider.getProvisionalProofFile(uri) + require.NoError(t, err) + require.NotNil(t, file) + }) + + t.Run("error - core provisional file exceeds maximum size", func(t *testing.T) { + lowMaxFileSize := protocol.Protocol{MaxProofFileSize: 10, CompressionAlgorithm: compressionAlgorithm} + provider := NewOperationProvider(lowMaxFileSize, operationparser.New(p), cas, cp) + + file, err := provider.getProvisionalProofFile(uri) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "exceeded maximum size 10") + }) + + t.Run("error - parse provisional proof file error (invalid JSON)", func(t *testing.T) { + content, err := cp.Compress(compressionAlgorithm, []byte("invalid")) + require.NoError(t, err) + address, err := cas.Write(content) + require.NoError(t, err) + + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + file, err := provider.getProvisionalProofFile(address) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "failed to parse content for provisional proof file") + }) + + t.Run("error - validate provisional proof file (invalid signed data)", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate signed data for first update + batchFiles.ProvisionalProof.Operations.Update[0] = "invalid-jws" + + invalid, err := json.Marshal(batchFiles.ProvisionalProof) + require.NoError(t, err) + + cas := mocks.NewMockCasClient(nil) + content, err := cp.Compress(compressionAlgorithm, invalid) + require.NoError(t, err) + address, err := cas.Write(content) + require.NoError(t, err) + + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + file, err := provider.getProvisionalProofFile(address) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "failed to validate signed data for update[0]") + }) +} + +func TestHandler_ValidateProvisionalPoofFile(t *testing.T) { + p := mocks.NewMockProtocolClient().Protocol + + t.Run("success", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateProvisionalProofFile(batchFiles.ProvisionalProof) + require.NoError(t, err) + }) + + t.Run("error - invalid signed data for update", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + // invalidate signed data for first update + batchFiles.ProvisionalProof.Operations.Update[0] = "jws" + + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + err = provider.validateProvisionalProofFile(batchFiles.ProvisionalProof) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to validate signed data for update[0]") + }) +} + +func TestHandler_GetBatchFiles(t *testing.T) { + cp := compression.New(compression.WithDefaultAlgorithms()) + cas := mocks.NewMockCasClient(nil) + + updateOp, err := generateOperation(1, operation.TypeUpdate) + require.NoError(t, err) + + recoverOp, err := generateOperation(2, operation.TypeRecover) + require.NoError(t, err) + + ppf := &models.ProvisionalProofFile{ + Operations: models.ProvisionalProofOperations{ + Update: []string{updateOp.SignedData}, + }, + } + + ppfURI, err := writeToCAS(ppf, cas) + require.NoError(t, err) + + cf := &models.ChunkFile{Deltas: []*model.DeltaModel{recoverOp.Delta, updateOp.Delta}} + + chunkURI, err := writeToCAS(cf, cas) + require.NoError(t, err) + + pif := &models.ProvisionalIndexFile{ + Chunks: []models.Chunk{{ChunkFileURI: chunkURI}}, + ProvisionalProofFileURI: ppfURI, + Operations: &models.ProvisionalOperations{ + Update: []models.OperationReference{ + { + DidSuffix: updateOp.UniqueSuffix, + RevealValue: updateOp.RevealValue, + }, + }, + }, + } + + pifURI, err := writeToCAS(pif, cas) + require.NoError(t, err) + + cpf := &models.CoreProofFile{ + Operations: models.CoreProofOperations{ + Recover: []string{recoverOp.SignedData}, + }, + } + + cpfURI, err := writeToCAS(cpf, cas) + require.NoError(t, err) + + af := &models.CoreIndexFile{ + ProvisionalIndexFileURI: pifURI, + CoreProofFileURI: cpfURI, + Operations: &models.CoreOperations{ + Recover: []models.OperationReference{ + { + DidSuffix: recoverOp.UniqueSuffix, + RevealValue: recoverOp.RevealValue, + }, + }, + }, + } + + t.Run("success", func(t *testing.T) { + p := newMockProtocolClient().Protocol + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + file, err := provider.getBatchFiles(af) + require.NoError(t, err) + require.NotNil(t, file) + }) + + t.Run("error - retrieve provisional index file", func(t *testing.T) { + p := newMockProtocolClient().Protocol + p.MaxProvisionalIndexFileSize = 10 + + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + file, err := provider.getBatchFiles(af) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "exceeded maximum size 10") + }) + + t.Run("error - retrieve core proof file", func(t *testing.T) { + p := newMockProtocolClient().Protocol + p.MaxProofFileSize = 7 + + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + file, err := provider.getBatchFiles(af) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "exceeded maximum size 7") + }) + + t.Run("error - retrieve provisional proof file", func(t *testing.T) { + p := newMockProtocolClient().Protocol + + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + content, err := cp.Compress(compressionAlgorithm, []byte("invalid")) + require.NoError(t, err) + + invalidContentURI, err := cas.Write(content) + require.NoError(t, err) + + pif2 := &models.ProvisionalIndexFile{ + Chunks: []models.Chunk{{ChunkFileURI: chunkURI}}, + ProvisionalProofFileURI: invalidContentURI, + Operations: &models.ProvisionalOperations{ + Update: []models.OperationReference{ + { + DidSuffix: updateOp.UniqueSuffix, + RevealValue: updateOp.RevealValue, + }, + }, + }, + } + + provisionalIndexURI, err := writeToCAS(pif2, cas) + require.NoError(t, err) + + af2 := &models.CoreIndexFile{ + ProvisionalIndexFileURI: provisionalIndexURI, + CoreProofFileURI: "", + } + + file, err := provider.getBatchFiles(af2) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "failed to unmarshal provisional proof file: invalid character") + }) + + t.Run("error - retrieve chunk file", func(t *testing.T) { + p := newMockProtocolClient().Protocol + p.MaxChunkFileSize = 10 + + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + file, err := provider.getBatchFiles(af) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "exceeded maximum size 10") + }) + + t.Run("error - missing provisional proof URI", func(t *testing.T) { + p := newMockProtocolClient().Protocol + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + updateOp, err := generateOperation(1, operation.TypeUpdate) + require.NoError(t, err) + + pif2 := &models.ProvisionalIndexFile{ + ProvisionalProofFileURI: "", + Operations: &models.ProvisionalOperations{ + Update: []models.OperationReference{ + { + DidSuffix: updateOp.UniqueSuffix, + RevealValue: updateOp.RevealValue, + }, + }, + }, + } + + pif2URI, err := writeToCAS(pif2, cas) + require.NoError(t, err) + + cif := &models.CoreIndexFile{ + ProvisionalIndexFileURI: pif2URI, + } + + file, err := provider.getBatchFiles(cif) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "missing provisional proof file URI") + }) + + t.Run("error - validate batch counts", func(t *testing.T) { + p := newMockProtocolClient().Protocol + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + recoverOp, err := generateOperation(3, operation.TypeRecover) + require.NoError(t, err) + + cpf := &models.CoreProofFile{ + Operations: models.CoreProofOperations{ + Recover: []string{}, + }, + } + + cpfBytes, err := json.Marshal(cpf) + require.NoError(t, err) + + compressed, err := cp.Compress(compressionAlgorithm, cpfBytes) + require.NoError(t, err) + cpfURI, err := cas.Write(compressed) + require.NoError(t, err) + + cif := &models.CoreIndexFile{ + CoreProofFileURI: cpfURI, + Operations: &models.CoreOperations{ + Recover: []models.OperationReference{ + { + DidSuffix: recoverOp.UniqueSuffix, + RevealValue: recoverOp.RevealValue, + }, + }, + }, + } + + file, err := provider.getBatchFiles(cif) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), + "number of recover ops[1] in core index doesn't match number of recover ops[0] in core proof") + }) + + t.Run("error - provisional index file is missing chunk file URI", func(t *testing.T) { + p := newMockProtocolClient().Protocol + provider := NewOperationProvider(p, operationparser.New(p), cas, cp) + + missingChunkURI, err := writeToCAS(&models.ProvisionalIndexFile{}, cas) + require.NoError(t, err) + + file, err := provider.getBatchFiles(&models.CoreIndexFile{ + ProvisionalIndexFileURI: missingChunkURI, + }) + require.Error(t, err) + require.Nil(t, file) + require.Contains(t, err.Error(), "provisional index file is missing chunk file URI") + }) +} + +func TestHandler_assembleBatchOperations(t *testing.T) { + p := newMockProtocolClient().Protocol + + t.Run("success", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + anchoredOps, err := provider.assembleAnchoredOperations(batchFiles, &txn.SidetreeTxn{Namespace: defaultNS}) + require.NoError(t, err) + require.Equal(t, 4, len(anchoredOps)) + }) + + t.Run("error - recover signed data error ", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + batchFiles.CoreProof.Operations.Recover[0] = "" + + anchoredOps, err := provider.assembleAnchoredOperations(batchFiles, &txn.SidetreeTxn{Namespace: defaultNS}) + require.Error(t, err) + require.Nil(t, anchoredOps) + require.Contains(t, err.Error(), "failed to validate signed data for recover[0]: missing signed data") + }) + + t.Run("error - core/provisional index, chunk file operation number mismatch", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + + createOp, err := generateOperation(1, operation.TypeCreate) + require.NoError(t, err) + + updateOp, err := generateOperation(2, operation.TypeUpdate) + require.NoError(t, err) + + deactivateOp, err := generateOperation(3, operation.TypeDeactivate) + require.NoError(t, err) + + cif := &models.CoreIndexFile{ + ProvisionalIndexFileURI: "hash", + Operations: &models.CoreOperations{ + Create: []models.CreateReference{{SuffixData: createOp.SuffixData}}, + Deactivate: []models.OperationReference{ + {DidSuffix: deactivateOp.UniqueSuffix, RevealValue: deactivateOp.RevealValue}, + }, + }, + } + + pif := &models.ProvisionalIndexFile{ + Chunks: []models.Chunk{}, + Operations: &models.ProvisionalOperations{ + Update: []models.OperationReference{ + {DidSuffix: updateOp.UniqueSuffix, RevealValue: updateOp.RevealValue}}, + }, + } + + // don't add update operation delta to chunk file in order to cause error + cf := &models.ChunkFile{Deltas: []*model.DeltaModel{createOp.Delta}} + + cpf := &models.CoreProofFile{ + Operations: models.CoreProofOperations{ + Deactivate: []string{deactivateOp.SignedData}, + }, + } + + ppf := &models.ProvisionalProofFile{ + Operations: models.ProvisionalProofOperations{ + Update: []string{updateOp.SignedData}, + }, + } + + batchFiles := &batchFiles{ + CoreIndex: cif, + CoreProof: cpf, + ProvisionalIndex: pif, + ProvisionalProof: ppf, + Chunk: cf, + } + + anchoredOps, err := provider.assembleAnchoredOperations(batchFiles, &txn.SidetreeTxn{Namespace: defaultNS}) + require.Error(t, err) + require.Nil(t, anchoredOps) + require.Contains(t, err.Error(), + "number of create+recover+update operations[2] doesn't match number of deltas[1]") + }) + + t.Run("error - duplicate operations found in core/provisional index files", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + + createOp, err := generateOperation(1, operation.TypeCreate) + require.NoError(t, err) + + updateOp, err := generateOperation(2, operation.TypeUpdate) + require.NoError(t, err) + + deactivateOp, err := generateOperation(3, operation.TypeDeactivate) + require.NoError(t, err) + + cif := &models.CoreIndexFile{ + ProvisionalIndexFileURI: "hash", + Operations: &models.CoreOperations{ + Create: []models.CreateReference{{SuffixData: createOp.SuffixData}}, + Deactivate: []models.OperationReference{ + {DidSuffix: "test-suffix", RevealValue: deactivateOp.RevealValue}, + }, + }, + } + + pif := &models.ProvisionalIndexFile{ + Chunks: []models.Chunk{}, + Operations: &models.ProvisionalOperations{ + Update: []models.OperationReference{ + {DidSuffix: "test-suffix", RevealValue: updateOp.RevealValue}, + }, + }, + } + + cf := &models.ChunkFile{Deltas: []*model.DeltaModel{createOp.Delta}} + + cpf := &models.CoreProofFile{ + Operations: models.CoreProofOperations{ + Deactivate: []string{deactivateOp.SignedData, deactivateOp.SignedData}, + }, + } + + batchFiles := &batchFiles{ + CoreIndex: cif, + CoreProof: cpf, + ProvisionalIndex: pif, + Chunk: cf, + } + + anchoredOps, err := provider.assembleAnchoredOperations(batchFiles, &txn.SidetreeTxn{Namespace: defaultNS}) + require.Error(t, err) + require.Nil(t, anchoredOps) + require.Contains(t, err.Error(), + "check for duplicate suffixes in core/provisional index files: duplicate values found [test-suffix]") + }) + + t.Run("error - duplicate operations found in core index file", func(t *testing.T) { + provider := NewOperationProvider(p, operationparser.New(p), nil, nil) + + createOp, err := generateOperation(1, operation.TypeCreate) + require.NoError(t, err) + + updateOp, err := generateOperation(2, operation.TypeUpdate) + require.NoError(t, err) + + deactivateOp, err := generateOperation(3, operation.TypeDeactivate) + require.NoError(t, err) + + cif := &models.CoreIndexFile{ + ProvisionalIndexFileURI: "hash", + Operations: &models.CoreOperations{ + Create: []models.CreateReference{{SuffixData: createOp.SuffixData}}, + Deactivate: []models.OperationReference{ + {DidSuffix: deactivateOp.UniqueSuffix, RevealValue: deactivateOp.RevealValue}, + {DidSuffix: deactivateOp.UniqueSuffix, RevealValue: deactivateOp.RevealValue}, + }, + }, + } + + pif := &models.ProvisionalIndexFile{ + Chunks: []models.Chunk{}, + Operations: &models.ProvisionalOperations{ + Update: []models.OperationReference{ + {DidSuffix: updateOp.UniqueSuffix, RevealValue: updateOp.RevealValue}, + }, + }, + } + + cf := &models.ChunkFile{Deltas: []*model.DeltaModel{createOp.Delta}} + + cpf := &models.CoreProofFile{ + Operations: models.CoreProofOperations{ + Deactivate: []string{deactivateOp.SignedData, deactivateOp.SignedData}, + }, + } + + batchFiles := &batchFiles{ + CoreIndex: cif, + CoreProof: cpf, + ProvisionalIndex: pif, + Chunk: cf, + } + + anchoredOps, err := provider.assembleAnchoredOperations(batchFiles, &txn.SidetreeTxn{Namespace: defaultNS}) + require.Error(t, err) + require.Nil(t, anchoredOps) + require.Contains(t, err.Error(), + "check for duplicate suffixes in core index files: duplicate values found [deactivate-3]") + }) +} + +func TestValidateBatchFileCounts(t *testing.T) { + t.Run("success", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + err = validateBatchFileCounts(batchFiles) + require.NoError(t, err) + }) + + t.Run("error - deactivate ops number mismatch", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + batchFiles.CoreProof.Operations.Deactivate = []string{} + + err = validateBatchFileCounts(batchFiles) + require.Error(t, err) + require.Contains(t, err.Error(), + "number of deactivate ops[1] in core index doesn't match number of deactivate ops[0] in core proof") + }) + + t.Run("error - recover ops number mismatch", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + batchFiles.CoreProof.Operations.Recover = []string{} + + err = validateBatchFileCounts(batchFiles) + require.Error(t, err) + require.Contains(t, err.Error(), "number of recover ops[1] in core index "+ + "doesn't match number of recover ops[0] in core proof") + }) + + t.Run("error - update ops number mismatch", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + batchFiles.ProvisionalProof.Operations.Update = []string{} + + err = validateBatchFileCounts(batchFiles) + require.Error(t, err) + require.Contains(t, err.Error(), "number of update ops[1] in provisional "+ + "index doesn't match number of update ops[0] in provisional proof") + }) + + t.Run("error - delta mismatch", func(t *testing.T) { + batchFiles, err := generateDefaultBatchFiles() + require.NoError(t, err) + + batchFiles.Chunk.Deltas = []*model.DeltaModel{} + + err = validateBatchFileCounts(batchFiles) + require.Error(t, err) + require.Contains(t, err.Error(), + "number of create+recover+update operations[3] doesn't match number of deltas[0]") + }) +} + +func generateDefaultBatchFiles() (*batchFiles, error) { + createOp, err := generateOperation(1, operation.TypeCreate) + if err != nil { + return nil, err + } + + updateOp, err := generateOperation(2, operation.TypeUpdate) + if err != nil { + return nil, err + } + + recoverOp, err := generateOperation(3, operation.TypeRecover) + if err != nil { + return nil, err + } + + deactivateOp, err := generateOperation(4, operation.TypeDeactivate) + if err != nil { + return nil, err + } + + cif := &models.CoreIndexFile{ + ProvisionalIndexFileURI: "provisionalIndexURI", + CoreProofFileURI: "coreProofURI", + Operations: &models.CoreOperations{ + Create: []models.CreateReference{{SuffixData: createOp.SuffixData}}, + Recover: []models.OperationReference{ + { + DidSuffix: recoverOp.UniqueSuffix, + RevealValue: recoverOp.RevealValue, + }, + }, + Deactivate: []models.OperationReference{ + { + DidSuffix: deactivateOp.UniqueSuffix, + RevealValue: deactivateOp.RevealValue, + }, + }, + }, + } + + pif := &models.ProvisionalIndexFile{ + Chunks: []models.Chunk{{ChunkFileURI: "chunkURI"}}, + ProvisionalProofFileURI: "provisionalProofURI", + Operations: &models.ProvisionalOperations{ + Update: []models.OperationReference{ + { + DidSuffix: updateOp.UniqueSuffix, + RevealValue: updateOp.RevealValue, + }, + }, + }, + } + + cf := &models.ChunkFile{Deltas: []*model.DeltaModel{createOp.Delta, recoverOp.Delta, updateOp.Delta}} + + cpf := &models.CoreProofFile{ + Operations: models.CoreProofOperations{ + Recover: []string{recoverOp.SignedData}, + Deactivate: []string{deactivateOp.SignedData}, + }, + } + + ppf := &models.ProvisionalProofFile{ + Operations: models.ProvisionalProofOperations{ + Update: []string{updateOp.SignedData}, + }, + } + + return &batchFiles{ + CoreIndex: cif, + CoreProof: cpf, + ProvisionalIndex: pif, + ProvisionalProof: ppf, + Chunk: cf, + }, nil +} + +func writeToCAS(value interface{}, cas cas.Client) (string, error) { + bytes, err := canonicalizer.MarshalCanonical(value) + if err != nil { + return "", err + } + + cp := compression.New(compression.WithDefaultAlgorithms()) + + compressed, err := cp.Compress(compressionAlgorithm, bytes) + if err != nil { + return "", err + } + + return cas.Write(compressed) +} + +func newMockProtocolClient() *mocks.MockProtocolClient { + pc := mocks.NewMockProtocolClient() + parser := operationparser.New(pc.Protocol) + dc := doccomposer.New() + + pv := pc.CurrentVersion + pv.OperationParserReturns(parser) + pv.DocumentComposerReturns(dc) + + return pc +} + +//nolint:lll +const sampleChunkFile = `{"chunks":[{"chunkFileUri":"EiDkiD-FuKC5mcsY4m0pd3OMTP7FAfo690gzN7-6JxcN1g"}],"operations":{"update":[{"didSuffix":"update-1","revealValue":"EiAdqFJ-x5QhwPq62DB9EfenKloqntykHJkZrwI6uxkoVQ"}]},"provisionalProofFileUri":"EiDdEHTL3VmFZO5hXoth8vTKnXgvfvW4lLJXyMjqs7ezUA"}` diff --git a/method/sidetreelongform/sidetree/api/api.go b/method/sidetreelongform/sidetree/api/api.go index fd4beae..0696a8f 100644 --- a/method/sidetreelongform/sidetree/api/api.go +++ b/method/sidetreelongform/sidetree/api/api.go @@ -6,7 +6,7 @@ SPDX-License-Identifier: Apache-2.0 // Package api include interface package api -import "github.com/trustbloc/sidetree-core-go/pkg/jws" +import "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" // Signer defines JWS Signer interface that will be used to sign required data in Sidetree request. type Signer interface { diff --git a/method/sidetreelongform/sidetree/client.go b/method/sidetreelongform/sidetree/client.go index ce4cf29..ddda70a 100644 --- a/method/sidetreelongform/sidetree/client.go +++ b/method/sidetreelongform/sidetree/client.go @@ -18,11 +18,11 @@ import ( "os" "strings" - "github.com/trustbloc/sidetree-core-go/pkg/commitment" - "github.com/trustbloc/sidetree-core-go/pkg/hashing" - "github.com/trustbloc/sidetree-core-go/pkg/patch" - "github.com/trustbloc/sidetree-core-go/pkg/util/pubkey" - "github.com/trustbloc/sidetree-core-go/pkg/versions/1_0/client" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/hashing" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/patch" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/pubkey" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/client" docdid "github.com/trustbloc/did-go/doc/did" "github.com/trustbloc/did-go/method/sidetreelongform/sidetree/doc" diff --git a/method/sidetreelongform/sidetree/client_test.go b/method/sidetreelongform/sidetree/client_test.go index 8d2c240..181c58a 100644 --- a/method/sidetreelongform/sidetree/client_test.go +++ b/method/sidetreelongform/sidetree/client_test.go @@ -21,12 +21,13 @@ import ( gojose "github.com/go-jose/go-jose/v3" "github.com/stretchr/testify/require" "github.com/trustbloc/kms-go/doc/jose/jwk" - "github.com/trustbloc/sidetree-core-go/pkg/commitment" - "github.com/trustbloc/sidetree-core-go/pkg/jws" - "github.com/trustbloc/sidetree-core-go/pkg/util/ecsigner" - "github.com/trustbloc/sidetree-core-go/pkg/util/edsigner" - "github.com/trustbloc/sidetree-core-go/pkg/util/pubkey" - "github.com/trustbloc/sidetree-core-go/pkg/versions/1_0/client" + + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/commitment" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/jws" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/ecsigner" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/edsigner" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/util/pubkey" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/versions/1_0/client" "github.com/trustbloc/did-go/doc/did" model "github.com/trustbloc/did-go/doc/did/endpoint" diff --git a/method/sidetreelongform/vdr.go b/method/sidetreelongform/vdr.go index 9d06678..6a42ef1 100644 --- a/method/sidetreelongform/vdr.go +++ b/method/sidetreelongform/vdr.go @@ -14,9 +14,10 @@ import ( "github.com/btcsuite/btcutil/base58" jsonld "github.com/piprate/json-gold/ld" + ld "github.com/trustbloc/did-go/doc/ld/documentloader" ldstore "github.com/trustbloc/did-go/doc/ld/store" - "github.com/trustbloc/sidetree-core-go/pkg/document" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" docdid "github.com/trustbloc/did-go/doc/did" "github.com/trustbloc/did-go/legacy/mem" diff --git a/method/sidetreelongform/vdr_test.go b/method/sidetreelongform/vdr_test.go index 9d79b5e..fef370d 100644 --- a/method/sidetreelongform/vdr_test.go +++ b/method/sidetreelongform/vdr_test.go @@ -15,14 +15,15 @@ import ( "testing" "github.com/stretchr/testify/require" + "github.com/trustbloc/kms-go/crypto/primitive/bbs12381g2pub" + "github.com/trustbloc/kms-go/doc/jose/jwk" + "github.com/trustbloc/kms-go/doc/jose/jwk/jwksupport" + ld "github.com/trustbloc/did-go/doc/ld/documentloader" mockldstore "github.com/trustbloc/did-go/doc/ld/mock" ldstore "github.com/trustbloc/did-go/doc/ld/store" + "github.com/trustbloc/did-go/method/sidetreelongform/sidetree-core/document" "github.com/trustbloc/did-go/method/sidetreelongform/sidetree/option/create" - "github.com/trustbloc/kms-go/crypto/primitive/bbs12381g2pub" - "github.com/trustbloc/kms-go/doc/jose/jwk" - "github.com/trustbloc/kms-go/doc/jose/jwk/jwksupport" - "github.com/trustbloc/sidetree-core-go/pkg/document" ariesdid "github.com/trustbloc/did-go/doc/did" model "github.com/trustbloc/did-go/doc/did/endpoint" diff --git a/scripts/check_unit.sh b/scripts/check_unit.sh index 8fa68d8..a8680b7 100755 --- a/scripts/check_unit.sh +++ b/scripts/check_unit.sh @@ -21,7 +21,7 @@ fi # Running wallet-sdk unit tests PKGS=`go list github.com/trustbloc/did-go/... 2> /dev/null | \ grep -v /mocks` -go test $PKGS -count=1 -race -coverprofile=profile.out -covermode=atomic -timeout=10m +go test $PKGS -count=1 -race -coverprofile=profile.out -covermode=atomic -timeout=10m -tags testing amend_coverage_file