From 5831d4c2af6d61b7c42cfc67f8c06d5d8f4b0751 Mon Sep 17 00:00:00 2001 From: Bob Stasyszyn Date: Thu, 28 Sep 2023 09:04:06 -0400 Subject: [PATCH] chore: Import common code from sidetree-core-go Import the common code from sidetree-core-go that is used by both Sidetree clients and servers. Signed-off-by: Bob Stasyszyn --- README.md | 2 +- go.mod | 34 + go.sum | 123 ++ pkg/api/operation/models.go | 84 + pkg/api/protocol/protocol.go | 154 ++ pkg/canonicalizer/canonicalizer.go | 29 + pkg/canonicalizer/canonicalizer_test.go | 43 + pkg/commitment/hash.go | 73 + pkg/commitment/hash_test.go | 114 ++ pkg/document/diddocument.go | 193 +++ pkg/document/diddocument_test.go | 115 ++ pkg/document/document.go | 101 ++ pkg/document/document_test.go | 94 ++ pkg/document/jwk.go | 57 + pkg/document/jwk_test.go | 74 + pkg/document/publickey.go | 102 ++ pkg/document/publickey_test.go | 59 + pkg/document/replace.go | 54 + pkg/document/replace_test.go | 55 + pkg/document/resolution.go | 106 ++ pkg/document/resolution_test.go | 27 + pkg/document/service.go | 38 + pkg/document/service_test.go | 29 + pkg/document/testdata/invalid-lists.json | 14 + pkg/document/testdata/pk-doc.json | 27 + pkg/document/testdata/vm-doc.json | 15 + pkg/docutil/doc.go | 40 + pkg/docutil/doc_test.go | 75 + pkg/docutil/docutil.go | 119 ++ pkg/encoder/encoder.go | 19 + pkg/encoder/encoder_test.go | 24 + pkg/hashing/hash.go | 144 ++ pkg/hashing/hash_test.go | 182 +++ pkg/internal/jsoncanonicalizer/README.md | 4 + pkg/internal/jsoncanonicalizer/es6numfmt.go | 92 ++ .../jsoncanonicalizer/jsoncanonicalizer.go | 377 +++++ pkg/internal/log/fields.go | 332 ++++ pkg/internal/log/fields_test.go | 203 +++ pkg/jws/header.go | 98 ++ pkg/jws/header_test.go | 38 + pkg/jws/jwk.go | 35 + pkg/jws/jwk_test.go | 54 + pkg/jwsutil/jwk.go | 293 ++++ pkg/jwsutil/jwk_test.go | 281 ++++ pkg/jwsutil/jws.go | 296 ++++ pkg/jwsutil/jws_test.go | 279 ++++ pkg/jwsutil/signature.go | 168 ++ pkg/jwsutil/signature_test.go | 285 ++++ pkg/mocks/documentcomposer.gen.go | 122 ++ pkg/mocks/documenttransformer.gen.go | 117 ++ pkg/mocks/documentvalidator.gen.go | 191 +++ pkg/mocks/operationapplier.gen.go | 117 ++ pkg/mocks/operationparser.gen.go | 375 +++++ pkg/mocks/protocol.go | 154 ++ pkg/mocks/protocolversion.gen.go | 425 +++++ pkg/patch/patch.go | 450 ++++++ pkg/patch/patch_test.go | 708 +++++++++ pkg/util/ecsigner/signer.go | 99 ++ pkg/util/ecsigner/signer_test.go | 109 ++ pkg/util/edsigner/signer.go | 50 + pkg/util/edsigner/signer_test.go | 74 + pkg/util/json/json.go | 98 ++ pkg/util/json/json_test.go | 168 ++ pkg/util/json/test_exports.go | 53 + pkg/util/pubkey/jwk.go | 65 + pkg/util/pubkey/jwk_test.go | 78 + pkg/util/signutil/signature.go | 50 + pkg/util/signutil/signature_test.go | 107 ++ pkg/versions/1_0/client/create.go | 124 ++ pkg/versions/1_0/client/create_test.go | 185 +++ pkg/versions/1_0/client/deactivate.go | 122 ++ pkg/versions/1_0/client/deactivate_test.go | 171 ++ pkg/versions/1_0/client/recover.go | 160 ++ pkg/versions/1_0/client/recover_test.go | 205 +++ pkg/versions/1_0/client/update.go | 122 ++ pkg/versions/1_0/client/update_test.go | 178 +++ pkg/versions/1_0/doccomposer/composer.go | 359 +++++ pkg/versions/1_0/doccomposer/composer_test.go | 635 ++++++++ .../didtransformer/testdata/doc.json | 100 ++ .../didtransformer/transformer.go | 364 +++++ .../didtransformer/transformer_test.go | 817 ++++++++++ .../doctransformer/transformer.go | 77 + .../doctransformer/transformer_test.go | 158 ++ .../1_0/doctransformer/metadata/metadata.go | 229 +++ .../doctransformer/metadata/metadata_test.go | 219 +++ .../didvalidator/testdata/doc.json | 144 ++ .../docvalidator/didvalidator/validator.go | 63 + .../didvalidator/validator_test.go | 90 ++ .../docvalidator/docvalidator/validator.go | 58 + .../docvalidator/validator_test.go | 67 + pkg/versions/1_0/model/operation.go | 45 + pkg/versions/1_0/model/request.go | 167 ++ pkg/versions/1_0/model/util.go | 88 ++ pkg/versions/1_0/model/util_test.go | 141 ++ .../1_0/operationapplier/operationapplier.go | 387 +++++ .../operationapplier/operationapplier_test.go | 1374 +++++++++++++++++ .../1_0/operationparser/commitment.go | 49 + .../1_0/operationparser/commitment_test.go | 295 ++++ pkg/versions/1_0/operationparser/create.go | 169 ++ .../1_0/operationparser/create_test.go | 412 +++++ .../1_0/operationparser/deactivate.go | 101 ++ .../1_0/operationparser/deactivate_test.go | 244 +++ pkg/versions/1_0/operationparser/method.go | 84 + .../1_0/operationparser/method_test.go | 109 ++ pkg/versions/1_0/operationparser/operation.go | 171 ++ .../1_0/operationparser/operation_test.go | 255 +++ .../operationparser/patchvalidator/addkeys.go | 40 + .../patchvalidator/addkeys_test.go | 52 + .../patchvalidator/addservices.go | 40 + .../patchvalidator/addservices_test.go | 60 + .../patchvalidator/alsoknownas.go | 70 + .../patchvalidator/alsoknownas_test.go | 72 + .../patchvalidator/document.go | 378 +++++ .../patchvalidator/document_test.go | 755 +++++++++ .../operationparser/patchvalidator/ietf.go | 76 + .../patchvalidator/ietf_test.go | 86 ++ .../patchvalidator/removekeys.go | 38 + .../patchvalidator/removekeys_test.go | 50 + .../patchvalidator/removeservices.go | 38 + .../patchvalidator/removeservices_test.go | 50 + .../operationparser/patchvalidator/replace.go | 66 + .../patchvalidator/replace_test.go | 125 ++ .../patchvalidator/testdata/doc.json | 144 ++ .../patchvalidator/validator.go | 34 + .../patchvalidator/validator_test.go | 82 + pkg/versions/1_0/operationparser/recover.go | 268 ++++ .../1_0/operationparser/recover_test.go | 569 +++++++ pkg/versions/1_0/operationparser/update.go | 117 ++ .../1_0/operationparser/update_test.go | 322 ++++ 129 files changed, 21133 insertions(+), 1 deletion(-) create mode 100644 pkg/api/operation/models.go create mode 100644 pkg/api/protocol/protocol.go create mode 100644 pkg/canonicalizer/canonicalizer.go create mode 100644 pkg/canonicalizer/canonicalizer_test.go create mode 100644 pkg/commitment/hash.go create mode 100644 pkg/commitment/hash_test.go create mode 100644 pkg/document/diddocument.go create mode 100644 pkg/document/diddocument_test.go create mode 100644 pkg/document/document.go create mode 100644 pkg/document/document_test.go create mode 100644 pkg/document/jwk.go create mode 100644 pkg/document/jwk_test.go create mode 100644 pkg/document/publickey.go create mode 100644 pkg/document/publickey_test.go create mode 100644 pkg/document/replace.go create mode 100644 pkg/document/replace_test.go create mode 100644 pkg/document/resolution.go create mode 100644 pkg/document/resolution_test.go create mode 100644 pkg/document/service.go create mode 100644 pkg/document/service_test.go create mode 100644 pkg/document/testdata/invalid-lists.json create mode 100644 pkg/document/testdata/pk-doc.json create mode 100644 pkg/document/testdata/vm-doc.json create mode 100644 pkg/docutil/doc.go create mode 100644 pkg/docutil/doc_test.go create mode 100644 pkg/docutil/docutil.go create mode 100644 pkg/encoder/encoder.go create mode 100644 pkg/encoder/encoder_test.go create mode 100644 pkg/hashing/hash.go create mode 100644 pkg/hashing/hash_test.go create mode 100644 pkg/internal/jsoncanonicalizer/README.md create mode 100644 pkg/internal/jsoncanonicalizer/es6numfmt.go create mode 100644 pkg/internal/jsoncanonicalizer/jsoncanonicalizer.go create mode 100644 pkg/internal/log/fields.go create mode 100644 pkg/internal/log/fields_test.go create mode 100644 pkg/jws/header.go create mode 100644 pkg/jws/header_test.go create mode 100644 pkg/jws/jwk.go create mode 100644 pkg/jws/jwk_test.go create mode 100644 pkg/jwsutil/jwk.go create mode 100644 pkg/jwsutil/jwk_test.go create mode 100644 pkg/jwsutil/jws.go create mode 100644 pkg/jwsutil/jws_test.go create mode 100644 pkg/jwsutil/signature.go create mode 100644 pkg/jwsutil/signature_test.go create mode 100644 pkg/mocks/documentcomposer.gen.go create mode 100644 pkg/mocks/documenttransformer.gen.go create mode 100644 pkg/mocks/documentvalidator.gen.go create mode 100644 pkg/mocks/operationapplier.gen.go create mode 100644 pkg/mocks/operationparser.gen.go create mode 100644 pkg/mocks/protocol.go create mode 100644 pkg/mocks/protocolversion.gen.go create mode 100644 pkg/patch/patch.go create mode 100644 pkg/patch/patch_test.go create mode 100644 pkg/util/ecsigner/signer.go create mode 100644 pkg/util/ecsigner/signer_test.go create mode 100644 pkg/util/edsigner/signer.go create mode 100644 pkg/util/edsigner/signer_test.go create mode 100644 pkg/util/json/json.go create mode 100644 pkg/util/json/json_test.go create mode 100644 pkg/util/json/test_exports.go create mode 100644 pkg/util/pubkey/jwk.go create mode 100644 pkg/util/pubkey/jwk_test.go create mode 100644 pkg/util/signutil/signature.go create mode 100644 pkg/util/signutil/signature_test.go create mode 100644 pkg/versions/1_0/client/create.go create mode 100644 pkg/versions/1_0/client/create_test.go create mode 100644 pkg/versions/1_0/client/deactivate.go create mode 100644 pkg/versions/1_0/client/deactivate_test.go create mode 100644 pkg/versions/1_0/client/recover.go create mode 100644 pkg/versions/1_0/client/recover_test.go create mode 100644 pkg/versions/1_0/client/update.go create mode 100644 pkg/versions/1_0/client/update_test.go create mode 100644 pkg/versions/1_0/doccomposer/composer.go create mode 100644 pkg/versions/1_0/doccomposer/composer_test.go create mode 100644 pkg/versions/1_0/doctransformer/didtransformer/testdata/doc.json create mode 100644 pkg/versions/1_0/doctransformer/didtransformer/transformer.go create mode 100644 pkg/versions/1_0/doctransformer/didtransformer/transformer_test.go create mode 100644 pkg/versions/1_0/doctransformer/doctransformer/transformer.go create mode 100644 pkg/versions/1_0/doctransformer/doctransformer/transformer_test.go create mode 100644 pkg/versions/1_0/doctransformer/metadata/metadata.go create mode 100644 pkg/versions/1_0/doctransformer/metadata/metadata_test.go create mode 100644 pkg/versions/1_0/docvalidator/didvalidator/testdata/doc.json create mode 100644 pkg/versions/1_0/docvalidator/didvalidator/validator.go create mode 100644 pkg/versions/1_0/docvalidator/didvalidator/validator_test.go create mode 100644 pkg/versions/1_0/docvalidator/docvalidator/validator.go create mode 100644 pkg/versions/1_0/docvalidator/docvalidator/validator_test.go create mode 100644 pkg/versions/1_0/model/operation.go create mode 100644 pkg/versions/1_0/model/request.go create mode 100644 pkg/versions/1_0/model/util.go create mode 100644 pkg/versions/1_0/model/util_test.go create mode 100644 pkg/versions/1_0/operationapplier/operationapplier.go create mode 100644 pkg/versions/1_0/operationapplier/operationapplier_test.go create mode 100644 pkg/versions/1_0/operationparser/commitment.go create mode 100644 pkg/versions/1_0/operationparser/commitment_test.go create mode 100644 pkg/versions/1_0/operationparser/create.go create mode 100644 pkg/versions/1_0/operationparser/create_test.go create mode 100644 pkg/versions/1_0/operationparser/deactivate.go create mode 100644 pkg/versions/1_0/operationparser/deactivate_test.go create mode 100644 pkg/versions/1_0/operationparser/method.go create mode 100644 pkg/versions/1_0/operationparser/method_test.go create mode 100644 pkg/versions/1_0/operationparser/operation.go create mode 100644 pkg/versions/1_0/operationparser/operation_test.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/addkeys.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/addkeys_test.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/addservices.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/addservices_test.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/alsoknownas.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/alsoknownas_test.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/document.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/document_test.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/ietf.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/ietf_test.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/removekeys.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/removekeys_test.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/removeservices.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/removeservices_test.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/replace.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/replace_test.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/testdata/doc.json create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/validator.go create mode 100644 pkg/versions/1_0/operationparser/patchvalidator/validator_test.go create mode 100644 pkg/versions/1_0/operationparser/recover.go create mode 100644 pkg/versions/1_0/operationparser/recover_test.go create mode 100644 pkg/versions/1_0/operationparser/update.go create mode 100644 pkg/versions/1_0/operationparser/update_test.go diff --git a/README.md b/README.md index 4e3ceaf..cb83364 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # sidetree-go -This library implements core components required to implement `Sidetree Protocol: `_ +This library implements core components required to implement the [Sidetree Protocol](https://github.com/decentralized-identity/sidetree/blob/master/docs/protocol.md>). ## Contributing Thank you for your interest in contributing. Please see our [community contribution guidelines](https://github.com/trustbloc/community/blob/main/CONTRIBUTING.md) for more information. diff --git a/go.mod b/go.mod index 6a15d17..edde14a 100644 --- a/go.mod +++ b/go.mod @@ -4,4 +4,38 @@ module github.com/trustbloc/sidetree-go +require ( + github.com/btcsuite/btcd v0.22.3 + github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce + github.com/evanphx/json-patch v4.1.0+incompatible + github.com/multiformats/go-multibase v0.0.1 + github.com/multiformats/go-multihash v0.0.14 + github.com/pkg/errors v0.9.1 + github.com/square/go-jose/v3 v3.0.0-20200630053402-0a67ce9b0693 + github.com/stretchr/testify v1.8.1 + github.com/trustbloc/logutil-go v1.0.0-rc1 + go.uber.org/zap v1.23.0 + golang.org/x/crypto v0.1.0 +) + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1 // indirect + github.com/minio/sha256-simd v0.1.1 // indirect + github.com/mr-tron/base58 v1.2.0 // indirect + github.com/multiformats/go-base32 v0.0.3 // indirect + github.com/multiformats/go-varint v0.0.6 // indirect + github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/spaolacci/murmur3 v1.1.0 // indirect + go.opentelemetry.io/otel v1.12.0 // indirect + go.opentelemetry.io/otel/trace v1.12.0 // indirect + go.uber.org/atomic v1.7.0 // indirect + go.uber.org/multierr v1.6.0 // indirect + golang.org/x/sys v0.1.0 // indirect + gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) + go 1.21 diff --git a/go.sum b/go.sum index e69de29..be1653d 100644 --- a/go.sum +++ b/go.sum @@ -0,0 +1,123 @@ +github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= +github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= +github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= +github.com/btcsuite/btcd v0.22.3 h1:kYNaWFvOw6xvqP0vR20RP1Zq1DVMBxEO8QN5d1/EfNg= +github.com/btcsuite/btcd v0.22.3/go.mod h1:wqgTSL29+50LRkmOVknEdmt8ZojIzhuWvgu/iptuN7Y= +github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 h1:q0rUy8C/TYNBQS1+CGKw68tLOFYSNEs0TFnxxnS9+4U= +github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= +github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= +github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= +github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce h1:YtWJF7RHm2pYCvA5t0RPmAaLUhREsKuKd+SLhxFbFeQ= +github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce/go.mod h1:0DVlHczLPewLcPGEIeUEzfOJhqGPQ0mJJRDBtD307+o= +github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg= +github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd/go.mod h1:F+uVaaLLH7j4eDXPRvw78tMflu7Ie2bzYOH4Y8rRKBY= +github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc= +github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY= +github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/evanphx/json-patch v4.1.0+incompatible h1:K1MDoo4AZ4wU0GIU/fPmtZg7VpzLjCxu+UwBD1FvwOc= +github.com/evanphx/json-patch v4.1.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= +github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= +github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1 h1:lYpkrQH5ajf0OXOcUbGjvZxxijuBwbbmlSxLiuofa+g= +github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1/go.mod h1:pD8RvIylQ358TN4wwqatJ8rNavkEINozVn9DtGI3dfQ= +github.com/minio/sha256-simd v0.1.1-0.20190913151208-6de447530771/go.mod h1:B5e1o+1/KgNmWrSQK08Y6Z1Vb5pwIktudl0J58iy0KM= +github.com/minio/sha256-simd v0.1.1 h1:5QHSlgo3nt5yKOJrC7W8w7X+NFl8cMPZm96iu8kKUJU= +github.com/minio/sha256-simd v0.1.1/go.mod h1:B5e1o+1/KgNmWrSQK08Y6Z1Vb5pwIktudl0J58iy0KM= +github.com/mr-tron/base58 v1.1.0/go.mod h1:xcD2VGqlgYjBdcBLw+TuYLr8afG+Hj8g2eTVqeSzSU8= +github.com/mr-tron/base58 v1.1.3/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= +github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o= +github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= +github.com/multiformats/go-base32 v0.0.3 h1:tw5+NhuwaOjJCC5Pp82QuXbrmLzWg7uxlMFp8Nq/kkI= +github.com/multiformats/go-base32 v0.0.3/go.mod h1:pLiuGC8y0QR3Ue4Zug5UzK9LjgbkL8NSQj0zQ5Nz/AA= +github.com/multiformats/go-multibase v0.0.1 h1:PN9/v21eLywrFWdFNsFKaU04kLJzuYzmrJR+ubhT9qA= +github.com/multiformats/go-multibase v0.0.1/go.mod h1:bja2MqRZ3ggyXtZSEDKpl0uO/gviWFaSteVbWT51qgs= +github.com/multiformats/go-multihash v0.0.14 h1:QoBceQYQQtNUuf6s7wHxnE2c8bhbMqhfGzNI032se/I= +github.com/multiformats/go-multihash v0.0.14/go.mod h1:VdAWLKTwram9oKAatUcLxBNUjdtcVwxObEQBtRfuyjc= +github.com/multiformats/go-varint v0.0.5/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE= +github.com/multiformats/go-varint v0.0.6 h1:gk85QWKxh3TazbLxED/NlDVv8+q+ReFJk7Y2W/KhfNY= +github.com/multiformats/go-varint v0.0.6/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= +github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/square/go-jose/v3 v3.0.0-20200630053402-0a67ce9b0693 h1:wD1IWQwAhdWclCwaf6DdzgCAe9Bfz1M+4AHRd7N786Y= +github.com/square/go-jose/v3 v3.0.0-20200630053402-0a67ce9b0693/go.mod h1:6hSY48PjDm4UObWmGLyJE9DxYVKTgR9kbCspXXJEhcU= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/trustbloc/logutil-go v1.0.0-rc1 h1:rRJbvgQfrlUfyej+mY0nuQJymGqjRW4oZEwKi544F4c= +github.com/trustbloc/logutil-go v1.0.0-rc1/go.mod h1:JlxT0oZfNKgIlSNtgc001WEeDMxlnAvOM43gNm8DQVc= +go.opentelemetry.io/otel v1.12.0 h1:IgfC7kqQrRccIKuB7Cl+SRUmsKbEwSGPr0Eu+/ht1SQ= +go.opentelemetry.io/otel v1.12.0/go.mod h1:geaoz0L0r1BEOR81k7/n9W4TCXYCJ7bPO7K374jQHG0= +go.opentelemetry.io/otel/sdk v1.12.0 h1:8npliVYV7qc0t1FKdpU08eMnOjgPFMnriPhn0HH4q3o= +go.opentelemetry.io/otel/sdk v1.12.0/go.mod h1:WYcvtgquYvgODEvxOry5owO2y9MyciW7JqMz6cpXShE= +go.opentelemetry.io/otel/trace v1.12.0 h1:p28in++7Kd0r2d8gSt931O57fdjUyWxkVbESuILAeUc= +go.opentelemetry.io/otel/trace v1.12.0/go.mod h1:pHlgBynn6s25qJ2szD+Bv+iwKJttjHSI3lUAyf0GNuQ= +go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI= +go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/zap v1.23.0 h1:OjGQ5KQDEUawVHxNwQgPpiypGHOxo2mNZsOqTak4fFY= +go.uber.org/zap v1.23.0/go.mod h1:D+nX8jyLsMHMYrln8A0rJjFt/T/9/bGgIhAqxv5URuY= +golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.1.0 h1:MDRAIl0xIo9Io2xV565hzXHw3zVseKrJKodhohM5CjU= +golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/pkg/api/operation/models.go b/pkg/api/operation/models.go new file mode 100644 index 0000000..57ff62f --- /dev/null +++ b/pkg/api/operation/models.go @@ -0,0 +1,84 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operation + +// Property contains a key-value pair. +type Property struct { + Key string + Value interface{} +} + +// Operation holds minimum information required for parsing/validating client request. +type Operation struct { + + // Type defines operation type. + Type Type + + // UniqueSuffix defines document unique suffix. + UniqueSuffix string + + // ID defines ID + ID string + + // OperationRequest is the original operation request + OperationRequest []byte + + // AnchorOrigin defines anchor origin. + AnchorOrigin interface{} + + // Properties contains an arbitrary set of implementation-specific name-value pairs. + Properties []Property +} + +// AnchoredOperation defines an anchored operation (stored in document operation store). +type AnchoredOperation struct { + + // Type defines operation type. + Type Type `json:"type"` + + // UniqueSuffix defines document unique suffix. + UniqueSuffix string `json:"uniqueSuffix"` + + // OperationRequest is the original operation request + OperationRequest []byte `json:"operation"` + + // TransactionTime is the logical anchoring time (block number in case of blockchain) for this operation in the + // anchoring system (blockchain). + TransactionTime uint64 `json:"transactionTime"` + + // TransactionNumber is the transaction number of the transaction this operation was batched within. + TransactionNumber uint64 `json:"transactionNumber"` + + // ProtocolVersion is the genesis time (version) of the protocol that was used for this operation. + ProtocolVersion uint64 `json:"protocolVersion"` + + // CanonicalReference contains canonical reference that applies to this operation. + CanonicalReference string `json:"canonicalReference,omitempty"` + + // EquivalenceReferences contains equivalence reference that applies to this operation. + EquivalentReferences []string `json:"equivalentReferences,omitempty"` + + // AnchorOrigin is anchor origin + AnchorOrigin interface{} `json:"anchorOrigin,omitempty"` +} + +// Type defines valid values for operation type. +type Type string + +const ( + // TypeCreate captures "create" operation type. + TypeCreate Type = "create" + + // TypeUpdate captures "update" operation type. + TypeUpdate Type = "update" + + // TypeDeactivate captures "deactivate" operation type. + TypeDeactivate Type = "deactivate" + + // TypeRecover captures "recover" operation type. + TypeRecover Type = "recover" +) diff --git a/pkg/api/protocol/protocol.go b/pkg/api/protocol/protocol.go new file mode 100644 index 0000000..1c9520d --- /dev/null +++ b/pkg/api/protocol/protocol.go @@ -0,0 +1,154 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package protocol + +import ( + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +//go:generate counterfeiter -o ../../mocks/operationparser.gen.go --fake-name OperationParser . OperationParser +//go:generate counterfeiter -o ../../mocks/operationapplier.gen.go --fake-name OperationApplier . OperationApplier +//go:generate counterfeiter -o ../../mocks/protocolversion.gen.go --fake-name ProtocolVersion . Version +//go:generate counterfeiter -o ../../mocks/documentcomposer.gen.go --fake-name DocumentComposer . DocumentComposer +//go:generate counterfeiter -o ../../mocks/documentvalidator.gen.go --fake-name DocumentValidator . DocumentValidator +//go:generate counterfeiter -o ../../mocks/documenttransformer.gen.go --fake-name DocumentTransformer . DocumentTransformer + +// Protocol defines protocol parameters. +type Protocol struct { + // GenesisTime is inclusive starting logical anchoring time that this protocol applies to. + // (e.g. block number in a blockchain) + GenesisTime uint64 `json:"genesisTime"` + + // MultihashAlgorithms are supported multihash algorithm codes + MultihashAlgorithms []uint `json:"multihashAlgorithms"` + + // MaxOperationCount defines maximum number of operations per batch. + MaxOperationCount uint `json:"maxOperationCount"` + + // MaxOperationSize is maximum operation size in bytes (used to reject operations before parsing them) + // It has to be greater than max delta size (big) + max proof size (medium) + other small values (operation type, suffix-data) + MaxOperationSize uint `json:"maxOperationSize"` + + // MaxOperationHashLength is maximum operation hash length + MaxOperationHashLength uint `json:"maxOperationHashLength"` + + // MaxDeltaSize is maximum size of operation's delta property. + MaxDeltaSize uint `json:"maxDeltaSize"` + + // MaxCasUriLength is maximum length of CAS URI in batch files. + MaxCasURILength uint `json:"maxCasUriLength"` + + // CompressionAlgorithm is file compression algorithm. + CompressionAlgorithm string `json:"compressionAlgorithm"` + + // MaxCoreIndexFileSize is maximum allowed size (in bytes) of core index file stored in CAS. + MaxCoreIndexFileSize uint `json:"maxCoreIndexFileSize"` + + // MaxProofFileSize is maximum allowed size (in bytes) of proof files stored in CAS. + MaxProofFileSize uint `json:"maxProofFileSize"` + + // MaxProvisionalIndexFileSize is maximum allowed size (in bytes) of provisional index file stored in CAS. + MaxProvisionalIndexFileSize uint `json:"maxProvisionalIndexFileSize"` + + // MaxChunkFileSize is maximum allowed size (in bytes) of chunk file stored in CAS. + MaxChunkFileSize uint `json:"maxChunkFileSize"` + + // Patches contains the list of allowed patches. + Patches []string `json:"patches"` + + // SignatureAlgorithms contain supported signature algorithms for signed operations (e.g. EdDSA, ES256, ES384, ES512, ES256K). + SignatureAlgorithms []string `json:"signatureAlgorithms"` + + // KeyAlgorithms contain supported key algorithms for signed operations (e.g. secp256k1, P-256, P-384, P-512, Ed25519). + KeyAlgorithms []string `json:"keyAlgorithms"` + + // MaxOperationTimeDelta is maximum time that operation should be valid before it expires; used with anchor from time + MaxOperationTimeDelta uint64 `json:"maxOperationTimeDelta"` + + // NonceSize is the number of bytes in nonce values + NonceSize uint64 `json:"nonceSize"` + + // MaxMemoryDecompressionFactor is maximum file size after decompression (e.g. 3 times maximum file size) + MaxMemoryDecompressionFactor uint `json:"maxMemoryDecompressionFactor"` +} + +// OperationParser defines the functions for parsing operations. +type OperationParser interface { + Parse(namespace string, operation []byte) (*operation.Operation, error) + ParseDID(namespace, shortOrLongFormDID string) (string, []byte, error) + GetRevealValue(operation []byte) (string, error) + GetCommitment(operation []byte) (string, error) +} + +// ResolutionModel contains temporary data during document resolution. +type ResolutionModel struct { + Doc document.Document + CreatedTime uint64 + UpdatedTime uint64 + LastOperationTransactionTime uint64 + LastOperationTransactionNumber uint64 + LastOperationProtocolVersion uint64 + UpdateCommitment string + RecoveryCommitment string + Deactivated bool + AnchorOrigin interface{} + EquivalentReferences []string + CanonicalReference string + VersionID string + PublishedOperations []*operation.AnchoredOperation + UnpublishedOperations []*operation.AnchoredOperation +} + +// OperationApplier applies the given operation to the document. +type OperationApplier interface { + Apply(op *operation.AnchoredOperation, rm *ResolutionModel) (*ResolutionModel, error) +} + +// DocumentComposer applies patches to the given document. +type DocumentComposer interface { + ApplyPatches(doc document.Document, patches []patch.Patch) (document.Document, error) +} + +// DocumentValidator is an interface for validating document operations. +type DocumentValidator interface { + IsValidOriginalDocument(payload []byte) error + IsValidPayload(payload []byte) error +} + +// DocumentTransformer transforms internal resolution model into external document(resolution result). +type DocumentTransformer interface { + TransformDocument(rm *ResolutionModel, info TransformationInfo) (*document.ResolutionResult, error) +} + +// TransformationInfo contains document transformation info. +type TransformationInfo map[string]interface{} + +// Version contains the protocol and corresponding implementations that are compatible with the protocol version. +type Version interface { + Version() string + Protocol() Protocol + OperationParser() OperationParser + OperationApplier() OperationApplier + DocumentTransformer() DocumentTransformer + DocumentValidator() DocumentValidator +} + +// Client defines interface for accessing protocol version/information. +type Client interface { + // Current returns latest version of protocol. + Current() (Version, error) + + // Get returns the version at the given transaction time. + Get(transactionTime uint64) (Version, error) +} + +// ClientProvider returns a protocol client for the given namespace. +type ClientProvider interface { + ForNamespace(namespace string) (Client, error) +} diff --git a/pkg/canonicalizer/canonicalizer.go b/pkg/canonicalizer/canonicalizer.go new file mode 100644 index 0000000..4ea5e52 --- /dev/null +++ b/pkg/canonicalizer/canonicalizer.go @@ -0,0 +1,29 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package canonicalizer + +import ( + "encoding/json" + + "github.com/trustbloc/sidetree-go/pkg/internal/jsoncanonicalizer" +) + +// MarshalCanonical is using JCS RFC canonicalization. +func MarshalCanonical(value interface{}) ([]byte, error) { + valueBytes, ok := value.([]byte) + + if !ok { + var err error + + valueBytes, err = json.Marshal(value) + if err != nil { + return nil, err + } + } + + return jsoncanonicalizer.Transform(valueBytes) +} diff --git a/pkg/canonicalizer/canonicalizer_test.go b/pkg/canonicalizer/canonicalizer_test.go new file mode 100644 index 0000000..f9fcfd4 --- /dev/null +++ b/pkg/canonicalizer/canonicalizer_test.go @@ -0,0 +1,43 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package canonicalizer + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestMarshalCanonical(t *testing.T) { + t.Run("success", func(t *testing.T) { + test := struct { + Beta string `json:"beta"` + Alpha string `json:"alpha"` + }{ + Beta: "beta", + Alpha: "alpha", + } + + result, err := MarshalCanonical(test) + require.NoError(t, err) + require.Equal(t, string(result), `{"alpha":"alpha","beta":"beta"}`) + }) + + t.Run("success - accepts bytes", func(t *testing.T) { + result, err := MarshalCanonical([]byte(`{"beta":"beta","alpha":"alpha"}`)) + require.NoError(t, err) + require.Equal(t, string(result), `{"alpha":"alpha","beta":"beta"}`) + }) + + t.Run("marshal error", func(t *testing.T) { + var c chan int + result, err := MarshalCanonical(c) + require.Error(t, err) + require.Empty(t, result) + require.Contains(t, err.Error(), "json: unsupported type: chan int") + }) +} diff --git a/pkg/commitment/hash.go b/pkg/commitment/hash.go new file mode 100644 index 0000000..270e54b --- /dev/null +++ b/pkg/commitment/hash.go @@ -0,0 +1,73 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package commitment + +import ( + "fmt" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/encoder" + "github.com/trustbloc/sidetree-go/pkg/hashing" + logfields "github.com/trustbloc/sidetree-go/pkg/internal/log" + "github.com/trustbloc/sidetree-go/pkg/jws" +) + +var logger = log.New("sidetree-core-commitment") + +// GetCommitment will calculate commitment from JWK. +func GetCommitment(jwk *jws.JWK, multihashCode uint) (string, error) { + data, err := canonicalizer.MarshalCanonical(jwk) + if err != nil { + return "", err + } + + logger.Debug("Calculating commitment from JWK", logfields.WithData(data)) + + hash, err := hashing.GetHashFromMultihash(multihashCode) + if err != nil { + return "", err + } + + dataHash, err := hashing.GetHash(hash, data) + if err != nil { + return "", err + } + + multiHash, err := hashing.ComputeMultihash(multihashCode, dataHash) + if err != nil { + return "", err + } + + return encoder.EncodeToString(multiHash), nil +} + +// GetRevealValue will calculate reveal value from JWK. +func GetRevealValue(jwk *jws.JWK, multihashCode uint) (string, error) { + rv, err := hashing.CalculateModelMultihash(jwk, multihashCode) + if err != nil { + return "", fmt.Errorf("failed to get reveal value: %s", err.Error()) + } + + return rv, nil +} + +// GetCommitmentFromRevealValue will calculate commitment from reveal value. +func GetCommitmentFromRevealValue(rv string) (string, error) { + mh, err := hashing.GetMultihash(rv) + if err != nil { + return "", fmt.Errorf("failed to get commitment from reveal value (get multihash): %s", err.Error()) + } + + multiHash, err := hashing.ComputeMultihash(uint(mh.Code), mh.Digest) + if err != nil { + return "", fmt.Errorf("failed to get commitment from reveal value (compute multihash): %s", err.Error()) + } + + return encoder.EncodeToString(multiHash), nil +} diff --git a/pkg/commitment/hash_test.go b/pkg/commitment/hash_test.go new file mode 100644 index 0000000..5d77b1d --- /dev/null +++ b/pkg/commitment/hash_test.go @@ -0,0 +1,114 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package commitment + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/jws" +) + +const ( + sha2_256 uint = 18 // multihash code +) + +func TestGetCommitment(t *testing.T) { + jwk := &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", + Y: "y", + } + + t.Run("success", func(t *testing.T) { + commitment, err := GetCommitment(jwk, sha2_256) + require.NoError(t, err) + require.NotEmpty(t, commitment) + }) + + t.Run(" error - multihash not supported", func(t *testing.T) { + commitment, err := GetCommitment(jwk, 55) + require.Error(t, err) + require.Empty(t, commitment) + require.Contains(t, err.Error(), "algorithm not supported, unable to compute hash") + }) + + t.Run("error - canonicalization failed", func(t *testing.T) { + commitment, err := GetCommitment(nil, sha2_256) + require.Error(t, err) + require.Empty(t, commitment) + require.Contains(t, err.Error(), "Expected '{' but got 'n'") + }) + + t.Run("interop test", func(t *testing.T) { + jwk := &jws.JWK{ + Kty: "EC", + Crv: "secp256k1", + X: "5s3-bKjD1Eu_3NJu8pk7qIdOPl1GBzU_V8aR3xiacoM", + Y: "v0-Q5H3vcfAfQ4zsebJQvMrIg3pcsaJzRvuIYZ3_UOY", + } + + canonicalized, err := canonicalizer.MarshalCanonical(jwk) + require.NoError(t, err) + + expected := `{"crv":"secp256k1","kty":"EC","x":"5s3-bKjD1Eu_3NJu8pk7qIdOPl1GBzU_V8aR3xiacoM","y":"v0-Q5H3vcfAfQ4zsebJQvMrIg3pcsaJzRvuIYZ3_UOY"}` + require.Equal(t, string(canonicalized), expected) + }) +} + +func TestGetRevealValue(t *testing.T) { + jwk := &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", + Y: "y", + } + + t.Run("success", func(t *testing.T) { + rv, err := GetRevealValue(jwk, sha2_256) + require.NoError(t, err) + require.NotEmpty(t, rv) + }) + + t.Run("error - wrong multihash code", func(t *testing.T) { + rv, err := GetRevealValue(jwk, 55) + require.Error(t, err) + require.Empty(t, rv) + require.Contains(t, err.Error(), "failed to get reveal value: algorithm not supported, unable to compute hash") + }) +} + +func TestGetCommitmentFromRevealValue(t *testing.T) { + jwk := &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", + Y: "y", + } + + t.Run("success", func(t *testing.T) { + rv, err := GetRevealValue(jwk, sha2_256) + require.NoError(t, err) + + cFromRv, err := GetCommitmentFromRevealValue(rv) + require.NoError(t, err) + + c, err := GetCommitment(jwk, sha2_256) + require.NoError(t, err) + require.Equal(t, c, cFromRv) + }) + + t.Run("error - reveal value is not a multihash", func(t *testing.T) { + cFromRv, err := GetCommitmentFromRevealValue("reveal") + require.Error(t, err) + require.Empty(t, cFromRv) + require.Contains(t, err.Error(), "failed to get commitment from reveal value") + }) +} diff --git a/pkg/document/diddocument.go b/pkg/document/diddocument.go new file mode 100644 index 0000000..1d19297 --- /dev/null +++ b/pkg/document/diddocument.go @@ -0,0 +1,193 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "encoding/json" + "io" +) + +const ( + + // ContextProperty defines key for context property. + ContextProperty = "@context" + + // AlsoKnownAs defines also known as property. + AlsoKnownAs = "alsoKnownAs" + + // ServiceProperty defines key for service property. + ServiceProperty = "service" + + // PublicKeyProperty defines key for public key property. + PublicKeyProperty = "publicKey" + + // VerificationMethodProperty defines key for verification method. + VerificationMethodProperty = "verificationMethod" + + // AuthenticationProperty defines key for authentication property. + AuthenticationProperty = "authentication" + + // AssertionMethodProperty defines key for assertion method property. + AssertionMethodProperty = "assertionMethod" + + // KeyAgreementProperty defines key for key agreement property. + KeyAgreementProperty = "keyAgreement" + + // DelegationKeyProperty defines key for delegation key property. + DelegationKeyProperty = "capabilityDelegation" + + // InvocationKeyProperty defines key for invocation key property. + InvocationKeyProperty = "capabilityInvocation" +) + +// DIDDocument Defines DID Document data structure used by Sidetree for basic type safety checks. +type DIDDocument map[string]interface{} + +// ID is identifier for DID subject (what DID Document is about). +func (doc DIDDocument) ID() string { + return stringEntry(doc[IDProperty]) +} + +// Context is the context of did document. +func (doc DIDDocument) Context() []interface{} { + return interfaceArray(doc[ContextProperty]) +} + +// PublicKeys are used for digital signatures, encryption and other cryptographic operations. +func (doc DIDDocument) PublicKeys() []PublicKey { + return ParsePublicKeys(doc[PublicKeyProperty]) +} + +// VerificationMethods (formerly public keys) are used for digital signatures, encryption and other cryptographic operations. +func (doc DIDDocument) VerificationMethods() []PublicKey { + return ParsePublicKeys(doc[VerificationMethodProperty]) +} + +// AlsoKnownAs are alternate identifiers for DID subject. +func (doc DIDDocument) AlsoKnownAs() []string { + return StringArray(doc[AlsoKnownAs]) +} + +// ParsePublicKeys is helper function for parsing public keys. +func ParsePublicKeys(entry interface{}) []PublicKey { + if entry == nil { + return nil + } + + typedEntry, ok := entry.([]interface{}) + if !ok { + return nil + } + + var result []PublicKey + for _, e := range typedEntry { + emap, ok := e.(map[string]interface{}) + if !ok { + continue + } + result = append(result, NewPublicKey(emap)) + } + + return result +} + +// Services is an array of service endpoints. +func (doc DIDDocument) Services() []Service { + return ParseServices(doc[ServiceProperty]) +} + +// ParseServices is utility for parsing array of service endpoints. +func ParseServices(entry interface{}) []Service { + if entry == nil { + return nil + } + + typedEntry, ok := entry.([]interface{}) + if !ok { + return nil + } + + var result []Service + for _, e := range typedEntry { + emap, ok := e.(map[string]interface{}) + if !ok { + continue + } + result = append(result, NewService(emap)) + } + + return result +} + +// JSONLdObject returns map that represents JSON LD Object. +func (doc DIDDocument) JSONLdObject() map[string]interface{} { + return doc +} + +// Authentications returns authentication array (mixture of strings and objects). +func (doc DIDDocument) Authentications() []interface{} { + return interfaceArray(doc[AuthenticationProperty]) +} + +// AssertionMethods returns assertion method array (mixture of strings and objects). +func (doc DIDDocument) AssertionMethods() []interface{} { + return interfaceArray(doc[AssertionMethodProperty]) +} + +// AgreementKeys returns agreement method array (mixture of strings and objects). +func (doc DIDDocument) AgreementKeys() []interface{} { + return interfaceArray(doc[KeyAgreementProperty]) +} + +// DelegationKeys returns delegation method array (mixture of strings and objects). +func (doc DIDDocument) DelegationKeys() []interface{} { + return interfaceArray(doc[DelegationKeyProperty]) +} + +// InvocationKeys returns invocation method array (mixture of strings and objects). +func (doc DIDDocument) InvocationKeys() []interface{} { + return interfaceArray(doc[InvocationKeyProperty]) +} + +// DIDDocumentFromReader creates an instance of DIDDocument by reading a JSON document from Reader. +func DIDDocumentFromReader(r io.Reader) (DIDDocument, error) { + data, err := io.ReadAll(r) + if err != nil { + return nil, err + } + + return DidDocumentFromBytes(data) +} + +// DidDocumentFromBytes creates an instance of DIDDocument by reading a JSON document from bytes. +func DidDocumentFromBytes(data []byte) (DIDDocument, error) { + doc := make(DIDDocument) + err := json.Unmarshal(data, &doc) + if err != nil { + return nil, err + } + + return doc, nil +} + +// DidDocumentFromJSONLDObject creates an instance of DIDDocument from json ld object. +func DidDocumentFromJSONLDObject(jsonldObject map[string]interface{}) DIDDocument { + return jsonldObject +} + +func interfaceArray(entry interface{}) []interface{} { + if entry == nil { + return nil + } + + entries, ok := entry.([]interface{}) + if !ok { + return nil + } + + return entries +} diff --git a/pkg/document/diddocument_test.go b/pkg/document/diddocument_test.go new file mode 100644 index 0000000..9b44da1 --- /dev/null +++ b/pkg/document/diddocument_test.go @@ -0,0 +1,115 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestValid(t *testing.T) { + r := reader(t, "testdata/pk-doc.json") + + doc, err := DIDDocumentFromReader(r) + require.Nil(t, err) + require.NotNil(t, doc) + require.Equal(t, "", doc.ID()) + + publicKeys := doc.PublicKeys() + require.Equal(t, []PublicKey{ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": []interface{}{"authentication"}, + "publicKeyJwk": map[string]interface{}{ + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc", + }, + }, + }, publicKeys) + + services := doc.Services() + require.Equal(t, []Service{ + { + "id": "hub", + "type": "IdentityHub", + "serviceEndpoint": "https://example.com/hub/", + "routingKeys": "routingKeysValue", + "recipientKeys": "recipientKeysValue", + "priority": float64(0), + }, + }, services) + + jsonld := doc.JSONLdObject() + require.NotNil(t, jsonld) + + require.Empty(t, doc.Context()) + require.Equal(t, "whatever", doc.Authentications()[0]) + + require.Equal(t, 1, len(doc.AlsoKnownAs())) + require.Equal(t, "identityURI", doc.AlsoKnownAs()[0]) + + newDoc := DidDocumentFromJSONLDObject(doc.JSONLdObject()) + require.Equal(t, newDoc, doc) +} + +func TestValidWithVerificationMethods(t *testing.T) { + r := reader(t, "testdata/vm-doc.json") + + doc, err := DIDDocumentFromReader(r) + require.Nil(t, err) + require.NotNil(t, doc) + require.Equal(t, "", doc.ID()) + + publicKeys := doc.VerificationMethods() + require.Equal(t, []PublicKey{ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": []interface{}{"authentication"}, + "publicKeyJwk": map[string]interface{}{ + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc", + }, + }, + }, publicKeys) +} + +func TestEmptyDoc(t *testing.T) { + var bytes = []byte(`{"@context":"https://w3id.org/did/v1"}`) //nolint:gofumpt + + doc, err := DidDocumentFromBytes(bytes) + require.Nil(t, err) + require.NotNil(t, doc) + + require.Equal(t, 0, len(doc.PublicKeys())) + require.Equal(t, 0, len(doc.Services())) + require.Equal(t, 0, len(doc.Authentications())) + require.Equal(t, 0, len(doc.AssertionMethods())) + require.Equal(t, 0, len(doc.AgreementKeys())) + require.Equal(t, 0, len(doc.DelegationKeys())) + require.Equal(t, 0, len(doc.InvocationKeys())) +} + +func TestInvalidLists(t *testing.T) { + r := reader(t, "testdata/invalid-lists.json") + + doc, err := DIDDocumentFromReader(r) + require.Nil(t, err) + require.NotNil(t, doc) + + services := doc.Services() + require.Equal(t, 0, len(services)) + + pubKeys := doc.PublicKeys() + require.Equal(t, 0, len(pubKeys)) +} diff --git a/pkg/document/document.go b/pkg/document/document.go new file mode 100644 index 0000000..2a31477 --- /dev/null +++ b/pkg/document/document.go @@ -0,0 +1,101 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "encoding/json" + + jsonutil "github.com/trustbloc/sidetree-go/pkg/util/json" +) + +// IDProperty describes id key. +const IDProperty = "id" + +// Document defines generic document data structure. +type Document map[string]interface{} + +// FromBytes creates an instance of Document by reading a JSON document from bytes. +func FromBytes(data []byte) (Document, error) { + doc := make(Document) + err := json.Unmarshal(data, &doc) + if err != nil { + return nil, err + } + + return doc, nil +} + +// FromJSONLDObject creates an instance of Document from json ld object. +func FromJSONLDObject(jsonldObject map[string]interface{}) Document { + return jsonldObject +} + +// ID is document identifier. +func (doc Document) ID() string { + return stringEntry(doc[IDProperty]) +} + +// Context is the context of document. +func (doc Document) Context() []interface{} { + return interfaceArray(doc[ContextProperty]) +} + +// PublicKeys in generic document are used for managing operation keys. +func (doc Document) PublicKeys() []PublicKey { + return ParsePublicKeys(doc[PublicKeyProperty]) +} + +// GetStringValue returns string value for specified key or "" if not found or wrong type. +func (doc Document) GetStringValue(key string) string { + return stringEntry(doc[key]) +} + +// Bytes returns byte representation of did document. +func (doc Document) Bytes() ([]byte, error) { + return jsonutil.MarshalCanonical(doc) +} + +// JSONLdObject returns map that represents JSON LD Object. +func (doc Document) JSONLdObject() map[string]interface{} { + return doc +} + +func stringEntry(entry interface{}) string { + if entry == nil { + return "" + } + id, ok := entry.(string) + if !ok { + return "" + } + + return id +} + +// StringArray is utility function to return string array from interface. +func StringArray(entry interface{}) []string { + if entry == nil { + return nil + } + + entries, ok := entry.([]interface{}) + if !ok { + return nil + } + + var result []string + for _, e := range entries { + val, ok := e.(string) + if !ok { + continue + } + + result = append(result, val) + } + + return result +} diff --git a/pkg/document/document_test.go b/pkg/document/document_test.go new file mode 100644 index 0000000..54a5f13 --- /dev/null +++ b/pkg/document/document_test.go @@ -0,0 +1,94 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "io" + "os" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFromBytes(t *testing.T) { + r := reader(t, "testdata/pk-doc.json") + + data, err := io.ReadAll(r) + require.Nil(t, err) + + doc, err := FromBytes(data) + require.Nil(t, err) + require.NotNil(t, doc) + require.Equal(t, "", doc.ID()) + require.Equal(t, 1, len(doc.PublicKeys())) + require.Equal(t, 0, len(doc.Context())) + + bytes, err := doc.Bytes() + require.Nil(t, err) + require.NotEmpty(t, bytes) + + jsonld := doc.JSONLdObject() + require.NotNil(t, jsonld) + + new := FromJSONLDObject(jsonld) + require.Equal(t, doc.ID(), new.ID()) +} + +func TestFromBytesError(t *testing.T) { + doc, err := FromBytes([]byte("[test : 123]")) + require.NotNil(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "invalid character") +} + +func TestMarshalError(t *testing.T) { + doc := Document{} + doc["test"] = make(chan int) + + bytes, err := doc.Bytes() + require.NotNil(t, err) + require.Nil(t, bytes) + require.Contains(t, err.Error(), "json: unsupported type: chan int") +} + +func TestGetStringValue(t *testing.T) { + const key = "key" + const value = "value" + + doc := Document{} + doc[key] = value + + require.Equal(t, value, doc.GetStringValue(key)) + + doc[key] = []string{"hello"} + require.Equal(t, "", doc.GetStringValue(key)) +} + +func TestStringEntry(t *testing.T) { + // not a string + str := stringEntry([]string{"hello"}) + require.Empty(t, str) + + str = stringEntry("hello") + require.Equal(t, "hello", str) +} + +func TestArrayStringEntry(t *testing.T) { + arr := StringArray(nil) + require.Nil(t, arr) + + // not a array + arr = StringArray("hello") + require.Nil(t, arr) +} + +func reader(t *testing.T, filename string) io.Reader { + f, err := os.Open(filename) + require.Nil(t, err) + + return f +} diff --git a/pkg/document/jwk.go b/pkg/document/jwk.go new file mode 100644 index 0000000..ca335f8 --- /dev/null +++ b/pkg/document/jwk.go @@ -0,0 +1,57 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import "errors" + +// JWK represents public key in JWK format. +type JWK map[string]interface{} + +// NewJWK creates new JWK. +func NewJWK(jwk map[string]interface{}) JWK { + return jwk +} + +// Kty is key type. +func (jwk JWK) Kty() string { + return stringEntry(jwk["kty"]) +} + +// Crv is curve. +func (jwk JWK) Crv() string { + return stringEntry(jwk["crv"]) +} + +// X is x. +func (jwk JWK) X() string { + return stringEntry(jwk["x"]) +} + +// Y is y. +func (jwk JWK) Y() string { + return stringEntry(jwk["y"]) +} + +// Validate will validate JWK properties. +func (jwk JWK) Validate() error { + // TODO: validation of the JWK fields depends on the algorithm (issue-409) + // For now check required fields for currently supported algorithms secp256k1, P-256, P-384, P-512 and Ed25519 + + if jwk.Crv() == "" { + return errors.New("JWK crv is missing") + } + + if jwk.Kty() == "" { + return errors.New("JWK kty is missing") + } + + if jwk.X() == "" { + return errors.New("JWK x is missing") + } + + return nil +} diff --git a/pkg/document/jwk_test.go b/pkg/document/jwk_test.go new file mode 100644 index 0000000..7f78de2 --- /dev/null +++ b/pkg/document/jwk_test.go @@ -0,0 +1,74 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestJWK(t *testing.T) { + jwk := NewJWK(map[string]interface{}{}) + require.Empty(t, jwk.Kty()) + require.Empty(t, jwk.Crv()) + require.Empty(t, jwk.Y()) + require.Empty(t, jwk.Y()) + + jwk = NewJWK(map[string]interface{}{ + "kty": "kty", + "crv": "crv", + "x": "x", + "y": "y", + }) + + require.Equal(t, "kty", jwk.Kty()) + require.Equal(t, "crv", jwk.Crv()) + require.Equal(t, "x", jwk.X()) + require.Equal(t, "y", jwk.Y()) +} + +func TestValidate(t *testing.T) { + t.Run("missing kty", func(t *testing.T) { + jwk := JWK{ + "kty": "", + "crv": "crv", + "x": "x", + "y": "y", + } + + err := jwk.Validate() + require.Error(t, err) + require.Contains(t, err.Error(), "JWK kty is missing") + }) + + t.Run("missing crv", func(t *testing.T) { + jwk := JWK{ + "kty": "kty", + "crv": "", + "x": "x", + "y": "y", + } + + err := jwk.Validate() + require.Error(t, err) + require.Contains(t, err.Error(), "JWK crv is missing") + }) + + t.Run("missing x", func(t *testing.T) { + jwk := JWK{ + "kty": "kty", + "crv": "crv", + "x": "", + "y": "y", + } + + err := jwk.Validate() + require.Error(t, err) + require.Contains(t, err.Error(), "JWK x is missing") + }) +} diff --git a/pkg/document/publickey.go b/pkg/document/publickey.go new file mode 100644 index 0000000..d89fdd3 --- /dev/null +++ b/pkg/document/publickey.go @@ -0,0 +1,102 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +const ( + + // ControllerProperty defines key for controller. + ControllerProperty = "controller" + + // PurposesProperty describes key purposes property. + PurposesProperty = "purposes" + + // PublicKeyJwkProperty describes external public key JWK. + PublicKeyJwkProperty = "publicKeyJwk" + + // TypeProperty describes type. + TypeProperty = "type" + + // PublicKeyBase58Property defines base 58 encoding for public key. + PublicKeyBase58Property = "publicKeyBase58" + + // PublicKeyMultibaseProperty defines base multibase for public key. + PublicKeyMultibaseProperty = "publicKeyMultibase" +) + +// KeyPurpose defines key purpose. +type KeyPurpose string + +const ( + // KeyPurposeAuthentication defines key purpose as authentication key. + KeyPurposeAuthentication = "authentication" + // KeyPurposeAssertionMethod defines key purpose as assertion key. + KeyPurposeAssertionMethod = "assertionMethod" + // KeyPurposeKeyAgreement defines key purpose as agreement key. + KeyPurposeKeyAgreement = "keyAgreement" + // KeyPurposeCapabilityDelegation defines key purpose as delegation key. + KeyPurposeCapabilityDelegation = "capabilityDelegation" + // KeyPurposeCapabilityInvocation defines key purpose as invocation key. + KeyPurposeCapabilityInvocation = "capabilityInvocation" +) + +// PublicKey must include id and type properties, and exactly one value property. +type PublicKey map[string]interface{} + +// NewPublicKey creates new public key. +func NewPublicKey(pk map[string]interface{}) PublicKey { + return pk +} + +// ID is public key ID. +func (pk PublicKey) ID() string { + return stringEntry(pk[IDProperty]) +} + +// Type is public key type. +func (pk PublicKey) Type() string { + return stringEntry(pk[TypeProperty]) +} + +// Controller identifies the entity that controls the corresponding private key. +func (pk PublicKey) Controller() string { + return stringEntry(pk[ControllerProperty]) +} + +// PublicKeyJwk is value property for JWK. +func (pk PublicKey) PublicKeyJwk() JWK { + entry, ok := pk[PublicKeyJwkProperty] + if !ok { + return nil + } + + json, ok := entry.(map[string]interface{}) + if !ok { + return nil + } + + return NewJWK(json) +} + +// PublicKeyBase58 is base58 encoded public key. +func (pk PublicKey) PublicKeyBase58() string { + return stringEntry(pk[PublicKeyBase58Property]) +} + +// PublicKeyMultibase is multibase public key. +func (pk PublicKey) PublicKeyMultibase() string { + return stringEntry(pk[PublicKeyMultibaseProperty]) +} + +// Purpose describes key purpose. +func (pk PublicKey) Purpose() []string { + return StringArray(pk[PurposesProperty]) +} + +// JSONLdObject returns map that represents JSON LD Object. +func (pk PublicKey) JSONLdObject() map[string]interface{} { + return pk +} diff --git a/pkg/document/publickey_test.go b/pkg/document/publickey_test.go new file mode 100644 index 0000000..8055992 --- /dev/null +++ b/pkg/document/publickey_test.go @@ -0,0 +1,59 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestPublicKey(t *testing.T) { + pk := NewPublicKey(map[string]interface{}{}) + require.Empty(t, pk.ID()) + require.Empty(t, pk.Type()) + require.Empty(t, pk.Controller()) + + pk = NewPublicKey(map[string]interface{}{ + "id": "did:example:123456789abcdefghi#keys-1", + "type": "JsonWebKey2020", + "controller": "did:example:123456789abcdefghi", + }) + require.Equal(t, "did:example:123456789abcdefghi#keys-1", pk.ID()) + require.Equal(t, "JsonWebKey2020", pk.Type()) + require.Equal(t, "did:example:123456789abcdefghi", pk.Controller()) + require.Empty(t, pk.Purpose()) + require.Empty(t, pk.PublicKeyJwk()) + require.Empty(t, pk.PublicKeyBase58()) + require.Empty(t, pk.PublicKeyMultibase()) + + require.NotEmpty(t, pk.JSONLdObject()) +} + +func TestPublicKeyJWK(t *testing.T) { + pk := NewPublicKey(map[string]interface{}{ + "publicKeyJwk": map[string]interface{}{ + "kty": "kty", + "crv": "crv", + "x": "x", + "y": "y", + }, + }) + + jwk := pk.PublicKeyJwk() + require.Equal(t, "kty", jwk.Kty()) + require.Equal(t, "crv", jwk.Crv()) + require.Equal(t, "x", jwk.X()) + require.Equal(t, "y", jwk.Y()) + + pk = NewPublicKey(map[string]interface{}{ + "publicKeyJwk": "invalid", + }) + + jwk = pk.PublicKeyJwk() + require.Nil(t, jwk) +} diff --git a/pkg/document/replace.go b/pkg/document/replace.go new file mode 100644 index 0000000..97f36fc --- /dev/null +++ b/pkg/document/replace.go @@ -0,0 +1,54 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "encoding/json" +) + +const ( + + // ReplaceServiceProperty defines key for service property. + ReplaceServiceProperty = "services" + + // ReplacePublicKeyProperty defines key for public key property. + ReplacePublicKeyProperty = "publicKeys" +) + +// ReplaceDocument defines replace document data structure. +type ReplaceDocument map[string]interface{} + +// ReplaceDocumentFromBytes creates an instance of replace document (for 'replace' patch, may be used for replace action). +func ReplaceDocumentFromBytes(data []byte) (ReplaceDocument, error) { + doc := make(ReplaceDocument) + err := json.Unmarshal(data, &doc) + if err != nil { + return nil, err + } + + return doc, nil +} + +// ReplaceDocumentFromJSONLDObject creates an instance of ReplaceDocument from json ld object. +func ReplaceDocumentFromJSONLDObject(jsonldObject map[string]interface{}) ReplaceDocument { + return jsonldObject +} + +// PublicKeys returns public keys for replace document. +func (doc ReplaceDocument) PublicKeys() []PublicKey { + return ParsePublicKeys(doc[ReplacePublicKeyProperty]) +} + +// Services returns services for replace document. +func (doc ReplaceDocument) Services() []Service { + return ParseServices(doc[ReplaceServiceProperty]) +} + +// JSONLdObject returns map that represents JSON LD Object. +func (doc ReplaceDocument) JSONLdObject() map[string]interface{} { + return doc +} diff --git a/pkg/document/replace_test.go b/pkg/document/replace_test.go new file mode 100644 index 0000000..2f9ce93 --- /dev/null +++ b/pkg/document/replace_test.go @@ -0,0 +1,55 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestReplaceDocumentFromBytes(t *testing.T) { + doc, err := ReplaceDocumentFromBytes([]byte(replaceDoc)) + require.Nil(t, err) + require.NotNil(t, doc) + require.Equal(t, 1, len(doc.PublicKeys())) + require.Equal(t, 1, len(doc.Services())) + + jsonld := doc.JSONLdObject() + require.NotNil(t, jsonld) + + new := ReplaceDocumentFromJSONLDObject(jsonld) + require.Equal(t, doc.PublicKeys()[0], new.PublicKeys()[0]) +} + +func TestReplaceDocumentFromBytesError(t *testing.T) { + doc, err := ReplaceDocumentFromBytes([]byte("[test : 123]")) + require.NotNil(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "invalid character") +} + +const replaceDoc = `{ + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "type": "EcdsaSecp256k1VerificationKey2019", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }], + "services": [ + { + "id": "sds3", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }] +}` diff --git a/pkg/document/resolution.go b/pkg/document/resolution.go new file mode 100644 index 0000000..6a4bdbe --- /dev/null +++ b/pkg/document/resolution.go @@ -0,0 +1,106 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import "github.com/trustbloc/sidetree-go/pkg/api/operation" + +// ResolutionResult describes resolution result. +type ResolutionResult struct { + Context interface{} `json:"@context"` + Document Document `json:"didDocument"` + DocumentMetadata Metadata `json:"didDocumentMetadata,omitempty"` +} + +// Metadata can contains various metadata such as document metadata and method metadata.. +type Metadata map[string]interface{} + +const ( + // UpdateCommitmentProperty is update commitment key. + UpdateCommitmentProperty = "updateCommitment" + + // RecoveryCommitmentProperty is recovery commitment key. + RecoveryCommitmentProperty = "recoveryCommitment" + + // PublishedProperty is published key. + PublishedProperty = "published" + + // DeactivatedProperty is deactivated flag key. + DeactivatedProperty = "deactivated" + + // AnchorOriginProperty is anchor origin key. + AnchorOriginProperty = "anchorOrigin" + + // CreatedProperty is the time that document was created - anchoring time of first successful create operation. + CreatedProperty = "created" + + // UpdatedProperty is the time of last document update - anchoring time of update/recover operations. + UpdatedProperty = "updated" + + // VersionIDProperty is version ID key. + VersionIDProperty = "versionId" + + // CanonicalIDProperty is canonical ID key. + CanonicalIDProperty = "canonicalId" + + // EquivalentIDProperty is equivalent ID array. + EquivalentIDProperty = "equivalentId" + + // MethodProperty is used for method metadata within did document metadata. + MethodProperty = "method" + + // UnpublishedOperationsProperty holds unpublished did operations. + UnpublishedOperationsProperty = "unpublishedOperations" + + // PublishedOperationsProperty holds published did operations. + PublishedOperationsProperty = "publishedOperations" +) + +// ResolutionOption is an option for specifying the resolution options for various resolvers. +type ResolutionOption func(opts *ResolutionOptions) + +// ResolutionOptions represent resolution options. +type ResolutionOptions struct { + AdditionalOperations []*operation.AnchoredOperation + VersionID string + VersionTime string +} + +// WithAdditionalOperations sets the additional operations to be used in a Resolve call. +func WithAdditionalOperations(additionalOperations []*operation.AnchoredOperation) ResolutionOption { + return func(opts *ResolutionOptions) { + if len(additionalOperations) > 0 { + opts.AdditionalOperations = additionalOperations + } + } +} + +// WithVersionID sets the version ID to be used in a Resolve call. +func WithVersionID(versionID string) ResolutionOption { + return func(opts *ResolutionOptions) { + opts.VersionID = versionID + } +} + +// WithVersionTime sets the version time to be used in a Resolve call. +func WithVersionTime(versionTime string) ResolutionOption { + return func(opts *ResolutionOptions) { + opts.VersionTime = versionTime + } +} + +// GetResolutionOptions returns resolution options. +func GetResolutionOptions(opts ...ResolutionOption) (ResolutionOptions, error) { + options := ResolutionOptions{} + + for _, option := range opts { + if option != nil { + option(&options) + } + } + + return options, nil +} diff --git a/pkg/document/resolution_test.go b/pkg/document/resolution_test.go new file mode 100644 index 0000000..6754a63 --- /dev/null +++ b/pkg/document/resolution_test.go @@ -0,0 +1,27 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" +) + +func TestGetOptions(t *testing.T) { + const verTime = "2021-05-10T17:00:00Z" + const verID = "ver" + + opts, err := GetResolutionOptions(WithAdditionalOperations([]*operation.AnchoredOperation{{Type: "create"}}), + WithVersionID(verID), WithVersionTime(verTime)) + require.NoError(t, err) + require.Equal(t, 1, len(opts.AdditionalOperations)) + require.Equal(t, verID, opts.VersionID) + require.Equal(t, verTime, opts.VersionTime) +} diff --git a/pkg/document/service.go b/pkg/document/service.go new file mode 100644 index 0000000..9c3da69 --- /dev/null +++ b/pkg/document/service.go @@ -0,0 +1,38 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +// ServiceEndpointProperty describes external service endpoint property. +const ServiceEndpointProperty = "serviceEndpoint" + +// Service represents any type of service the entity wishes to advertise. +type Service map[string]interface{} + +// NewService creates new service. +func NewService(m map[string]interface{}) Service { + return m +} + +// ID is service ID. +func (s Service) ID() string { + return stringEntry(s[IDProperty]) +} + +// Type is service type. +func (s Service) Type() string { + return stringEntry(s[TypeProperty]) +} + +// ServiceEndpoint is service endpoint. +func (s Service) ServiceEndpoint() interface{} { + return s[ServiceEndpointProperty] +} + +// JSONLdObject returns map that represents JSON LD Object. +func (s Service) JSONLdObject() map[string]interface{} { + return s +} diff --git a/pkg/document/service_test.go b/pkg/document/service_test.go new file mode 100644 index 0000000..ac91649 --- /dev/null +++ b/pkg/document/service_test.go @@ -0,0 +1,29 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package document + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestService(t *testing.T) { + svc := NewService(map[string]interface{}{}) + require.Empty(t, svc.Type()) + + svc = NewService(map[string]interface{}{ + "id": "did:example:123456789abcdefghi;openid", + "type": "OpenIdConnectVersion3.1Service", + "serviceEndpoint": "https://openid.example.com/", + }) + require.Equal(t, "did:example:123456789abcdefghi;openid", svc.ID()) + require.Equal(t, "OpenIdConnectVersion3.1Service", svc.Type()) + require.Equal(t, "https://openid.example.com/", svc.ServiceEndpoint()) + + require.NotEmpty(t, svc.JSONLdObject()) +} diff --git a/pkg/document/testdata/invalid-lists.json b/pkg/document/testdata/invalid-lists.json new file mode 100644 index 0000000..9611b0b --- /dev/null +++ b/pkg/document/testdata/invalid-lists.json @@ -0,0 +1,14 @@ +{ + "publicKey": + { + "id": "key2", + "type": "RsaVerificationKey2018", + "publicKeyPem": "-----BEGIN PUBLIC KEY.2.END PUBLIC KEY-----" + }, + "service": + { + "id": "IdentityHub", + "type": "IdentityHub", + "serviceEndpoint": "" + } +} \ No newline at end of file diff --git a/pkg/document/testdata/pk-doc.json b/pkg/document/testdata/pk-doc.json new file mode 100644 index 0000000..35c6f7a --- /dev/null +++ b/pkg/document/testdata/pk-doc.json @@ -0,0 +1,27 @@ +{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ], + "service": [ + { + "id": "hub", + "type": "IdentityHub", + "serviceEndpoint": "https://example.com/hub/", + "routingKeys": "routingKeysValue", + "recipientKeys": "recipientKeysValue", + "priority": 0 + } + ], + "authentication": ["whatever"], + "alsoKnownAs": ["identityURI"] +} diff --git a/pkg/document/testdata/vm-doc.json b/pkg/document/testdata/vm-doc.json new file mode 100644 index 0000000..635cfce --- /dev/null +++ b/pkg/document/testdata/vm-doc.json @@ -0,0 +1,15 @@ +{ + "verificationMethod": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +} diff --git a/pkg/docutil/doc.go b/pkg/docutil/doc.go new file mode 100644 index 0000000..96e0f08 --- /dev/null +++ b/pkg/docutil/doc.go @@ -0,0 +1,40 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package docutil + +import ( + "strings" + + "github.com/pkg/errors" + + "github.com/trustbloc/sidetree-go/pkg/hashing" +) + +// NamespaceDelimiter is the delimiter that separates the namespace from the unique suffix. +const NamespaceDelimiter = ":" + +// CalculateID calculates the ID from model and namespace. +func CalculateID(namespace string, value interface{}, hashAlgorithmAsMultihashCode uint) (string, error) { + uniqueSuffix, err := hashing.CalculateModelMultihash(value, hashAlgorithmAsMultihashCode) + if err != nil { + return "", err + } + + didID := namespace + NamespaceDelimiter + uniqueSuffix + + return didID, nil +} + +// GetNamespaceFromID returns namespace from ID. +func GetNamespaceFromID(id string) (string, error) { + pos := strings.LastIndex(id, ":") + if pos == -1 { + return "", errors.Errorf("invalid ID [%s]", id) + } + + return id[0:pos], nil +} diff --git a/pkg/docutil/doc_test.go b/pkg/docutil/doc_test.go new file mode 100644 index 0000000..71f33d0 --- /dev/null +++ b/pkg/docutil/doc_test.go @@ -0,0 +1,75 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package docutil + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +const ( + sha2_256 uint = 18 + namespace = "did:sidetree" +) + +func TestCalculateID(t *testing.T) { + t.Run("success", func(t *testing.T) { + id, err := CalculateID(namespace, suffixDataObject, sha2_256) + require.Nil(t, err) + require.Equal(t, namespace+NamespaceDelimiter+expectedSuffixForSuffixObject, id) + }) + + t.Run("error - multihash algorithm not supported", func(t *testing.T) { + id, err := CalculateID(namespace, suffixDataObject, 55) + require.NotNil(t, err) + require.Empty(t, id) + require.Contains(t, err.Error(), "algorithm not supported, unable to compute hash") + }) +} + +func TestDidCalculationError(t *testing.T) { + // non-supported mulithash code will cause an error + id, err := CalculateID(namespace, suffixDataObject, 55) + require.NotNil(t, err) + require.Empty(t, id) + require.Contains(t, err.Error(), "algorithm not supported, unable to compute hash") + + // payload has to be JSON object in order to canonicalize + id, err = CalculateID(namespace, "!!!", sha2_256) + require.NotNil(t, err) + require.Empty(t, id) + require.Contains(t, err.Error(), "Expected '{'") +} + +func TestNamespaceFromID(t *testing.T) { + const namespace = "did:sidetree" + const suffix = "123456" + + t.Run("Valid ID", func(t *testing.T) { + ns, err := GetNamespaceFromID(namespace + NamespaceDelimiter + suffix) + require.NoError(t, err) + require.Equal(t, namespace, ns) + }) + + t.Run("Invalid ID", func(t *testing.T) { + ns, err := GetNamespaceFromID(suffix) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid ID") + require.Empty(t, ns) + }) +} + +var suffixDataObject = &struct { + DeltaHash string `json:"deltaHash,omitempty"` + RecoveryCommitment string `json:"recoveryCommitment,omitempty"` +}{ + DeltaHash: "EiBOmkP6kn7yjt0VocmcPu9OQOsZi199Evh-xB48ebubQA", + RecoveryCommitment: "EiAAZJYry29vICkwmso8FL92WAISMAhsL8xkCm8dYVnq_w", +} + +const expectedSuffixForSuffixObject = "EiA5vyaRzJIxbkuZbvwEXiC__u8ieFx50TAAo98tBzCuyA" diff --git a/pkg/docutil/docutil.go b/pkg/docutil/docutil.go new file mode 100644 index 0000000..457efdf --- /dev/null +++ b/pkg/docutil/docutil.go @@ -0,0 +1,119 @@ +/* + Copyright SecureKey Technologies Inc. + + This file contains software code that is the intellectual property of SecureKey. + SecureKey reserves all rights in the code and you may not use it without + written permission from SecureKey. +*/ + +package docutil + +import ( + "errors" + "fmt" + "strings" + "time" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/document" +) + +// GetTransformationInfoForPublished will create transformation info object for published document. +func GetTransformationInfoForPublished(namespace, id, suffix string, + internalResult *protocol.ResolutionModel) protocol.TransformationInfo { + ti := make(protocol.TransformationInfo) + ti[document.IDProperty] = id + ti[document.PublishedProperty] = true + + canonicalRef := "" + if internalResult.CanonicalReference != "" { + canonicalRef = NamespaceDelimiter + internalResult.CanonicalReference + } + + canonicalID := namespace + canonicalRef + NamespaceDelimiter + suffix + + // we should always set canonical id if document has been published + ti[document.CanonicalIDProperty] = canonicalID + + equivalentIDs := []string{canonicalID} + + if len(internalResult.EquivalentReferences) > 0 { + for _, eqRef := range internalResult.EquivalentReferences { + equivalentID := namespace + NamespaceDelimiter + eqRef + NamespaceDelimiter + suffix + equivalentIDs = append(equivalentIDs, equivalentID) + } + } + + // equivalent ids should always include canonical id (if specified) + ti[document.EquivalentIDProperty] = equivalentIDs + + return ti +} + +// GetTransformationInfoForUnpublished will create transformation info object for unpublished document. +func GetTransformationInfoForUnpublished(namespace, domain, label, suffix, createRequestJCS string) protocol.TransformationInfo { + ti := make(protocol.TransformationInfo) + ti[document.PublishedProperty] = false + + id := fmt.Sprintf("%s:%s", namespace, suffix) + + // For interim/unpublished documents we should set optional label if specified. + if label != "" { + id = fmt.Sprintf("%s:%s:%s", namespace, label, suffix) + } + + var equivalentIDs []string + + if createRequestJCS != "" { + // we should always set short form equivalent id for long form resolution + equivalentIDs = append(equivalentIDs, id) + } + + // Also, if optional domain is specified, we should set equivalent id with domain hint + if label != "" && domain != "" { + equivalentID := id + if !strings.Contains(label, domain) { + equivalentID = fmt.Sprintf("%s:%s:%s:%s", namespace, domain, label, suffix) + } + + equivalentIDs = append(equivalentIDs, equivalentID) + } + + if len(equivalentIDs) > 0 { + ti[document.EquivalentIDProperty] = equivalentIDs + } + + if createRequestJCS != "" { + id = fmt.Sprintf("%s:%s", id, createRequestJCS) + } + + ti[document.IDProperty] = id + + return ti +} + +func GetCreateResult(op *operation.Operation, pv protocol.Version) (*protocol.ResolutionModel, error) { + // we can use operation applier to generate create response even though operation is not anchored yet + anchored := &operation.AnchoredOperation{ + Type: op.Type, + UniqueSuffix: op.UniqueSuffix, + OperationRequest: op.OperationRequest, + TransactionTime: uint64(time.Now().Unix()), + ProtocolVersion: pv.Protocol().GenesisTime, + AnchorOrigin: op.AnchorOrigin, + } + + rm := &protocol.ResolutionModel{UnpublishedOperations: []*operation.AnchoredOperation{anchored}} + rm, err := pv.OperationApplier().Apply(anchored, rm) + if err != nil { + return nil, err + } + + // if returned document is empty (e.g. applying patches failed) we can reject this request at API level + if len(rm.Doc.JSONLdObject()) == 0 { + return nil, errors.New("applying delta resulted in an empty document (most likely due to an invalid patch)") + } + + return rm, nil +} diff --git a/pkg/encoder/encoder.go b/pkg/encoder/encoder.go new file mode 100644 index 0000000..3b86d9f --- /dev/null +++ b/pkg/encoder/encoder.go @@ -0,0 +1,19 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package encoder + +import "encoding/base64" + +// EncodeToString encodes the bytes to string. +func EncodeToString(data []byte) string { + return base64.RawURLEncoding.EncodeToString(data) +} + +// DecodeString decodes the encoded content to Bytes. +func DecodeString(encodedContent string) ([]byte, error) { + return base64.RawURLEncoding.DecodeString(encodedContent) +} diff --git a/pkg/encoder/encoder_test.go b/pkg/encoder/encoder_test.go new file mode 100644 index 0000000..df1cf3a --- /dev/null +++ b/pkg/encoder/encoder_test.go @@ -0,0 +1,24 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package encoder + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestEncodeAndDecodeAsString(t *testing.T) { + data := "Hello World" + encoded := EncodeToString([]byte(data)) + require.NotNil(t, encoded) + + decodedBytes, err := DecodeString(encoded) + require.Nil(t, err) + require.NotNil(t, decodedBytes) + require.EqualValues(t, "Hello World", decodedBytes) +} diff --git a/pkg/hashing/hash.go b/pkg/hashing/hash.go new file mode 100644 index 0000000..73ab1c2 --- /dev/null +++ b/pkg/hashing/hash.go @@ -0,0 +1,144 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package hashing + +import ( + "crypto" + "errors" + "fmt" + + "github.com/multiformats/go-multihash" + + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/encoder" +) + +// ComputeMultihash will compute the hash for the supplied bytes using multihash code. +func ComputeMultihash(multihashCode uint, bytes []byte) ([]byte, error) { + hash, err := GetHashFromMultihash(multihashCode) + if err != nil { + return nil, err + } + + hashedBytes, err := GetHash(hash, bytes) + if err != nil { + return nil, err + } + + return multihash.Encode(hashedBytes, uint64(multihashCode)) +} + +// GetHashFromMultihash will return hash based on specified multihash code. +func GetHashFromMultihash(multihashCode uint) (h crypto.Hash, err error) { + switch multihashCode { + case multihash.SHA2_256: + h = crypto.SHA256 + case multihash.SHA2_512: + h = crypto.SHA512 + default: + err = fmt.Errorf("algorithm not supported, unable to compute hash") + } + + return h, err +} + +// IsSupportedMultihash checks to see if the given encoded hash has been hashed using valid multihash code. +func IsSupportedMultihash(encodedMultihash string) bool { + code, err := GetMultihashCode(encodedMultihash) + if err != nil { + return false + } + + return multihash.ValidCode(code) +} + +// IsComputedUsingMultihashAlgorithms checks to see if the given encoded hash has been hashed using one of supplied code. +func IsComputedUsingMultihashAlgorithms(encodedMultihash string, codes []uint) bool { + mhCode, err := GetMultihashCode(encodedMultihash) + if err != nil { + return false + } + + for _, supported := range codes { + if mhCode == uint64(supported) { + return true + } + } + + return false +} + +// GetMultihashCode returns multihash code from encoded multihash. +func GetMultihashCode(encodedMultihash string) (uint64, error) { + mh, err := GetMultihash(encodedMultihash) + if err != nil { + return 0, fmt.Errorf("failed to get decoded multihash: %s", err.Error()) + } + + return mh.Code, nil +} + +// GetMultihash returns decoded multihash from encoded multihash. +func GetMultihash(encodedMultihash string) (*multihash.DecodedMultihash, error) { + multihashBytes, err := encoder.DecodeString(encodedMultihash) + if err != nil { + return nil, err + } + + return multihash.Decode(multihashBytes) +} + +// IsValidModelMultihash compares model with provided model multihash. +func IsValidModelMultihash(model interface{}, modelMultihash string) error { + code, err := GetMultihashCode(modelMultihash) + if err != nil { + return err + } + + encodedComputedMultihash, err := CalculateModelMultihash(model, uint(code)) + if err != nil { + return err + } + + if encodedComputedMultihash != modelMultihash { + return errors.New("supplied hash doesn't match original content") + } + + return nil +} + +// CalculateModelMultihash calculates model multihash. +func CalculateModelMultihash(value interface{}, alg uint) (string, error) { + bytes, err := canonicalizer.MarshalCanonical(value) + if err != nil { + return "", err + } + + multiHashBytes, err := ComputeMultihash(alg, bytes) + if err != nil { + return "", err + } + + return encoder.EncodeToString(multiHashBytes), nil +} + +// GetHash calculates hash of data using hash function identified by hash. +func GetHash(hash crypto.Hash, data []byte) ([]byte, error) { + if !hash.Available() { + return nil, fmt.Errorf("hash function not available for: %d", hash) + } + + h := hash.New() + + if _, hashErr := h.Write(data); hashErr != nil { + return nil, hashErr + } + + result := h.Sum(nil) + + return result, nil +} diff --git a/pkg/hashing/hash_test.go b/pkg/hashing/hash_test.go new file mode 100644 index 0000000..90a9ffb --- /dev/null +++ b/pkg/hashing/hash_test.go @@ -0,0 +1,182 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package hashing + +import ( + "crypto" + "crypto/sha256" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/encoder" +) + +const ( + algSHA256 = 5 + + sha2_256 = 18 + sha2_512 = 19 +) + +var sample = []byte("test") + +func TestGetHashFromMultihash(t *testing.T) { + hash, err := GetHashFromMultihash(100) + require.NotNil(t, err) + require.Contains(t, err.Error(), "algorithm not supported") + require.Equal(t, crypto.Hash(0), hash) + + hash, err = GetHashFromMultihash(sha2_256) + require.Nil(t, err) + require.NotNil(t, hash) +} + +func TestComputeHash(t *testing.T) { + hash, err := ComputeMultihash(100, sample) + require.NotNil(t, err) + require.Contains(t, err.Error(), "algorithm not supported") + require.Nil(t, hash) + + hash, err = ComputeMultihash(sha2_256, sample) + require.Nil(t, err) + require.NotNil(t, hash) +} + +func TestIsSupportedMultihash(t *testing.T) { + // scenario: not base64 encoded (corrupted input) + supported := IsSupportedMultihash("XXXXXaGVsbG8=") + require.False(t, supported) + + // scenario: base64 encoded, however not multihash + supported = IsSupportedMultihash(encoder.EncodeToString(sample)) + require.False(t, supported) + + // scenario: valid encoded multihash + hash, err := ComputeMultihash(sha2_256, sample) + require.Nil(t, err) + require.NotNil(t, hash) + + key := encoder.EncodeToString(hash) + supported = IsSupportedMultihash(key) + require.True(t, supported) +} + +func TestIsComputedUsingHashAlgorithm(t *testing.T) { + hash, err := ComputeMultihash(sha2_256, sample) + require.Nil(t, err) + require.NotNil(t, hash) + + key := encoder.EncodeToString(hash) + ok := IsComputedUsingMultihashAlgorithms(key, []uint{sha2_256}) + require.True(t, ok) + + // use random code to fail + ok = IsComputedUsingMultihashAlgorithms(key, []uint{55}) + require.False(t, ok) + + ok = IsComputedUsingMultihashAlgorithms("invalid", []uint{sha2_256}) + require.False(t, ok) +} + +func TestIsValidModelMultihash(t *testing.T) { + t.Run("success", func(t *testing.T) { + suffix, err := CalculateModelMultihash(suffixDataObject, sha2_256) + require.Nil(t, err) + require.Equal(t, expectedSuffixForSuffixObject, suffix) + + err = IsValidModelMultihash(suffixDataObject, suffix) + require.NoError(t, err) + }) + + t.Run("error - model multihash is not matching provided multihash", func(t *testing.T) { + differentMultihash, err := ComputeMultihash(sha2_256, []byte("test")) + require.NoError(t, err) + + err = IsValidModelMultihash(suffixDataObject, encoder.EncodeToString(differentMultihash)) + require.Error(t, err) + require.Contains(t, err.Error(), "supplied hash doesn't match original content") + }) + + t.Run("error - multihash is not encoded", func(t *testing.T) { + differentMultihash, err := ComputeMultihash(sha2_256, []byte("test")) + require.NoError(t, err) + + err = IsValidModelMultihash(suffixDataObject, string(differentMultihash)) + require.Error(t, err) + require.Contains(t, err.Error(), "illegal base64 data") + }) + + t.Run("error - invalid model", func(t *testing.T) { + differentMultihash, err := ComputeMultihash(sha2_256, []byte("test")) + require.NoError(t, err) + + var c chan int + err = IsValidModelMultihash(c, encoder.EncodeToString(differentMultihash)) + require.Error(t, err) + require.Contains(t, err.Error(), "json: unsupported type: chan int") + }) +} + +func TestCalculateModelMultihash(t *testing.T) { + t.Run("success", func(t *testing.T) { + suffix, err := CalculateModelMultihash(suffixDataObject, sha2_256) + require.Nil(t, err) + require.Equal(t, expectedSuffixForSuffixObject, suffix) + }) + + t.Run("success", func(t *testing.T) { + _, err := CalculateModelMultihash(suffixDataObject, sha2_512) + require.Nil(t, err) + }) + + t.Run("error - multihash algorithm not supported", func(t *testing.T) { + id, err := CalculateModelMultihash(suffixDataObject, 55) + require.NotNil(t, err) + require.Empty(t, id) + require.Contains(t, err.Error(), "algorithm not supported, unable to compute hash") + }) + + t.Run("error - marshal canonical", func(t *testing.T) { + var c chan int + result, err := CalculateModelMultihash(c, sha2_256) + require.Error(t, err) + require.Empty(t, result) + require.Contains(t, err.Error(), "json: unsupported type: chan int") + }) +} + +func TestHash(t *testing.T) { + t.Run("success", func(t *testing.T) { + test := []byte("hello world") + + h, err := GetHash(algSHA256, test) + require.NoError(t, err) + require.NotEmpty(t, h) + + expected := sha256.Sum256(test) + require.Equal(t, expected[:], h) + }) + + t.Run("error - hash code not supported", func(t *testing.T) { + test := []byte("test data") + h, err := GetHash(55, test) + require.Error(t, err) + require.Empty(t, h) + require.Contains(t, err.Error(), "hash function not available for: 55") + }) +} + +var suffixDataObject = &struct { + DeltaHash string `json:"deltaHash,omitempty"` + RecoveryCommitment string `json:"recoveryCommitment,omitempty"` +}{ + DeltaHash: "EiBOmkP6kn7yjt0VocmcPu9OQOsZi199Evh-xB48ebubQA", + RecoveryCommitment: "EiAAZJYry29vICkwmso8FL92WAISMAhsL8xkCm8dYVnq_w", +} + +const expectedSuffixForSuffixObject = "EiA5vyaRzJIxbkuZbvwEXiC__u8ieFx50TAAo98tBzCuyA" diff --git a/pkg/internal/jsoncanonicalizer/README.md b/pkg/internal/jsoncanonicalizer/README.md new file mode 100644 index 0000000..5c91be7 --- /dev/null +++ b/pkg/internal/jsoncanonicalizer/README.md @@ -0,0 +1,4 @@ +## JSON Cononicalizer + +The files in this folder are copied AS-IS from [Cyberphone JSON Canonicalization Go Library](https://github.com/cyberphone/json-canonicalization/tree/master/go/src/webpki.org/jsoncanonicalizer). +The licence details are available at [LICENCE](https://github.com/cyberphone/json-canonicalization/blob/master/LICENSE). diff --git a/pkg/internal/jsoncanonicalizer/es6numfmt.go b/pkg/internal/jsoncanonicalizer/es6numfmt.go new file mode 100644 index 0000000..939dda1 --- /dev/null +++ b/pkg/internal/jsoncanonicalizer/es6numfmt.go @@ -0,0 +1,92 @@ +// +// Copyright 2006-2019 WebPKI.org (http://webpki.org). +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +// This package converts numbers in IEEE-754 double precision into the +// format specified for JSON in EcmaScript Version 6 and forward. +// The core application for this is canonicalization: +// https://tools.ietf.org/html/draft-rundgren-json-canonicalization-scheme-02 + +package jsoncanonicalizer + +import ( + "errors" + "math" + "strconv" + "strings" +) + +const invalidPattern uint64 = 0x7ff0000000000000 + +func NumberToJSON(ieeeF64 float64) (res string, err error) { + ieeeU64 := math.Float64bits(ieeeF64) + + // Special case: NaN and Infinity are invalid in JSON + if (ieeeU64 & invalidPattern) == invalidPattern { + return "null", errors.New("Invalid JSON number: " + strconv.FormatUint(ieeeU64, 16)) + } + + // Special case: eliminate "-0" as mandated by the ES6-JSON/JCS specifications + if ieeeF64 == 0 { // Right, this line takes both -0 and 0 + return "0", nil + } + + // Deal with the sign separately + var sign string = "" + if ieeeF64 < 0 { + ieeeF64 = -ieeeF64 + sign = "-" + } + + // ES6 has a unique "g" format + var format byte = 'e' + if ieeeF64 < 1e+21 && ieeeF64 >= 1e-6 { + format = 'f' + } + + // The following should (in "theory") do the trick: + es6Formatted := strconv.FormatFloat(ieeeF64, format, -1, 64) + + // Unfortunately Go version 1.11.4 is a bit buggy with respect to + // rounding for -1 precision which is dealt with below. + // https://github.com/golang/go/issues/29491 + exponent := strings.IndexByte(es6Formatted, 'e') + if exponent > 0 { + gform := strconv.FormatFloat(ieeeF64, 'g', 17, 64) + if len(gform) == len(es6Formatted) { + // "g" occasionally produces another result which also is the correct one + es6Formatted = gform + } + // Go outputs "1e+09" which must be rewritten as "1e+9" + if es6Formatted[exponent+2] == '0' { + es6Formatted = es6Formatted[:exponent+2] + es6Formatted[exponent+3:] + } + } else if strings.IndexByte(es6Formatted, '.') < 0 && len(es6Formatted) >= 12 { + i := len(es6Formatted) + for es6Formatted[i-1] == '0' { + i-- + } + if i != len(es6Formatted) { + fix := strconv.FormatFloat(ieeeF64, 'f', 0, 64) + if fix[i] >= '5' { + // "f" with precision 0 occasionally produces another result which also is + // the correct one although it must be rounded to match the -1 precision + // (which fortunately seems to be correct with respect to trailing zeroes) + es6Formatted = fix[:i-1] + string(fix[i-1]+1) + es6Formatted[i:] + } + } + } + return sign + es6Formatted, nil +} diff --git a/pkg/internal/jsoncanonicalizer/jsoncanonicalizer.go b/pkg/internal/jsoncanonicalizer/jsoncanonicalizer.go new file mode 100644 index 0000000..d2510a7 --- /dev/null +++ b/pkg/internal/jsoncanonicalizer/jsoncanonicalizer.go @@ -0,0 +1,377 @@ +// +// Copyright 2006-2019 WebPKI.org (http://webpki.org). +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +// This package transforms JSON data in UTF-8 according to: +// https://tools.ietf.org/html/draft-rundgren-json-canonicalization-scheme-02 + +package jsoncanonicalizer + +import ( + "container/list" + "errors" + "fmt" + "strconv" + "strings" + "unicode/utf16" +) + +type nameValueType struct { + name string + sortKey []uint16 + value string +} + +// JSON standard escapes (modulo \u) +var asciiEscapes = []byte{'\\', '"', 'b', 'f', 'n', 'r', 't'} +var binaryEscapes = []byte{'\\', '"', '\b', '\f', '\n', '\r', '\t'} + +// JSON literals +var literals = []string{"true", "false", "null"} + +func Transform(jsonData []byte) (result []byte, e error) { + // JSON data MUST be UTF-8 encoded + var jsonDataLength int = len(jsonData) + + // Current pointer in jsonData + var index int = 0 + + // "Forward" declarations are needed for closures referring each other + var parseElement func() string + var parseSimpleType func() string + var parseQuotedString func() string + var parseObject func() string + var parseArray func() string + + var globalError error = nil + + checkError := func(e error) { + // We only honor the first reported error + if globalError == nil { + globalError = e + } + } + + setError := func(msg string) { + checkError(errors.New(msg)) + } + + isWhiteSpace := func(c byte) bool { + return c == 0x20 || c == 0x0a || c == 0x0d || c == 0x09 + } + + nextChar := func() byte { + if index < jsonDataLength { + c := jsonData[index] + if c > 0x7f { + setError("Unexpected non-ASCII character") + } + index++ + return c + } + setError("Unexpected EOF reached") + return '"' + } + + scan := func() byte { + for { + c := nextChar() + if isWhiteSpace(c) { + continue + } + return c + } + } + + scanFor := func(expected byte) { + c := scan() + if c != expected { + setError("Expected '" + string(expected) + "' but got '" + string(c) + "'") + } + } + + getUEscape := func() rune { + start := index + nextChar() + nextChar() + nextChar() + nextChar() + if globalError != nil { + return 0 + } + u16, err := strconv.ParseUint(string(jsonData[start:index]), 16, 64) + checkError(err) + return rune(u16) + } + + testNextNonWhiteSpaceChar := func() byte { + save := index + c := scan() + index = save + return c + } + + decorateString := func(rawUTF8 string) string { + var quotedString strings.Builder + quotedString.WriteByte('"') + CoreLoop: + for _, c := range []byte(rawUTF8) { + // Is this within the JSON standard escapes? + for i, esc := range binaryEscapes { + if esc == c { + quotedString.WriteByte('\\') + quotedString.WriteByte(asciiEscapes[i]) + continue CoreLoop + } + } + if c < 0x20 { + // Other ASCII control characters must be escaped with \uhhhh + quotedString.WriteString(fmt.Sprintf("\\u%04x", c)) + } else { + quotedString.WriteByte(c) + } + } + quotedString.WriteByte('"') + return quotedString.String() + } + + parseQuotedString = func() string { + var rawString strings.Builder + CoreLoop: + for globalError == nil { + var c byte + if index < jsonDataLength { + c = jsonData[index] + index++ + } else { + nextChar() + break + } + if c == '"' { + break + } + if c < ' ' { + setError("Unterminated string literal") + } else if c == '\\' { + // Escape sequence + c = nextChar() + if c == 'u' { + // The \u escape + firstUTF16 := getUEscape() + if utf16.IsSurrogate(firstUTF16) { + // If the first UTF-16 code unit has a certain value there must be + // another succeeding UTF-16 code unit as well + if nextChar() != '\\' || nextChar() != 'u' { + setError("Missing surrogate") + } else { + // Output the UTF-32 code point as UTF-8 + rawString.WriteRune(utf16.DecodeRune(firstUTF16, getUEscape())) + } + } else { + // Single UTF-16 code identical to UTF-32. Output as UTF-8 + rawString.WriteRune(firstUTF16) + } + } else if c == '/' { + // Benign but useless escape + rawString.WriteByte('/') + } else { + // The JSON standard escapes + for i, esc := range asciiEscapes { + if esc == c { + rawString.WriteByte(binaryEscapes[i]) + continue CoreLoop + } + } + setError("Unexpected escape: \\" + string(c)) + } + } else { + // Just an ordinary ASCII character alternatively a UTF-8 byte + // outside of ASCII. + // Note that properly formatted UTF-8 never clashes with ASCII + // making byte per byte search for ASCII break characters work + // as expected. + rawString.WriteByte(c) + } + } + return rawString.String() + } + + parseSimpleType = func() string { + var token strings.Builder + index-- + for globalError == nil { + c := testNextNonWhiteSpaceChar() + if c == ',' || c == ']' || c == '}' { + break + } + c = nextChar() + if isWhiteSpace(c) { + break + } + token.WriteByte(c) + } + if token.Len() == 0 { + setError("Missing argument") + } + value := token.String() + // Is it a JSON literal? + for _, literal := range literals { + if literal == value { + return literal + } + } + // Apparently not so we assume that it is a I-JSON number + ieeeF64, err := strconv.ParseFloat(value, 64) + checkError(err) + value, err = NumberToJSON(ieeeF64) + checkError(err) + return value + } + + parseElement = func() string { + switch scan() { + case '{': + return parseObject() + case '"': + return decorateString(parseQuotedString()) + case '[': + return parseArray() + default: + return parseSimpleType() + } + } + + parseArray = func() string { + var arrayData strings.Builder + arrayData.WriteByte('[') + var next bool = false + for globalError == nil && testNextNonWhiteSpaceChar() != ']' { + if next { + scanFor(',') + arrayData.WriteByte(',') + } else { + next = true + } + arrayData.WriteString(parseElement()) + } + scan() + arrayData.WriteByte(']') + return arrayData.String() + } + + lexicographicallyPrecedes := func(sortKey []uint16, e *list.Element) bool { + // Find the minimum length of the sortKeys + oldSortKey := e.Value.(nameValueType).sortKey + minLength := len(oldSortKey) + if minLength > len(sortKey) { + minLength = len(sortKey) + } + for q := 0; q < minLength; q++ { + diff := int(sortKey[q]) - int(oldSortKey[q]) + if diff < 0 { + // Smaller => Precedes + return true + } else if diff > 0 { + // Bigger => No match + return false + } + // Still equal => Continue + } + // The sortKeys compared equal up to minLength + if len(sortKey) < len(oldSortKey) { + // Shorter => Precedes + return true + } + if len(sortKey) == len(oldSortKey) { + setError("Duplicate key: " + e.Value.(nameValueType).name) + } + // Longer => No match + return false + } + + parseObject = func() string { + nameValueList := list.New() + var next bool = false + CoreLoop: + for globalError == nil && testNextNonWhiteSpaceChar() != '}' { + if next { + scanFor(',') + } + next = true + scanFor('"') + rawUTF8 := parseQuotedString() + if globalError != nil { + break + } + // Sort keys on UTF-16 code units + // Since UTF-8 doesn't have endianess this is just a value transformation + // In the Go case the transformation is UTF-8 => UTF-32 => UTF-16 + sortKey := utf16.Encode([]rune(rawUTF8)) + scanFor(':') + nameValue := nameValueType{rawUTF8, sortKey, parseElement()} + for e := nameValueList.Front(); e != nil; e = e.Next() { + // Check if the key is smaller than a previous key + if lexicographicallyPrecedes(sortKey, e) { + // Precedes => Insert before and exit sorting + nameValueList.InsertBefore(nameValue, e) + continue CoreLoop + } + // Continue searching for a possibly succeeding sortKey + // (which is straightforward since the list is ordered) + } + // The sortKey is either the first or is succeeding all previous sortKeys + nameValueList.PushBack(nameValue) + } + // Scan away '}' + scan() + // Now everything is sorted so we can properly serialize the object + var objectData strings.Builder + objectData.WriteByte('{') + next = false + for e := nameValueList.Front(); e != nil; e = e.Next() { + if next { + objectData.WriteByte(',') + } + next = true + nameValue := e.Value.(nameValueType) + objectData.WriteString(decorateString(nameValue.name)) + objectData.WriteByte(':') + objectData.WriteString(nameValue.value) + } + objectData.WriteByte('}') + return objectData.String() + } + + // /////////////////////////////////////////////// + // This is where Transform actually begins... // + // /////////////////////////////////////////////// + var transformed string + + if testNextNonWhiteSpaceChar() == '[' { + scan() + transformed = parseArray() + } else { + scanFor('{') + transformed = parseObject() + } + for index < jsonDataLength { + if !isWhiteSpace(jsonData[index]) { + setError("Improperly terminated JSON object") + break + } + index++ + } + return []byte(transformed), globalError +} diff --git a/pkg/internal/log/fields.go b/pkg/internal/log/fields.go new file mode 100644 index 0000000..f8cf66e --- /dev/null +++ b/pkg/internal/log/fields.go @@ -0,0 +1,332 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package log + +import ( + "encoding/json" + "fmt" + + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +// Log Fields. +const ( + FieldURI = "uri" + FieldServiceName = "service" + FieldData = "data" + FieldRequestBody = "requestBody" + FieldSize = "size" + FieldMaxSize = "maxSize" + FieldParameter = "parameter" + FieldTotal = "total" + FieldSuffix = "suffix" + FieldSuffixes = "suffixes" + FieldOperationType = "operationType" + FieldOperation = "operation" + FieldOperations = "operations" + FieldOperationID = "operationID" + FieldGenesisTime = "genesisTime" + FieldOperationGenesisTime = "opGenesisTime" + FieldSidetreeTxn = "sidetreeTxn" + FieldID = "id" + FieldResolutionModel = "resolutionModel" + FieldVersion = "version" + FieldNamespace = "namespace" + FieldAnchorString = "anchorString" + FieldSource = "source" + FieldTotalPending = "totalPending" + FieldTransactionTime = "transactionTime" + FieldTransactionNumber = "transactionNumber" + FieldCommitment = "commitment" + FieldRecoveryCommitment = "recoveryCommitment" + FieldUpdateCommitment = "updateCommitment" + FieldTotalCommitments = "totalCommitments" + FieldTotalOperations = "totalOperations" + FieldTotalCreateOperations = "totalCreateOperations" + FieldTotalUpdateOperations = "totalUpdateOperations" + FieldTotalRecoverOperations = "totalRecoverOperations" + FieldTotalDeactivateOperations = "totalDeactivateOperations" + FieldDocument = "document" + FieldDeactivated = "deactivated" + FieldVersionTime = "versionTime" + FieldPatch = "patch" + FieldIsBatch = "isBatch" + FieldContent = "content" + FieldSources = "sources" + FieldAlias = "alias" +) + +// WithURIString sets the uri field. +func WithURIString(value string) zap.Field { + return zap.String(FieldURI, value) +} + +// WithData sets the data field. +func WithData(value []byte) zap.Field { + return zap.String(FieldData, string(value)) +} + +// WithRequestBody sets the request-body field. +func WithRequestBody(value []byte) zap.Field { + return zap.String(FieldRequestBody, string(value)) +} + +// WithServiceName sets the service field. +func WithServiceName(value string) zap.Field { + return zap.String(FieldServiceName, value) +} + +// WithSize sets the size field. +func WithSize(value int) zap.Field { + return zap.Int(FieldSize, value) +} + +// WithMaxSize sets the max-size field. +func WithMaxSize(value int) zap.Field { + return zap.Int(FieldMaxSize, value) +} + +// WithParameter sets the parameter field. +func WithParameter(value string) zap.Field { + return zap.String(FieldParameter, value) +} + +// WithTotal sets the total field. +func WithTotal(value int) zap.Field { + return zap.Int(FieldTotal, value) +} + +// WithSuffix sets the suffix field. +func WithSuffix(value string) zap.Field { + return zap.String(FieldSuffix, value) +} + +// WithSuffixes sets the suffixes field. +func WithSuffixes(value ...string) zap.Field { + return zap.Array(FieldSuffixes, NewStringArrayMarshaller(value)) +} + +// WithOperationType sets the operation-type field. +func WithOperationType(value string) zap.Field { + return zap.Any(FieldOperationType, value) +} + +// WithOperation sets the operation field. +func WithOperation(value interface{}) zap.Field { + return zap.Inline(NewObjectMarshaller(FieldOperation, value)) +} + +// WithOperationID sets the operation-id field. +func WithOperationID(value string) zap.Field { + return zap.String(FieldOperationID, value) +} + +// WithGenesisTime sets the genesis-time field. +func WithGenesisTime(value uint64) zap.Field { + return zap.Uint64(FieldGenesisTime, value) +} + +// WithOperationGenesisTime sets the op-genesis-time field. +func WithOperationGenesisTime(value uint64) zap.Field { + return zap.Uint64(FieldOperationGenesisTime, value) +} + +// WithSidetreeTxn sets the sidetree-txn field. +func WithSidetreeTxn(value interface{}) zap.Field { + return zap.Inline(NewObjectMarshaller(FieldSidetreeTxn, value)) +} + +// WithID sets the id field. +func WithID(value string) zap.Field { + return zap.String(FieldID, value) +} + +// WithResolutionModel sets the resolution-model field. +func WithResolutionModel(value interface{}) zap.Field { + return zap.Inline(NewObjectMarshaller(FieldResolutionModel, value)) +} + +// WithVersion sets the version field. +func WithVersion(value string) zap.Field { + return zap.String(FieldVersion, value) +} + +// WithNamespace sets the namespace field. +func WithNamespace(value string) zap.Field { + return zap.String(FieldNamespace, value) +} + +// WithAnchorString sets the anchor-string field. +func WithAnchorString(value string) zap.Field { + return zap.String(FieldAnchorString, value) +} + +// WithSource sets the source field. +func WithSource(value string) zap.Field { + return zap.String(FieldSource, value) +} + +// WithTotalPending sets the total-pending field. +func WithTotalPending(value uint) zap.Field { + return zap.Uint(FieldTotalPending, value) +} + +// WithTransactionTime sets the transaction-time field. +func WithTransactionTime(value uint64) zap.Field { + return zap.Uint64(FieldTransactionTime, value) +} + +// WithTransactionNumber sets the transaction-number field. +func WithTransactionNumber(value uint64) zap.Field { + return zap.Uint64(FieldTransactionNumber, value) +} + +// WithCommitment sets the commitment field. +func WithCommitment(value string) zap.Field { + return zap.String(FieldCommitment, value) +} + +// WithRecoveryCommitment sets the recovery-commitment field. +func WithRecoveryCommitment(value string) zap.Field { + return zap.String(FieldRecoveryCommitment, value) +} + +// WithUpdateCommitment sets the update-commitment field. +func WithUpdateCommitment(value string) zap.Field { + return zap.String(FieldUpdateCommitment, value) +} + +// WithTotalCommitments sets the total-commitments field. +func WithTotalCommitments(value int) zap.Field { + return zap.Int(FieldTotalCommitments, value) +} + +// WithTotalOperations sets the total-operations field. +func WithTotalOperations(value int) zap.Field { + return zap.Int(FieldTotalOperations, value) +} + +// WithTotalCreateOperations sets the total-create-operations field. +func WithTotalCreateOperations(value int) zap.Field { + return zap.Int(FieldTotalCreateOperations, value) +} + +// WithTotalUpdateOperations sets the total-update-operations field. +func WithTotalUpdateOperations(value int) zap.Field { + return zap.Int(FieldTotalUpdateOperations, value) +} + +// WithTotalRecoverOperations sets the total-recover-operations field. +func WithTotalRecoverOperations(value int) zap.Field { + return zap.Int(FieldTotalRecoverOperations, value) +} + +// WithTotalDeactivateOperations sets the total-deactivate-operations field. +func WithTotalDeactivateOperations(value int) zap.Field { + return zap.Int(FieldTotalDeactivateOperations, value) +} + +// WithDocument sets the document field. +func WithDocument(value map[string]interface{}) zap.Field { + return zap.Inline(newJSONMarshaller(FieldDocument, value)) +} + +// WithDeactivated sets the deactivated field. +func WithDeactivated(value bool) zap.Field { + return zap.Bool(FieldDeactivated, value) +} + +// WithOperations sets the operation field. +func WithOperations(value interface{}) zap.Field { + return zap.Inline(NewObjectMarshaller(FieldOperations, value)) +} + +// WithVersionTime sets the version-time field. +func WithVersionTime(value string) zap.Field { + return zap.String(FieldVersionTime, value) +} + +// WithPatch sets the patch field. +func WithPatch(value interface{}) zap.Field { + return zap.Inline(NewObjectMarshaller(FieldPatch, value)) +} + +// WithIsBatch sets the is-batch field. +func WithIsBatch(value bool) zap.Field { + return zap.Bool(FieldIsBatch, value) +} + +// WithContent sets the content field. +func WithContent(value []byte) zap.Field { + return zap.String(FieldContent, string(value)) +} + +// WithSources sets the sources field. +func WithSources(value ...string) zap.Field { + return zap.Array(FieldSources, NewStringArrayMarshaller(value)) +} + +// WithAlias sets the alias field. +func WithAlias(value string) zap.Field { + return zap.String(FieldAlias, value) +} + +type jsonMarshaller struct { + key string + obj interface{} +} + +func newJSONMarshaller(key string, value interface{}) *jsonMarshaller { + return &jsonMarshaller{key: key, obj: value} +} + +func (m *jsonMarshaller) MarshalLogObject(e zapcore.ObjectEncoder) error { + b, err := json.Marshal(m.obj) + if err != nil { + return fmt.Errorf("marshal json: %w", err) + } + + e.AddString(m.key, string(b)) + + return nil +} + +// ObjectMarshaller uses reflection to marshal an object's fields. +type ObjectMarshaller struct { + key string + obj interface{} +} + +// NewObjectMarshaller returns a new ObjectMarshaller. +func NewObjectMarshaller(key string, obj interface{}) *ObjectMarshaller { + return &ObjectMarshaller{key: key, obj: obj} +} + +// MarshalLogObject marshals the object's fields. +func (m *ObjectMarshaller) MarshalLogObject(e zapcore.ObjectEncoder) error { + return e.AddReflected(m.key, m.obj) +} + +// StringArrayMarshaller marshals an array of strings into a log field. +type StringArrayMarshaller struct { + values []string +} + +// NewStringArrayMarshaller returns a new StringArrayMarshaller. +func NewStringArrayMarshaller(values []string) *StringArrayMarshaller { + return &StringArrayMarshaller{values: values} +} + +// MarshalLogArray marshals the array. +func (m *StringArrayMarshaller) MarshalLogArray(e zapcore.ArrayEncoder) error { + for _, v := range m.values { + e.AppendString(v) + } + + return nil +} diff --git a/pkg/internal/log/fields_test.go b/pkg/internal/log/fields_test.go new file mode 100644 index 0000000..7430746 --- /dev/null +++ b/pkg/internal/log/fields_test.go @@ -0,0 +1,203 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package log + +import ( + "bytes" + "encoding/json" + "net/url" + "testing" + + "github.com/stretchr/testify/require" + "github.com/trustbloc/logutil-go/pkg/log" +) + +func TestStandardFields(t *testing.T) { + const module = "test_module" + + u1 := parseURL(t, "https://example1.com") + + t.Run("json fields 1", func(t *testing.T) { + stdOut := newMockWriter() + + logger := log.New(module, log.WithStdOut(stdOut), log.WithEncoding(log.JSON)) + + rm := &mockObject{Field1: "value33", Field2: 888} + + logger.Info("Some message", + WithData([]byte(`{"field":"value"}`)), WithServiceName("service1"), WithSize(1234), + WithParameter("param1"), WithRequestBody([]byte(`request body`)), + WithTotal(12), WithSuffix("1234"), WithOperationType("Create"), + WithURIString(u1.String()), WithOperationID("op1"), WithGenesisTime(1233), + WithOperationGenesisTime(3321), WithID("id1"), WithResolutionModel(rm), + ) + + t.Logf(stdOut.String()) + l := unmarshalLogData(t, stdOut.Bytes()) + + require.Equal(t, `Some message`, l.Msg) + require.Equal(t, `{"field":"value"}`, l.Data) + require.Equal(t, `service1`, l.Service) + require.Equal(t, 1234, l.Size) + require.Equal(t, `param1`, l.Parameter) + require.Equal(t, `request body`, l.RequestBody) + require.Equal(t, 12, l.Total) + require.Equal(t, "1234", l.Suffix) + require.Equal(t, "Create", l.OperationType) + require.Equal(t, `Some message`, l.Msg) + require.Equal(t, u1.String(), l.URI) + require.Equal(t, u1.String(), l.URI) + require.Equal(t, "op1", l.OperationID) + require.Equal(t, 1233, l.GenesisTime) + require.Equal(t, 3321, l.OperationGenesisTime) + require.Equal(t, "id1", l.ID) + require.Equal(t, rm, l.ResolutionModel) + }) + + t.Run("json fields 2", func(t *testing.T) { + stdOut := newMockWriter() + + logger := log.New(module, log.WithStdOut(stdOut), log.WithEncoding(log.JSON)) + + op := &mockObject{Field1: "op1", Field2: 9486} + txn := &mockObject{Field1: "txn1", Field2: 5967} + patch := &mockObject{Field1: "patch1", Field2: 3265} + + logger.Info("Some message", + WithSuffixes("suffix1", "suffix2"), WithVersion("v1"), WithMaxSize(20), + WithOperation(op), WithSidetreeTxn(txn), WithNamespace("ns1"), WithAnchorString("anchor1"), + WithSource("inbox"), WithTotalPending(36), WithTransactionTime(989), WithTransactionNumber(778), + WithCommitment("commit1"), WithRecoveryCommitment("recommit1"), WithUpdateCommitment("upcommit1"), + WithTotalCommitments(32), WithTotalOperations(54), WithTotalCreateOperations(12), + WithTotalUpdateOperations(87), WithTotalRecoverOperations(12), WithTotalDeactivateOperations(3), + WithDocument(map[string]interface{}{"field1": 1234}), WithDeactivated(true), WithOperations([]*mockObject{op}), + WithVersionTime("12"), WithPatch(patch), WithIsBatch(true), WithContent([]byte("content1")), + WithSources("source1", "source2"), WithAlias("alias1"), + ) + + l := unmarshalLogData(t, stdOut.Bytes()) + + require.Equal(t, []string{"suffix1", "suffix2"}, l.Suffixes) + require.Equal(t, "v1", l.Version) + require.Equal(t, 20, l.MaxSize) + require.Equal(t, op, l.Operation) + require.Equal(t, txn, l.SidetreeTxn) + require.Equal(t, "ns1", l.Namespace) + require.Equal(t, "anchor1", l.AnchorString) + require.Equal(t, "inbox", l.Source) + require.Equal(t, 36, l.TotalPending) + require.Equal(t, 989, l.TransactionTime) + require.Equal(t, 778, l.TransactionNumber) + require.Equal(t, "commit1", l.Commitment) + require.Equal(t, "recommit1", l.RecoveryCommitment) + require.Equal(t, "upcommit1", l.UpdateCommitment) + require.Equal(t, 32, l.TotalCommitments) + require.Equal(t, 54, l.TotalOperations) + require.Equal(t, 12, l.TotalCreateOperations) + require.Equal(t, 87, l.TotalUpdateOperations) + require.Equal(t, 12, l.TotalRecoverOperations) + require.Equal(t, 3, l.TotalDeactivateOperations) + require.Equal(t, `{"field1":1234}`, l.Document) + require.Equal(t, true, l.Deactivated) + require.Equal(t, []*mockObject{op}, l.Operations) + require.Equal(t, "12", l.VersionTime) + require.Equal(t, patch, l.Patch) + require.Equal(t, true, l.IsBatch) + require.Equal(t, "content1", l.Content) + require.Equal(t, []string{"source1", "source2"}, l.Sources) + require.Equal(t, "alias1", l.Alias) + }) +} + +type mockObject struct { + Field1 string + Field2 int +} + +type logData struct { + Level string `json:"level"` + Time string `json:"time"` + Logger string `json:"logger"` + Caller string `json:"caller"` + Msg string `json:"msg"` + Error string `json:"error"` + + Data string `json:"data"` + Service string `json:"service"` + Size int `json:"size"` + Parameter string `json:"parameter"` + URI string `json:"uri"` + RequestBody string `json:"requestBody"` + Total int `json:"total"` + Suffix string `json:"suffix"` + OperationType string `json:"operationType"` + OperationID string `json:"operationID"` + GenesisTime int `json:"genesisTime"` + ID string `json:"id"` + ResolutionModel *mockObject `json:"resolutionModel"` + Suffixes []string `json:"suffixes"` + Version string `json:"version"` + MaxSize int `json:"maxSize"` + Operation *mockObject `json:"operation"` + SidetreeTxn *mockObject `json:"sidetreeTxn"` + Namespace string `json:"namespace"` + AnchorString string `json:"anchorString"` + Source string `json:"source"` + OperationGenesisTime int `json:"opGenesisTime"` + TotalPending int `json:"totalPending"` + TransactionTime int `json:"transactionTime"` + TransactionNumber int `json:"transactionNumber"` + Commitment string `json:"commitment"` + RecoveryCommitment string `json:"recoveryCommitment"` + UpdateCommitment string `json:"updateCommitment"` + TotalCommitments int `json:"totalCommitments"` + TotalOperations int `json:"totalOperations"` + TotalCreateOperations int `json:"totalCreateOperations"` + TotalUpdateOperations int `json:"totalUpdateOperations"` + TotalRecoverOperations int `json:"totalRecoverOperations"` + TotalDeactivateOperations int `json:"totalDeactivateOperations"` + Document string `json:"document"` + Deactivated bool `json:"deactivated"` + Operations []*mockObject `json:"operations"` + VersionTime string `json:"versionTime"` + Patch *mockObject `json:"patch"` + IsBatch bool `json:"isBatch"` + Content string `json:"content"` + Sources []string `json:"sources"` + Alias string `json:"alias"` +} + +func unmarshalLogData(t *testing.T, b []byte) *logData { + t.Helper() + + l := &logData{} + + require.NoError(t, json.Unmarshal(b, l)) + + return l +} + +func parseURL(t *testing.T, raw string) *url.URL { + t.Helper() + + u, err := url.Parse(raw) + require.NoError(t, err) + + return u +} + +type mockWriter struct { + *bytes.Buffer +} + +func (m *mockWriter) Sync() error { + return nil +} + +func newMockWriter() *mockWriter { + return &mockWriter{Buffer: bytes.NewBuffer(nil)} +} diff --git a/pkg/jws/header.go b/pkg/jws/header.go new file mode 100644 index 0000000..f7f7e31 --- /dev/null +++ b/pkg/jws/header.go @@ -0,0 +1,98 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +// IANA registered JOSE headers (https://tools.ietf.org/html/rfc7515#section-4.1) +const ( + // HeaderAlgorithm identifies: + // For JWS: the cryptographic algorithm used to secure the JWS. + // For JWE: the cryptographic algorithm used to encrypt or determine the value of the CEK. + HeaderAlgorithm = "alg" // string + + // HeaderJWKSetURL is a URI that refers to a resource for a set of JSON-encoded public keys, one of which: + // For JWS: corresponds to the key used to digitally sign the JWS. + // For JWE: corresponds to the public key to which the JWE was encrypted. + HeaderJWKSetURL = "jku" // string + + // HeaderJSONWebKey is: + // For JWS: the public key that corresponds to the key used to digitally sign the JWS. + // For JWE: the public key to which the JWE was encrypted. + HeaderJSONWebKey = "publicKeyJwk" // JSON + + // HeaderKeyID is a hint: + // For JWS: indicating which key was used to secure the JWS. + // For JWE: which references the public key to which the JWE was encrypted. + HeaderKeyID = "kid" // string + + // HeaderX509URL is a URI that refers to a resource for the X.509 public key certificate or certificate chain: + // For JWS: corresponding to the key used to digitally sign the JWS. + // For JWE: corresponding to the public key to which the JWE was encrypted. + HeaderX509URL = "x5u" + + // HeaderX509CertificateChain contains the X.509 public key certificate or certificate chain: + // For JWS: corresponding to the key used to digitally sign the JWS. + // For JWE: corresponding to the public key to which the JWE was encrypted. + HeaderX509CertificateChain = "x5c" + + // HeaderX509CertificateDigest (X.509 certificate SHA-1 thumbprint) is a base64url-encoded + // SHA-1 thumbprint (a.k.a. digest) of the DER encoding of the X.509 certificate: + // For JWS: corresponding to the key used to digitally sign the JWS. + // For JWE: corresponding to the public key to which the JWE was encrypted. + HeaderX509CertificateDigestSha1 = "x5t" + + // HeaderX509CertificateDigestSha256 (X.509 certificate SHA-256 thumbprint) is a base64url-encoded SHA-256 + // thumbprint (a.k.a. digest) of the DER encoding of the X.509 certificate: + // For JWS: corresponding to the key used to digitally sign the JWS. + // For JWE: corresponding to the public key to which the JWE was encrypted. + HeaderX509CertificateDigestSha256 = "x5t#S256" // string + + // HeaderType is: + // For JWS: used by JWS applications to declare the media type of this complete JWS. + // For JWE: used by JWE applications to declare the media type of this complete JWE. + HeaderType = "typ" // string + + // HeaderContentType is used by JWS applications to declare the media type of: + // For JWS: the secured content (the payload). + // For JWE: the secured content (the plaintext). + HeaderContentType = "cty" // string + + // HeaderCritical indicates that extensions to: + // For JWS: this JWS header specification and/or JWA are being used that MUST be understood and processed. + // For JWE: this JWE header specification and/or JWA are being used that MUST be understood and processed. + HeaderCritical = "crit" // array +) + +// Header defined in https://tools.ietf.org/html/rfc7797 +const ( + // HeaderB64 determines whether the payload is represented in the JWS and the JWS Signing + // Input as ASCII(BASE64URL(JWS Payload)) or as the JWS Payload value itself with no encoding performed. + HeaderB64Payload = "b64" // bool +) + +// Headers represents JOSE headers. +type Headers map[string]interface{} + +// KeyID gets Key ID from JOSE headers. +func (h Headers) KeyID() (string, bool) { + return h.stringValue(HeaderKeyID) +} + +// Algorithm gets Key ID from JOSE headers. +func (h Headers) Algorithm() (string, bool) { + return h.stringValue(HeaderAlgorithm) +} + +func (h Headers) stringValue(key string) (string, bool) { + kRaw, ok := h[key] + if !ok { + return "", false + } + + kStr, ok := kRaw.(string) + + return kStr, ok +} diff --git a/pkg/jws/header_test.go b/pkg/jws/header_test.go new file mode 100644 index 0000000..52aca8b --- /dev/null +++ b/pkg/jws/header_test.go @@ -0,0 +1,38 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestHeader(t *testing.T) { + headers := make(Headers) + + alg, ok := headers.Algorithm() + require.False(t, ok) + require.Empty(t, alg) + + kid, ok := headers.KeyID() + require.False(t, ok) + require.Empty(t, kid) + + headers = Headers(map[string]interface{}{ + "alg": "alg", + "kid": "kid", + }) + + alg, ok = headers.Algorithm() + require.True(t, ok) + require.Equal(t, "alg", alg) + + kid, ok = headers.KeyID() + require.True(t, ok) + require.Equal(t, "kid", kid) +} diff --git a/pkg/jws/jwk.go b/pkg/jws/jwk.go new file mode 100644 index 0000000..ce9d45e --- /dev/null +++ b/pkg/jws/jwk.go @@ -0,0 +1,35 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jws + +import "errors" + +// JWK contains public key in JWK format. +type JWK struct { + Kty string `json:"kty"` + Crv string `json:"crv"` + X string `json:"x"` + Y string `json:"y"` + Nonce string `json:"nonce,omitempty"` +} + +// Validate validates JWK. +func (jwk *JWK) Validate() error { + if jwk.Crv == "" { + return errors.New("JWK crv is missing") + } + + if jwk.Kty == "" { + return errors.New("JWK kty is missing") + } + + if jwk.X == "" { + return errors.New("JWK x is missing") + } + + return nil +} diff --git a/pkg/jws/jwk_test.go b/pkg/jws/jwk_test.go new file mode 100644 index 0000000..551b27b --- /dev/null +++ b/pkg/jws/jwk_test.go @@ -0,0 +1,54 @@ +package jws + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestValidate(t *testing.T) { + t.Run("success ", func(t *testing.T) { + jwk := JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + } + + err := jwk.Validate() + require.NoError(t, err) + }) + + t.Run("missing kty", func(t *testing.T) { + jwk := JWK{ + Kty: "", + Crv: "crv", + X: "x", + } + + err := jwk.Validate() + require.Error(t, err) + require.Contains(t, err.Error(), "kty is missing") + }) + + t.Run("missing crv", func(t *testing.T) { + jwk := JWK{ + Kty: "kty", + X: "x", + } + + err := jwk.Validate() + require.Error(t, err) + require.Contains(t, err.Error(), "crv is missing") + }) + + t.Run("missing x", func(t *testing.T) { + jwk := JWK{ + Kty: "kty", + Crv: "crv", + } + + err := jwk.Validate() + require.Error(t, err) + require.Contains(t, err.Error(), "x is missing") + }) +} diff --git a/pkg/jwsutil/jwk.go b/pkg/jwsutil/jwk.go new file mode 100644 index 0000000..4a2f114 --- /dev/null +++ b/pkg/jwsutil/jwk.go @@ -0,0 +1,293 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jwsutil + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/x509" + "encoding/base64" + "errors" + "fmt" + "math/big" + "strings" + + "github.com/btcsuite/btcd/btcec" + "github.com/square/go-jose/v3" + "github.com/square/go-jose/v3/json" + "golang.org/x/crypto/ed25519" +) + +const ( + secp256k1Crv = "secp256k1" + secp256k1Kty = "EC" + secp256k1Size = 32 + bitsPerByte = 8 +) + +// JWK (JSON Web Key) is a JSON data structure that represents a cryptographic key. +type JWK struct { + jose.JSONWebKey + + Kty string + Crv string +} + +// PublicKeyBytes converts a public key to bytes. +func (j *JWK) PublicKeyBytes() ([]byte, error) { + if isSecp256k1(j.Kty, j.Crv) { + var ecPubKey *ecdsa.PublicKey + + ecPubKey, ok := j.Key.(*ecdsa.PublicKey) + if !ok { + ecPubKey = &j.Key.(*ecdsa.PrivateKey).PublicKey + } + + pubKey := &btcec.PublicKey{ + Curve: btcec.S256(), + X: ecPubKey.X, + Y: ecPubKey.Y, + } + + return pubKey.SerializeCompressed(), nil + } + + switch pubKey := j.Public().Key.(type) { + case *ecdsa.PublicKey, ed25519.PublicKey: + pubKBytes, err := x509.MarshalPKIXPublicKey(pubKey) + if err != nil { + return nil, errors.New("failed to read public key bytes") + } + + return pubKBytes, nil + default: + return nil, fmt.Errorf("unsupported public key type in kid '%s'", j.KeyID) + } +} + +// UnmarshalJSON reads a key from its JSON representation. +func (j *JWK) UnmarshalJSON(jwkBytes []byte) error { + var key jsonWebKey + + marshalErr := json.Unmarshal(jwkBytes, &key) + if marshalErr != nil { + return fmt.Errorf("unable to read JWK: %w", marshalErr) + } + + if isSecp256k1(key.Kty, key.Crv) { + jwk, err := unmarshalSecp256k1(&key) + if err != nil { + return fmt.Errorf("unable to read JWK: %w", err) + } + + *j = *jwk + } else { + var joseJWK jose.JSONWebKey + + err := json.Unmarshal(jwkBytes, &joseJWK) + if err != nil { + return fmt.Errorf("unable to read jose JWK, %w", err) + } + + j.JSONWebKey = joseJWK + } + + j.Kty = key.Kty + j.Crv = key.Crv + + return nil +} + +// MarshalJSON serializes the given key to its JSON representation. +func (j *JWK) MarshalJSON() ([]byte, error) { + if isSecp256k1(j.Kty, j.Crv) { + return marshalSecp256k1(j) + } + + return (&j.JSONWebKey).MarshalJSON() +} + +func isSecp256k1(kty, crv string) bool { + return strings.EqualFold(kty, secp256k1Kty) && strings.EqualFold(crv, secp256k1Crv) +} + +func unmarshalSecp256k1(jwk *jsonWebKey) (*JWK, error) { + if jwk.X == nil { + return nil, ErrInvalidKey + } + + if jwk.Y == nil { + return nil, ErrInvalidKey + } + + curve := btcec.S256() + + if curveSize(curve) != len(jwk.X.data) { + return nil, ErrInvalidKey + } + + if curveSize(curve) != len(jwk.Y.data) { + return nil, ErrInvalidKey + } + + if jwk.D != nil && dSize(curve) != len(jwk.D.data) { + return nil, ErrInvalidKey + } + + x := jwk.X.bigInt() + y := jwk.Y.bigInt() + + if !curve.IsOnCurve(x, y) { + return nil, ErrInvalidKey + } + + var key interface{} + + if jwk.D != nil { + key = &ecdsa.PrivateKey{ + PublicKey: ecdsa.PublicKey{ + Curve: curve, + X: x, + Y: y, + }, + D: jwk.D.bigInt(), + } + } else { + key = &ecdsa.PublicKey{ + Curve: curve, + X: x, + Y: y, + } + } + + return &JWK{ + JSONWebKey: jose.JSONWebKey{ + Key: key, KeyID: jwk.Kid, Algorithm: jwk.Alg, Use: jwk.Use, + }, + }, nil +} + +func marshalSecp256k1(jwk *JWK) ([]byte, error) { + var raw jsonWebKey + + switch ecdsaKey := jwk.Key.(type) { + case *ecdsa.PublicKey: + raw = jsonWebKey{ + Kty: secp256k1Kty, + Crv: secp256k1Crv, + X: newFixedSizeBuffer(ecdsaKey.X.Bytes(), secp256k1Size), + Y: newFixedSizeBuffer(ecdsaKey.Y.Bytes(), secp256k1Size), + } + + case *ecdsa.PrivateKey: + raw = jsonWebKey{ + Kty: secp256k1Kty, + Crv: secp256k1Crv, + X: newFixedSizeBuffer(ecdsaKey.X.Bytes(), secp256k1Size), + Y: newFixedSizeBuffer(ecdsaKey.Y.Bytes(), secp256k1Size), + D: newFixedSizeBuffer(ecdsaKey.D.Bytes(), dSize(ecdsaKey.Curve)), + } + } + + raw.Kid = jwk.KeyID + raw.Alg = jwk.Algorithm + raw.Use = jwk.Use + + return json.Marshal(raw) +} + +// jsonWebKey contains subset of json web key json properties. +type jsonWebKey struct { + Use string `json:"use,omitempty"` + Kty string `json:"kty,omitempty"` + Kid string `json:"kid,omitempty"` + Crv string `json:"crv,omitempty"` + Alg string `json:"alg,omitempty"` + + X *byteBuffer `json:"x,omitempty"` + Y *byteBuffer `json:"y,omitempty"` + + D *byteBuffer `json:"d,omitempty"` +} + +// Get size of curve in bytes. +func curveSize(crv elliptic.Curve) int { + bits := crv.Params().BitSize + + div := bits / bitsPerByte + mod := bits % bitsPerByte + + if mod == 0 { + return div + } + + return div + 1 +} + +func dSize(curve elliptic.Curve) int { + order := curve.Params().P + bitLen := order.BitLen() + size := bitLen / bitsPerByte + + if bitLen%bitsPerByte != 0 { + size++ + } + + return size +} + +// byteBuffer represents a slice of bytes that can be serialized to url-safe base64. +type byteBuffer struct { + data []byte +} + +func (b *byteBuffer) UnmarshalJSON(data []byte) error { + var encoded string + + err := json.Unmarshal(data, &encoded) + if err != nil { + return err + } + + if encoded == "" { + return nil + } + + decoded, err := base64.RawURLEncoding.DecodeString(encoded) + if err != nil { + return err + } + + *b = byteBuffer{ + data: decoded, + } + + return nil +} + +func (b *byteBuffer) MarshalJSON() ([]byte, error) { + return json.Marshal(b.base64()) +} + +func (b *byteBuffer) base64() string { + return base64.RawURLEncoding.EncodeToString(b.data) +} + +func (b byteBuffer) bigInt() *big.Int { + return new(big.Int).SetBytes(b.data) +} + +func newFixedSizeBuffer(data []byte, length int) *byteBuffer { + paddedData := make([]byte, length-len(data)) + + return &byteBuffer{ + data: append(paddedData, data...), + } +} + +// ErrInvalidKey is returned when passed JWK is invalid. +var ErrInvalidKey = errors.New("invalid JWK") diff --git a/pkg/jwsutil/jwk_test.go b/pkg/jwsutil/jwk_test.go new file mode 100644 index 0000000..dc23861 --- /dev/null +++ b/pkg/jwsutil/jwk_test.go @@ -0,0 +1,281 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jwsutil + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "testing" + + "github.com/btcsuite/btcd/btcec" + "github.com/square/go-jose/v3" + "github.com/square/go-jose/v3/json" + "github.com/stretchr/testify/require" +) + +func TestDecodePublicKey(t *testing.T) { + t.Run("Test decode public key success", func(t *testing.T) { + tests := []struct { + name string + jwkJSON string + }{ + { + name: "get public key bytes Ed25519 JWK", + jwkJSON: `{ + "kty": "OKP", + "use": "enc", + "crv": "Ed25519", + "kid": "sample@sample.id", + "x": "sEHL6KXs8bUz9Ss2qSWWjhhRMHVjrog0lzFENM132R8", + "alg": "EdDSA" + }`, + }, + { + name: "get public key bytes EC P-526 JWK", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "P-256", + "kid": "sample@sample.id", + "x": "JR7nhI47w7bxrNkp7Xt1nbmozNn-RB2Q-PWi7KHT8J0", + "y": "iXmKtH0caOgB1vV0CQwinwK999qdDvrssKhdbiAz9OI", + "alg": "ES256" + }`, + }, + { + name: "get public key bytes EC SECP256K1 JWK", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "YRrvJocKf39GpdTnd-zBFE0msGDqawR-Cmtc6yKoFsM", + "y": "kE-dMH9S3mxnTXo0JFEhraCU_tVYFDfpu9tpP1LfVKQ", + "alg": "ES256K" + }`, + }, + { + name: "get private key bytes EC SECP256K1 JWK", + jwkJSON: `{ + "kty": "EC", + "d": "Lg5xrN8Usd_T-MfqBIs3bUWQCNsXY8hGU-Ru3Joom8E", + "use": "sig", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "dv6X5DheBaFWR2H_yv9pUI2dcmL2XX8m7zgFc9Coaqg", + "y": "AUVSmytVWP350kV1RHhQ6AcCWaJj8AFt4aNLlDws7C4", + "alg": "ES256K" + }`, + }, + } + + t.Parallel() + + for _, test := range tests { + tc := test + t.Run(tc.name, func(t *testing.T) { + var jwk JWK + + err := json.Unmarshal([]byte(tc.jwkJSON), &jwk) + require.NoError(t, err) + + pkBytes, err := jwk.PublicKeyBytes() + require.NoError(t, err) + require.NotEmpty(t, pkBytes) + + jwkBytes, err := json.Marshal(&jwk) + require.NoError(t, err) + require.NotEmpty(t, jwkBytes) + }) + } + }) + + t.Run("Test decode public key failure", func(t *testing.T) { + tests := []struct { + name string + jwkJSON string + err string + }{ + { + name: "attempt public key bytes from invalid JSON bytes", + jwkJSON: `}`, + err: "invalid character", + }, + { + name: "attempt public key bytes from invalid curve", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "sec12341", + "kid": "sample@sample.id", + "x": "wQehEGTVCu32yp8IwTaBCqPUIYslyd-WoFRsfDKE9II", + "y": "rIJO8RmkExUecJ5i15L9OC7rl7pwmYFR8QQgdM1ERWI", + "alg": "ES256" + }`, + err: "unsupported elliptic curve 'sec12341'", + }, + { + name: "attempt public key bytes from invalid JSON bytes", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "", + "y": "", + "alg": "ES256" + }`, + err: "unable to read JWK: invalid JWK", + }, + { + name: "attempt public key bytes from invalid JSON bytes", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "wQehEGTVCu32yp8IwTaBCqPUIYslyd-WoFRsfDKE9II", + "y": "", + "alg": "ES256" + }`, + err: "unable to read JWK: invalid JWK", + }, + { + name: "attempt public key bytes from invalid JSON bytes", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "x", + "y": "y", + "alg": "ES256" + }`, + err: "unable to read JWK", + }, + { + name: "X is not defined", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "y": "rIJO8RmkExUecJ5i15L9OC7rl7pwmYFR8QQgdM1ERWI", + "alg": "ES256" + }`, + err: "invalid JWK", + }, + { + name: "Y is not defined", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "wQehEGTVCu32yp8IwTaBCqPUIYslyd-WoFRsfDKE9II", + "alg": "ES256" + }`, + err: "invalid JWK", + }, + { + name: "Y is not defined", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "wQehEGTVCu32yp8IwTaBCqPUIYslyd-WoFRsfDKE9II", + "y": "rIJO8RmkExUecJ5i15L9OC7rl7pwmYFR8QQgdM1ERWI", + "d": "", + "alg": "ES256" + }`, + err: "invalid JWK", + }, + { + name: "Y is not defined", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "wQehEGTVCu32yp8IwTaBCqPUIYslyd-WoFRsfDKE9II", + "y": "rIJO8RmkExUecJ5i15L9OC7rl7pwmYFR8QQgdM1ERWO", + "alg": "ES256" + }`, + err: "unable to read JWK: invalid JWK", + }, + { + name: "attempt public key bytes from invalid JSON bytes", + jwkJSON: `{ + "kty": "EC", + "use": "enc", + "crv": "secp256k1", + "kid": "sample@sample.id", + "x": "{", + "y": "y", + "alg": "ES256" + }`, + err: "unable to read JWK", + }, + } + + t.Parallel() + + for _, test := range tests { + tc := test + t.Run(tc.name, func(t *testing.T) { + var jwk JWK + err := json.Unmarshal([]byte(tc.jwkJSON), &jwk) + require.Error(t, err) + require.Contains(t, err.Error(), tc.err) + }) + } + }) +} + +func TestByteBufferUnmarshalFailure(t *testing.T) { + bb := &byteBuffer{} + err := bb.UnmarshalJSON([]byte("{")) + require.Error(t, err) +} + +func TestCurveSize(t *testing.T) { + require.Equal(t, 32, curveSize(btcec.S256())) + require.Equal(t, 32, curveSize(elliptic.P256())) + require.Equal(t, 28, curveSize(elliptic.P224())) + require.Equal(t, 48, curveSize(elliptic.P384())) + require.Equal(t, 66, curveSize(elliptic.P521())) +} + +func TestJWK_PublicKeyBytesValidation(t *testing.T) { + // invalid public key + privKey, err := ecdsa.GenerateKey(btcec.S256(), rand.Reader) + require.NoError(t, err) + + jwk := &JWK{ + JSONWebKey: jose.JSONWebKey{ + Key: &privKey.PublicKey, + Algorithm: "ES256", + KeyID: "pubkey#123", + }, + Crv: "P-256", + Kty: "EC", + } + + pkBytes, err := jwk.PublicKeyBytes() + require.Error(t, err) + require.Contains(t, err.Error(), "failed to read public key bytes") + require.Empty(t, pkBytes) + + // unsupported public key type + jwk.Key = "key of invalid type" + pkBytes, err = jwk.PublicKeyBytes() + require.Error(t, err) + require.Contains(t, err.Error(), "unsupported public key type in kid 'pubkey#123'") + require.Empty(t, pkBytes) +} diff --git a/pkg/jwsutil/jws.go b/pkg/jwsutil/jws.go new file mode 100644 index 0000000..08c5412 --- /dev/null +++ b/pkg/jwsutil/jws.go @@ -0,0 +1,296 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jwsutil + +import ( + "encoding/base64" + "errors" + "fmt" + "strings" + + "github.com/square/go-jose/v3/json" + + "github.com/trustbloc/sidetree-go/pkg/jws" +) + +const ( + jwsPartsCount = 3 + jwsHeaderPart = 0 + jwsPayloadPart = 1 + jwsSignaturePart = 2 +) + +// JSONWebSignature defines JSON Web Signature (https://tools.ietf.org/html/rfc7515) +type JSONWebSignature struct { + ProtectedHeaders jws.Headers + UnprotectedHeaders jws.Headers + Payload []byte + + signature []byte + joseHeaders jws.Headers +} + +// Signer defines JWS Signer interface. It makes signing of data and provides custom JWS headers relevant to the signer. +type Signer interface { + // Sign signs. + Sign(data []byte) ([]byte, error) + + // Headers provides JWS headers. "alg" header must be provided (see https://tools.ietf.org/html/rfc7515#section-4.1) + Headers() jws.Headers +} + +// NewJWS creates JSON Web Signature. +func NewJWS(protectedHeaders, unprotectedHeaders jws.Headers, payload []byte, signer Signer) (*JSONWebSignature, error) { + headers := mergeHeaders(protectedHeaders, signer.Headers()) + s := &JSONWebSignature{ + ProtectedHeaders: headers, + UnprotectedHeaders: unprotectedHeaders, + Payload: payload, + joseHeaders: headers, + } + + signature, err := sign(s.joseHeaders, payload, signer) + if err != nil { + return nil, fmt.Errorf("sign JWS: %w", err) + } + + s.signature = signature + + return s, nil +} + +// SerializeCompact makes JWS Compact Serialization (https://tools.ietf.org/html/rfc7515#section-7.1) +func (s JSONWebSignature) SerializeCompact(detached bool) (string, error) { + byteHeaders, err := json.Marshal(s.joseHeaders) + if err != nil { + return "", fmt.Errorf("marshal JWS JOSE Headers: %w", err) + } + + b64Headers := base64.RawURLEncoding.EncodeToString(byteHeaders) + + b64Payload := "" + if !detached { + b64Payload = base64.RawURLEncoding.EncodeToString(s.Payload) + } + + b64Signature := base64.RawURLEncoding.EncodeToString(s.signature) + + return fmt.Sprintf("%s.%s.%s", + b64Headers, + b64Payload, + b64Signature), nil +} + +// Signature returns a copy of JWS signature. +func (s JSONWebSignature) Signature() []byte { + if s.signature == nil { + return nil + } + + sCopy := make([]byte, len(s.signature)) + copy(sCopy, s.signature) + + return sCopy +} + +func mergeHeaders(h1, h2 jws.Headers) jws.Headers { + h := make(jws.Headers, len(h1)+len(h2)) + + for k, v := range h2 { + h[k] = v + } + + for k, v := range h1 { + h[k] = v + } + + return h +} + +func sign(joseHeaders jws.Headers, payload []byte, signer Signer) ([]byte, error) { + err := checkJWSHeaders(joseHeaders) + if err != nil { + return nil, fmt.Errorf("check JOSE headers: %w", err) + } + + sigInput, err := signingInput(joseHeaders, payload) + if err != nil { + return nil, fmt.Errorf("prepare JWS verification data: %w", err) + } + + signature, err := signer.Sign(sigInput) + if err != nil { + return nil, fmt.Errorf("sign JWS verification data: %w", err) + } + + return signature, nil +} + +// jwsParseOpts holds options for the JWS Parsing. +type jwsParseOpts struct { + detachedPayload []byte +} + +// ParseOpt is the JWS Parser option. +type ParseOpt func(opts *jwsParseOpts) + +// WithJWSDetachedPayload option is for definition of JWS detached payload. +func WithJWSDetachedPayload(payload []byte) ParseOpt { + return func(opts *jwsParseOpts) { + opts.detachedPayload = payload + } +} + +// ParseJWS parses serialized JWS. Currently only JWS Compact Serialization parsing is supported. +func ParseJWS(jwsStr string, opts ...ParseOpt) (*JSONWebSignature, error) { + pOpts := &jwsParseOpts{} + + for _, opt := range opts { + opt(pOpts) + } + + if strings.HasPrefix(jwsStr, "{") { + // TODO support JWS JSON serialization format + // https://github.com/hyperledger/aries-framework-go/issues/1331 + return nil, errors.New("JWS JSON serialization is not supported") + } + + return parseCompacted(jwsStr, pOpts) +} + +// VerifyJWS parses and validates serialized JWS. Currently only JWS Compact Serialization parsing is supported. +func VerifyJWS(jwsStr string, jwk *jws.JWK, opts ...ParseOpt) (*JSONWebSignature, error) { + parsedJWS, err := ParseJWS(jwsStr, opts...) + if err != nil { + return nil, err + } + + sInput, err := signingInput(parsedJWS.ProtectedHeaders, parsedJWS.Payload) + if err != nil { + return nil, fmt.Errorf("build signing input: %w", err) + } + + err = VerifySignature(jwk, parsedJWS.signature, sInput) + if err != nil { + return nil, err + } + + return parsedJWS, nil +} + +// IsCompactJWS checks weather input is a compact JWS (based on https://tools.ietf.org/html/rfc7516#section-9) +func IsCompactJWS(s string) bool { + parts := strings.Split(s, ".") + + return len(parts) == jwsPartsCount +} + +func parseCompacted(jwsCompact string, opts *jwsParseOpts) (*JSONWebSignature, error) { + parts := strings.Split(jwsCompact, ".") + if len(parts) != jwsPartsCount { + return nil, errors.New("invalid JWS compact format") + } + + joseHeaders, err := parseCompactedHeaders(parts) + if err != nil { + return nil, err + } + + payload, err := parseCompactedPayload(parts[jwsPayloadPart], opts) + if err != nil { + return nil, err + } + + signature, err := base64.RawURLEncoding.DecodeString(parts[jwsSignaturePart]) + if err != nil { + return nil, fmt.Errorf("decode base64 signature: %w", err) + } + + if len(signature) == 0 { + return nil, errors.New("compact jws signature is empty") + } + + return &JSONWebSignature{ + ProtectedHeaders: joseHeaders, + Payload: payload, + signature: signature, + joseHeaders: joseHeaders, + }, nil +} + +func parseCompactedPayload(jwsPayload string, opts *jwsParseOpts) ([]byte, error) { + if len(opts.detachedPayload) > 0 { + return opts.detachedPayload, nil + } + + payload, err := base64.RawURLEncoding.DecodeString(jwsPayload) + if err != nil { + return nil, fmt.Errorf("decode base64 payload: %w", err) + } + + if len(payload) == 0 { + return nil, errors.New("compact jws payload is empty") + } + + return payload, nil +} + +func parseCompactedHeaders(parts []string) (jws.Headers, error) { + headersBytes, err := base64.RawURLEncoding.DecodeString(parts[jwsHeaderPart]) + if err != nil { + return nil, fmt.Errorf("decode base64 header: %w", err) + } + + var joseHeaders jws.Headers + + err = json.Unmarshal(headersBytes, &joseHeaders) + if err != nil { + return nil, fmt.Errorf("unmarshal JSON headers: %w", err) + } + + err = checkJWSHeaders(joseHeaders) + if err != nil { + return nil, err + } + + return joseHeaders, nil +} + +func signingInput(headers jws.Headers, payload []byte) ([]byte, error) { + headersBytes, err := json.Marshal(headers) + if err != nil { + return nil, fmt.Errorf("serialize JWS headers: %w", err) + } + + hBase64 := true + + if b64, ok := headers[jws.HeaderB64Payload]; ok { + if hBase64, ok = b64.(bool); !ok { + return nil, errors.New("invalid b64 header") + } + } + + headersStr := base64.RawURLEncoding.EncodeToString(headersBytes) + + var payloadStr string + + if hBase64 { + payloadStr = base64.RawURLEncoding.EncodeToString(payload) + } else { + payloadStr = string(payload) + } + + return []byte(fmt.Sprintf("%s.%s", headersStr, payloadStr)), nil +} + +func checkJWSHeaders(headers jws.Headers) error { + if _, ok := headers[jws.HeaderAlgorithm]; !ok { + return fmt.Errorf("%s JWS header is not defined", jws.HeaderAlgorithm) + } + + return nil +} diff --git a/pkg/jwsutil/jws_test.go b/pkg/jwsutil/jws_test.go new file mode 100644 index 0000000..c2bede0 --- /dev/null +++ b/pkg/jwsutil/jws_test.go @@ -0,0 +1,279 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jwsutil + +import ( + "crypto/ecdsa" + "crypto/ed25519" + "crypto/elliptic" + "crypto/rand" + "encoding/base64" + "errors" + "fmt" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/jws" + "github.com/trustbloc/sidetree-go/pkg/util/ecsigner" + "github.com/trustbloc/sidetree-go/pkg/util/edsigner" +) + +func TestHeaders_GetKeyID(t *testing.T) { + kid, ok := jws.Headers{"kid": "key id"}.KeyID() + require.True(t, ok) + require.Equal(t, "key id", kid) + + kid, ok = jws.Headers{"kid": 777}.KeyID() + require.False(t, ok) + require.Empty(t, kid) + + kid, ok = jws.Headers{}.KeyID() + require.False(t, ok) + require.Empty(t, kid) +} + +func TestHeaders_GetAlgorithm(t *testing.T) { + kid, ok := jws.Headers{"alg": "EdDSA"}.Algorithm() + require.True(t, ok) + require.Equal(t, "EdDSA", kid) + + kid, ok = jws.Headers{"alg": 777}.Algorithm() + require.False(t, ok) + require.Empty(t, kid) + + kid, ok = jws.Headers{}.Algorithm() + require.False(t, ok) + require.Empty(t, kid) +} + +func TestJSONWebSignature_SerializeCompact(t *testing.T) { + headers := jws.Headers{"alg": "EdSDA", "typ": "JWT"} + payload := []byte("payload") + + newJWS, err := NewJWS(headers, nil, payload, + &testSigner{ + headers: jws.Headers{"alg": "dummy"}, + signature: []byte("signature"), + }) + require.NoError(t, err) + + jwsCompact, err := newJWS.SerializeCompact(false) + require.NoError(t, err) + require.NotEmpty(t, jwsCompact) + + // b64=false + newJWS, err = NewJWS(headers, nil, payload, + &testSigner{ + headers: jws.Headers{"alg": "dummy", "b64": false}, + signature: []byte("signature"), + }) + require.NoError(t, err) + + jwsCompact, err = newJWS.SerializeCompact(false) + require.NoError(t, err) + require.NotEmpty(t, jwsCompact) + + // signer error + newJWS, err = NewJWS(headers, nil, payload, + &testSigner{ + headers: jws.Headers{"alg": "dummy"}, + err: errors.New("signer error"), + }) + require.Error(t, err) + require.Contains(t, err.Error(), "sign JWS verification data") + require.Nil(t, newJWS) + + // no alg defined + newJWS, err = NewJWS(jws.Headers{}, nil, payload, + &testSigner{ + headers: jws.Headers{}, + }) + require.Error(t, err) + require.Contains(t, err.Error(), "alg JWS header is not defined") + require.Nil(t, newJWS) + + // jose headers marshalling error + newJWS, err = NewJWS(jws.Headers{}, nil, payload, + &testSigner{ + headers: getUnmarshallableMap(), + }) + require.Error(t, err) + require.Contains(t, err.Error(), "serialize JWS headers") + require.Nil(t, newJWS) + + // invalid b64 + newJWS, err = NewJWS(jws.Headers{}, nil, payload, + &testSigner{ + headers: jws.Headers{"alg": "dummy", "b64": "invalid"}, + signature: []byte("signature"), + }) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid b64 header") + require.Nil(t, newJWS) +} + +func TestJSONWebSignature_Signature(t *testing.T) { + jws := &JSONWebSignature{ + signature: []byte("signature"), + } + require.NotEmpty(t, jws.Signature()) + + jws.signature = nil + require.Empty(t, jws.Signature()) +} + +func TestParseJWS(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + corruptedBased64 := "XXXXXaGVsbG8=" + + signer := ecsigner.New(privateKey, "ES256", "key-1") + jws, err := NewJWS(signer.Headers(), nil, []byte("payload"), + signer) + require.NoError(t, err) + + jwsCompact, err := jws.SerializeCompact(false) + require.NoError(t, err) + require.NotEmpty(t, jwsCompact) + + validJWSParts := strings.Split(jwsCompact, ".") + + parsedJWS, err := VerifyJWS(jwsCompact, jwk) + require.NoError(t, err) + require.NotNil(t, parsedJWS) + require.Equal(t, jws, parsedJWS) + + jwsDetached := fmt.Sprintf("%s.%s.%s", validJWSParts[0], "", validJWSParts[2]) + + detachedPayload, err := base64.RawURLEncoding.DecodeString(validJWSParts[1]) + require.NoError(t, err) + + parsedJWS, err = VerifyJWS(jwsDetached, jwk, WithJWSDetachedPayload(detachedPayload)) + require.NoError(t, err) + require.NotNil(t, parsedJWS) + require.Equal(t, jws, parsedJWS) + + // Parse not compact JWS format + parsedJWS, err = VerifyJWS(`{"some": "JSON"}`, jwk) + require.Error(t, err) + require.EqualError(t, err, "JWS JSON serialization is not supported") + require.Nil(t, parsedJWS) + + // Parse invalid compact JWS format + parsedJWS, err = VerifyJWS("two_parts.only", jwk) + require.Error(t, err) + require.EqualError(t, err, "invalid JWS compact format") + require.Nil(t, parsedJWS) + + // invalid headers + jwsWithInvalidHeaders := fmt.Sprintf("%s.%s.%s", "invalid", validJWSParts[1], validJWSParts[2]) + parsedJWS, err = VerifyJWS(jwsWithInvalidHeaders, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "unmarshal JSON headers") + require.Nil(t, parsedJWS) + + jwsWithInvalidHeaders = fmt.Sprintf("%s.%s.%s", corruptedBased64, validJWSParts[1], validJWSParts[2]) + parsedJWS, err = VerifyJWS(jwsWithInvalidHeaders, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "decode base64 header") + require.Nil(t, parsedJWS) + + emptyHeaders := base64.RawURLEncoding.EncodeToString([]byte("{}")) + + jwsWithInvalidHeaders = fmt.Sprintf("%s.%s.%s", emptyHeaders, validJWSParts[1], validJWSParts[2]) + parsedJWS, err = VerifyJWS(jwsWithInvalidHeaders, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "alg JWS header is not defined") + require.Nil(t, parsedJWS) + + // invalid payload + jwsWithInvalidPayload := fmt.Sprintf("%s.%s.%s", validJWSParts[0], corruptedBased64, validJWSParts[2]) + parsedJWS, err = VerifyJWS(jwsWithInvalidPayload, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "decode base64 payload") + require.Nil(t, parsedJWS) + + // invalid signature + jwsWithInvalidSignature := fmt.Sprintf("%s.%s.%s", validJWSParts[0], validJWSParts[1], corruptedBased64) + parsedJWS, err = VerifyJWS(jwsWithInvalidSignature, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "decode base64 signature") + require.Nil(t, parsedJWS) + + // missing signature + jwsMissingSignature := fmt.Sprintf("%s.%s.%s", validJWSParts[0], validJWSParts[1], "") + parsedJWS, err = VerifyJWS(jwsMissingSignature, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "compact jws signature is empty") + require.Nil(t, parsedJWS) + + // missing payload + jwsMissingPayload := fmt.Sprintf("%s.%s.%s", validJWSParts[0], "", validJWSParts[2]) + parsedJWS, err = VerifyJWS(jwsMissingPayload, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "compact jws payload is empty") + require.Nil(t, parsedJWS) + + // signature verification error error + jwk.Kty = "type" + parsedJWS, err = VerifyJWS(jwsCompact, jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "key type is not supported for verifying signature") + require.Nil(t, parsedJWS) +} + +func TestParseJWS_ED25519(t *testing.T) { + publicKey, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(publicKey) + require.NoError(t, err) + + signer := edsigner.New(privateKey, "EdDSA", "key-1") + jws, err := NewJWS(signer.Headers(), nil, []byte("payload"), signer) + require.NoError(t, err) + + jwsCompact, err := jws.SerializeCompact(false) + require.NoError(t, err) + require.NotEmpty(t, jwsCompact) + + parsedJWS, err := VerifyJWS(jwsCompact, jwk) + require.NoError(t, err) + require.NotNil(t, parsedJWS) + require.Equal(t, jws, parsedJWS) +} + +func TestIsCompactJWS(t *testing.T) { + require.True(t, IsCompactJWS("a.b.c")) + require.False(t, IsCompactJWS("a.b")) + require.False(t, IsCompactJWS(`{"some": "JSON"}`)) + require.False(t, IsCompactJWS("")) +} + +type testSigner struct { + headers jws.Headers + signature []byte + err error +} + +func (s testSigner) Sign(_ []byte) ([]byte, error) { + return s.signature, s.err +} + +func (s testSigner) Headers() jws.Headers { + return s.headers +} + +func getUnmarshallableMap() map[string]interface{} { + return map[string]interface{}{"alg": "JWS", "error": map[chan int]interface{}{make(chan int): 6}} +} diff --git a/pkg/jwsutil/signature.go b/pkg/jwsutil/signature.go new file mode 100644 index 0000000..6a3b6a9 --- /dev/null +++ b/pkg/jwsutil/signature.go @@ -0,0 +1,168 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jwsutil + +import ( + "crypto" + "crypto/ecdsa" + "crypto/ed25519" + "crypto/elliptic" + "encoding/json" + "errors" + "fmt" + "math/big" + + "github.com/btcsuite/btcd/btcec" + + "github.com/trustbloc/sidetree-go/pkg/jws" +) + +const ( + p256KeySize = 32 + p384KeySize = 48 + p521KeySize = 66 + secp256k1KeySize = 32 +) + +// VerifySignature verifies signature against public key in JWK format. +func VerifySignature(jwk *jws.JWK, signature, msg []byte) error { + switch jwk.Kty { + case "EC": + return verifyECSignature(jwk, signature, msg) + case "OKP": + return verifyEd25519Signature(jwk, signature, msg) + default: + return fmt.Errorf("'%s' key type is not supported for verifying signature", jwk.Kty) + } +} + +func verifyEd25519Signature(jwk *jws.JWK, signature, msg []byte) error { + pubKey, err := GetED25519PublicKey(jwk) + if err != nil { + return err + } + + verified := ed25519.Verify(pubKey, msg, signature) + if !verified { + return errors.New("ed25519: invalid signature") + } + + return nil +} + +// GetED25519PublicKey retunns ed25519 public key. +func GetED25519PublicKey(jwk *jws.JWK) (ed25519.PublicKey, error) { + jsonBytes, err := json.Marshal(jwk) + if err != nil { + return nil, err + } + + var internalJWK JWK + err = internalJWK.UnmarshalJSON(jsonBytes) + if err != nil { + return nil, err + } + + pubKey, ok := internalJWK.Key.(ed25519.PublicKey) + if !ok { + return nil, errors.New("unexpected public key type for ed25519") + } + + // ed25519 panics if key size is wrong + if len(pubKey) != ed25519.PublicKeySize { + return nil, errors.New("ed25519: invalid key") + } + + return pubKey, nil +} + +func verifyECSignature(jwk *jws.JWK, signature, msg []byte) error { + ec := parseEllipticCurve(jwk.Crv) + if ec == nil { + return fmt.Errorf("ecdsa: unsupported elliptic curve '%s'", jwk.Crv) + } + + jwkBytes, err := json.Marshal(jwk) + if err != nil { + return err + } + + internalJWK := JWK{ + Kty: jwk.Kty, + Crv: jwk.Crv, + } + + err = internalJWK.UnmarshalJSON(jwkBytes) + if err != nil { + return err + } + + ecdsaPubKey, ok := internalJWK.JSONWebKey.Key.(*ecdsa.PublicKey) + if !ok { + return errors.New("not an EC public key") + } + + if len(signature) != 2*ec.keySize { + return errors.New("ecdsa: invalid signature size") + } + + hasher := ec.hash.New() + + _, err = hasher.Write(msg) + if err != nil { + return errors.New("ecdsa: hash error") + } + + hash := hasher.Sum(nil) + + r := big.NewInt(0).SetBytes(signature[:ec.keySize]) + s := big.NewInt(0).SetBytes(signature[ec.keySize:]) + + verified := ecdsa.Verify(ecdsaPubKey, hash, r, s) + if !verified { + return errors.New("ecdsa: invalid signature") + } + + return nil +} + +type ellipticCurve struct { + curve elliptic.Curve + keySize int + hash crypto.Hash +} + +func parseEllipticCurve(curve string) *ellipticCurve { + switch curve { + case "P-256": + return &ellipticCurve{ + curve: elliptic.P256(), + keySize: p256KeySize, + hash: crypto.SHA256, + } + case "P-384": + return &ellipticCurve{ + curve: elliptic.P384(), + keySize: p384KeySize, + hash: crypto.SHA384, + } + case "P-521": + return &ellipticCurve{ + curve: elliptic.P521(), + keySize: p521KeySize, + hash: crypto.SHA512, + } + case "secp256k1": + return &ellipticCurve{ + curve: btcec.S256(), + keySize: secp256k1KeySize, + hash: crypto.SHA256, + } + default: + return nil + } +} diff --git a/pkg/jwsutil/signature_test.go b/pkg/jwsutil/signature_test.go new file mode 100644 index 0000000..5677da5 --- /dev/null +++ b/pkg/jwsutil/signature_test.go @@ -0,0 +1,285 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package jwsutil + +import ( + "crypto" + "crypto/ecdsa" + "crypto/ed25519" + "crypto/elliptic" + "crypto/rand" + "fmt" + "reflect" + "testing" + + "github.com/btcsuite/btcd/btcec" + gojose "github.com/square/go-jose/v3" + "github.com/square/go-jose/v3/json" + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/jws" +) + +func TestVerifySignature(t *testing.T) { + t.Run("success EC P-256", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + + signature := getECSignature(privateKey, payload, crypto.SHA256) + err = VerifySignature(jwk, signature, payload) + require.NoError(t, err) + }) + + t.Run("success EC P-384", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P384(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + + signature := getECSignature(privateKey, payload, crypto.SHA384) + err = VerifySignature(jwk, signature, payload) + require.NoError(t, err) + }) + + t.Run("success EC P-521", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + + signature := getECSignature(privateKey, payload, crypto.SHA512) + err = VerifySignature(jwk, signature, payload) + require.NoError(t, err) + }) + + t.Run("success EC secp256k1", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(btcec.S256(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + + signature := getECSignature(privateKey, payload, crypto.SHA256) + err = VerifySignature(jwk, signature, payload) + require.NoError(t, err) + }) + + t.Run("success ED25519", func(t *testing.T) { + publicKey, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + payload := []byte("test message") + signature := ed25519.Sign(privateKey, payload) + + jwk, err := getPublicKeyJWK(publicKey) + require.NoError(t, err) + + err = VerifySignature(jwk, signature, payload) + require.NoError(t, err) + }) + + t.Run("unsupported key type", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + signature := getECSignatureSHA256(privateKey, payload) + + jwk.Kty = "not-supported" + err = VerifySignature(jwk, signature, payload) + require.Error(t, err) + require.Contains(t, err.Error(), "key type is not supported for verifying signature") + }) +} + +func TestVerifyECSignature(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + t.Run("success", func(t *testing.T) { + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + + signature := getECSignatureSHA256(privateKey, payload) + err = verifyECSignature(jwk, signature, payload) + require.NoError(t, err) + }) + t.Run("unsupported elliptic curve", func(t *testing.T) { + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + payload := []byte("test") + signature := getECSignatureSHA256(privateKey, payload) + + jwk.Crv = "invalid" + err = verifyECSignature(jwk, signature, payload) + require.Error(t, err) + require.Contains(t, err.Error(), "unsupported elliptic curve") + }) + t.Run("invalid signature size", func(t *testing.T) { + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + err = verifyECSignature(jwk, []byte("signature"), []byte("test")) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid signature size") + }) + t.Run("invalid signature", func(t *testing.T) { + jwk, err := getPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + signature := getECSignatureSHA256(privateKey, []byte("test")) + + err = verifyECSignature(jwk, signature, []byte("different")) + require.Error(t, err) + require.Contains(t, err.Error(), "ecdsa: invalid signature") + }) +} + +func TestVerifyED25519Signature(t *testing.T) { + publicKey, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + payload := []byte("test message") + signature := ed25519.Sign(privateKey, payload) + + t.Run("success", func(t *testing.T) { + jwk, err := getPublicKeyJWK(publicKey) + require.NoError(t, err) + + err = verifyEd25519Signature(jwk, signature, payload) + require.NoError(t, err) + }) + + t.Run("invalid payload", func(t *testing.T) { + jwk, err := getPublicKeyJWK(publicKey) + require.NoError(t, err) + + err = verifyEd25519Signature(jwk, signature, []byte("different payload")) + require.Error(t, err) + require.Contains(t, err.Error(), "ed25519: invalid signature") + }) + + t.Run("invalid signature", func(t *testing.T) { + jwk, err := getPublicKeyJWK(publicKey) + require.NoError(t, err) + + err = verifyEd25519Signature(jwk, []byte("signature"), payload) + require.Error(t, err) + require.Contains(t, err.Error(), "ed25519: invalid signature") + }) + + t.Run("invalid curve", func(t *testing.T) { + jwk, err := getPublicKeyJWK(publicKey) + require.NoError(t, err) + jwk.Crv = "invalid" + + err = verifyEd25519Signature(jwk, signature, payload) + require.Error(t, err) + require.Contains(t, err.Error(), "unknown curve") + }) + + t.Run("wrong key type - EC key", func(t *testing.T) { + ecPrivateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := getPublicKeyJWK(&ecPrivateKey.PublicKey) + require.NoError(t, err) + + err = verifyEd25519Signature(jwk, signature, payload) + require.Error(t, err) + require.Contains(t, err.Error(), "unexpected public key type for ed25519") + }) +} + +func getECSignatureSHA256(privateKey *ecdsa.PrivateKey, payload []byte) []byte { + return getECSignature(privateKey, payload, crypto.SHA256) +} + +func getECSignature(privKey *ecdsa.PrivateKey, payload []byte, hash crypto.Hash) []byte { + hasher := hash.New() + + _, err := hasher.Write(payload) + if err != nil { + panic(err) + } + + hashed := hasher.Sum(nil) + + r, s, err := ecdsa.Sign(rand.Reader, privKey, hashed) + if err != nil { + panic(err) + } + + curveBits := privKey.Curve.Params().BitSize + + keyBytes := curveBits / 8 + if curveBits%8 > 0 { + keyBytes++ + } + + copyPadded := func(source []byte, size int) []byte { + dest := make([]byte, size) + copy(dest[size-len(source):], source) + + return dest + } + + return append(copyPadded(r.Bytes(), keyBytes), copyPadded(s.Bytes(), keyBytes)...) +} + +// getPublicKeyJWK returns public key in JWK format. +func getPublicKeyJWK(pubKey interface{}) (*jws.JWK, error) { + internalJWK := JWK{ + JSONWebKey: gojose.JSONWebKey{Key: pubKey}, + } + + switch key := pubKey.(type) { + case ed25519.PublicKey: + // handled automatically by gojose + case *ecdsa.PublicKey: + ecdsaPubKey := pubKey.(*ecdsa.PublicKey) + // using internal jwk wrapper marshall feature since gojose doesn't handle secp256k1 curve + if ecdsaPubKey.Curve == btcec.S256() { + internalJWK.Kty = secp256k1Kty + internalJWK.Crv = secp256k1Crv + } + default: + return nil, fmt.Errorf("unknown key type '%s'", reflect.TypeOf(key)) + } + + jsonJWK, err := internalJWK.MarshalJSON() + if err != nil { + return nil, err + } + + var jwk jws.JWK + err = json.Unmarshal(jsonJWK, &jwk) + if err != nil { + return nil, err + } + + return &jwk, nil +} diff --git a/pkg/mocks/documentcomposer.gen.go b/pkg/mocks/documentcomposer.gen.go new file mode 100644 index 0000000..0e0e449 --- /dev/null +++ b/pkg/mocks/documentcomposer.gen.go @@ -0,0 +1,122 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +type DocumentComposer struct { + ApplyPatchesStub func(document.Document, []patch.Patch) (document.Document, error) + applyPatchesMutex sync.RWMutex + applyPatchesArgsForCall []struct { + arg1 document.Document + arg2 []patch.Patch + } + applyPatchesReturns struct { + result1 document.Document + result2 error + } + applyPatchesReturnsOnCall map[int]struct { + result1 document.Document + result2 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *DocumentComposer) ApplyPatches(arg1 document.Document, arg2 []patch.Patch) (document.Document, error) { + var arg2Copy []patch.Patch + if arg2 != nil { + arg2Copy = make([]patch.Patch, len(arg2)) + copy(arg2Copy, arg2) + } + fake.applyPatchesMutex.Lock() + ret, specificReturn := fake.applyPatchesReturnsOnCall[len(fake.applyPatchesArgsForCall)] + fake.applyPatchesArgsForCall = append(fake.applyPatchesArgsForCall, struct { + arg1 document.Document + arg2 []patch.Patch + }{arg1, arg2Copy}) + stub := fake.ApplyPatchesStub + fakeReturns := fake.applyPatchesReturns + fake.recordInvocation("ApplyPatches", []interface{}{arg1, arg2Copy}) + fake.applyPatchesMutex.Unlock() + if stub != nil { + return stub(arg1, arg2) + } + if specificReturn { + return ret.result1, ret.result2 + } + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *DocumentComposer) ApplyPatchesCallCount() int { + fake.applyPatchesMutex.RLock() + defer fake.applyPatchesMutex.RUnlock() + return len(fake.applyPatchesArgsForCall) +} + +func (fake *DocumentComposer) ApplyPatchesCalls(stub func(document.Document, []patch.Patch) (document.Document, error)) { + fake.applyPatchesMutex.Lock() + defer fake.applyPatchesMutex.Unlock() + fake.ApplyPatchesStub = stub +} + +func (fake *DocumentComposer) ApplyPatchesArgsForCall(i int) (document.Document, []patch.Patch) { + fake.applyPatchesMutex.RLock() + defer fake.applyPatchesMutex.RUnlock() + argsForCall := fake.applyPatchesArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *DocumentComposer) ApplyPatchesReturns(result1 document.Document, result2 error) { + fake.applyPatchesMutex.Lock() + defer fake.applyPatchesMutex.Unlock() + fake.ApplyPatchesStub = nil + fake.applyPatchesReturns = struct { + result1 document.Document + result2 error + }{result1, result2} +} + +func (fake *DocumentComposer) ApplyPatchesReturnsOnCall(i int, result1 document.Document, result2 error) { + fake.applyPatchesMutex.Lock() + defer fake.applyPatchesMutex.Unlock() + fake.ApplyPatchesStub = nil + if fake.applyPatchesReturnsOnCall == nil { + fake.applyPatchesReturnsOnCall = make(map[int]struct { + result1 document.Document + result2 error + }) + } + fake.applyPatchesReturnsOnCall[i] = struct { + result1 document.Document + result2 error + }{result1, result2} +} + +func (fake *DocumentComposer) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.applyPatchesMutex.RLock() + defer fake.applyPatchesMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *DocumentComposer) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} diff --git a/pkg/mocks/documenttransformer.gen.go b/pkg/mocks/documenttransformer.gen.go new file mode 100644 index 0000000..0285f38 --- /dev/null +++ b/pkg/mocks/documenttransformer.gen.go @@ -0,0 +1,117 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/document" +) + +type DocumentTransformer struct { + TransformDocumentStub func(*protocol.ResolutionModel, protocol.TransformationInfo) (*document.ResolutionResult, error) + transformDocumentMutex sync.RWMutex + transformDocumentArgsForCall []struct { + arg1 *protocol.ResolutionModel + arg2 protocol.TransformationInfo + } + transformDocumentReturns struct { + result1 *document.ResolutionResult + result2 error + } + transformDocumentReturnsOnCall map[int]struct { + result1 *document.ResolutionResult + result2 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *DocumentTransformer) TransformDocument(arg1 *protocol.ResolutionModel, arg2 protocol.TransformationInfo) (*document.ResolutionResult, error) { + fake.transformDocumentMutex.Lock() + ret, specificReturn := fake.transformDocumentReturnsOnCall[len(fake.transformDocumentArgsForCall)] + fake.transformDocumentArgsForCall = append(fake.transformDocumentArgsForCall, struct { + arg1 *protocol.ResolutionModel + arg2 protocol.TransformationInfo + }{arg1, arg2}) + stub := fake.TransformDocumentStub + fakeReturns := fake.transformDocumentReturns + fake.recordInvocation("TransformDocument", []interface{}{arg1, arg2}) + fake.transformDocumentMutex.Unlock() + if stub != nil { + return stub(arg1, arg2) + } + if specificReturn { + return ret.result1, ret.result2 + } + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *DocumentTransformer) TransformDocumentCallCount() int { + fake.transformDocumentMutex.RLock() + defer fake.transformDocumentMutex.RUnlock() + return len(fake.transformDocumentArgsForCall) +} + +func (fake *DocumentTransformer) TransformDocumentCalls(stub func(*protocol.ResolutionModel, protocol.TransformationInfo) (*document.ResolutionResult, error)) { + fake.transformDocumentMutex.Lock() + defer fake.transformDocumentMutex.Unlock() + fake.TransformDocumentStub = stub +} + +func (fake *DocumentTransformer) TransformDocumentArgsForCall(i int) (*protocol.ResolutionModel, protocol.TransformationInfo) { + fake.transformDocumentMutex.RLock() + defer fake.transformDocumentMutex.RUnlock() + argsForCall := fake.transformDocumentArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *DocumentTransformer) TransformDocumentReturns(result1 *document.ResolutionResult, result2 error) { + fake.transformDocumentMutex.Lock() + defer fake.transformDocumentMutex.Unlock() + fake.TransformDocumentStub = nil + fake.transformDocumentReturns = struct { + result1 *document.ResolutionResult + result2 error + }{result1, result2} +} + +func (fake *DocumentTransformer) TransformDocumentReturnsOnCall(i int, result1 *document.ResolutionResult, result2 error) { + fake.transformDocumentMutex.Lock() + defer fake.transformDocumentMutex.Unlock() + fake.TransformDocumentStub = nil + if fake.transformDocumentReturnsOnCall == nil { + fake.transformDocumentReturnsOnCall = make(map[int]struct { + result1 *document.ResolutionResult + result2 error + }) + } + fake.transformDocumentReturnsOnCall[i] = struct { + result1 *document.ResolutionResult + result2 error + }{result1, result2} +} + +func (fake *DocumentTransformer) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.transformDocumentMutex.RLock() + defer fake.transformDocumentMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *DocumentTransformer) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} diff --git a/pkg/mocks/documentvalidator.gen.go b/pkg/mocks/documentvalidator.gen.go new file mode 100644 index 0000000..13c0adc --- /dev/null +++ b/pkg/mocks/documentvalidator.gen.go @@ -0,0 +1,191 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" +) + +type DocumentValidator struct { + IsValidOriginalDocumentStub func([]byte) error + isValidOriginalDocumentMutex sync.RWMutex + isValidOriginalDocumentArgsForCall []struct { + arg1 []byte + } + isValidOriginalDocumentReturns struct { + result1 error + } + isValidOriginalDocumentReturnsOnCall map[int]struct { + result1 error + } + IsValidPayloadStub func([]byte) error + isValidPayloadMutex sync.RWMutex + isValidPayloadArgsForCall []struct { + arg1 []byte + } + isValidPayloadReturns struct { + result1 error + } + isValidPayloadReturnsOnCall map[int]struct { + result1 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *DocumentValidator) IsValidOriginalDocument(arg1 []byte) error { + var arg1Copy []byte + if arg1 != nil { + arg1Copy = make([]byte, len(arg1)) + copy(arg1Copy, arg1) + } + fake.isValidOriginalDocumentMutex.Lock() + ret, specificReturn := fake.isValidOriginalDocumentReturnsOnCall[len(fake.isValidOriginalDocumentArgsForCall)] + fake.isValidOriginalDocumentArgsForCall = append(fake.isValidOriginalDocumentArgsForCall, struct { + arg1 []byte + }{arg1Copy}) + stub := fake.IsValidOriginalDocumentStub + fakeReturns := fake.isValidOriginalDocumentReturns + fake.recordInvocation("IsValidOriginalDocument", []interface{}{arg1Copy}) + fake.isValidOriginalDocumentMutex.Unlock() + if stub != nil { + return stub(arg1) + } + if specificReturn { + return ret.result1 + } + return fakeReturns.result1 +} + +func (fake *DocumentValidator) IsValidOriginalDocumentCallCount() int { + fake.isValidOriginalDocumentMutex.RLock() + defer fake.isValidOriginalDocumentMutex.RUnlock() + return len(fake.isValidOriginalDocumentArgsForCall) +} + +func (fake *DocumentValidator) IsValidOriginalDocumentCalls(stub func([]byte) error) { + fake.isValidOriginalDocumentMutex.Lock() + defer fake.isValidOriginalDocumentMutex.Unlock() + fake.IsValidOriginalDocumentStub = stub +} + +func (fake *DocumentValidator) IsValidOriginalDocumentArgsForCall(i int) []byte { + fake.isValidOriginalDocumentMutex.RLock() + defer fake.isValidOriginalDocumentMutex.RUnlock() + argsForCall := fake.isValidOriginalDocumentArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *DocumentValidator) IsValidOriginalDocumentReturns(result1 error) { + fake.isValidOriginalDocumentMutex.Lock() + defer fake.isValidOriginalDocumentMutex.Unlock() + fake.IsValidOriginalDocumentStub = nil + fake.isValidOriginalDocumentReturns = struct { + result1 error + }{result1} +} + +func (fake *DocumentValidator) IsValidOriginalDocumentReturnsOnCall(i int, result1 error) { + fake.isValidOriginalDocumentMutex.Lock() + defer fake.isValidOriginalDocumentMutex.Unlock() + fake.IsValidOriginalDocumentStub = nil + if fake.isValidOriginalDocumentReturnsOnCall == nil { + fake.isValidOriginalDocumentReturnsOnCall = make(map[int]struct { + result1 error + }) + } + fake.isValidOriginalDocumentReturnsOnCall[i] = struct { + result1 error + }{result1} +} + +func (fake *DocumentValidator) IsValidPayload(arg1 []byte) error { + var arg1Copy []byte + if arg1 != nil { + arg1Copy = make([]byte, len(arg1)) + copy(arg1Copy, arg1) + } + fake.isValidPayloadMutex.Lock() + ret, specificReturn := fake.isValidPayloadReturnsOnCall[len(fake.isValidPayloadArgsForCall)] + fake.isValidPayloadArgsForCall = append(fake.isValidPayloadArgsForCall, struct { + arg1 []byte + }{arg1Copy}) + stub := fake.IsValidPayloadStub + fakeReturns := fake.isValidPayloadReturns + fake.recordInvocation("IsValidPayload", []interface{}{arg1Copy}) + fake.isValidPayloadMutex.Unlock() + if stub != nil { + return stub(arg1) + } + if specificReturn { + return ret.result1 + } + return fakeReturns.result1 +} + +func (fake *DocumentValidator) IsValidPayloadCallCount() int { + fake.isValidPayloadMutex.RLock() + defer fake.isValidPayloadMutex.RUnlock() + return len(fake.isValidPayloadArgsForCall) +} + +func (fake *DocumentValidator) IsValidPayloadCalls(stub func([]byte) error) { + fake.isValidPayloadMutex.Lock() + defer fake.isValidPayloadMutex.Unlock() + fake.IsValidPayloadStub = stub +} + +func (fake *DocumentValidator) IsValidPayloadArgsForCall(i int) []byte { + fake.isValidPayloadMutex.RLock() + defer fake.isValidPayloadMutex.RUnlock() + argsForCall := fake.isValidPayloadArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *DocumentValidator) IsValidPayloadReturns(result1 error) { + fake.isValidPayloadMutex.Lock() + defer fake.isValidPayloadMutex.Unlock() + fake.IsValidPayloadStub = nil + fake.isValidPayloadReturns = struct { + result1 error + }{result1} +} + +func (fake *DocumentValidator) IsValidPayloadReturnsOnCall(i int, result1 error) { + fake.isValidPayloadMutex.Lock() + defer fake.isValidPayloadMutex.Unlock() + fake.IsValidPayloadStub = nil + if fake.isValidPayloadReturnsOnCall == nil { + fake.isValidPayloadReturnsOnCall = make(map[int]struct { + result1 error + }) + } + fake.isValidPayloadReturnsOnCall[i] = struct { + result1 error + }{result1} +} + +func (fake *DocumentValidator) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.isValidOriginalDocumentMutex.RLock() + defer fake.isValidOriginalDocumentMutex.RUnlock() + fake.isValidPayloadMutex.RLock() + defer fake.isValidPayloadMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *DocumentValidator) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} diff --git a/pkg/mocks/operationapplier.gen.go b/pkg/mocks/operationapplier.gen.go new file mode 100644 index 0000000..de5f359 --- /dev/null +++ b/pkg/mocks/operationapplier.gen.go @@ -0,0 +1,117 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" +) + +type OperationApplier struct { + ApplyStub func(*operation.AnchoredOperation, *protocol.ResolutionModel) (*protocol.ResolutionModel, error) + applyMutex sync.RWMutex + applyArgsForCall []struct { + arg1 *operation.AnchoredOperation + arg2 *protocol.ResolutionModel + } + applyReturns struct { + result1 *protocol.ResolutionModel + result2 error + } + applyReturnsOnCall map[int]struct { + result1 *protocol.ResolutionModel + result2 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *OperationApplier) Apply(arg1 *operation.AnchoredOperation, arg2 *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { + fake.applyMutex.Lock() + ret, specificReturn := fake.applyReturnsOnCall[len(fake.applyArgsForCall)] + fake.applyArgsForCall = append(fake.applyArgsForCall, struct { + arg1 *operation.AnchoredOperation + arg2 *protocol.ResolutionModel + }{arg1, arg2}) + stub := fake.ApplyStub + fakeReturns := fake.applyReturns + fake.recordInvocation("Apply", []interface{}{arg1, arg2}) + fake.applyMutex.Unlock() + if stub != nil { + return stub(arg1, arg2) + } + if specificReturn { + return ret.result1, ret.result2 + } + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationApplier) ApplyCallCount() int { + fake.applyMutex.RLock() + defer fake.applyMutex.RUnlock() + return len(fake.applyArgsForCall) +} + +func (fake *OperationApplier) ApplyCalls(stub func(*operation.AnchoredOperation, *protocol.ResolutionModel) (*protocol.ResolutionModel, error)) { + fake.applyMutex.Lock() + defer fake.applyMutex.Unlock() + fake.ApplyStub = stub +} + +func (fake *OperationApplier) ApplyArgsForCall(i int) (*operation.AnchoredOperation, *protocol.ResolutionModel) { + fake.applyMutex.RLock() + defer fake.applyMutex.RUnlock() + argsForCall := fake.applyArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *OperationApplier) ApplyReturns(result1 *protocol.ResolutionModel, result2 error) { + fake.applyMutex.Lock() + defer fake.applyMutex.Unlock() + fake.ApplyStub = nil + fake.applyReturns = struct { + result1 *protocol.ResolutionModel + result2 error + }{result1, result2} +} + +func (fake *OperationApplier) ApplyReturnsOnCall(i int, result1 *protocol.ResolutionModel, result2 error) { + fake.applyMutex.Lock() + defer fake.applyMutex.Unlock() + fake.ApplyStub = nil + if fake.applyReturnsOnCall == nil { + fake.applyReturnsOnCall = make(map[int]struct { + result1 *protocol.ResolutionModel + result2 error + }) + } + fake.applyReturnsOnCall[i] = struct { + result1 *protocol.ResolutionModel + result2 error + }{result1, result2} +} + +func (fake *OperationApplier) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.applyMutex.RLock() + defer fake.applyMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *OperationApplier) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} diff --git a/pkg/mocks/operationparser.gen.go b/pkg/mocks/operationparser.gen.go new file mode 100644 index 0000000..01ceba9 --- /dev/null +++ b/pkg/mocks/operationparser.gen.go @@ -0,0 +1,375 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" +) + +type OperationParser struct { + GetCommitmentStub func([]byte) (string, error) + getCommitmentMutex sync.RWMutex + getCommitmentArgsForCall []struct { + arg1 []byte + } + getCommitmentReturns struct { + result1 string + result2 error + } + getCommitmentReturnsOnCall map[int]struct { + result1 string + result2 error + } + GetRevealValueStub func([]byte) (string, error) + getRevealValueMutex sync.RWMutex + getRevealValueArgsForCall []struct { + arg1 []byte + } + getRevealValueReturns struct { + result1 string + result2 error + } + getRevealValueReturnsOnCall map[int]struct { + result1 string + result2 error + } + ParseStub func(string, []byte) (*operation.Operation, error) + parseMutex sync.RWMutex + parseArgsForCall []struct { + arg1 string + arg2 []byte + } + parseReturns struct { + result1 *operation.Operation + result2 error + } + parseReturnsOnCall map[int]struct { + result1 *operation.Operation + result2 error + } + ParseDIDStub func(string, string) (string, []byte, error) + parseDIDMutex sync.RWMutex + parseDIDArgsForCall []struct { + arg1 string + arg2 string + } + parseDIDReturns struct { + result1 string + result2 []byte + result3 error + } + parseDIDReturnsOnCall map[int]struct { + result1 string + result2 []byte + result3 error + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *OperationParser) GetCommitment(arg1 []byte) (string, error) { + var arg1Copy []byte + if arg1 != nil { + arg1Copy = make([]byte, len(arg1)) + copy(arg1Copy, arg1) + } + fake.getCommitmentMutex.Lock() + ret, specificReturn := fake.getCommitmentReturnsOnCall[len(fake.getCommitmentArgsForCall)] + fake.getCommitmentArgsForCall = append(fake.getCommitmentArgsForCall, struct { + arg1 []byte + }{arg1Copy}) + stub := fake.GetCommitmentStub + fakeReturns := fake.getCommitmentReturns + fake.recordInvocation("GetCommitment", []interface{}{arg1Copy}) + fake.getCommitmentMutex.Unlock() + if stub != nil { + return stub(arg1) + } + if specificReturn { + return ret.result1, ret.result2 + } + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationParser) GetCommitmentCallCount() int { + fake.getCommitmentMutex.RLock() + defer fake.getCommitmentMutex.RUnlock() + return len(fake.getCommitmentArgsForCall) +} + +func (fake *OperationParser) GetCommitmentCalls(stub func([]byte) (string, error)) { + fake.getCommitmentMutex.Lock() + defer fake.getCommitmentMutex.Unlock() + fake.GetCommitmentStub = stub +} + +func (fake *OperationParser) GetCommitmentArgsForCall(i int) []byte { + fake.getCommitmentMutex.RLock() + defer fake.getCommitmentMutex.RUnlock() + argsForCall := fake.getCommitmentArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *OperationParser) GetCommitmentReturns(result1 string, result2 error) { + fake.getCommitmentMutex.Lock() + defer fake.getCommitmentMutex.Unlock() + fake.GetCommitmentStub = nil + fake.getCommitmentReturns = struct { + result1 string + result2 error + }{result1, result2} +} + +func (fake *OperationParser) GetCommitmentReturnsOnCall(i int, result1 string, result2 error) { + fake.getCommitmentMutex.Lock() + defer fake.getCommitmentMutex.Unlock() + fake.GetCommitmentStub = nil + if fake.getCommitmentReturnsOnCall == nil { + fake.getCommitmentReturnsOnCall = make(map[int]struct { + result1 string + result2 error + }) + } + fake.getCommitmentReturnsOnCall[i] = struct { + result1 string + result2 error + }{result1, result2} +} + +func (fake *OperationParser) GetRevealValue(arg1 []byte) (string, error) { + var arg1Copy []byte + if arg1 != nil { + arg1Copy = make([]byte, len(arg1)) + copy(arg1Copy, arg1) + } + fake.getRevealValueMutex.Lock() + ret, specificReturn := fake.getRevealValueReturnsOnCall[len(fake.getRevealValueArgsForCall)] + fake.getRevealValueArgsForCall = append(fake.getRevealValueArgsForCall, struct { + arg1 []byte + }{arg1Copy}) + stub := fake.GetRevealValueStub + fakeReturns := fake.getRevealValueReturns + fake.recordInvocation("GetRevealValue", []interface{}{arg1Copy}) + fake.getRevealValueMutex.Unlock() + if stub != nil { + return stub(arg1) + } + if specificReturn { + return ret.result1, ret.result2 + } + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationParser) GetRevealValueCallCount() int { + fake.getRevealValueMutex.RLock() + defer fake.getRevealValueMutex.RUnlock() + return len(fake.getRevealValueArgsForCall) +} + +func (fake *OperationParser) GetRevealValueCalls(stub func([]byte) (string, error)) { + fake.getRevealValueMutex.Lock() + defer fake.getRevealValueMutex.Unlock() + fake.GetRevealValueStub = stub +} + +func (fake *OperationParser) GetRevealValueArgsForCall(i int) []byte { + fake.getRevealValueMutex.RLock() + defer fake.getRevealValueMutex.RUnlock() + argsForCall := fake.getRevealValueArgsForCall[i] + return argsForCall.arg1 +} + +func (fake *OperationParser) GetRevealValueReturns(result1 string, result2 error) { + fake.getRevealValueMutex.Lock() + defer fake.getRevealValueMutex.Unlock() + fake.GetRevealValueStub = nil + fake.getRevealValueReturns = struct { + result1 string + result2 error + }{result1, result2} +} + +func (fake *OperationParser) GetRevealValueReturnsOnCall(i int, result1 string, result2 error) { + fake.getRevealValueMutex.Lock() + defer fake.getRevealValueMutex.Unlock() + fake.GetRevealValueStub = nil + if fake.getRevealValueReturnsOnCall == nil { + fake.getRevealValueReturnsOnCall = make(map[int]struct { + result1 string + result2 error + }) + } + fake.getRevealValueReturnsOnCall[i] = struct { + result1 string + result2 error + }{result1, result2} +} + +func (fake *OperationParser) Parse(arg1 string, arg2 []byte) (*operation.Operation, error) { + var arg2Copy []byte + if arg2 != nil { + arg2Copy = make([]byte, len(arg2)) + copy(arg2Copy, arg2) + } + fake.parseMutex.Lock() + ret, specificReturn := fake.parseReturnsOnCall[len(fake.parseArgsForCall)] + fake.parseArgsForCall = append(fake.parseArgsForCall, struct { + arg1 string + arg2 []byte + }{arg1, arg2Copy}) + stub := fake.ParseStub + fakeReturns := fake.parseReturns + fake.recordInvocation("Parse", []interface{}{arg1, arg2Copy}) + fake.parseMutex.Unlock() + if stub != nil { + return stub(arg1, arg2) + } + if specificReturn { + return ret.result1, ret.result2 + } + return fakeReturns.result1, fakeReturns.result2 +} + +func (fake *OperationParser) ParseCallCount() int { + fake.parseMutex.RLock() + defer fake.parseMutex.RUnlock() + return len(fake.parseArgsForCall) +} + +func (fake *OperationParser) ParseCalls(stub func(string, []byte) (*operation.Operation, error)) { + fake.parseMutex.Lock() + defer fake.parseMutex.Unlock() + fake.ParseStub = stub +} + +func (fake *OperationParser) ParseArgsForCall(i int) (string, []byte) { + fake.parseMutex.RLock() + defer fake.parseMutex.RUnlock() + argsForCall := fake.parseArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *OperationParser) ParseReturns(result1 *operation.Operation, result2 error) { + fake.parseMutex.Lock() + defer fake.parseMutex.Unlock() + fake.ParseStub = nil + fake.parseReturns = struct { + result1 *operation.Operation + result2 error + }{result1, result2} +} + +func (fake *OperationParser) ParseReturnsOnCall(i int, result1 *operation.Operation, result2 error) { + fake.parseMutex.Lock() + defer fake.parseMutex.Unlock() + fake.ParseStub = nil + if fake.parseReturnsOnCall == nil { + fake.parseReturnsOnCall = make(map[int]struct { + result1 *operation.Operation + result2 error + }) + } + fake.parseReturnsOnCall[i] = struct { + result1 *operation.Operation + result2 error + }{result1, result2} +} + +func (fake *OperationParser) ParseDID(arg1 string, arg2 string) (string, []byte, error) { + fake.parseDIDMutex.Lock() + ret, specificReturn := fake.parseDIDReturnsOnCall[len(fake.parseDIDArgsForCall)] + fake.parseDIDArgsForCall = append(fake.parseDIDArgsForCall, struct { + arg1 string + arg2 string + }{arg1, arg2}) + stub := fake.ParseDIDStub + fakeReturns := fake.parseDIDReturns + fake.recordInvocation("ParseDID", []interface{}{arg1, arg2}) + fake.parseDIDMutex.Unlock() + if stub != nil { + return stub(arg1, arg2) + } + if specificReturn { + return ret.result1, ret.result2, ret.result3 + } + return fakeReturns.result1, fakeReturns.result2, fakeReturns.result3 +} + +func (fake *OperationParser) ParseDIDCallCount() int { + fake.parseDIDMutex.RLock() + defer fake.parseDIDMutex.RUnlock() + return len(fake.parseDIDArgsForCall) +} + +func (fake *OperationParser) ParseDIDCalls(stub func(string, string) (string, []byte, error)) { + fake.parseDIDMutex.Lock() + defer fake.parseDIDMutex.Unlock() + fake.ParseDIDStub = stub +} + +func (fake *OperationParser) ParseDIDArgsForCall(i int) (string, string) { + fake.parseDIDMutex.RLock() + defer fake.parseDIDMutex.RUnlock() + argsForCall := fake.parseDIDArgsForCall[i] + return argsForCall.arg1, argsForCall.arg2 +} + +func (fake *OperationParser) ParseDIDReturns(result1 string, result2 []byte, result3 error) { + fake.parseDIDMutex.Lock() + defer fake.parseDIDMutex.Unlock() + fake.ParseDIDStub = nil + fake.parseDIDReturns = struct { + result1 string + result2 []byte + result3 error + }{result1, result2, result3} +} + +func (fake *OperationParser) ParseDIDReturnsOnCall(i int, result1 string, result2 []byte, result3 error) { + fake.parseDIDMutex.Lock() + defer fake.parseDIDMutex.Unlock() + fake.ParseDIDStub = nil + if fake.parseDIDReturnsOnCall == nil { + fake.parseDIDReturnsOnCall = make(map[int]struct { + result1 string + result2 []byte + result3 error + }) + } + fake.parseDIDReturnsOnCall[i] = struct { + result1 string + result2 []byte + result3 error + }{result1, result2, result3} +} + +func (fake *OperationParser) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.getCommitmentMutex.RLock() + defer fake.getCommitmentMutex.RUnlock() + fake.getRevealValueMutex.RLock() + defer fake.getRevealValueMutex.RUnlock() + fake.parseMutex.RLock() + defer fake.parseMutex.RUnlock() + fake.parseDIDMutex.RLock() + defer fake.parseDIDMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *OperationParser) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} diff --git a/pkg/mocks/protocol.go b/pkg/mocks/protocol.go new file mode 100644 index 0000000..76eb7a1 --- /dev/null +++ b/pkg/mocks/protocol.go @@ -0,0 +1,154 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package mocks + +import ( + "fmt" + + "github.com/pkg/errors" + + "github.com/trustbloc/sidetree-go/pkg/api/protocol" +) + +const ( + // DefaultNS is default namespace used in mocks. + DefaultNS = "did:sidetree" + + // MaxBatchFileSize is maximum batch files size in bytes. + MaxBatchFileSize = 20000 + + // MaxOperationByteSize is maximum operation size in bytes. + MaxOperationByteSize = 2000 + + // MaxDeltaByteSize is maximum delta size in bytes. + MaxDeltaByteSize = 1000 + + // CurrentVersion is the current protocol version. + CurrentVersion = "1.0" + + sha2_256 = 18 +) + +// MockProtocolClient mocks protocol for testing purposes. +type MockProtocolClient struct { + Protocol protocol.Protocol // current version (separated for easier testing) + CurrentVersion *ProtocolVersion + Versions []*ProtocolVersion + Err error +} + +// NewMockProtocolClient creates mock protocol client. +func NewMockProtocolClient() *MockProtocolClient { + latest := GetDefaultProtocolParameters() + + latestVersion := GetProtocolVersion(latest) + + // has to be sorted for mock client to work + versions := []*ProtocolVersion{latestVersion} + + return &MockProtocolClient{ + Protocol: latest, + CurrentVersion: latestVersion, + Versions: versions, + } +} + +// Current mocks getting last protocol version. +func (m *MockProtocolClient) Current() (protocol.Version, error) { + if m.Err != nil { + return nil, m.Err + } + + return m.CurrentVersion, nil +} + +// Get mocks getting protocol version based on anchoring(transaction) time. +func (m *MockProtocolClient) Get(transactionTime uint64) (protocol.Version, error) { + if m.Err != nil { + return nil, m.Err + } + + for i := len(m.Versions) - 1; i >= 0; i-- { + if transactionTime >= m.Versions[i].Protocol().GenesisTime { + return m.Versions[i], nil + } + } + + return nil, fmt.Errorf("protocol parameters are not defined for anchoring time: %d", transactionTime) +} + +// NewMockProtocolClientProvider creates new mock protocol client provider. +func NewMockProtocolClientProvider() *MockProtocolClientProvider { + m := make(map[string]protocol.Client) + + m[DefaultNS] = NewMockProtocolClient() + + return &MockProtocolClientProvider{ + ProtocolClients: m, + } +} + +// MockProtocolClientProvider implements mock protocol client provider. +type MockProtocolClientProvider struct { + ProtocolClients map[string]protocol.Client +} + +// WithProtocolClient sets the protocol client. +func (m *MockProtocolClientProvider) WithProtocolClient(ns string, pc protocol.Client) *MockProtocolClientProvider { + m.ProtocolClients[ns] = pc + + return m +} + +// ForNamespace will return protocol client for that namespace. +func (m *MockProtocolClientProvider) ForNamespace(namespace string) (protocol.Client, error) { + pc, ok := m.ProtocolClients[namespace] + if !ok { + return nil, errors.Errorf("protocol client not found for namespace [%s]", namespace) + } + + return pc, nil +} + +// GetProtocolVersion returns mock protocol version. +// +//nolint:gocritic +func GetProtocolVersion(p protocol.Protocol) *ProtocolVersion { + v := &ProtocolVersion{} + v.VersionReturns(CurrentVersion) + v.OperationApplierReturns(&OperationApplier{}) + v.OperationParserReturns(&OperationParser{}) + v.DocumentTransformerReturns(&DocumentTransformer{}) + + v.ProtocolReturns(p) + + return v +} + +// GetDefaultProtocolParameters returns mock protocol parameters. +func GetDefaultProtocolParameters() protocol.Protocol { + return protocol.Protocol{ + GenesisTime: 0, + MultihashAlgorithms: []uint{sha2_256}, + MaxOperationCount: 2, + MaxOperationSize: MaxOperationByteSize, + MaxOperationHashLength: 100, + MaxDeltaSize: MaxDeltaByteSize, + MaxCasURILength: 100, + CompressionAlgorithm: "GZIP", + MaxChunkFileSize: MaxBatchFileSize, + MaxProvisionalIndexFileSize: MaxBatchFileSize, + MaxCoreIndexFileSize: MaxBatchFileSize, + MaxProofFileSize: MaxBatchFileSize, + SignatureAlgorithms: []string{"EdDSA", "ES256"}, + KeyAlgorithms: []string{"Ed25519", "P-256"}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, + MaxOperationTimeDelta: 2 * 60 * 60, + NonceSize: 16, // 16 bytes = 128 bits + MaxMemoryDecompressionFactor: 3, + } +} diff --git a/pkg/mocks/protocolversion.gen.go b/pkg/mocks/protocolversion.gen.go new file mode 100644 index 0000000..d94da0c --- /dev/null +++ b/pkg/mocks/protocolversion.gen.go @@ -0,0 +1,425 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package mocks + +import ( + "sync" + + "github.com/trustbloc/sidetree-go/pkg/api/protocol" +) + +type ProtocolVersion struct { + DocumentTransformerStub func() protocol.DocumentTransformer + documentTransformerMutex sync.RWMutex + documentTransformerArgsForCall []struct { + } + documentTransformerReturns struct { + result1 protocol.DocumentTransformer + } + documentTransformerReturnsOnCall map[int]struct { + result1 protocol.DocumentTransformer + } + DocumentValidatorStub func() protocol.DocumentValidator + documentValidatorMutex sync.RWMutex + documentValidatorArgsForCall []struct { + } + documentValidatorReturns struct { + result1 protocol.DocumentValidator + } + documentValidatorReturnsOnCall map[int]struct { + result1 protocol.DocumentValidator + } + OperationApplierStub func() protocol.OperationApplier + operationApplierMutex sync.RWMutex + operationApplierArgsForCall []struct { + } + operationApplierReturns struct { + result1 protocol.OperationApplier + } + operationApplierReturnsOnCall map[int]struct { + result1 protocol.OperationApplier + } + OperationParserStub func() protocol.OperationParser + operationParserMutex sync.RWMutex + operationParserArgsForCall []struct { + } + operationParserReturns struct { + result1 protocol.OperationParser + } + operationParserReturnsOnCall map[int]struct { + result1 protocol.OperationParser + } + ProtocolStub func() protocol.Protocol + protocolMutex sync.RWMutex + protocolArgsForCall []struct { + } + protocolReturns struct { + result1 protocol.Protocol + } + protocolReturnsOnCall map[int]struct { + result1 protocol.Protocol + } + VersionStub func() string + versionMutex sync.RWMutex + versionArgsForCall []struct { + } + versionReturns struct { + result1 string + } + versionReturnsOnCall map[int]struct { + result1 string + } + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *ProtocolVersion) DocumentTransformer() protocol.DocumentTransformer { + fake.documentTransformerMutex.Lock() + ret, specificReturn := fake.documentTransformerReturnsOnCall[len(fake.documentTransformerArgsForCall)] + fake.documentTransformerArgsForCall = append(fake.documentTransformerArgsForCall, struct { + }{}) + stub := fake.DocumentTransformerStub + fakeReturns := fake.documentTransformerReturns + fake.recordInvocation("DocumentTransformer", []interface{}{}) + fake.documentTransformerMutex.Unlock() + if stub != nil { + return stub() + } + if specificReturn { + return ret.result1 + } + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) DocumentTransformerCallCount() int { + fake.documentTransformerMutex.RLock() + defer fake.documentTransformerMutex.RUnlock() + return len(fake.documentTransformerArgsForCall) +} + +func (fake *ProtocolVersion) DocumentTransformerCalls(stub func() protocol.DocumentTransformer) { + fake.documentTransformerMutex.Lock() + defer fake.documentTransformerMutex.Unlock() + fake.DocumentTransformerStub = stub +} + +func (fake *ProtocolVersion) DocumentTransformerReturns(result1 protocol.DocumentTransformer) { + fake.documentTransformerMutex.Lock() + defer fake.documentTransformerMutex.Unlock() + fake.DocumentTransformerStub = nil + fake.documentTransformerReturns = struct { + result1 protocol.DocumentTransformer + }{result1} +} + +func (fake *ProtocolVersion) DocumentTransformerReturnsOnCall(i int, result1 protocol.DocumentTransformer) { + fake.documentTransformerMutex.Lock() + defer fake.documentTransformerMutex.Unlock() + fake.DocumentTransformerStub = nil + if fake.documentTransformerReturnsOnCall == nil { + fake.documentTransformerReturnsOnCall = make(map[int]struct { + result1 protocol.DocumentTransformer + }) + } + fake.documentTransformerReturnsOnCall[i] = struct { + result1 protocol.DocumentTransformer + }{result1} +} + +func (fake *ProtocolVersion) DocumentValidator() protocol.DocumentValidator { + fake.documentValidatorMutex.Lock() + ret, specificReturn := fake.documentValidatorReturnsOnCall[len(fake.documentValidatorArgsForCall)] + fake.documentValidatorArgsForCall = append(fake.documentValidatorArgsForCall, struct { + }{}) + stub := fake.DocumentValidatorStub + fakeReturns := fake.documentValidatorReturns + fake.recordInvocation("DocumentValidator", []interface{}{}) + fake.documentValidatorMutex.Unlock() + if stub != nil { + return stub() + } + if specificReturn { + return ret.result1 + } + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) DocumentValidatorCallCount() int { + fake.documentValidatorMutex.RLock() + defer fake.documentValidatorMutex.RUnlock() + return len(fake.documentValidatorArgsForCall) +} + +func (fake *ProtocolVersion) DocumentValidatorCalls(stub func() protocol.DocumentValidator) { + fake.documentValidatorMutex.Lock() + defer fake.documentValidatorMutex.Unlock() + fake.DocumentValidatorStub = stub +} + +func (fake *ProtocolVersion) DocumentValidatorReturns(result1 protocol.DocumentValidator) { + fake.documentValidatorMutex.Lock() + defer fake.documentValidatorMutex.Unlock() + fake.DocumentValidatorStub = nil + fake.documentValidatorReturns = struct { + result1 protocol.DocumentValidator + }{result1} +} + +func (fake *ProtocolVersion) DocumentValidatorReturnsOnCall(i int, result1 protocol.DocumentValidator) { + fake.documentValidatorMutex.Lock() + defer fake.documentValidatorMutex.Unlock() + fake.DocumentValidatorStub = nil + if fake.documentValidatorReturnsOnCall == nil { + fake.documentValidatorReturnsOnCall = make(map[int]struct { + result1 protocol.DocumentValidator + }) + } + fake.documentValidatorReturnsOnCall[i] = struct { + result1 protocol.DocumentValidator + }{result1} +} + +func (fake *ProtocolVersion) OperationApplier() protocol.OperationApplier { + fake.operationApplierMutex.Lock() + ret, specificReturn := fake.operationApplierReturnsOnCall[len(fake.operationApplierArgsForCall)] + fake.operationApplierArgsForCall = append(fake.operationApplierArgsForCall, struct { + }{}) + stub := fake.OperationApplierStub + fakeReturns := fake.operationApplierReturns + fake.recordInvocation("OperationApplier", []interface{}{}) + fake.operationApplierMutex.Unlock() + if stub != nil { + return stub() + } + if specificReturn { + return ret.result1 + } + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) OperationApplierCallCount() int { + fake.operationApplierMutex.RLock() + defer fake.operationApplierMutex.RUnlock() + return len(fake.operationApplierArgsForCall) +} + +func (fake *ProtocolVersion) OperationApplierCalls(stub func() protocol.OperationApplier) { + fake.operationApplierMutex.Lock() + defer fake.operationApplierMutex.Unlock() + fake.OperationApplierStub = stub +} + +func (fake *ProtocolVersion) OperationApplierReturns(result1 protocol.OperationApplier) { + fake.operationApplierMutex.Lock() + defer fake.operationApplierMutex.Unlock() + fake.OperationApplierStub = nil + fake.operationApplierReturns = struct { + result1 protocol.OperationApplier + }{result1} +} + +func (fake *ProtocolVersion) OperationApplierReturnsOnCall(i int, result1 protocol.OperationApplier) { + fake.operationApplierMutex.Lock() + defer fake.operationApplierMutex.Unlock() + fake.OperationApplierStub = nil + if fake.operationApplierReturnsOnCall == nil { + fake.operationApplierReturnsOnCall = make(map[int]struct { + result1 protocol.OperationApplier + }) + } + fake.operationApplierReturnsOnCall[i] = struct { + result1 protocol.OperationApplier + }{result1} +} + +func (fake *ProtocolVersion) OperationParser() protocol.OperationParser { + fake.operationParserMutex.Lock() + ret, specificReturn := fake.operationParserReturnsOnCall[len(fake.operationParserArgsForCall)] + fake.operationParserArgsForCall = append(fake.operationParserArgsForCall, struct { + }{}) + stub := fake.OperationParserStub + fakeReturns := fake.operationParserReturns + fake.recordInvocation("OperationParser", []interface{}{}) + fake.operationParserMutex.Unlock() + if stub != nil { + return stub() + } + if specificReturn { + return ret.result1 + } + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) OperationParserCallCount() int { + fake.operationParserMutex.RLock() + defer fake.operationParserMutex.RUnlock() + return len(fake.operationParserArgsForCall) +} + +func (fake *ProtocolVersion) OperationParserCalls(stub func() protocol.OperationParser) { + fake.operationParserMutex.Lock() + defer fake.operationParserMutex.Unlock() + fake.OperationParserStub = stub +} + +func (fake *ProtocolVersion) OperationParserReturns(result1 protocol.OperationParser) { + fake.operationParserMutex.Lock() + defer fake.operationParserMutex.Unlock() + fake.OperationParserStub = nil + fake.operationParserReturns = struct { + result1 protocol.OperationParser + }{result1} +} + +func (fake *ProtocolVersion) OperationParserReturnsOnCall(i int, result1 protocol.OperationParser) { + fake.operationParserMutex.Lock() + defer fake.operationParserMutex.Unlock() + fake.OperationParserStub = nil + if fake.operationParserReturnsOnCall == nil { + fake.operationParserReturnsOnCall = make(map[int]struct { + result1 protocol.OperationParser + }) + } + fake.operationParserReturnsOnCall[i] = struct { + result1 protocol.OperationParser + }{result1} +} + +func (fake *ProtocolVersion) Protocol() protocol.Protocol { + fake.protocolMutex.Lock() + ret, specificReturn := fake.protocolReturnsOnCall[len(fake.protocolArgsForCall)] + fake.protocolArgsForCall = append(fake.protocolArgsForCall, struct { + }{}) + stub := fake.ProtocolStub + fakeReturns := fake.protocolReturns + fake.recordInvocation("Protocol", []interface{}{}) + fake.protocolMutex.Unlock() + if stub != nil { + return stub() + } + if specificReturn { + return ret.result1 + } + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) ProtocolCallCount() int { + fake.protocolMutex.RLock() + defer fake.protocolMutex.RUnlock() + return len(fake.protocolArgsForCall) +} + +func (fake *ProtocolVersion) ProtocolCalls(stub func() protocol.Protocol) { + fake.protocolMutex.Lock() + defer fake.protocolMutex.Unlock() + fake.ProtocolStub = stub +} + +func (fake *ProtocolVersion) ProtocolReturns(result1 protocol.Protocol) { + fake.protocolMutex.Lock() + defer fake.protocolMutex.Unlock() + fake.ProtocolStub = nil + fake.protocolReturns = struct { + result1 protocol.Protocol + }{result1} +} + +func (fake *ProtocolVersion) ProtocolReturnsOnCall(i int, result1 protocol.Protocol) { + fake.protocolMutex.Lock() + defer fake.protocolMutex.Unlock() + fake.ProtocolStub = nil + if fake.protocolReturnsOnCall == nil { + fake.protocolReturnsOnCall = make(map[int]struct { + result1 protocol.Protocol + }) + } + fake.protocolReturnsOnCall[i] = struct { + result1 protocol.Protocol + }{result1} +} + +func (fake *ProtocolVersion) Version() string { + fake.versionMutex.Lock() + ret, specificReturn := fake.versionReturnsOnCall[len(fake.versionArgsForCall)] + fake.versionArgsForCall = append(fake.versionArgsForCall, struct { + }{}) + stub := fake.VersionStub + fakeReturns := fake.versionReturns + fake.recordInvocation("Version", []interface{}{}) + fake.versionMutex.Unlock() + if stub != nil { + return stub() + } + if specificReturn { + return ret.result1 + } + return fakeReturns.result1 +} + +func (fake *ProtocolVersion) VersionCallCount() int { + fake.versionMutex.RLock() + defer fake.versionMutex.RUnlock() + return len(fake.versionArgsForCall) +} + +func (fake *ProtocolVersion) VersionCalls(stub func() string) { + fake.versionMutex.Lock() + defer fake.versionMutex.Unlock() + fake.VersionStub = stub +} + +func (fake *ProtocolVersion) VersionReturns(result1 string) { + fake.versionMutex.Lock() + defer fake.versionMutex.Unlock() + fake.VersionStub = nil + fake.versionReturns = struct { + result1 string + }{result1} +} + +func (fake *ProtocolVersion) VersionReturnsOnCall(i int, result1 string) { + fake.versionMutex.Lock() + defer fake.versionMutex.Unlock() + fake.VersionStub = nil + if fake.versionReturnsOnCall == nil { + fake.versionReturnsOnCall = make(map[int]struct { + result1 string + }) + } + fake.versionReturnsOnCall[i] = struct { + result1 string + }{result1} +} + +func (fake *ProtocolVersion) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.documentTransformerMutex.RLock() + defer fake.documentTransformerMutex.RUnlock() + fake.documentValidatorMutex.RLock() + defer fake.documentValidatorMutex.RUnlock() + fake.operationApplierMutex.RLock() + defer fake.operationApplierMutex.RUnlock() + fake.operationParserMutex.RLock() + defer fake.operationParserMutex.RUnlock() + fake.protocolMutex.RLock() + defer fake.protocolMutex.RUnlock() + fake.versionMutex.RLock() + defer fake.versionMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *ProtocolVersion) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} diff --git a/pkg/patch/patch.go b/pkg/patch/patch.go new file mode 100644 index 0000000..40a6a67 --- /dev/null +++ b/pkg/patch/patch.go @@ -0,0 +1,450 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patch + +import ( + "encoding/json" + "errors" + "fmt" + "sort" + "strings" + + "github.com/trustbloc/sidetree-go/pkg/document" + json2 "github.com/trustbloc/sidetree-go/pkg/util/json" +) + +const jsonPatchAddTemplate = `{ "op": "add", "path": "/%s", "value": %s }` + +// Action defines action of document patch. +type Action string + +const ( + + // Replace captures enum value "replace". + Replace Action = "replace" + + // AddPublicKeys captures enum value "add-public-keys". + AddPublicKeys Action = "add-public-keys" + + // RemovePublicKeys captures enum value "remove-public-keys". + RemovePublicKeys Action = "remove-public-keys" + + // AddServiceEndpoints captures "add-services". + AddServiceEndpoints Action = "add-services" + + // RemoveServiceEndpoints captures "remove-services". + RemoveServiceEndpoints Action = "remove-services" + + // JSONPatch captures enum value "json-patch". + JSONPatch Action = "ietf-json-patch" + + // AddAlsoKnownAs captures "add-also-known-as". + AddAlsoKnownAs Action = "add-also-known-as" + + // RemoveAlsoKnownAs captures "remove-also-known-as". + RemoveAlsoKnownAs Action = "remove-also-known-as" +) + +// Key defines key that will be used to get document patch information. +type Key string + +const ( + + // DocumentKey captures "document" key. + DocumentKey Key = "document" + + // PatchesKey captures "patches" key. + PatchesKey Key = "patches" + + // PublicKeys captures "publicKeys" key. + PublicKeys Key = "publicKeys" + + // ServicesKey captures "services" key. + ServicesKey Key = "services" + + // IdsKey captures "ids" key. + IdsKey Key = "ids" + + // ActionKey captures "action" key. + ActionKey Key = "action" + + // UrisKey captures "uris" key. + UrisKey Key = "uris" +) + +var actionConfig = map[Action]Key{ + AddPublicKeys: PublicKeys, + RemovePublicKeys: IdsKey, + AddServiceEndpoints: ServicesKey, + RemoveServiceEndpoints: IdsKey, + JSONPatch: PatchesKey, + Replace: DocumentKey, + AddAlsoKnownAs: UrisKey, + RemoveAlsoKnownAs: UrisKey, +} + +// Patch defines generic patch structure. +type Patch map[Key]interface{} + +// PatchesFromDocument creates patches from opaque document. +func PatchesFromDocument(doc string) ([]Patch, error) { + parsed, err := document.FromBytes([]byte(doc)) + if err != nil { + return nil, err + } + + if err := validateDocument(parsed); err != nil { + return nil, err + } + + var docPatches []Patch + var jsonPatches []string + + for _, key := range sortedKeys(parsed) { + jsonBytes, err := json.Marshal(parsed[key]) + if err != nil { + return nil, err + } + + var docPatch Patch + switch key { + case document.PublicKeyProperty: + docPatch, err = NewAddPublicKeysPatch(string(jsonBytes)) + case document.ServiceProperty: + docPatch, err = NewAddServiceEndpointsPatch(string(jsonBytes)) + case document.AlsoKnownAs: + docPatch, err = NewAddAlsoKnownAs(string(jsonBytes)) + default: + jsonPatches = append(jsonPatches, fmt.Sprintf(jsonPatchAddTemplate, key, string(jsonBytes))) + } + + if err != nil { + return nil, err + } + + if docPatch != nil { + docPatches = append(docPatches, docPatch) + } + } + + if len(jsonPatches) > 0 { + combinedJSONPatch, err := NewJSONPatch(fmt.Sprintf("[%s]", strings.Join(jsonPatches, ","))) + if err != nil { + return nil, err + } + + docPatches = append(docPatches, combinedJSONPatch) + } + + return docPatches, nil +} + +// NewReplacePatch creates new replace patch. +func NewReplacePatch(doc string) (Patch, error) { + parsed, err := document.ReplaceDocumentFromBytes([]byte(doc)) + if err != nil { + return nil, err + } + + if err := validateReplaceDocument(parsed); err != nil { + return nil, err + } + + patch := make(Patch) + patch[ActionKey] = Replace + patch[DocumentKey] = parsed.JSONLdObject() + + return patch, nil +} + +// NewJSONPatch creates new generic update patch (will be used for generic updates). +func NewJSONPatch(patches string) (Patch, error) { + var generic []interface{} + err := json.Unmarshal([]byte(patches), &generic) + if err != nil { + return nil, err + } + + patch := make(Patch) + patch[ActionKey] = JSONPatch + patch[PatchesKey] = generic + + return patch, nil +} + +// NewAddPublicKeysPatch creates new patch for adding public keys. +func NewAddPublicKeysPatch(publicKeys string) (Patch, error) { + pubKeys, err := getPublicKeys(publicKeys) + if err != nil { + return nil, err + } + + patch := make(Patch) + patch[ActionKey] = AddPublicKeys + patch[PublicKeys] = pubKeys + + return patch, nil +} + +// NewRemovePublicKeysPatch creates new patch for removing public keys. +func NewRemovePublicKeysPatch(publicKeyIds string) (Patch, error) { + ids, err := getStringArray(publicKeyIds) + if err != nil { + return nil, fmt.Errorf("public key ids not string array: %s", err.Error()) + } + + if len(ids) == 0 { + return nil, errors.New("missing public key ids") + } + + patch := make(Patch) + patch[ActionKey] = RemovePublicKeys + patch[IdsKey] = getGenericArray(ids) + + return patch, nil +} + +// NewAddServiceEndpointsPatch creates new patch for adding service endpoints. +func NewAddServiceEndpointsPatch(serviceEndpoints string) (Patch, error) { + services, err := getServices(serviceEndpoints) + if err != nil { + return nil, err + } + + patch := make(Patch) + patch[ActionKey] = AddServiceEndpoints + patch[ServicesKey] = services + + return patch, nil +} + +// NewRemoveServiceEndpointsPatch creates new patch for removing service endpoints. +func NewRemoveServiceEndpointsPatch(serviceEndpointIds string) (Patch, error) { + ids, err := getStringArray(serviceEndpointIds) + if err != nil { + return nil, fmt.Errorf("service ids not string array: %s", err.Error()) + } + + if len(ids) == 0 { + return nil, errors.New("missing service ids") + } + + patch := make(Patch) + patch[ActionKey] = RemoveServiceEndpoints + patch[IdsKey] = getGenericArray(ids) + + return patch, nil +} + +// NewAddAlsoKnownAs creates new patch for adding also-known-as property. +func NewAddAlsoKnownAs(uris string) (Patch, error) { + urisToAdd, err := getStringArray(uris) + if err != nil { + return nil, fmt.Errorf("also known as uris is not string array: %s", err.Error()) + } + + if len(urisToAdd) == 0 { + return nil, errors.New("missing also known as uris") + } + + patch := make(Patch) + patch[ActionKey] = AddAlsoKnownAs + patch[UrisKey] = getGenericArray(urisToAdd) + + return patch, nil +} + +// NewRemoveAlsoKnownAs creates new patch for removing also-known-as URI. +func NewRemoveAlsoKnownAs(uris string) (Patch, error) { + urisToRemove, err := getStringArray(uris) + if err != nil { + return nil, fmt.Errorf("also known as uris is not string array: %s", err.Error()) + } + + if len(urisToRemove) == 0 { + return nil, errors.New("missing also known as uris") + } + + patch := make(Patch) + patch[ActionKey] = RemoveAlsoKnownAs + patch[UrisKey] = getGenericArray(urisToRemove) + + return patch, nil +} + +// GetValue returns patch value. +func (p Patch) GetValue() (interface{}, error) { + action, err := p.GetAction() + if err != nil { + return nil, err + } + + valueKey, ok := actionConfig[action] + if !ok { + return nil, fmt.Errorf("action '%s' is not supported", action) + } + + entry, ok := p[valueKey] + if !ok { + return nil, fmt.Errorf("%s patch is missing key: %s", action, valueKey) + } + + return entry, nil +} + +// GetAction returns string value for specified key or "" if not found or wrong type. +func (p Patch) GetAction() (Action, error) { + entry, ok := p[ActionKey] + if !ok { + return "", fmt.Errorf("patch is missing %s key", ActionKey) + } + + var action Action + switch v := entry.(type) { + case Action: + action = v + case string: + action = Action(v) + default: + return "", fmt.Errorf("action type not supported: %s", v) + } + + _, ok = actionConfig[action] + if !ok { + return "", fmt.Errorf("action '%s' is not supported", action) + } + + return action, nil +} + +// Bytes returns byte representation of patch. +func (p Patch) Bytes() ([]byte, error) { + return json2.MarshalCanonical(p) +} + +// JSONLdObject returns map that represents JSON LD Object. +func (p Patch) JSONLdObject() map[Key]interface{} { + return p +} + +// FromBytes parses provided data into document patch. +func FromBytes(data []byte) (Patch, error) { + patch := make(Patch) + err := json.Unmarshal(data, &patch) + if err != nil { + return nil, err + } + + _, err = patch.GetAction() + if err != nil { + return nil, err + } + + _, err = patch.GetValue() + if err != nil { + return nil, err + } + + return patch, nil +} + +func stringEntry(entry interface{}) string { + if entry == nil { + return "" + } + id, ok := entry.(string) + if !ok { + return "" + } + + return id +} + +func validateReplaceDocument(doc document.ReplaceDocument) error { + allowedKeys := []string{document.ReplaceServiceProperty, document.ReplacePublicKeyProperty} + + for key := range doc { + if !contains(allowedKeys, key) { + return fmt.Errorf("key '%s' is not allowed in replace document", key) + } + } + + return nil +} + +func contains(keys []string, key string) bool { + for _, k := range keys { + if k == key { + return true + } + } + + return false +} + +func validateDocument(doc document.Document) error { + if doc.ID() != "" { + return errors.New("document must NOT have the id property") + } + + return nil +} + +func getPublicKeys(publicKeys string) (interface{}, error) { + // create an empty did document with public keys + pkDoc, err := document.DidDocumentFromBytes([]byte(fmt.Sprintf(`{%q:%s}`, document.PublicKeyProperty, publicKeys))) + if err != nil { + return nil, fmt.Errorf("public keys invalid: %s", err.Error()) + } + + return pkDoc[document.PublicKeyProperty], nil +} + +func getServices(serviceEndpoints string) (interface{}, error) { + // create an empty did document with service endpoints + svcDocStr := fmt.Sprintf(`{%q:%s}`, document.ServiceProperty, serviceEndpoints) + svcDoc, err := document.DidDocumentFromBytes([]byte(svcDocStr)) + if err != nil { + return nil, fmt.Errorf("services invalid: %s", err.Error()) + } + + return svcDoc[document.ServiceProperty], nil +} + +func getStringArray(arr string) ([]string, error) { + var values []string + err := json.Unmarshal([]byte(arr), &values) + if err != nil { + return nil, err + } + + return values, nil +} + +func getGenericArray(arr []string) []interface{} { + var values []interface{} + for _, v := range arr { + values = append(values, v) + } + + return values +} + +func sortedKeys(m map[string]interface{}) []string { + keys := make([]string, len(m)) + + i := 0 + + for k := range m { + keys[i] = k + i++ + } + + sort.Strings(keys) + + return keys +} diff --git a/pkg/patch/patch_test.go b/pkg/patch/patch_test.go new file mode 100644 index 0000000..140d5b9 --- /dev/null +++ b/pkg/patch/patch_test.go @@ -0,0 +1,708 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patch + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/document" +) + +func TestFromBytes(t *testing.T) { + t.Run("success", func(t *testing.T) { + patch, err := FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddPublicKeys) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[PublicKeys]) + + bytes, err := patch.Bytes() + require.NoError(t, err) + require.NotEmpty(t, bytes) + + jsonld := patch.JSONLdObject() + require.NotNil(t, jsonld) + }) + t.Run("parse error - invalid character", func(t *testing.T) { + patch, err := FromBytes([]byte("[test : 123]")) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "invalid character") + }) + t.Run("parse error - invalid character", func(t *testing.T) { + patch, err := FromBytes([]byte("[test : 123]")) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "invalid character") + }) +} + +func TestActionValidation(t *testing.T) { + t.Run("error - missing action", func(t *testing.T) { + patch, err := FromBytes([]byte(`{}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "patch is missing action key") + }) + t.Run("error -action not supported", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "invalid"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Equal(t, err.Error(), "action 'invalid' is not supported") + }) + t.Run("error - action type not supported", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": 0}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "action type not supported") + }) +} + +func TestPatchesFromDocument(t *testing.T) { + t.Run("success from new", func(t *testing.T) { + patches, err := PatchesFromDocument(testDoc) + require.NoError(t, err) + require.Equal(t, 3, len(patches)) + }) + t.Run("success from new with also known as", func(t *testing.T) { + patches, err := PatchesFromDocument(testDocWithAlsoKnownAs) + require.NoError(t, err) + require.Equal(t, 2, len(patches)) + }) + t.Run("error from new due to invalid uris format", func(t *testing.T) { + patches, err := PatchesFromDocument(testDocWithInvalidAlsoKnownAs) + require.Error(t, err) + require.Nil(t, patches) + require.Contains(t, err.Error(), "also known as uris is not string array") + }) + t.Run("error - invalid json", func(t *testing.T) { + p, err := PatchesFromDocument(`invalid`) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "invalid character") + }) + t.Run("error - document has id", func(t *testing.T) { + p, err := PatchesFromDocument(`{"id": "abc"}`) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "document must NOT have the id property") + }) + t.Run("patches array is always in the same order", func(t *testing.T) { + var prev []Patch + + for i := 1; i <= 100; i++ { + patches, err := PatchesFromDocument(testDoc) + require.NoError(t, err) + + if prev != nil { + require.Equalf(t, prev, patches, "expecting the patches array to be in the same order") + } + + prev = patches + } + }) +} + +func TestReplacePatch(t *testing.T) { + t.Run("success from bytes", func(t *testing.T) { + patch, err := FromBytes([]byte(replacePatch)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, Replace) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[DocumentKey]) + }) + t.Run("missing document", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "replace"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "replace patch is missing key: document") + }) + t.Run("success from new", func(t *testing.T) { + doc, err := document.FromBytes([]byte(replaceDoc)) + require.NoError(t, err) + + p, err := NewReplacePatch(replaceDoc) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, Replace) + + value, err := p.GetValue() + require.NoError(t, err) + require.Equal(t, value, doc.JSONLdObject()) + }) + t.Run("error - invalid json", func(t *testing.T) { + p, err := NewReplacePatch(`invalid`) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "invalid character") + }) + t.Run("error - document has invalid property", func(t *testing.T) { + p, err := NewReplacePatch(`{"id": "abc"}`) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "key 'id' is not allowed in replace document") + }) +} + +func TestIETFPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + patch, err := FromBytes([]byte(ietfPatch)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, JSONPatch) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[PatchesKey]) + }) + t.Run("missing patches", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "ietf-json-patch"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "ietf-json-patch patch is missing key: patches") + }) + t.Run("success from new", func(t *testing.T) { + p, err := NewJSONPatch(patches) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, JSONPatch) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[PatchesKey]) + }) +} + +func TestAddPublicKeysPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + patch, err := FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddPublicKeys) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[PublicKeys]) + }) + t.Run("missing public keys", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "add-public-keys"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "add-public-keys patch is missing key: publicKeys") + }) + t.Run("success from new", func(t *testing.T) { + p, err := NewAddPublicKeysPatch(testAddPublicKeys) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddPublicKeys) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[PublicKeys]) + }) + t.Run("error - invalid string", func(t *testing.T) { + p, err := NewAddPublicKeysPatch("invalid-json") + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "public keys invalid: invalid character") + }) +} + +func TestRemovePublicKeysPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + patch, err := FromBytes([]byte(removePublicKeysPatch)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, RemovePublicKeys) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[IdsKey]) + }) + t.Run("missing public key ids", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "remove-public-keys"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "remove-public-keys patch is missing key: ids") + }) + t.Run("success from new", func(t *testing.T) { + const ids = `["key1", "key2"]` + p, err := NewRemovePublicKeysPatch(ids) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, RemovePublicKeys) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[IdsKey]) + }) + t.Run("empty public key ids", func(t *testing.T) { + const ids = `[]` + p, err := NewRemovePublicKeysPatch(ids) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "missing public key ids") + }) + t.Run("error - ids not string array", func(t *testing.T) { + const ids = `[0, 1]` + p, err := NewRemovePublicKeysPatch(ids) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "cannot unmarshal") + }) +} + +func TestAddServiceEndpointsPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + patch, err := FromBytes([]byte(addServiceEndpoints)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddServiceEndpoints) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[ServicesKey]) + }) + t.Run("missing service endpoints", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "add-services"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "add-services patch is missing key: services") + }) + t.Run("success from new", func(t *testing.T) { + p, err := NewAddServiceEndpointsPatch(testAddServiceEndpoints) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddServiceEndpoints) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[ServicesKey]) + }) + t.Run("error - not json", func(t *testing.T) { + p, err := NewAddServiceEndpointsPatch("not-json") + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "services invalid: invalid character") + }) +} + +func TestRemoveServiceEndpointsPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := FromBytes([]byte(removeServiceEndpoints)) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, RemoveServiceEndpoints) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[IdsKey]) + }) + t.Run("missing public key ids", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "remove-services"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "remove-services patch is missing key: ids") + }) + t.Run("success from new", func(t *testing.T) { + const ids = `["svc1", "svc2"]` + p, err := NewRemoveServiceEndpointsPatch(ids) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, RemoveServiceEndpoints) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[IdsKey]) + }) + t.Run("empty service ids", func(t *testing.T) { + const ids = `[]` + p, err := NewRemoveServiceEndpointsPatch(ids) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "missing service ids") + }) + t.Run("error - ids not string array", func(t *testing.T) { + const ids = `[0, 1]` + p, err := NewRemoveServiceEndpointsPatch(ids) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "cannot unmarshal") + }) +} + +func TestAddAlsoKnownAsPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + patch, err := FromBytes([]byte(addAlsoKnownAs)) + require.NoError(t, err) + require.NotNil(t, patch) + + action, err := patch.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddAlsoKnownAs) + + value, err := patch.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, patch[UrisKey]) + }) + t.Run("missing URIs", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "add-also-known-as"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "add-also-known-as patch is missing key: uris") + }) + t.Run("success from new", func(t *testing.T) { + p, err := NewAddAlsoKnownAs(`["testURI"]`) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, AddAlsoKnownAs) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[UrisKey]) + }) + t.Run("error - empty", func(t *testing.T) { + p, err := NewAddAlsoKnownAs("[]") + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "missing also known as uris") + }) + t.Run("error - not json", func(t *testing.T) { + p, err := NewAddAlsoKnownAs("not-json") + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "also known as uris is not string array") + }) +} + +func TestRemoveAlsoKnownAsPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := FromBytes([]byte(removeAlsoKnownAs)) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, RemoveAlsoKnownAs) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[UrisKey]) + }) + t.Run("missing public key ids", func(t *testing.T) { + patch, err := FromBytes([]byte(`{"action": "remove-also-known-as"}`)) + require.Error(t, err) + require.Nil(t, patch) + require.Contains(t, err.Error(), "remove-also-known-as patch is missing key: uris") + }) + t.Run("success from new", func(t *testing.T) { + const uris = `["identity1", "identity2"]` + p, err := NewRemoveAlsoKnownAs(uris) + require.NoError(t, err) + require.NotNil(t, p) + + action, err := p.GetAction() + require.NoError(t, err) + require.Equal(t, action, RemoveAlsoKnownAs) + + value, err := p.GetValue() + require.NoError(t, err) + require.NotEmpty(t, value) + require.Equal(t, value, p[UrisKey]) + }) + t.Run("empty uris", func(t *testing.T) { + const uris = `[]` + p, err := NewRemoveAlsoKnownAs(uris) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "missing also known as uris") + }) + t.Run("error - uris not string array", func(t *testing.T) { + const uris = `[0, 1]` + p, err := NewRemoveAlsoKnownAs(uris) + require.Error(t, err) + require.Nil(t, p) + require.Contains(t, err.Error(), "cannot unmarshal") + }) +} + +func TestBytes(t *testing.T) { + t.Run("success", func(t *testing.T) { + original, err := FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + require.NotNil(t, original) + + bytes, err := original.Bytes() + require.NoError(t, err) + require.NotEmpty(t, bytes) + + patch, err := FromBytes(bytes) + require.NoError(t, err) + require.Equal(t, original, patch) + }) + t.Run("error from bytes", func(t *testing.T) { + patch := Patch{} + patch["test"] = make(chan int) + + bytes, err := patch.Bytes() + require.NotNil(t, err) + require.Nil(t, bytes) + require.Contains(t, err.Error(), "json: unsupported type: chan int") + }) +} + +func TestStringEntry(t *testing.T) { + t.Run("success", func(t *testing.T) { + str := stringEntry([]string{"hello"}) + require.Empty(t, str) + + str = stringEntry("hello") + require.Equal(t, "hello", str) + }) +} + +const ietfPatch = `{ + "action": "ietf-json-patch", + "patches": [{ + "op": "replace", + "path": "/name", + "value": "value" + }] +}` + +const patches = `[ + { + "op": "replace", + "path": "/some/object/0", + "value": "value" + } +]` + +const addPublicKeysPatch = `{ + "action": "add-public-keys", + "publicKeys": [{ + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const testAddPublicKeys = `[{ + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }]` + +const removePublicKeysPatch = `{ + "action": "remove-public-keys", + "ids": ["key1", "key2"] +}` + +const addServiceEndpoints = `{ + "action": "add-services", + "services": [ + { + "id": "sds1", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }, + { + "id": "sds2", + "type": "SecureDataStore", + "serviceEndpoint": "http://some-cloud.com/hub" + } + ] +}` + +const testAddServiceEndpoints = `[ + { + "id": "sds1", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }, + { + "id": "sds2", + "type": "SecureDataStore", + "serviceEndpoint": "http://some-cloud.com/hub" + } + ]` + +const removeServiceEndpoints = `{ + "action": "remove-services", + "ids": ["sds1", "sds2"] +}` + +const testDoc = `{ + "publicKey": [{ + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }], + "service": [{ + "id":"vcs", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }], + "test": "test", + "other": "value" +}` + +const replacePatch = `{ + "action": "replace", + "document": { + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "type": "EcdsaSecp256k1VerificationKey2019", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }], + "services": [ + { + "id": "sds3", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }] + } +}` + +const replaceDoc = `{ + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "type": "EcdsaSecp256k1VerificationKey2019", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }], + "services": [ + { + "id": "sds3", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }] +}` + +const addAlsoKnownAs = `{ + "action": "add-also-known-as", + "uris": ["testURI"] +}` + +const removeAlsoKnownAs = `{ + "action": "remove-also-known-as", + "uris": ["testURI", "nonExistentURI"] +}` + +const testDocWithAlsoKnownAs = `{ + "alsoKnownAs": ["authentication"], + "publicKey": [{ + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const testDocWithInvalidAlsoKnownAs = `{ + "alsoKnownAs": [123] +}` diff --git a/pkg/util/ecsigner/signer.go b/pkg/util/ecsigner/signer.go new file mode 100644 index 0000000..7de90b5 --- /dev/null +++ b/pkg/util/ecsigner/signer.go @@ -0,0 +1,99 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package ecsigner + +import ( + "crypto" + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "errors" + + "github.com/btcsuite/btcd/btcec" + + "github.com/trustbloc/sidetree-go/pkg/jws" +) + +// Signer implements signer interface. +type Signer struct { + alg string + kid string + privateKey *ecdsa.PrivateKey +} + +// New creates new ECDSA signer. +func New(privKey *ecdsa.PrivateKey, alg, kid string) *Signer { + return &Signer{privateKey: privKey, kid: kid, alg: alg} +} + +// Headers provides required JWS protected headers. It provides information about signing key and algorithm. +func (signer *Signer) Headers() jws.Headers { + headers := make(jws.Headers) + + if signer.alg != "" { + headers[jws.HeaderAlgorithm] = signer.alg + } + + if signer.kid != "" { + headers[jws.HeaderKeyID] = signer.kid + } + + return headers +} + +// Sign signs msg and returns signature value. +func (signer *Signer) Sign(msg []byte) ([]byte, error) { + if signer.privateKey == nil { + return nil, errors.New("private key not provided") + } + + hasher := getHasher(signer.privateKey.Curve).New() + + _, err := hasher.Write(msg) + if err != nil { + return nil, err + } + + hashed := hasher.Sum(nil) + + r, s, err := ecdsa.Sign(rand.Reader, signer.privateKey, hashed) + if err != nil { + return nil, err + } + + curveBits := signer.privateKey.Curve.Params().BitSize + + const bitsInByte = 8 + keyBytes := curveBits / bitsInByte + if curveBits%bitsInByte > 0 { + keyBytes++ + } + + return append(copyPadded(r.Bytes(), keyBytes), copyPadded(s.Bytes(), keyBytes)...), nil +} + +func copyPadded(source []byte, size int) []byte { + dest := make([]byte, size) + copy(dest[size-len(source):], source) + + return dest +} + +func getHasher(curve elliptic.Curve) crypto.Hash { + switch curve { + case elliptic.P256(): + return crypto.SHA256 + case elliptic.P384(): + return crypto.SHA384 + case elliptic.P521(): + return crypto.SHA512 + case btcec.S256(): + return crypto.SHA256 + default: + return crypto.SHA256 + } +} diff --git a/pkg/util/ecsigner/signer_test.go b/pkg/util/ecsigner/signer_test.go new file mode 100644 index 0000000..3cab753 --- /dev/null +++ b/pkg/util/ecsigner/signer_test.go @@ -0,0 +1,109 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package ecsigner + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "testing" + + "github.com/btcsuite/btcd/btcec" + "github.com/stretchr/testify/require" +) + +func TestSign(t *testing.T) { + msg := []byte("test message") + + t.Run("success EC P-256", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "ES256", "key-1") + + signature, err := signer.Sign(msg) + require.NoError(t, err) + require.NotEmpty(t, signature) + }) + + t.Run("success EC P-384", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P384(), rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "ES384", "key-1") + + signature, err := signer.Sign(msg) + require.NoError(t, err) + require.NotEmpty(t, signature) + }) + + t.Run("success EC P-521", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "ES521", "key-1") + + signature, err := signer.Sign(msg) + require.NoError(t, err) + require.NotEmpty(t, signature) + }) + + t.Run("success EC secp256k1 ", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(btcec.S256(), rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "ES256K", "key-1") + + signature, err := signer.Sign(msg) + require.NoError(t, err) + require.NotEmpty(t, signature) + }) + + t.Run("private key not provided", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(btcec.S256(), rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "ES256K", "key-1") + signer.privateKey = nil + + signature, err := signer.Sign(msg) + require.Error(t, err) + require.Nil(t, signature) + require.Contains(t, err.Error(), "private key not provided") + }) +} + +func TestHeaders(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + t.Run("success - kid, alg provided", func(t *testing.T) { + signer := New(privateKey, "ES256", "key-1") + + // verify headers + kid, ok := signer.Headers().KeyID() + require.Equal(t, true, ok) + require.Equal(t, "key-1", kid) + + alg, ok := signer.Headers().Algorithm() + require.Equal(t, true, ok) + require.Equal(t, "ES256", alg) + }) + + t.Run("success - kid, alg not provided", func(t *testing.T) { + signer := New(privateKey, "", "") + + // verify headers + kid, ok := signer.Headers().KeyID() + require.Equal(t, false, ok) + require.Empty(t, kid) + + alg, ok := signer.Headers().Algorithm() + require.Equal(t, false, ok) + require.Empty(t, alg) + }) +} diff --git a/pkg/util/edsigner/signer.go b/pkg/util/edsigner/signer.go new file mode 100644 index 0000000..c645200 --- /dev/null +++ b/pkg/util/edsigner/signer.go @@ -0,0 +1,50 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package edsigner + +import ( + "crypto/ed25519" + "errors" + + "github.com/trustbloc/sidetree-go/pkg/jws" +) + +// Signer implements signer interface. +type Signer struct { + alg string + kid string + privateKey ed25519.PrivateKey +} + +// New returns ED25519 signer. +func New(privKey ed25519.PrivateKey, alg, kid string) *Signer { + return &Signer{privateKey: privKey, kid: kid, alg: alg} +} + +// Headers provides required JWS protected headers. It provides information about signing key and algorithm. +func (signer *Signer) Headers() jws.Headers { + headers := make(jws.Headers) + + if signer.alg != "" { + headers[jws.HeaderAlgorithm] = signer.alg + } + + if signer.kid != "" { + headers[jws.HeaderKeyID] = signer.kid + } + + return headers +} + +// Sign signs msg and returns signature value. +func (signer *Signer) Sign(msg []byte) ([]byte, error) { + if l := len(signer.privateKey); l != ed25519.PrivateKeySize { + return nil, errors.New("invalid private key size") + } + + return ed25519.Sign(signer.privateKey, msg), nil +} diff --git a/pkg/util/edsigner/signer_test.go b/pkg/util/edsigner/signer_test.go new file mode 100644 index 0000000..eeb672c --- /dev/null +++ b/pkg/util/edsigner/signer_test.go @@ -0,0 +1,74 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package edsigner + +import ( + "crypto/ed25519" + "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSign(t *testing.T) { + _, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + msg := []byte("test message") + + t.Run("success", func(t *testing.T) { + signer := New(privateKey, "EdDSA", "key-1") + + signature, err := signer.Sign(msg) + require.NoError(t, err) + require.NotEmpty(t, signature) + }) + + t.Run("invalid key size", func(t *testing.T) { + signer := New(privateKey, "EdDSA", "key-1") + signer.privateKey = nil + + signature, err := signer.Sign(msg) + require.Error(t, err) + require.Nil(t, signature) + require.Contains(t, err.Error(), "invalid private key size") + }) +} + +func TestHeaders(t *testing.T) { + t.Run("success - kid, alg provided", func(t *testing.T) { + _, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "EdDSA", "key-1") + + // verify headers + kid, ok := signer.Headers().KeyID() + require.Equal(t, true, ok) + require.Equal(t, "key-1", kid) + + alg, ok := signer.Headers().Algorithm() + require.Equal(t, true, ok) + require.Equal(t, "EdDSA", alg) + }) + + t.Run("success - kid, alg not provided", func(t *testing.T) { + _, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + signer := New(privateKey, "", "") + + // verify headers + kid, ok := signer.Headers().KeyID() + require.Equal(t, false, ok) + require.Empty(t, kid) + + alg, ok := signer.Headers().Algorithm() + require.Equal(t, false, ok) + require.Empty(t, alg) + }) +} diff --git a/pkg/util/json/json.go b/pkg/util/json/json.go new file mode 100644 index 0000000..a119fd0 --- /dev/null +++ b/pkg/util/json/json.go @@ -0,0 +1,98 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package json + +import ( + "bytes" + "encoding/json" +) + +// MarshalCanonical marshals the object into a canonical JSON format. +func MarshalCanonical(v interface{}) ([]byte, error) { + b, err := json.Marshal(v) + if err != nil { + return nil, err + } + + return getCanonicalContent(b) +} + +// MarshalIndentCanonical is like MarshalCanonical but applies Indent to format the output. +// Each JSON element in the output will begin on a new line beginning with prefix +// followed by one or more copies of indent according to the indentation nesting. +func MarshalIndentCanonical(v interface{}, prefix, indent string) ([]byte, error) { + b, err := MarshalCanonical(v) + if err != nil { + return nil, err + } + var buf bytes.Buffer + err = json.Indent(&buf, b, prefix, indent) + if err != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +// getCanonicalContent ensures that fields in the JSON doc are marshaled in a deterministic order. +func getCanonicalContent(content []byte) ([]byte, error) { + m, err := unmarshalJSONMap(content) + if err != nil { + a, e := unmarshalJSONArray(content) + if e != nil { + return nil, e + } + + // Re-marshal it in order to ensure that the JSON fields are marshaled in a deterministic order. + aBytes, e := marshalJSONArray(a) + if e != nil { + return nil, e + } + + return aBytes, nil + } + + // Re-marshal it in order to ensure that the JSON fields are marshaled in a deterministic order. + mBytes, err := marshalJSONMap(m) + if err != nil { + return nil, err + } + + return mBytes, nil +} + +// marshalJSONMap marshals a JSON map. This variable may be overridden by unit tests. +var marshalJSONMap = func(m map[string]interface{}) ([]byte, error) { + return json.Marshal(&m) +} + +// unmarshalJSONMap unmarshals a JSON map from the given bytes. This variable may be overridden by unit tests. +var unmarshalJSONMap = func(bytes []byte) (map[string]interface{}, error) { + m := make(map[string]interface{}) + err := json.Unmarshal(bytes, &m) + if err != nil { + return nil, err + } + + return m, nil +} + +// unmarshalJSONArray unmarshals an array of JSON maps from the given bytes. This variable may be overridden by unit tests. +var unmarshalJSONArray = func(bytes []byte) ([]map[string]interface{}, error) { + var a []map[string]interface{} + err := json.Unmarshal(bytes, &a) + if err != nil { + return nil, err + } + + return a, nil +} + +// marshalJSONArray marshals an array of JSON maps. This variable may be overridden by unit tests. +var marshalJSONArray = func(a []map[string]interface{}) ([]byte, error) { + return json.Marshal(&a) +} diff --git a/pkg/util/json/json_test.go b/pkg/util/json/json_test.go new file mode 100644 index 0000000..9d93dd9 --- /dev/null +++ b/pkg/util/json/json_test.go @@ -0,0 +1,168 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package json + +import ( + "encoding/json" + "errors" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type testData struct { + FieldC string + FieldB int + FieldA string +} + +func TestMarshalCanonical(t *testing.T) { + value1 := &testData{ + FieldC: "valueC_1", + FieldB: 100, + FieldA: "valueA_1", + } + value2 := &testData{ + FieldC: "valueC_2", + FieldB: 200, + FieldA: "valueA_2", + } + + t.Run("Struct", func(t *testing.T) { + v1, err := MarshalCanonical(value1) + require.NoError(t, err) + assert.NotNil(t, v1) + fmt.Printf("%s\n", v1) + + v := &testData{} + err = json.Unmarshal(v1, v) + require.NoError(t, err) + + require.Equal(t, value1, v) + }) + + t.Run("Array", func(t *testing.T) { + arr := []*testData{value1, value2} + v1, err := MarshalCanonical(arr) + require.NoError(t, err) + assert.NotNil(t, v1) + fmt.Printf("%s\n", v1) + + var v []*testData + err = json.Unmarshal(v1, &v) + require.NoError(t, err) + + require.Equal(t, arr, v) + }) + + t.Run("Marshal struct error", func(t *testing.T) { + reset := SetJSONMarshaler(func(map[string]interface{}) (bytes []byte, e error) { + return nil, errors.New("injected marshal error") + }) + defer reset() + + _, err := MarshalCanonical(value1) + require.Error(t, err) + }) + + t.Run("Unmarshal struct error", func(t *testing.T) { + reset := SetJSONUnmarshaler(func(bytes []byte) (map[string]interface{}, error) { + return nil, errors.New("injected marshal error") + }) + defer reset() + + _, err := MarshalCanonical(value1) + require.Error(t, err) + }) + + t.Run("Marshal array error", func(t *testing.T) { + reset := SetJSONArrayMarshaler(func([]map[string]interface{}) (bytes []byte, e error) { + return nil, errors.New("injected marshal error") + }) + defer reset() + + _, err := MarshalCanonical([]*testData{value1, value2}) + require.Error(t, err) + }) + + t.Run("Unmarshal array error", func(t *testing.T) { + reset := SetJSONArrayUnmarshaler(func(bytes []byte) ([]map[string]interface{}, error) { + return nil, errors.New("injected marshal error") + }) + defer reset() + + _, err := MarshalCanonical([]*testData{value1, value2}) + require.Error(t, err) + }) +} + +func TestMarshalIndentCanonical(t *testing.T) { + value1 := &testData{ + FieldC: "valueC_1", + FieldB: 100, + FieldA: "valueA_1", + } + + t.Run("Success", func(t *testing.T) { + v1, err := MarshalIndentCanonical(value1, "", " ") + require.NoError(t, err) + assert.NotNil(t, v1) + fmt.Printf("%s\n", v1) + }) + + t.Run("Marshal error", func(t *testing.T) { + reset := SetJSONMarshaler(func(m map[string]interface{}) (bytes []byte, e error) { + return nil, errors.New("injected marshal error") + }) + defer reset() + + _, err := MarshalIndentCanonical(value1, "", " ") + require.Error(t, err) + }) +} + +func TestGetCanonicalContent(t *testing.T) { + t.Run("Struct", func(t *testing.T) { + value1 := []byte(`{"field1":"value1","field2":"value2"}`) + value2 := []byte(`{"field2":"value2","field1":"value1"}`) + + v1, err := getCanonicalContent(value1) + require.NoError(t, err) + assert.NotNil(t, v1) + + v2, err := getCanonicalContent(value2) + require.NoError(t, err) + assert.Equal(t, v1, v2) + }) + + t.Run("Array", func(t *testing.T) { + value1 := []byte(`[{"field1":"value1_1","field2":"value2_1"},{"field1":"value1_2","field2":"value2_2"}]`) + value2 := []byte(`[{"field2":"value2_1","field1":"value1_1"},{"field2":"value2_2","field1":"value1_2"}]`) + + v1, err := getCanonicalContent(value1) + require.NoError(t, err) + assert.NotNil(t, v1) + + v2, err := getCanonicalContent(value2) + require.NoError(t, err) + assert.Equal(t, v1, v2) + }) + + t.Run("Marshal error", func(t *testing.T) { + value1 := []byte(`{"field1":"value1","field2":"value2"}`) + + reset := SetJSONMarshaler(func(m map[string]interface{}) (bytes []byte, e error) { + return nil, errors.New("injected marshal error") + }) + defer reset() + + _, err := getCanonicalContent(value1) + require.Error(t, err) + }) +} diff --git a/pkg/util/json/test_exports.go b/pkg/util/json/test_exports.go new file mode 100644 index 0000000..d168815 --- /dev/null +++ b/pkg/util/json/test_exports.go @@ -0,0 +1,53 @@ +//go:build testing + +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package json + +// SetJSONMarshaler sets the JSON map marshaler for unit tests. +// Returns a function that resets the marshaler to the previous value. +func SetJSONMarshaler(marshaler func(m map[string]interface{}) ([]byte, error)) func() { + prevMarshaler := marshalJSONMap + marshalJSONMap = marshaler + + return func() { + marshalJSONMap = prevMarshaler + } +} + +// SetJSONUnmarshaler sets the JSON map unmarshaler for unit tests. +// Returns a function that resets the unmarshaler to the previous value. +func SetJSONUnmarshaler(unmarshaler func(bytes []byte) (map[string]interface{}, error)) func() { + prevUnmarshaler := unmarshalJSONMap + unmarshalJSONMap = unmarshaler + + return func() { + unmarshalJSONMap = prevUnmarshaler + } +} + +// SetJSONArrayMarshaler sets the JSON array marshaler for unit tests. +// Returns a function that resets the marshaler to the previous value. +func SetJSONArrayMarshaler(marshaler func(m []map[string]interface{}) ([]byte, error)) func() { + prevMarshaler := marshalJSONArray + marshalJSONArray = marshaler + + return func() { + marshalJSONArray = prevMarshaler + } +} + +// SetJSONArrayUnmarshaler sets the JSON array unmarshaler for unit tests. +// Returns a function that resets the unmarshaler to the previous value. +func SetJSONArrayUnmarshaler(unmarshaler func(bytes []byte) ([]map[string]interface{}, error)) func() { + prevUnmarshaler := unmarshalJSONArray + unmarshalJSONArray = unmarshaler + + return func() { + unmarshalJSONArray = prevUnmarshaler + } +} diff --git a/pkg/util/pubkey/jwk.go b/pkg/util/pubkey/jwk.go new file mode 100644 index 0000000..9a1231c --- /dev/null +++ b/pkg/util/pubkey/jwk.go @@ -0,0 +1,65 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package pubkey + +import ( + "crypto/ecdsa" + "crypto/ed25519" + "errors" + "fmt" + "reflect" + + "github.com/btcsuite/btcd/btcec" + gojose "github.com/square/go-jose/v3" + "github.com/square/go-jose/v3/json" + + "github.com/trustbloc/sidetree-go/pkg/jws" + internal "github.com/trustbloc/sidetree-go/pkg/jwsutil" +) + +const ( + secp256k1Crv = "secp256k1" + secp256k1Kty = "EC" +) + +// GetPublicKeyJWK returns public key in JWK format. +func GetPublicKeyJWK(pubKey interface{}) (*jws.JWK, error) { + internalJWK := internal.JWK{ + JSONWebKey: gojose.JSONWebKey{Key: pubKey}, + } + + switch key := pubKey.(type) { + case ed25519.PublicKey: + // handled automatically by gojose + case *ecdsa.PublicKey: + ecdsaPubKey, ok := pubKey.(*ecdsa.PublicKey) + if !ok { + // check because linter complains; should never happen + return nil, errors.New("unexpected interface") + } + // using internal jwk wrapper marshall feature since gojose doesn't handle secp256k1 curve + if ecdsaPubKey.Curve == btcec.S256() { + internalJWK.Kty = secp256k1Kty + internalJWK.Crv = secp256k1Crv + } + default: + return nil, fmt.Errorf("unknown key type '%s'", reflect.TypeOf(key)) + } + + jsonJWK, err := internalJWK.MarshalJSON() + if err != nil { + return nil, err + } + + var jwk jws.JWK + err = json.Unmarshal(jsonJWK, &jwk) + if err != nil { + return nil, err + } + + return &jwk, nil +} diff --git a/pkg/util/pubkey/jwk_test.go b/pkg/util/pubkey/jwk_test.go new file mode 100644 index 0000000..8846402 --- /dev/null +++ b/pkg/util/pubkey/jwk_test.go @@ -0,0 +1,78 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package pubkey + +import ( + "crypto/ecdsa" + "crypto/ed25519" + "crypto/elliptic" + "crypto/rand" + "testing" + + "github.com/btcsuite/btcd/btcec" + "github.com/stretchr/testify/require" +) + +func TestGetPublicKeyJWK(t *testing.T) { + t.Run("success EC P-256", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := GetPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + require.NotEmpty(t, jwk) + require.Equal(t, "P-256", jwk.Crv) + require.Equal(t, "EC", jwk.Kty) + }) + + t.Run("success EC secp256k1 ", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(btcec.S256(), rand.Reader) + require.NoError(t, err) + + jwk, err := GetPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + require.NotEmpty(t, jwk) + require.Equal(t, "secp256k1", jwk.Crv) + require.Equal(t, "EC", jwk.Kty) + }) + + t.Run("success ED25519", func(t *testing.T) { + publicKey, _, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + jwk, err := GetPublicKeyJWK(publicKey) + require.NoError(t, err) + require.NotEmpty(t, jwk) + require.Equal(t, "Ed25519", jwk.Crv) + require.Equal(t, "OKP", jwk.Kty) + }) + + t.Run("unknown key type", func(t *testing.T) { + _, privateKey, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + jwk, err := GetPublicKeyJWK(privateKey) + require.Error(t, err) + require.Nil(t, jwk) + require.Contains(t, err.Error(), "unknown key type") + }) + t.Run("marshall error", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + privateKey.PublicKey = ecdsa.PublicKey{ + Curve: nil, + X: nil, + Y: nil, + } + + jwk, err := GetPublicKeyJWK(&privateKey.PublicKey) + require.Error(t, err) + require.Nil(t, jwk) + require.Contains(t, err.Error(), "invalid EC key") + }) +} diff --git a/pkg/util/signutil/signature.go b/pkg/util/signutil/signature.go new file mode 100644 index 0000000..5266abf --- /dev/null +++ b/pkg/util/signutil/signature.go @@ -0,0 +1,50 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package signutil + +import ( + "errors" + + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/jws" + internaljws "github.com/trustbloc/sidetree-go/pkg/jwsutil" +) + +// Signer defines JWS Signer interface that will be used to sign required data in Sidetree request. +type Signer interface { + // Sign signs data and returns signature value. + Sign(data []byte) ([]byte, error) + + // Headers provides required JWS protected headers. It provides information about signing key and algorithm. + Headers() jws.Headers +} + +// SignModel signs model. +func SignModel(model interface{}, signer Signer) (string, error) { + // first you normalize model + signedDataBytes, err := canonicalizer.MarshalCanonical(model) + if err != nil { + return "", err + } + + return SignPayload(signedDataBytes, signer) +} + +// SignPayload allows for singing payload. +func SignPayload(payload []byte, signer Signer) (string, error) { + alg, ok := signer.Headers().Algorithm() + if !ok || alg == "" { + return "", errors.New("signing algorithm is required") + } + + jwsSignature, err := internaljws.NewJWS(signer.Headers(), nil, payload, signer) + if err != nil { + return "", err + } + + return jwsSignature.SerializeCompact(false) +} diff --git a/pkg/util/signutil/signature_test.go b/pkg/util/signutil/signature_test.go new file mode 100644 index 0000000..df8860a --- /dev/null +++ b/pkg/util/signutil/signature_test.go @@ -0,0 +1,107 @@ +package signutil + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "errors" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/jws" + internal "github.com/trustbloc/sidetree-go/pkg/jwsutil" + "github.com/trustbloc/sidetree-go/pkg/util/ecsigner" + "github.com/trustbloc/sidetree-go/pkg/util/pubkey" +) + +func TestSignModel(t *testing.T) { + t.Run("marshal error", func(t *testing.T) { + ch := make(chan int) + request, err := SignModel(ch, nil) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "unsupported type: chan int") + }) + t.Run("success", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + signer := ecsigner.New(privateKey, "ES256", "key-1") + + test := struct { + message string + }{ + message: "test", + } + + request, err := SignModel(test, signer) + require.NoError(t, err) + require.NotEmpty(t, request) + }) +} + +func TestSignPayload(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + t.Run("success", func(t *testing.T) { + signer := ecsigner.New(privateKey, "ES256", "key-1") + + message := []byte("test") + jwsSignature, err := SignPayload(message, signer) + require.NoError(t, err) + require.NotEmpty(t, jwsSignature) + + _, err = internal.VerifyJWS(jwsSignature, jwk) + require.NoError(t, err) + }) + t.Run("signing algorithm required", func(t *testing.T) { + signer := ecsigner.New(privateKey, "", "kid") + + jws, err := SignPayload([]byte("test"), signer) + require.Error(t, err) + require.Empty(t, jws) + require.Contains(t, err.Error(), "signing algorithm is required") + }) + t.Run("kid is required", func(t *testing.T) { + jws, err := SignPayload([]byte(""), NewMockSigner(errors.New("test error"), true)) + require.Error(t, err) + require.Empty(t, jws) + require.Contains(t, err.Error(), "test error") + }) +} + +// MockSigner implements signer interface. +type MockSigner struct { + Recovery bool + Err error +} + +// NewMockSigner creates new mock signer (default to recovery signer). +func NewMockSigner(err error, recovery bool) *MockSigner { + return &MockSigner{Err: err, Recovery: recovery} +} + +// Headers provides required JWS protected headers. It provides information about signing key and algorithm. +func (ms *MockSigner) Headers() jws.Headers { + headers := make(jws.Headers) + headers[jws.HeaderAlgorithm] = "alg" + if !ms.Recovery { + headers[jws.HeaderKeyID] = "kid" + } + + return headers +} + +// Sign signs msg and returns mock signature value. +func (ms *MockSigner) Sign(msg []byte) ([]byte, error) { + if ms.Err != nil { + return nil, ms.Err + } + + return []byte("signature"), nil +} diff --git a/pkg/versions/1_0/client/create.go b/pkg/versions/1_0/client/create.go new file mode 100644 index 0000000..f2fa4f7 --- /dev/null +++ b/pkg/versions/1_0/client/create.go @@ -0,0 +1,124 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "errors" + "fmt" + + "github.com/multiformats/go-multihash" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/hashing" + "github.com/trustbloc/sidetree-go/pkg/patch" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +// CreateRequestInfo contains data for creating create payload. +type CreateRequestInfo struct { + + // opaque document content + // required + OpaqueDocument string + + // patches that will be used to create document + // required if opaque document is not specified + Patches []patch.Patch + + // the recovery commitment + // required + RecoveryCommitment string + + // the update commitment + // required + UpdateCommitment string + + // AnchorOrigin signifies the system(s) that know the most recent anchor for this DID (optional) + AnchorOrigin interface{} + + // Type signifies the type of entity a DID represents (optional) + Type string + + // latest hashing algorithm supported by protocol + MultihashCode uint +} + +// NewCreateRequest is utility function to create payload for 'create' request. +func NewCreateRequest(info *CreateRequestInfo) ([]byte, error) { + if err := validateCreateRequest(info); err != nil { + return nil, err + } + + patches, err := getPatches(info.OpaqueDocument, info.Patches) + if err != nil { + return nil, err + } + + delta := &model.DeltaModel{ + UpdateCommitment: info.UpdateCommitment, + Patches: patches, + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, info.MultihashCode) + if err != nil { + return nil, err + } + + suffixData := &model.SuffixDataModel{ + DeltaHash: deltaHash, + RecoveryCommitment: info.RecoveryCommitment, + AnchorOrigin: info.AnchorOrigin, + Type: info.Type, + } + + schema := &model.CreateRequest{ + Operation: operation.TypeCreate, + Delta: delta, + SuffixData: suffixData, + } + + return canonicalizer.MarshalCanonical(schema) +} + +func getPatches(opaque string, patches []patch.Patch) ([]patch.Patch, error) { + if opaque != "" { + return patch.PatchesFromDocument(opaque) + } + + return patches, nil +} + +func validateCreateRequest(info *CreateRequestInfo) error { + if info.OpaqueDocument == "" && len(info.Patches) == 0 { + return errors.New("either opaque document or patches have to be supplied") + } + + if info.OpaqueDocument != "" && len(info.Patches) > 0 { + return errors.New("cannot provide both opaque document and patches") + } + + supported := multihash.ValidCode(uint64(info.MultihashCode)) + + if !supported { + return fmt.Errorf("multihash[%d] not supported", info.MultihashCode) + } + + if !hashing.IsComputedUsingMultihashAlgorithms(info.RecoveryCommitment, []uint{info.MultihashCode}) { + return errors.New("next recovery commitment is not computed with the specified hash algorithm") + } + + if !hashing.IsComputedUsingMultihashAlgorithms(info.UpdateCommitment, []uint{info.MultihashCode}) { + return errors.New("next update commitment is not computed with the specified hash algorithm") + } + + if info.RecoveryCommitment == info.UpdateCommitment { + return errors.New("recovery and update commitments cannot be equal, re-using public keys is not allowed") + } + + return nil +} diff --git a/pkg/versions/1_0/client/create_test.go b/pkg/versions/1_0/client/create_test.go new file mode 100644 index 0000000..23b8d51 --- /dev/null +++ b/pkg/versions/1_0/client/create_test.go @@ -0,0 +1,185 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/commitment" + "github.com/trustbloc/sidetree-go/pkg/patch" + "github.com/trustbloc/sidetree-go/pkg/util/pubkey" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +const ( + didSuffix = "whatever" + opaqueDoc = "{}" + + signerErr = "signer error" + + sha2_256 = 18 +) + +func TestNewCreateRequest(t *testing.T) { + recoverPrivateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + updatePrivateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + recoverJWK, err := pubkey.GetPublicKeyJWK(&recoverPrivateKey.PublicKey) + require.NoError(t, err) + + updateJWK, err := pubkey.GetPublicKeyJWK(&updatePrivateKey.PublicKey) + require.NoError(t, err) + + recoveryCommitment, err := commitment.GetCommitment(recoverJWK, sha2_256) + require.NoError(t, err) + + updateCommitment, err := commitment.GetCommitment(updateJWK, sha2_256) + require.NoError(t, err) + + t.Run("missing opaque document or patches", func(t *testing.T) { + request, err := NewCreateRequest(&CreateRequestInfo{}) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "either opaque document or patches have to be supplied") + }) + t.Run("cannot provide both opaque document and patches", func(t *testing.T) { + request, err := NewCreateRequest(&CreateRequestInfo{OpaqueDocument: "{}", Patches: []patch.Patch{{}}}) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "cannot provide both opaque document and patches") + }) + t.Run("recovery commitment error", func(t *testing.T) { + request, err := NewCreateRequest(&CreateRequestInfo{OpaqueDocument: "{}", RecoveryCommitment: recoveryCommitment}) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "recovery commitment is not computed with the specified hash algorithm") + }) + t.Run("update commitment error", func(t *testing.T) { + info := &CreateRequestInfo{ + OpaqueDocument: "{}", + RecoveryCommitment: recoveryCommitment, + MultihashCode: sha2_256, + } + + request, err := NewCreateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "update commitment is not computed with the specified hash algorithm") + }) + t.Run("multihash not supported", func(t *testing.T) { + info := &CreateRequestInfo{ + OpaqueDocument: "{}", + MultihashCode: 55, + } + + request, err := NewCreateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "multihash[55] not supported") + }) + t.Run("error - malformed opaque doc", func(t *testing.T) { + info := &CreateRequestInfo{ + OpaqueDocument: `{,}`, + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, + MultihashCode: sha2_256, + } + + request, err := NewCreateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "invalid character ','") + }) + + t.Run("error - update and recover commitment equal", func(t *testing.T) { + info := &CreateRequestInfo{ + OpaqueDocument: "{}", + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: recoveryCommitment, + MultihashCode: sha2_256, + } + + request, err := NewCreateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "recovery and update commitments cannot be equal, re-using public keys is not allowed") + }) + + t.Run("success - opaque document", func(t *testing.T) { + info := &CreateRequestInfo{ + OpaqueDocument: "{}", + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, + MultihashCode: sha2_256, + } + + request, err := NewCreateRequest(info) + require.NoError(t, err) + require.NotEmpty(t, request) + }) + + t.Run("success - patches", func(t *testing.T) { + p, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + info := &CreateRequestInfo{ + Patches: []patch.Patch{p}, + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, + MultihashCode: sha2_256, + } + + request, err := NewCreateRequest(info) + require.NoError(t, err) + require.NotEmpty(t, request) + }) + + t.Run("success - optional params (entity type and anchor origin)", func(t *testing.T) { + p, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + info := &CreateRequestInfo{ + Patches: []patch.Patch{p}, + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, + AnchorOrigin: "anchor-origin", + Type: "did-entity-type", + MultihashCode: sha2_256, + } + + bytes, err := NewCreateRequest(info) + require.NoError(t, err) + require.NotEmpty(t, bytes) + + var request model.CreateRequest + err = json.Unmarshal(bytes, &request) + require.NoError(t, err) + + require.Contains(t, request.SuffixData.AnchorOrigin, "anchor-origin") + require.Contains(t, request.SuffixData.Type, "did-entity-type") + }) +} + +const addKeys = `[{ + "id": "test", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA" + } +}]` diff --git a/pkg/versions/1_0/client/deactivate.go b/pkg/versions/1_0/client/deactivate.go new file mode 100644 index 0000000..b20214f --- /dev/null +++ b/pkg/versions/1_0/client/deactivate.go @@ -0,0 +1,122 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "errors" + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/jws" + "github.com/trustbloc/sidetree-go/pkg/util/signutil" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +// Signer defines JWS Signer interface that will be used to sign required data in Sidetree request. +type Signer interface { + // Sign signs data and returns signature value + Sign(data []byte) ([]byte, error) + + // Headers provides required JWS protected headers. It provides information about signing key and algorithm. + Headers() jws.Headers +} + +// DeactivateRequestInfo is the information required to create deactivate request. +type DeactivateRequestInfo struct { + + // DidSuffix is the suffix of the document to be deactivated + DidSuffix string + + // RecoveryKey is recovery key for current deactivate request + RecoveryKey *jws.JWK + + // Signer that will be used for signing specific subset of request data + // Signer for recover operation must be recovery key + Signer Signer + + // RevealValue is reveal value + RevealValue string + + // AnchorFrom defines earliest time for this operation. + AnchorFrom int64 + + // AnchorUntil defines expiry time for this operation. + AnchorUntil int64 +} + +// NewDeactivateRequest is utility function to create payload for 'deactivate' request. +func NewDeactivateRequest(info *DeactivateRequestInfo) ([]byte, error) { + if err := validateDeactivateRequest(info); err != nil { + return nil, err + } + + signedDataModel := model.DeactivateSignedDataModel{ + DidSuffix: info.DidSuffix, + RecoveryKey: info.RecoveryKey, + AnchorFrom: info.AnchorFrom, + AnchorUntil: info.AnchorUntil, + } + + signModel, err := signutil.SignModel(signedDataModel, info.Signer) + if err != nil { + return nil, err + } + + schema := &model.DeactivateRequest{ + Operation: operation.TypeDeactivate, + DidSuffix: info.DidSuffix, + RevealValue: info.RevealValue, + SignedData: signModel, + } + + return canonicalizer.MarshalCanonical(schema) +} + +func validateDeactivateRequest(info *DeactivateRequestInfo) error { + if info.DidSuffix == "" { + return errors.New("missing did unique suffix") + } + + if info.RevealValue == "" { + return errors.New("missing reveal value") + } + + return validateSigner(info.Signer) +} + +func validateSigner(signer Signer) error { + if signer == nil { + return errors.New("missing signer") + } + + if signer.Headers() == nil { + return errors.New("missing protected headers") + } + + alg, ok := signer.Headers().Algorithm() + if !ok { + return errors.New("algorithm must be present in the protected header") + } + + if alg == "" { + return errors.New("algorithm cannot be empty in the protected header") + } + + allowedHeaders := map[string]bool{ + jws.HeaderAlgorithm: true, + jws.HeaderKeyID: true, + } + + for h := range signer.Headers() { + if _, ok := allowedHeaders[h]; !ok { + return fmt.Errorf("header '%s' is not allowed in the protected headers", h) + } + } + + return nil +} diff --git a/pkg/versions/1_0/client/deactivate_test.go b/pkg/versions/1_0/client/deactivate_test.go new file mode 100644 index 0000000..bb1e6df --- /dev/null +++ b/pkg/versions/1_0/client/deactivate_test.go @@ -0,0 +1,171 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "errors" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/jws" + "github.com/trustbloc/sidetree-go/pkg/util/ecsigner" + "github.com/trustbloc/sidetree-go/pkg/util/pubkey" +) + +func TestNewDeactivateRequest(t *testing.T) { + t.Run("missing unique suffix", func(t *testing.T) { + info := &DeactivateRequestInfo{} + + request, err := NewDeactivateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing did unique suffix") + }) + t.Run("missing reveal value", func(t *testing.T) { + info := &DeactivateRequestInfo{DidSuffix: "suffix"} + + request, err := NewDeactivateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing reveal value") + }) + t.Run("signing error", func(t *testing.T) { + info := &DeactivateRequestInfo{ + DidSuffix: "whatever", + Signer: NewMockSigner(errors.New(signerErr)), + RevealValue: "reveal", + } + + request, err := NewDeactivateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), signerErr) + }) + t.Run("success", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + jwk, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + require.NoError(t, err) + + signer := ecsigner.New(privateKey, "ES256", "") + + info := &DeactivateRequestInfo{ + DidSuffix: "whatever", + Signer: signer, + RecoveryKey: jwk, + RevealValue: "reveal", + } + + request, err := NewDeactivateRequest(info) + require.NoError(t, err) + require.NotEmpty(t, request) + }) +} + +func TestValidateSigner(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + const testKid = "kid" + + t.Run("success - kid can be empty", func(t *testing.T) { + signer := ecsigner.New(privateKey, "alg", "") + + err := validateSigner(signer) + require.NoError(t, err) + }) + t.Run("success - kid can be provided", func(t *testing.T) { + signer := ecsigner.New(privateKey, "alg", testKid) + + err := validateSigner(signer) + require.NoError(t, err) + }) + t.Run("error - missing signer", func(t *testing.T) { + err := validateSigner(nil) + require.Error(t, err) + require.Contains(t, err.Error(), "missing signer") + }) + + t.Run("error - missing protected headers", func(t *testing.T) { + err := validateSigner(&MockSigner{}) + require.Error(t, err) + require.Contains(t, err.Error(), "missing protected headers") + }) + + t.Run("err - algorithm must be present in the protected header", func(t *testing.T) { + headers := make(jws.Headers) + + headers["kid"] = testKid + + signer := &MockSigner{MockHeaders: headers} + + err := validateSigner(signer) + require.Error(t, err) + require.Contains(t, err.Error(), "algorithm must be present in the protected header") + }) + + t.Run("err - algorithm cannot be empty", func(t *testing.T) { + headers := make(jws.Headers) + + headers["kid"] = testKid + headers["alg"] = "" + + signer := &MockSigner{MockHeaders: headers} + + err := validateSigner(signer) + require.Error(t, err) + require.Contains(t, err.Error(), "algorithm cannot be empty in the protected header") + }) + + t.Run("err - invalid protected header value", func(t *testing.T) { + headers := make(jws.Headers) + + headers["kid"] = "kid" + headers["alg"] = "alg" + headers["invalid"] = "value" + + signer := &MockSigner{MockHeaders: headers} + + err := validateSigner(signer) + require.Error(t, err) + require.Contains(t, err.Error(), "header 'invalid' is not allowed in the protected headers") + }) +} + +// MockSigner implements signer interface. +type MockSigner struct { + MockHeaders jws.Headers + Err error +} + +// New creates new mock signer (default to recovery signer). +func NewMockSigner(err error) *MockSigner { + headers := make(jws.Headers) + headers[jws.HeaderAlgorithm] = "alg" + headers[jws.HeaderKeyID] = "kid" + + return &MockSigner{Err: err, MockHeaders: headers} +} + +// Headers provides required JWS protected headers. It provides information about signing key and algorithm. +func (ms *MockSigner) Headers() jws.Headers { + return ms.MockHeaders +} + +// Sign signs msg and returns mock signature value. +func (ms *MockSigner) Sign(msg []byte) ([]byte, error) { + if ms.Err != nil { + return nil, ms.Err + } + + return []byte("signature"), nil +} diff --git a/pkg/versions/1_0/client/recover.go b/pkg/versions/1_0/client/recover.go new file mode 100644 index 0000000..21d6457 --- /dev/null +++ b/pkg/versions/1_0/client/recover.go @@ -0,0 +1,160 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "errors" + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/commitment" + "github.com/trustbloc/sidetree-go/pkg/hashing" + "github.com/trustbloc/sidetree-go/pkg/jws" + "github.com/trustbloc/sidetree-go/pkg/patch" + "github.com/trustbloc/sidetree-go/pkg/util/signutil" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +// RecoverRequestInfo is the information required to create recover request. +type RecoverRequestInfo struct { + + // DidSuffix is the suffix of the document to be recovered + DidSuffix string + + // RecoveryKey is the current recovery public key + RecoveryKey *jws.JWK + + // OpaqueDocument is opaque content + OpaqueDocument string + + // Patches that will be used to create document + // required if opaque document is not specified + Patches []patch.Patch + + // RecoveryCommitment is recovery commitment to be used for the next recovery + RecoveryCommitment string + + // UpdateCommitment is update commitment to be used for the next update + UpdateCommitment string + + // AnchorOrigin signifies the system(s) that know the most recent anchor for this DID (optional) + AnchorOrigin interface{} + + // AnchorFrom defines earliest time for this operation. + AnchorFrom int64 + + // AnchorUntil defines expiry time for this operation. + AnchorUntil int64 + + // MultihashCode is the latest hashing algorithm supported by protocol + MultihashCode uint + + // Signer will be used for signing specific subset of request data + // Signer for recover operation must be recovery key + Signer Signer + + // RevealValue is reveal value + RevealValue string +} + +// NewRecoverRequest is utility function to create payload for 'recovery' request. +func NewRecoverRequest(info *RecoverRequestInfo) ([]byte, error) { + err := validateRecoverRequest(info) + if err != nil { + return nil, err + } + + patches, err := getPatches(info.OpaqueDocument, info.Patches) + if err != nil { + return nil, err + } + + delta := &model.DeltaModel{ + UpdateCommitment: info.UpdateCommitment, + Patches: patches, + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, info.MultihashCode) + if err != nil { + return nil, err + } + + signedDataModel := model.RecoverSignedDataModel{ + DeltaHash: deltaHash, + RecoveryKey: info.RecoveryKey, + RecoveryCommitment: info.RecoveryCommitment, + AnchorOrigin: info.AnchorOrigin, + AnchorFrom: info.AnchorFrom, + AnchorUntil: info.AnchorUntil, + } + + err = validateCommitment(info.RecoveryKey, info.MultihashCode, info.RecoveryCommitment) + if err != nil { + return nil, err + } + + signModel, err := signutil.SignModel(signedDataModel, info.Signer) + if err != nil { + return nil, err + } + + schema := &model.RecoverRequest{ + Operation: operation.TypeRecover, + DidSuffix: info.DidSuffix, + RevealValue: info.RevealValue, + Delta: delta, + SignedData: signModel, + } + + return canonicalizer.MarshalCanonical(schema) +} + +func validateRecoverRequest(info *RecoverRequestInfo) error { + if info.DidSuffix == "" { + return errors.New("missing did unique suffix") + } + + if info.RevealValue == "" { + return errors.New("missing reveal value") + } + + if info.OpaqueDocument == "" && len(info.Patches) == 0 { + return errors.New("either opaque document or patches have to be supplied") + } + + if info.OpaqueDocument != "" && len(info.Patches) > 0 { + return errors.New("cannot provide both opaque document and patches") + } + + if err := validateSigner(info.Signer); err != nil { + return err + } + + return validateRecoveryKey(info.RecoveryKey) +} + +func validateRecoveryKey(key *jws.JWK) error { + if key == nil { + return errors.New("missing recovery key") + } + + return key.Validate() +} + +func validateCommitment(jwk *jws.JWK, multihashCode uint, nextCommitment string) error { + currentCommitment, err := commitment.GetCommitment(jwk, multihashCode) + if err != nil { + return fmt.Errorf("calculate current commitment: %s", err.Error()) + } + + if currentCommitment == nextCommitment { + return errors.New("re-using public keys for commitment is not allowed") + } + + return nil +} diff --git a/pkg/versions/1_0/client/recover_test.go b/pkg/versions/1_0/client/recover_test.go new file mode 100644 index 0000000..770139d --- /dev/null +++ b/pkg/versions/1_0/client/recover_test.go @@ -0,0 +1,205 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "encoding/json" + "errors" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/commitment" + internaljws "github.com/trustbloc/sidetree-go/pkg/jwsutil" + "github.com/trustbloc/sidetree-go/pkg/patch" + "github.com/trustbloc/sidetree-go/pkg/util/ecsigner" + "github.com/trustbloc/sidetree-go/pkg/util/pubkey" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +func TestNewRecoverRequest(t *testing.T) { + t.Run("missing unique suffix", func(t *testing.T) { + info := getRecoverRequestInfo() + info.DidSuffix = "" + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing did unique suffix") + }) + t.Run("missing reveal value", func(t *testing.T) { + info := getRecoverRequestInfo() + info.RevealValue = "" + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing reveal value") + }) + t.Run("missing opaque document", func(t *testing.T) { + info := getRecoverRequestInfo() + info.OpaqueDocument = "" + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "either opaque document or patches have to be supplied") + }) + t.Run("cannot provide both opaque document and patches", func(t *testing.T) { + info := getRecoverRequestInfo() + info.Patches = []patch.Patch{{}} + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "cannot provide both opaque document and patches") + }) + t.Run("missing recovery key", func(t *testing.T) { + info := getRecoverRequestInfo() + info.RecoveryKey = nil + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing recovery key") + }) + t.Run("missing signer", func(t *testing.T) { + info := getRecoverRequestInfo() + info.Signer = nil + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing signer") + }) + t.Run("multihash not supported", func(t *testing.T) { + info := getRecoverRequestInfo() + info.MultihashCode = 55 + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "algorithm not supported") + }) + t.Run("signing error", func(t *testing.T) { + info := getRecoverRequestInfo() + info.Signer = NewMockSigner(errors.New(signerErr)) + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), signerErr) + }) + t.Run("error - malformed opaque doc", func(t *testing.T) { + info := getRecoverRequestInfo() + info.OpaqueDocument = "{,}" + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "invalid character ','") + }) + + t.Run("error - re-using public keys for commitment is not allowed", func(t *testing.T) { + info := getRecoverRequestInfo() + + currentCommitment, err := commitment.GetCommitment(info.RecoveryKey, info.MultihashCode) + require.NoError(t, err) + + info.RecoveryCommitment = currentCommitment + + request, err := NewRecoverRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "re-using public keys for commitment is not allowed") + }) + + t.Run("success - opaque document", func(t *testing.T) { + info := getRecoverRequestInfo() + + bytes, err := NewRecoverRequest(info) + require.NoError(t, err) + require.NotEmpty(t, bytes) + + var request map[string]interface{} + err = json.Unmarshal(bytes, &request) + require.NoError(t, err) + + require.Equal(t, "recover", request["type"]) + require.Equal(t, didSuffix, request["didSuffix"]) + }) + + t.Run("success - json patches", func(t *testing.T) { + p, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + // default request info is constructed with opaque document; switch to patches + info := getRecoverRequestInfo() + info.OpaqueDocument = "" + info.Patches = []patch.Patch{p} + + bytes, err := NewRecoverRequest(info) + require.NoError(t, err) + require.NotEmpty(t, bytes) + + var request map[string]interface{} + err = json.Unmarshal(bytes, &request) + require.NoError(t, err) + + require.Equal(t, "recover", request["type"]) + require.Equal(t, didSuffix, request["didSuffix"]) + }) + + t.Run("success - optional params (anchor origin)", func(t *testing.T) { + info := getRecoverRequestInfo() + info.AnchorOrigin = "test-anchor-origin" + + bytes, err := NewRecoverRequest(info) + require.NoError(t, err) + require.NotEmpty(t, bytes) + + var request map[string]interface{} + err = json.Unmarshal(bytes, &request) + require.NoError(t, err) + + jws, ok := request["signedData"] + require.True(t, ok) + + signedData, err := internaljws.ParseJWS(jws.(string)) + require.NoError(t, err) + + var signedModel model.RecoverSignedDataModel + err = json.Unmarshal(signedData.Payload, &signedModel) + require.NoError(t, err) + + require.Equal(t, "test-anchor-origin", signedModel.AnchorOrigin) + }) +} + +func getRecoverRequestInfo() *RecoverRequestInfo { + privKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + panic(err) + } + + jwk, err := pubkey.GetPublicKeyJWK(&privKey.PublicKey) + if err != nil { + panic(err) + } + + return &RecoverRequestInfo{ + DidSuffix: didSuffix, + OpaqueDocument: opaqueDoc, + RecoveryKey: jwk, + MultihashCode: sha2_256, + Signer: ecsigner.New(privKey, "ES256", ""), + RevealValue: "reveal", + } +} diff --git a/pkg/versions/1_0/client/update.go b/pkg/versions/1_0/client/update.go new file mode 100644 index 0000000..f4137ed --- /dev/null +++ b/pkg/versions/1_0/client/update.go @@ -0,0 +1,122 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "errors" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/hashing" + "github.com/trustbloc/sidetree-go/pkg/jws" + "github.com/trustbloc/sidetree-go/pkg/patch" + "github.com/trustbloc/sidetree-go/pkg/util/signutil" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +// UpdateRequestInfo is the information required to create update request. +type UpdateRequestInfo struct { + + // DidSuffix is the suffix of the document to be updated + DidSuffix string + + // Patches is an array of standard patch actions + Patches []patch.Patch + + // update commitment to be used for the next update + UpdateCommitment string + + // update key to be used for this update + UpdateKey *jws.JWK + + // latest hashing algorithm supported by protocol + MultihashCode uint + + // Signer that will be used for signing request specific subset of data + Signer Signer + + // RevealValue is reveal value + RevealValue string + + // AnchorFrom defines earliest time for this operation. + AnchorFrom int64 + + // AnchorUntil defines expiry time for this operation. + AnchorUntil int64 +} + +// NewUpdateRequest is utility function to create payload for 'update' request. +func NewUpdateRequest(info *UpdateRequestInfo) ([]byte, error) { + if err := validateUpdateRequest(info); err != nil { + return nil, err + } + + delta := &model.DeltaModel{ + UpdateCommitment: info.UpdateCommitment, + Patches: info.Patches, + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, info.MultihashCode) + if err != nil { + return nil, err + } + + signedDataModel := &model.UpdateSignedDataModel{ + DeltaHash: deltaHash, + UpdateKey: info.UpdateKey, + AnchorFrom: info.AnchorFrom, + AnchorUntil: info.AnchorUntil, + } + + err = validateCommitment(info.UpdateKey, info.MultihashCode, info.UpdateCommitment) + if err != nil { + return nil, err + } + + signModel, err := signutil.SignModel(signedDataModel, info.Signer) + if err != nil { + return nil, err + } + + schema := &model.UpdateRequest{ + Operation: operation.TypeUpdate, + DidSuffix: info.DidSuffix, + RevealValue: info.RevealValue, + Delta: delta, + SignedData: signModel, + } + + return canonicalizer.MarshalCanonical(schema) +} + +func validateUpdateRequest(info *UpdateRequestInfo) error { + if info.DidSuffix == "" { + return errors.New("missing did unique suffix") + } + + if info.RevealValue == "" { + return errors.New("missing reveal value") + } + + if len(info.Patches) == 0 { + return errors.New("missing update information") + } + + if err := validateUpdateKey(info.UpdateKey); err != nil { + return err + } + + return validateSigner(info.Signer) +} + +func validateUpdateKey(key *jws.JWK) error { + if key == nil { + return errors.New("missing update key") + } + + return key.Validate() +} diff --git a/pkg/versions/1_0/client/update_test.go b/pkg/versions/1_0/client/update_test.go new file mode 100644 index 0000000..bab9aa3 --- /dev/null +++ b/pkg/versions/1_0/client/update_test.go @@ -0,0 +1,178 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package client + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "errors" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/commitment" + "github.com/trustbloc/sidetree-go/pkg/jws" + "github.com/trustbloc/sidetree-go/pkg/patch" + "github.com/trustbloc/sidetree-go/pkg/util/ecsigner" +) + +func TestNewUpdateRequest(t *testing.T) { + const didSuffix = "whatever" + + patches, err := getTestPatches() + require.NoError(t, err) + + updateJWK := &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", + } + + signer := NewMockSigner(nil) + + t.Run("missing unique suffix", func(t *testing.T) { + info := &UpdateRequestInfo{} + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing did unique suffix") + }) + t.Run("missing reveal value", func(t *testing.T) { + info := &UpdateRequestInfo{DidSuffix: didSuffix} + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing reveal value") + }) + t.Run("missing json patch", func(t *testing.T) { + info := &UpdateRequestInfo{DidSuffix: didSuffix, RevealValue: "reveal"} + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing update information") + }) + t.Run("multihash not supported", func(t *testing.T) { + info := &UpdateRequestInfo{ + DidSuffix: didSuffix, + Patches: patches, + UpdateKey: updateJWK, + Signer: signer, + RevealValue: "reveal", + } + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "algorithm not supported") + }) + t.Run("missing update key", func(t *testing.T) { + signer = NewMockSigner(nil) + signer.MockHeaders = make(jws.Headers) + + info := &UpdateRequestInfo{ + DidSuffix: didSuffix, + Patches: patches, + MultihashCode: sha2_256, + Signer: signer, + RevealValue: "reveal", + } + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "missing update key") + }) + t.Run("algorithm must be present in the protected header", func(t *testing.T) { + signer = NewMockSigner(nil) + signer.MockHeaders = make(jws.Headers) + + info := &UpdateRequestInfo{ + DidSuffix: didSuffix, + Patches: patches, + MultihashCode: sha2_256, + UpdateKey: updateJWK, + Signer: signer, + RevealValue: "reveal", + } + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "algorithm must be present in the protected header") + }) + t.Run("signing error", func(t *testing.T) { + info := &UpdateRequestInfo{ + DidSuffix: didSuffix, + Patches: patches, + MultihashCode: sha2_256, + UpdateKey: updateJWK, + Signer: NewMockSigner(errors.New(signerErr)), + RevealValue: "reveal", + } + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), signerErr) + }) + t.Run("error - re-using public keys for commitment is not allowed", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + signer := ecsigner.New(privateKey, "ES256", "key-1") + + currentCommitment, err := commitment.GetCommitment(updateJWK, sha2_256) + require.NoError(t, err) + + info := &UpdateRequestInfo{ + DidSuffix: didSuffix, + Patches: patches, + MultihashCode: sha2_256, + UpdateKey: updateJWK, + UpdateCommitment: currentCommitment, + Signer: signer, + RevealValue: "reveal", + } + + request, err := NewUpdateRequest(info) + require.Error(t, err) + require.Empty(t, request) + require.Contains(t, err.Error(), "re-using public keys for commitment is not allowed") + }) + t.Run("success", func(t *testing.T) { + privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + signer := ecsigner.New(privateKey, "ES256", "key-1") + + info := &UpdateRequestInfo{ + DidSuffix: didSuffix, + Patches: patches, + MultihashCode: sha2_256, + UpdateKey: updateJWK, + Signer: signer, + RevealValue: "reveal", + } + + request, err := NewUpdateRequest(info) + require.NoError(t, err) + require.NotEmpty(t, request) + }) +} + +func getTestPatches() ([]patch.Patch, error) { + p, err := patch.NewJSONPatch(`[{"op": "replace", "path": "/name", "value": "Jane"}]`) + if err != nil { + return nil, err + } + + return []patch.Patch{p}, nil +} diff --git a/pkg/versions/1_0/doccomposer/composer.go b/pkg/versions/1_0/doccomposer/composer.go new file mode 100644 index 0000000..589daee --- /dev/null +++ b/pkg/versions/1_0/doccomposer/composer.go @@ -0,0 +1,359 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package doccomposer + +import ( + "encoding/json" + "fmt" + + jsonpatch "github.com/evanphx/json-patch" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/sidetree-go/pkg/document" + logfields "github.com/trustbloc/sidetree-go/pkg/internal/log" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +var logger = log.New("sidetree-core-composer") + +// DocumentComposer applies patches to the document. +type DocumentComposer struct { +} + +// New creates new document composer. +func New() *DocumentComposer { + return &DocumentComposer{} +} + +// ApplyPatches applies patches to the document. +func (c *DocumentComposer) ApplyPatches(doc document.Document, patches []patch.Patch) (document.Document, error) { + result, err := deepCopy(doc) + if err != nil { + return nil, err + } + + for _, p := range patches { + result, err = applyPatch(result, p) + if err != nil { + return nil, err + } + } + + return result, nil +} + +// applyPatch applies a patch to the document. +func applyPatch(doc document.Document, p patch.Patch) (document.Document, error) { + action, err := p.GetAction() + if err != nil { + return nil, err + } + + value, err := p.GetValue() + if err != nil { + return nil, err + } + + switch action { + case patch.Replace: + return applyRecover(value) + case patch.JSONPatch: + return applyJSON(doc, value) + case patch.AddPublicKeys: + return applyAddPublicKeys(doc, value) + case patch.RemovePublicKeys: + return applyRemovePublicKeys(doc, value) + case patch.AddServiceEndpoints: + return applyAddServiceEndpoints(doc, value) + case patch.RemoveServiceEndpoints: + return applyRemoveServiceEndpoints(doc, value) + case patch.AddAlsoKnownAs: + return applyAddAlsoKnownAs(doc, value) + case patch.RemoveAlsoKnownAs: + return applyRemoveAlsoKnownAs(doc, value) + } + + return nil, fmt.Errorf("action '%s' is not supported", action) +} + +func applyJSON(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("Applying JSON patch", logfields.WithPatch(entry)) + + bytes, err := json.Marshal(entry) + if err != nil { + return nil, err + } + + jsonPatches, err := jsonpatch.DecodePatch(bytes) + if err != nil { + return nil, err + } + + docBytes, err := doc.Bytes() + if err != nil { + return nil, err + } + + docBytes, err = jsonPatches.Apply(docBytes) + if err != nil { + return nil, err + } + + return document.FromBytes(docBytes) +} + +func applyRecover(replaceDoc interface{}) (document.Document, error) { + logger.Debug("Applying replace patch", logfields.WithPatch(replaceDoc)) + + docBytes, err := json.Marshal(replaceDoc) + if err != nil { + return nil, err + } + + replace, err := document.ReplaceDocumentFromBytes(docBytes) + if err != nil { + return nil, err + } + + doc := make(document.Document) + doc[document.PublicKeyProperty] = replace[document.ReplacePublicKeyProperty] + doc[document.ServiceProperty] = replace[document.ReplaceServiceProperty] + + return doc, nil +} + +// adds public keys to document. +func applyAddPublicKeys(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("Applying add public keys patch", logfields.WithPatch(entry)) + + addPublicKeys := document.ParsePublicKeys(entry) + existingPublicKeysMap := sliceToMapPK(doc.PublicKeys()) + + var newPublicKeys []document.PublicKey + newPublicKeys = append(newPublicKeys, doc.PublicKeys()...) + + for _, key := range addPublicKeys { + _, ok := existingPublicKeysMap[key.ID()] + if ok { + // if a key ID already exists, we will just replace the existing key + updateKey(newPublicKeys, key) + } else { + // new key - append it to existing keys + newPublicKeys = append(newPublicKeys, key) + } + } + + doc[document.PublicKeyProperty] = convertPublicKeys(newPublicKeys) + + return doc, nil +} + +func updateKey(keys []document.PublicKey, key document.PublicKey) { + for index, pk := range keys { + if pk.ID() == key.ID() { + keys[index] = key + } + } +} + +func convertPublicKeys(pubKeys []document.PublicKey) []interface{} { + var values []interface{} + for _, pk := range pubKeys { + values = append(values, pk.JSONLdObject()) + } + + return values +} + +// remove public keys from the document. +func applyRemovePublicKeys(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("Applying remove public keys patch", logfields.WithPatch(entry)) + + keysToRemove := sliceToMap(document.StringArray(entry)) + + var newPublicKeys []interface{} + + for _, key := range doc.PublicKeys() { + _, ok := keysToRemove[key.ID()] + if !ok { + // not in remove list so add to resulting public keys + newPublicKeys = append(newPublicKeys, key.JSONLdObject()) + } + } + + doc[document.PublicKeyProperty] = newPublicKeys + + return doc, nil +} + +func sliceToMap(ids []string) map[string]bool { + // convert slice to map + values := make(map[string]bool) + for _, id := range ids { + values[id] = true + } + + return values +} + +func sliceToMapPK(publicKeys []document.PublicKey) map[string]document.PublicKey { + // convert slice to map + values := make(map[string]document.PublicKey) + for _, pk := range publicKeys { + values[pk.ID()] = pk + } + + return values +} + +// adds service endpoints to document. +func applyAddServiceEndpoints(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("Applying add service endpoints patch", logfields.WithPatch(entry)) + + didDoc := document.DidDocumentFromJSONLDObject(doc.JSONLdObject()) + + addServices := document.ParseServices(entry) + existingServicesMap := sliceToMapServices(didDoc.Services()) + + var newServices []document.Service + newServices = append(newServices, didDoc.Services()...) + + for _, service := range addServices { + _, ok := existingServicesMap[service.ID()] + if ok { + // if a service ID already exists, we will just replace the existing service + updateService(newServices, service) + } else { + // new service - append it to existing services + newServices = append(newServices, service) + } + } + + doc[document.ServiceProperty] = convertServices(newServices) + + return doc, nil +} + +func updateService(services []document.Service, service document.Service) { + for index, s := range services { + if s.ID() == service.ID() { + services[index] = service + } + } +} + +func convertServices(services []document.Service) []interface{} { + var values []interface{} + for _, service := range services { + values = append(values, service.JSONLdObject()) + } + + return values +} + +func applyRemoveServiceEndpoints(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("Applying remove service endpoints patch", logfields.WithPatch(entry)) + + didDoc := document.DidDocumentFromJSONLDObject(doc.JSONLdObject()) + servicesToRemove := sliceToMap(document.StringArray(entry)) + + var newServices []interface{} + + for _, service := range didDoc.Services() { + _, ok := servicesToRemove[service.ID()] + if !ok { + // not in remove list so add to resulting services + newServices = append(newServices, service.JSONLdObject()) + } + } + + doc[document.ServiceProperty] = newServices + + return doc, nil +} + +func sliceToMapServices(services []document.Service) map[string]document.Service { + // convert slice to map + values := make(map[string]document.Service) + for _, svc := range services { + values[svc.ID()] = svc + } + + return values +} + +// adds also-known-as to document. +func applyAddAlsoKnownAs(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("applying add also-known-as patch", logfields.WithPatch(entry)) + + didDoc := document.DidDocumentFromJSONLDObject(doc.JSONLdObject()) + + addURIs := document.StringArray(entry) + existingURIs := sliceToMap(didDoc.AlsoKnownAs()) + + var newURIs []string + newURIs = append(newURIs, didDoc.AlsoKnownAs()...) + + for _, uri := range addURIs { + _, ok := existingURIs[uri] + if !ok { + // new URI - append it to existing URIs + newURIs = append(newURIs, uri) + } + } + + doc[document.AlsoKnownAs] = interfaceArray(newURIs) + + return doc, nil +} + +func interfaceArray(values []string) []interface{} { + var iArr []interface{} + for _, v := range values { + iArr = append(iArr, v) + } + + return iArr +} + +func applyRemoveAlsoKnownAs(doc document.Document, entry interface{}) (document.Document, error) { + logger.Debug("Applying remove also-known-as patch", logfields.WithPatch(entry)) + + didDoc := document.DidDocumentFromJSONLDObject(doc.JSONLdObject()) + urisToRemove := sliceToMap(document.StringArray(entry)) + + var newURIs []interface{} + + for _, uri := range didDoc.AlsoKnownAs() { + _, ok := urisToRemove[uri] + if !ok { + // not in remove list so add to resulting services + newURIs = append(newURIs, uri) + } + } + + doc[document.AlsoKnownAs] = newURIs + + return doc, nil +} + +// deepCopy returns deep copy of JSON object. +func deepCopy(doc document.Document) (document.Document, error) { + bytes, err := json.Marshal(doc) + if err != nil { + return nil, err + } + + var result document.Document + err = json.Unmarshal(bytes, &result) + if err != nil { + return nil, err + } + + return result, nil +} diff --git a/pkg/versions/1_0/doccomposer/composer_test.go b/pkg/versions/1_0/doccomposer/composer_test.go new file mode 100644 index 0000000..c2731d1 --- /dev/null +++ b/pkg/versions/1_0/doccomposer/composer_test.go @@ -0,0 +1,635 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package doccomposer + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +const invalid = "invalid" + +func TestApplyPatches(t *testing.T) { + documentComposer := New() + + t.Run("success - add one key to existing doc with two keys", func(t *testing.T) { + original, err := setupDefaultDoc() + require.NoError(t, err) + require.Equal(t, 2, len(original.PublicKeys())) + + addPublicKeys, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + doc, err := documentComposer.ApplyPatches(original, []patch.Patch{addPublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + require.Equal(t, 3, len(didDoc.PublicKeys())) + require.Equal(t, "key1", didDoc.PublicKeys()[0].ID()) + require.Equal(t, "key2", didDoc.PublicKeys()[1].ID()) + require.Equal(t, "key3", didDoc.PublicKeys()[2].ID()) + + // make sure that original document is not modified + require.Equal(t, 2, len(original.PublicKeys())) + }) + + t.Run("action not supported", func(t *testing.T) { + p, err := patch.NewAddServiceEndpointsPatch("{}") + require.NoError(t, err) + + p["action"] = invalid + + doc, err := documentComposer.ApplyPatches(make(document.Document), []patch.Patch{p}) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "not supported") + }) + t.Run("error - original document deep copy fails (not json)", func(t *testing.T) { + doc := make(document.Document) + doc["key"] = make(chan int) + + doc, err := documentComposer.ApplyPatches(doc, nil) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "json: unsupported type: chan int") + }) +} + +func TestApplyPatches_PatchesFromOpaqueDoc(t *testing.T) { + documentComposer := New() + + t.Run("success", func(t *testing.T) { + patches, err := patch.PatchesFromDocument(testDoc) + require.NoError(t, err) + + doc, err := documentComposer.ApplyPatches(make(document.Document), patches) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc := document.DidDocumentFromJSONLDObject(doc.JSONLdObject()) + require.Len(t, didDoc.Services(), 2) + require.Len(t, didDoc.PublicKeys(), 2) + }) +} + +func TestApplyPatches_ReplacePatch(t *testing.T) { + documentComposer := New() + + t.Run("success", func(t *testing.T) { + replace, err := patch.NewReplacePatch(replaceDoc) + require.NoError(t, err) + + doc, err := documentComposer.ApplyPatches(make(document.Document), []patch.Patch{replace}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc := document.DidDocumentFromJSONLDObject(doc.JSONLdObject()) + require.Len(t, didDoc.Services(), 1) + require.Len(t, didDoc.PublicKeys(), 1) + }) +} + +func TestApplyPatches_JSON(t *testing.T) { + documentComposer := New() + + t.Run("success", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + ietf, err := patch.NewJSONPatch(patches) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{ietf}) + require.NoError(t, err) + require.NotNil(t, doc) + }) + t.Run("invalid operation", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + ietf, err := patch.NewJSONPatch(invalidPatches) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{ietf}) + require.Error(t, err) + require.Nil(t, doc) + require.Contains(t, err.Error(), "Unexpected kind: invalid") + }) +} + +func TestApplyPatches_AddPublicKeys(t *testing.T) { + documentComposer := New() + + t.Run("succes - add one key to existing two keys", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addPublicKeys, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addPublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 3, len(diddoc.PublicKeys())) + require.Equal(t, "key1", diddoc.PublicKeys()[0].ID()) + require.Equal(t, "key2", diddoc.PublicKeys()[1].ID()) + require.Equal(t, "key3", diddoc.PublicKeys()[2].ID()) + }) + t.Run("success - add existing public key to document; old one will be replaced", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addPublicKeys, err := patch.NewAddPublicKeysPatch(updateExistingKey) + require.NoError(t, err) + + // existing public key will be replaced with new one that has type 'updatedKeyType' + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addPublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + keys := diddoc.PublicKeys() + require.Equal(t, 2, len(keys)) + require.Equal(t, 1, len(keys[1].Purpose())) + }) + t.Run("add same key twice - no error; one key added", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addPublicKeys, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addPublicKeys, addPublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 3, len(diddoc.PublicKeys())) + }) +} + +func TestApplyPatches_RemovePublicKeys(t *testing.T) { + documentComposer := New() + + t.Run("success - remove existing key", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + removePublicKeys, err := patch.NewRemovePublicKeysPatch(`["key1"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removePublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 1, len(didDoc.PublicKeys())) + }) + + t.Run("success - remove existing and non-existing keys", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + removePublicKeys, err := patch.NewRemovePublicKeysPatch(`["key1", "key3"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removePublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 1, len(diddoc.PublicKeys())) + }) + t.Run("success - add and remove same key; doc stays at two keys", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addPublicKeys, err := patch.NewAddPublicKeysPatch(addKeys) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addPublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + removePublicKeys, err := patch.NewRemovePublicKeysPatch(`["key3"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removePublicKeys}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(diddoc.PublicKeys())) + }) +} + +func TestApplyPatches_AddServiceEndpoints(t *testing.T) { + documentComposer := New() + + t.Run("success - add new service to existing two services", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addServices, err := patch.NewAddServiceEndpointsPatch(addServices) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 3, len(diddoc.Services())) + require.Equal(t, "svc1", diddoc.Services()[0].ID()) + require.Equal(t, "svc2", diddoc.Services()[1].ID()) + require.Equal(t, "svc3", diddoc.Services()[2].ID()) + }) + t.Run("success - add existing service to document ", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addServices, err := patch.NewAddServiceEndpointsPatch(updateExistingService) + require.NoError(t, err) + + // existing service will be replaced with new one that has type 'updatedService' + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + services := diddoc.Services() + require.Equal(t, 2, len(services)) + require.Equal(t, diddoc.Services()[1].Type(), "updatedServiceType") + }) + t.Run("add same service twice - no error; one service added", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addServices, err := patch.NewAddServiceEndpointsPatch(addServices) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addServices, addServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 3, len(diddoc.Services())) + }) +} + +func TestApplyPatches_RemoveServiceEndpoints(t *testing.T) { + documentComposer := New() + + t.Run("success - remove existing service", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + removeServices, err := patch.NewRemoveServiceEndpointsPatch(`["svc1"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 1, len(diddoc.Services())) + }) + + t.Run("success - remove existing and non-existing service", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + removeServices, err := patch.NewRemoveServiceEndpointsPatch(`["svc1", "svc3"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 1, len(diddoc.Services())) + }) + t.Run("success - add and remove same service; doc stays at two services", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + addServices, err := patch.NewAddServiceEndpointsPatch(addServices) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + removeServices, err := patch.NewRemoveServiceEndpointsPatch(`["svc3"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeServices}) + require.NoError(t, err) + require.NotNil(t, doc) + + diddoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(diddoc.Services())) + }) +} + +func TestApplyPatches_RemoveAlsoKnownAs(t *testing.T) { + documentComposer := New() + + t.Run("success - remove existing URIs (one by one)", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + + removeAlsoKnownAs, err := patch.NewRemoveAlsoKnownAs(`["https://myblog.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 1, len(didDoc.AlsoKnownAs())) + require.Equal(t, "https://second.example/", didDoc.AlsoKnownAs()[0]) + + removeAlsoKnownAs, err = patch.NewRemoveAlsoKnownAs(`["https://second.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 0, len(didDoc.AlsoKnownAs())) + }) + + t.Run("success - remove all existing URI", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + + removeAlsoKnownAs, err := patch.NewRemoveAlsoKnownAs(`["https://myblog.example/","https://second.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 0, len(didDoc.AlsoKnownAs())) + }) + + t.Run("success - remove one existing and one non-existing URI", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + + removeAlsoKnownAs, err := patch.NewRemoveAlsoKnownAs(`["https://myblog.example/","https://non-existing.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 1, len(didDoc.AlsoKnownAs())) + require.Equal(t, "https://second.example/", didDoc.AlsoKnownAs()[0]) + }) + + t.Run("success - add and remove same uri; doc stays at two uri", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + + addAlsoKnowAs, err := patch.NewAddAlsoKnownAs(`["https://third.example/","https://fourth.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addAlsoKnowAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 4, len(didDoc.AlsoKnownAs())) + require.Equal(t, "https://myblog.example/", didDoc.AlsoKnownAs()[0]) + require.Equal(t, "https://second.example/", didDoc.AlsoKnownAs()[1]) + require.Equal(t, "https://third.example/", didDoc.AlsoKnownAs()[2]) + require.Equal(t, "https://fourth.example/", didDoc.AlsoKnownAs()[3]) + + removeAlsoKnownAs, err := patch.NewRemoveAlsoKnownAs(`["https://third.example/","https://fourth.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{removeAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + }) + + t.Run("error - uri is not a string", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + + addAlsoKnowAs, err := patch.NewAddAlsoKnownAs(`[123,"https://another.example/"]`) + require.Error(t, err) + require.Nil(t, addAlsoKnowAs) + require.Contains(t, err.Error(), "also known as uris is not string array") + }) + + t.Run("error - uri is empty", func(t *testing.T) { + doc, err := setupDefaultDoc() + require.NoError(t, err) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + + addAlsoKnowAs, err := patch.NewAddAlsoKnownAs(`[]`) + require.Error(t, err) + require.Nil(t, addAlsoKnowAs) + require.Contains(t, err.Error(), "missing also known as uris") + }) +} + +func TestApplyPatches_AddAlsoKnownAs(t *testing.T) { + documentComposer := New() + + t.Run("success - add multiple URIs, followed by same URIs", func(t *testing.T) { + doc := make(document.Document) + + didDoc := document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 0, len(didDoc.AlsoKnownAs())) + + addAlsoKnownAs, err := patch.NewAddAlsoKnownAs(`["https://myblog.example/", "https://other.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + require.Equal(t, "https://myblog.example/", didDoc.AlsoKnownAs()[0]) + require.Equal(t, "https://other.example/", didDoc.AlsoKnownAs()[1]) + + // add again same URIs - they will be ignored during applying patches + addAlsoKnownAs, err = patch.NewAddAlsoKnownAs(`["https://myblog.example/", "https://other.example/"]`) + require.NoError(t, err) + + doc, err = documentComposer.ApplyPatches(doc, []patch.Patch{addAlsoKnownAs}) + require.NoError(t, err) + require.NotNil(t, doc) + + didDoc = document.DidDocumentFromJSONLDObject(doc) + require.Equal(t, 2, len(didDoc.AlsoKnownAs())) + require.Equal(t, "https://myblog.example/", didDoc.AlsoKnownAs()[0]) + require.Equal(t, "https://other.example/", didDoc.AlsoKnownAs()[1]) + }) +} + +func setupDefaultDoc() (document.Document, error) { + documentComposer := New() + + patches, err := patch.PatchesFromDocument(testDoc) + if err != nil { + return nil, err + } + + return documentComposer.ApplyPatches(make(document.Document), patches) +} + +const invalidPatches = `[ + { + "op": "invalid", + "path": "/test", + "value": "new value" + } +]` + +const patches = `[ + { + "op": "replace", + "path": "/test", + "value": "new value" + } +]` + +const testDoc = `{ + "alsoKnownAs": ["https://myblog.example/", "https://second.example/"], + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "key2", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ], + "service": [ + { + "id": "svc1", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }, + { + "id": "svc2", + "type": "SecureDataStore", + "serviceEndpoint": "http://some-cloud.com/hub" + } + ] +}` + +const addKeys = `[{ + "id": "key3", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }]` + +const updateExistingKey = `[{ + "id": "key2", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }]` + +const addServices = `[ + { + "id": "svc3", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + } + ]` + +const updateExistingService = `[ + { + "id": "svc2", + "type": "updatedServiceType", + "serviceEndpoint": "http://hub.my-personal-server.com" + } + ]` + +const replaceDoc = `{ + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "type": "EcdsaSecp256k1VerificationKey2019", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }], + "services": [ + { + "id": "sds3", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }] +}` diff --git a/pkg/versions/1_0/doctransformer/didtransformer/testdata/doc.json b/pkg/versions/1_0/doctransformer/didtransformer/testdata/doc.json new file mode 100644 index 0000000..3c03dc9 --- /dev/null +++ b/pkg/versions/1_0/doctransformer/didtransformer/testdata/doc.json @@ -0,0 +1,100 @@ +{ + "alsoKnownAs": ["https:\\somebody.com"], + "publicKey": [ + { + "id": "master", + "type": "EcdsaSecp256k1VerificationKey2019", + "purposes": ["authentication", "assertionMethod", "keyAgreement", "capabilityDelegation", "capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "auth", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "assertion", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "agreement", + "type": "JsonWebKey2020", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "invocation", + "type": "JsonWebKey2020", + "purposes": ["capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "delegation", + "type": "JsonWebKey2020", + "purposes": ["capabilityDelegation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "general", + "type": "JsonWebKey2020", + "purposes": [], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ], + "service": [ + { + "id": "hub", + "type": "IdentityHub", + "routingKeys": "routingKeysValue", + "recipientKeys": "recipientKeysValue", + "serviceEndpoint": "https://example.com/hub/" + }, + { + "id": "hub-object", + "type": "IdentityHub", + "serviceEndpoint": { + "@context": "https://schema.identity.foundation/hub", + "type": "UserHubEndpoint", + "instances": ["did:example:456", "did:example:789"] + } + } + ] +} \ No newline at end of file diff --git a/pkg/versions/1_0/doctransformer/didtransformer/transformer.go b/pkg/versions/1_0/doctransformer/didtransformer/transformer.go new file mode 100644 index 0000000..89af896 --- /dev/null +++ b/pkg/versions/1_0/doctransformer/didtransformer/transformer.go @@ -0,0 +1,364 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package didtransformer + +import ( + "errors" + "fmt" + + "github.com/btcsuite/btcutil/base58" + "github.com/multiformats/go-multibase" + + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/jws" + internaljws "github.com/trustbloc/sidetree-go/pkg/jwsutil" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/doctransformer/metadata" +) + +const ( + didContext = "https://www.w3.org/ns/did/v1" + + didResolutionContext = "https://w3id.org/did-resolution/v1" + + // ed25519VerificationKey2018 requires special handling (convert to base58). + ed25519VerificationKey2018 = "Ed25519VerificationKey2018" + + // ed25519VerificationKey202p requires special handling (convert to multibase). + ed25519VerificationKey2020 = "Ed25519VerificationKey2020" + + bls12381G2Key2020 = "Bls12381G2Key2020" + jsonWebKey2020 = "JsonWebKey2020" + ecdsaSecp256k1VerificationKey2019 = "EcdsaSecp256k1VerificationKey2019" + x25519KeyAgreementKey2019 = "X25519KeyAgreementKey2019" + + bls12381G2Key2020Ctx = "https://w3id.org/security/suites/bls12381-2020/v1" + jsonWebKey2020Ctx = "https://w3id.org/security/suites/jws-2020/v1" + ecdsaSecp256k1VerificationKey2019Ctx = "https://w3id.org/security/suites/secp256k1-2019/v1" + ed25519VerificationKey2018Ctx = "https://w3id.org/security/suites/ed25519-2018/v1" + ed25519VerificationKey2020Ctx = "https://w3id.org/security/suites/ed25519-2020/v1" + x25519KeyAgreementKey2019Ctx = "https://w3id.org/security/suites/x25519-2019/v1" +) + +type keyContextMap map[string]string + +var defaultKeyContextMap = keyContextMap{ + bls12381G2Key2020: bls12381G2Key2020Ctx, + jsonWebKey2020: jsonWebKey2020Ctx, + ecdsaSecp256k1VerificationKey2019: ecdsaSecp256k1VerificationKey2019Ctx, + ed25519VerificationKey2018: ed25519VerificationKey2018Ctx, + ed25519VerificationKey2020: ed25519VerificationKey2020Ctx, + x25519KeyAgreementKey2019: x25519KeyAgreementKey2019Ctx, +} + +// Option is a registry instance option. +type Option func(opts *Transformer) + +// WithMethodContext sets optional method context(s). +func WithMethodContext(ctx []string) Option { + return func(opts *Transformer) { + opts.methodCtx = ctx + } +} + +// WithKeyContext sets optional key context. +func WithKeyContext(ctx map[string]string) Option { + return func(opts *Transformer) { + opts.keyCtx = ctx + } +} + +// WithBase sets optional @base context. +func WithBase(enabled bool) Option { + return func(opts *Transformer) { + opts.includeBase = enabled + } +} + +// WithIncludePublishedOperations sets optional include published operations flag. +func WithIncludePublishedOperations(enabled bool) Option { + return func(opts *Transformer) { + opts.includePublishedOperations = enabled + } +} + +// WithIncludeUnpublishedOperations sets optional include unpublished operations flag. +func WithIncludeUnpublishedOperations(enabled bool) Option { + return func(opts *Transformer) { + opts.includeUnpublishedOperations = enabled + } +} + +// Transformer is responsible for transforming internal to external document. +type Transformer struct { + keyCtx map[string]string + methodCtx []string // used for setting additional contexts during resolution + includeBase bool + + includePublishedOperations bool + includeUnpublishedOperations bool +} + +// New creates a new DID Transformer. +func New(opts ...Option) *Transformer { + transformer := &Transformer{} + + // apply options + for _, opt := range opts { + opt(transformer) + } + + // if key contexts are not provided via options use default key contexts + if len(transformer.keyCtx) == 0 { + transformer.keyCtx = defaultKeyContextMap + } + + return transformer +} + +// TransformDocument takes internal resolution model and transformation info and creates +// external representation of document (resolution result). +func (t *Transformer) TransformDocument(rm *protocol.ResolutionModel, + info protocol.TransformationInfo) (*document.ResolutionResult, error) { + docMetadata, err := metadata.New( + metadata.WithIncludeUnpublishedOperations(t.includeUnpublishedOperations), + metadata.WithIncludePublishedOperations(t.includePublishedOperations)). + CreateDocumentMetadata(rm, info) + if err != nil { + return nil, err + } + + id, ok := info[document.IDProperty] + if !ok { + return nil, errors.New("id is required for document transformation") + } + + internal := document.DidDocumentFromJSONLDObject(rm.Doc.JSONLdObject()) + + // start with empty document + external := document.DidDocumentFromJSONLDObject(make(document.DIDDocument)) + + // add main context + ctx := []interface{}{didContext} + + // add optional method contexts + for _, c := range t.methodCtx { + ctx = append(ctx, c) + } + + if t.includeBase { + ctx = append(ctx, getBase(id.(string))) + } + + alsoKnownAs := internal.AlsoKnownAs() + if len(alsoKnownAs) > 0 { + external[document.AlsoKnownAs] = alsoKnownAs + } + + external[document.ContextProperty] = ctx + external[document.IDProperty] = id + + result := &document.ResolutionResult{ + Context: didResolutionContext, + Document: external.JSONLdObject(), + DocumentMetadata: docMetadata, + } + + // add keys + err = t.processKeys(internal, result) + if err != nil { + return nil, fmt.Errorf("failed to transform public keys for did document: %s", err.Error()) + } + + // add services + t.processServices(internal, result) + + return result, nil +} + +func getBase(id string) interface{} { + return &struct { + Base string `json:"@base"` + }{ + Base: id, + } +} + +// processServices will process services and add them to external document. +func (t *Transformer) processServices(internal document.DIDDocument, resolutionResult *document.ResolutionResult) { + var services []document.Service + + did := resolutionResult.Document.ID() + + // add did to service id + for _, sv := range internal.Services() { + externalService := make(document.Service) + externalService[document.IDProperty] = t.getObjectID(did, sv.ID()) + externalService[document.TypeProperty] = sv.Type() + externalService[document.ServiceEndpointProperty] = sv.ServiceEndpoint() + + for key, value := range sv { + _, ok := externalService[key] + if !ok { + externalService[key] = value + } + } + + services = append(services, externalService) + } + + if len(services) > 0 { + resolutionResult.Document[document.ServiceProperty] = services + } +} + +// processKeys will process keys according to Sidetree rules bellow and add them to external document. +// every key will be included in the verificationMethod section of the resolved DID Document. +// +// -- authentication: the key MUST be included by reference (full id) in the authentication section of the resolved DID Document +// -- assertion: the key MUST be included by reference in the assertionMethod section. +// -- agreement: the key MUST be included by reference in the keyAgreement section. +// -- delegation: the key MUST be included by reference in the capabilityDelegation section. +// -- invocation: the key MUST be included by reference in the capabilityInvocation section. +// +//nolint:gocyclo +func (t *Transformer) processKeys(internal document.DIDDocument, + resolutionResult *document.ResolutionResult) error { //nolint:gocognit + purposes := map[string][]interface{}{ + document.AuthenticationProperty: make([]interface{}, 0), + document.AssertionMethodProperty: make([]interface{}, 0), + document.KeyAgreementProperty: make([]interface{}, 0), + document.DelegationKeyProperty: make([]interface{}, 0), + document.InvocationKeyProperty: make([]interface{}, 0), + } + + did := resolutionResult.Document.ID() + + var publicKeys []document.PublicKey + + var keyContexts []string + + for _, pk := range internal.PublicKeys() { + id := t.getObjectID(did, pk.ID()) + + externalPK := make(document.PublicKey) + externalPK[document.IDProperty] = id + externalPK[document.TypeProperty] = pk.Type() + externalPK[document.ControllerProperty] = did + + if pkJwk := pk.PublicKeyJwk(); pkJwk != nil { //nolint:nestif + if pk.Type() == ed25519VerificationKey2018 { + ed25519PubKey, err := getED2519PublicKey(pkJwk) + if err != nil { + return err + } + externalPK[document.PublicKeyBase58Property] = base58.Encode(ed25519PubKey) + } else if pk.Type() == ed25519VerificationKey2020 { + ed25519PubKey, err := getED2519PublicKey(pkJwk) + if err != nil { + return err + } + + multibaseEncode, err := multibase.Encode(multibase.Base58BTC, ed25519PubKey) + if err != nil { + return err + } + + externalPK[document.PublicKeyMultibaseProperty] = multibaseEncode + } else { + externalPK[document.PublicKeyJwkProperty] = pkJwk + } + } else if pkb58 := pk.PublicKeyBase58(); pkb58 != "" { + externalPK[document.PublicKeyBase58Property] = pkb58 + } else if pkMultibase := pk.PublicKeyMultibase(); pkMultibase != "" { + externalPK[document.PublicKeyMultibaseProperty] = pkMultibase + } else { + externalPK[document.PublicKeyJwkProperty] = nil // if key missing, default to adding nil jwk + } + + keyContext, ok := t.keyCtx[pk.Type()] + if !ok { + return fmt.Errorf("key context not found for key type: %s", pk.Type()) + } + + if !contains(keyContexts, keyContext) { + keyContexts = append(keyContexts, keyContext) + } + + publicKeys = append(publicKeys, externalPK) + + for _, p := range pk.Purpose() { + switch p { + case document.KeyPurposeAuthentication: + purposes[document.AuthenticationProperty] = append(purposes[document.AuthenticationProperty], id) + case document.KeyPurposeAssertionMethod: + purposes[document.AssertionMethodProperty] = append(purposes[document.AssertionMethodProperty], id) + case document.KeyPurposeKeyAgreement: + purposes[document.KeyAgreementProperty] = append(purposes[document.KeyAgreementProperty], id) + case document.KeyPurposeCapabilityDelegation: + purposes[document.DelegationKeyProperty] = append(purposes[document.DelegationKeyProperty], id) + case document.KeyPurposeCapabilityInvocation: + purposes[document.InvocationKeyProperty] = append(purposes[document.InvocationKeyProperty], id) + } + } + } + + if len(publicKeys) > 0 { + resolutionResult.Document[document.VerificationMethodProperty] = publicKeys + + // we need to add key context(s) to original context + ctx := append(resolutionResult.Document.Context(), interfaceArray(keyContexts)...) + resolutionResult.Document[document.ContextProperty] = ctx + } + + for key, value := range purposes { + if len(value) > 0 { + resolutionResult.Document[key] = value + } + } + + return nil +} + +func contains(values []string, value string) bool { + for _, v := range values { + if v == value { + return true + } + } + + return false +} + +func interfaceArray(values []string) []interface{} { + var iArr []interface{} + for _, v := range values { + iArr = append(iArr, v) + } + + return iArr +} + +func (t *Transformer) getObjectID(docID, objectID string) interface{} { + relativeID := "#" + objectID + if t.includeBase { + return relativeID + } + + return docID + relativeID +} + +func getED2519PublicKey(pkJWK document.JWK) ([]byte, error) { + jwk := &jws.JWK{ + Crv: pkJWK.Crv(), + Kty: pkJWK.Kty(), + X: pkJWK.X(), + Y: pkJWK.Y(), + } + + return internaljws.GetED25519PublicKey(jwk) +} diff --git a/pkg/versions/1_0/doctransformer/didtransformer/transformer_test.go b/pkg/versions/1_0/doctransformer/didtransformer/transformer_test.go new file mode 100644 index 0000000..55a13df --- /dev/null +++ b/pkg/versions/1_0/doctransformer/didtransformer/transformer_test.go @@ -0,0 +1,817 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package didtransformer + +import ( + "crypto/ed25519" + "crypto/rand" + "encoding/json" + "fmt" + "io" + "os" + "testing" + + "github.com/btcsuite/btcutil/base58" + "github.com/multiformats/go-multibase" + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/util/pubkey" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/doctransformer/metadata" +) + +const testID = "doc:abc:123" + +func TestNewTransformer(t *testing.T) { + transformer := New() + require.NotNil(t, transformer) + require.Empty(t, transformer.methodCtx) + require.Equal(t, false, transformer.includeBase) + require.Equal(t, false, transformer.includePublishedOperations) + require.Equal(t, false, transformer.includeUnpublishedOperations) + + const ctx1 = "ctx-1" + transformer = New(WithMethodContext([]string{ctx1})) + require.Equal(t, 1, len(transformer.methodCtx)) + require.Equal(t, ctx1, transformer.methodCtx[0]) + + const ctx2 = "ctx-2" + transformer = New(WithMethodContext([]string{ctx1, ctx2})) + require.Equal(t, 2, len(transformer.methodCtx)) + require.Equal(t, ctx2, transformer.methodCtx[1]) + + transformer = New(WithBase(true)) + require.Equal(t, true, transformer.includeBase) + + keyCtx := map[string]string{ + "key-1": "value-1", + "key-2": "value-2", + } + + transformer = New(WithKeyContext(keyCtx)) + require.Equal(t, 2, len(transformer.keyCtx)) + + transformer = New(WithIncludePublishedOperations(true), WithIncludeUnpublishedOperations(true)) + require.Equal(t, true, transformer.includePublishedOperations) + require.Equal(t, true, transformer.includeUnpublishedOperations) +} + +func TestTransformDocument(t *testing.T) { + r := reader(t, "testdata/doc.json") + docBytes, err := io.ReadAll(r) + require.NoError(t, err) + doc, err := document.FromBytes(docBytes) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc, RecoveryCommitment: "recovery", UpdateCommitment: "update"} + + t.Run("success", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + require.NotNil(t, result) + require.Equal(t, testID, result.Document[document.IDProperty]) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + + // test document has 5 keys defined, two distinct key types: EcdsaSecp256k1VerificationKey2019, JsonWebKey2020 + require.Equal(t, 3, len(didDoc.Context())) + require.Equal(t, didContext, didDoc.Context()[0]) + require.NotEmpty(t, didDoc[document.AlsoKnownAs]) + require.Equal(t, ecdsaSecp256k1VerificationKey2019Ctx, didDoc.Context()[1]) + require.Equal(t, jsonWebKey2020Ctx, didDoc.Context()[2]) + + // validate services + service := didDoc.Services()[0] + require.Equal(t, service.ID(), testID+"#hub") + require.Equal(t, "https://example.com/hub/", service.ServiceEndpoint().(string)) + require.Equal(t, "recipientKeysValue", service["recipientKeys"]) + require.Equal(t, "routingKeysValue", service["routingKeys"]) + require.Equal(t, "IdentityHub", service.Type()) + + service = didDoc.Services()[1] + require.Equal(t, service.ID(), testID+"#hub-object") + require.NotEmpty(t, service.ServiceEndpoint()) + require.Empty(t, service["recipientKeys"]) + require.Equal(t, "IdentityHub", service.Type()) + + serviceEndpointEntry := service.ServiceEndpoint() + serviceEndpoint := serviceEndpointEntry.(map[string]interface{}) + require.Equal(t, "https://schema.identity.foundation/hub", serviceEndpoint["@context"]) + require.Equal(t, "UserHubEndpoint", serviceEndpoint["type"]) + require.Equal(t, []interface{}{"did:example:456", "did:example:789"}, serviceEndpoint["instances"]) + + // validate public keys + pk := didDoc.VerificationMethods()[0] + require.Contains(t, pk.ID(), testID) + require.NotEmpty(t, pk.Type()) + require.NotEmpty(t, pk.PublicKeyJwk()) + require.Empty(t, pk.PublicKeyBase58()) + + expectedPublicKeys := []string{"master", "general", "authentication", "assertion", "agreement", "delegation", "invocation"} + require.Equal(t, len(expectedPublicKeys), len(didDoc.VerificationMethods())) + + expectedAuthenticationKeys := []string{"master", "authentication"} + require.Equal(t, len(expectedAuthenticationKeys), len(didDoc.Authentications())) + + expectedAssertionMethodKeys := []string{"master", "assertion"} + require.Equal(t, len(expectedAssertionMethodKeys), len(didDoc.AssertionMethods())) + + expectedAgreementKeys := []string{"master", "agreement"} + require.Equal(t, len(expectedAgreementKeys), len(didDoc.AgreementKeys())) + + expectedDelegationKeys := []string{"master", "delegation"} + require.Equal(t, len(expectedDelegationKeys), len(didDoc.DelegationKeys())) + + expectedInvocationKeys := []string{"master", "invocation"} + require.Equal(t, len(expectedInvocationKeys), len(didDoc.InvocationKeys())) + }) + t.Run("success - with canonical, equivalent ID", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = "did:abc:123" + info[document.PublishedProperty] = true + info[document.CanonicalIDProperty] = "canonical" + info[document.EquivalentIDProperty] = []string{"equivalent"} + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + require.Equal(t, "did:abc:123", result.Document[document.IDProperty]) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + + require.Equal(t, "canonical", result.DocumentMetadata[document.CanonicalIDProperty]) + require.NotEmpty(t, result.DocumentMetadata[document.EquivalentIDProperty]) + }) + + t.Run("success - all supported contexts for key type", func(t *testing.T) { + d, err := document.FromBytes([]byte(allKeyTypes)) + require.NoError(t, err) + + trans := New() + + internalDoc := &protocol.ResolutionModel{Doc: d} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := trans.TransformDocument(internalDoc, info) + require.NoError(t, err) + require.NotEmpty(t, result) + + didDoc := result.Document + + require.Equal(t, 7, len(didDoc.Context())) + require.Equal(t, didContext, didDoc.Context()[0]) + require.Equal(t, bls12381G2Key2020Ctx, didDoc.Context()[1]) + require.Equal(t, jsonWebKey2020Ctx, didDoc.Context()[2]) + require.Equal(t, ecdsaSecp256k1VerificationKey2019Ctx, didDoc.Context()[3]) + require.Equal(t, ed25519VerificationKey2018Ctx, didDoc.Context()[4]) + require.Equal(t, x25519KeyAgreementKey2019Ctx, didDoc.Context()[5]) + require.Equal(t, ed25519VerificationKey2020Ctx, didDoc.Context()[6]) + }) + + t.Run("success - override contexts for key type", func(t *testing.T) { + testKeyContexts := map[string]string{ + bls12381G2Key2020: "context-1", + jsonWebKey2020: "context-2", + ecdsaSecp256k1VerificationKey2019: "context-3", + ed25519VerificationKey2018: "context-4", + x25519KeyAgreementKey2019: "context-5", + ed25519VerificationKey2020: "context-6", + } + + d, err := document.FromBytes([]byte(allKeyTypes)) + require.NoError(t, err) + + trans := New(WithKeyContext(testKeyContexts)) + + internalDoc := &protocol.ResolutionModel{Doc: d} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := trans.TransformDocument(internalDoc, info) + require.NoError(t, err) + require.NotEmpty(t, result) + + didDoc := result.Document + + require.Equal(t, 7, len(didDoc.Context())) + require.Equal(t, didContext, didDoc.Context()[0]) + require.Equal(t, "context-1", didDoc.Context()[1]) + require.Equal(t, "context-2", didDoc.Context()[2]) + require.Equal(t, "context-3", didDoc.Context()[3]) + require.Equal(t, "context-4", didDoc.Context()[4]) + require.Equal(t, "context-5", didDoc.Context()[5]) + require.Equal(t, "context-6", didDoc.Context()[6]) + }) + + t.Run("success - include operations (published/unpublished)", func(t *testing.T) { + trans := New( + WithIncludePublishedOperations(true), + WithIncludeUnpublishedOperations(true)) + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + publishedOps := []*operation.AnchoredOperation{ + {Type: "create", UniqueSuffix: "suffix", CanonicalReference: "ref1"}, + {Type: "update", UniqueSuffix: "suffix", CanonicalReference: "ref2"}, + } + + unpublishedOps := []*operation.AnchoredOperation{ + {Type: "update", UniqueSuffix: "suffix"}, + } + + rm := &protocol.ResolutionModel{ + Doc: doc, + RecoveryCommitment: "recovery", + UpdateCommitment: "update", + PublishedOperations: publishedOps, + UnpublishedOperations: unpublishedOps, + } + + result, err := trans.TransformDocument(rm, info) + require.NoError(t, err) + require.NotNil(t, result) + require.Equal(t, testID, result.Document[document.IDProperty]) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + + require.Equal(t, 2, len(methodMetadata[document.PublishedOperationsProperty].([]*metadata.PublishedOperation))) + require.Equal(t, 1, len(methodMetadata[document.UnpublishedOperationsProperty].([]*metadata.UnpublishedOperation))) + }) + + t.Run("error - internal document is missing", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(nil, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "resolution model is required for creating document metadata") + }) + + t.Run("error - transformation info is missing", func(t *testing.T) { + result, err := transformer.TransformDocument(internal, nil) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "transformation info is required for creating document metadata") + }) + + t.Run("error - transformation info is missing id", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "id is required for document transformation") + }) + + t.Run("error - missing context for key type", func(t *testing.T) { + doc, err := document.FromBytes([]byte(noContextForKeyType)) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "key context not found for key type: InvalidType") + }) +} + +func TestWithMethodContext(t *testing.T) { + doc := make(document.Document) + + transformer := New(WithMethodContext([]string{"ctx-1", "ctx-2"})) + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + require.Equal(t, 3, len(didDoc.Context())) + require.Equal(t, "ctx-1", didDoc.Context()[1]) + require.Equal(t, "ctx-2", didDoc.Context()[2]) +} + +func TestWithBase(t *testing.T) { + r := reader(t, "testdata/doc.json") + docBytes, err := io.ReadAll(r) + require.NoError(t, err) + doc, err := document.FromBytes(docBytes) + require.NoError(t, err) + + transformer := New(WithBase(true)) + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + + // test document has 5 keys defined, two distinct key types: EcdsaSecp256k1VerificationKey2019, JsonWebKey2020 + // two distinct key context + did context + @base context + require.Equal(t, 4, len(didDoc.Context())) + + // second context is @base + baseMap := didDoc.Context()[1].(map[string]interface{}) + baseMap["@base"] = testID + + // validate service id doesn't contain document id + service := didDoc.Services()[0] + require.NotContains(t, service.ID(), testID) + + // validate public key id doesn't contain document id + pk := didDoc.VerificationMethods()[0] + require.NotContains(t, pk.ID(), testID) +} + +func TestEd25519VerificationKey2018(t *testing.T) { + publicKey, _, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + jwk, err := pubkey.GetPublicKeyJWK(publicKey) + require.NoError(t, err) + + publicKeyBytes, err := json.Marshal(jwk) + require.NoError(t, err) + + data := fmt.Sprintf(ed25519DocTemplate, string(publicKeyBytes)) + + doc, err := document.FromBytes([]byte(data)) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + require.Equal(t, didDoc.VerificationMethods()[0].Controller(), didDoc.ID()) + require.Equal(t, didContext, didDoc.Context()[0]) + + // validate service + service := didDoc.Services()[0] + require.Contains(t, service.ID(), testID) + require.NotEmpty(t, service.ServiceEndpoint()) + require.Equal(t, "OpenIdConnectVersion1.0Service", service.Type()) + + // validate public key + pk := didDoc.VerificationMethods()[0] + require.Contains(t, pk.ID(), testID) + require.Equal(t, "Ed25519VerificationKey2018", pk.Type()) + require.Empty(t, pk.PublicKeyJwk()) + + // test base58 encoding + require.Equal(t, base58.Encode(publicKey), pk.PublicKeyBase58()) + + // validate length of expected keys + expectedPublicKeys := []string{"assertion"} + require.Equal(t, len(expectedPublicKeys), len(didDoc.VerificationMethods())) + + expectedAssertionMethodKeys := []string{"assertion"} + require.Equal(t, len(expectedAssertionMethodKeys), len(didDoc.AssertionMethods())) + + require.Equal(t, 0, len(didDoc.Authentications())) + require.Equal(t, 0, len(didDoc.AgreementKeys())) +} + +func TestEd25519VerificationKey2020(t *testing.T) { + publicKey, _, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + jwk, err := pubkey.GetPublicKeyJWK(publicKey) + require.NoError(t, err) + + publicKeyBytes, err := json.Marshal(jwk) + require.NoError(t, err) + + data := fmt.Sprintf(ed25519VerificationKey2020DocTemplate, string(publicKeyBytes)) + + doc, err := document.FromBytes([]byte(data)) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + require.Equal(t, didDoc.VerificationMethods()[0].Controller(), didDoc.ID()) + require.Equal(t, didContext, didDoc.Context()[0]) + + // validate service + service := didDoc.Services()[0] + require.Contains(t, service.ID(), testID) + require.NotEmpty(t, service.ServiceEndpoint()) + require.Equal(t, "OpenIdConnectVersion1.0Service", service.Type()) + + // validate public key + pk := didDoc.VerificationMethods()[0] + require.Contains(t, pk.ID(), testID) + require.Equal(t, "Ed25519VerificationKey2020", pk.Type()) + require.Empty(t, pk.PublicKeyJwk()) + + // test base58 encoding + multibaseEncode, err := multibase.Encode(multibase.Base58BTC, publicKey) + require.NoError(t, err) + + require.Equal(t, multibaseEncode, pk.PublicKeyMultibase()) + + // validate length of expected keys + expectedPublicKeys := []string{"assertion"} + require.Equal(t, len(expectedPublicKeys), len(didDoc.VerificationMethods())) + + expectedAssertionMethodKeys := []string{"assertion"} + require.Equal(t, len(expectedAssertionMethodKeys), len(didDoc.AssertionMethods())) + + require.Equal(t, 0, len(didDoc.Authentications())) + require.Equal(t, 0, len(didDoc.AgreementKeys())) +} + +func TestEd25519VerificationKey2018_Error(t *testing.T) { + doc, err := document.FromBytes([]byte(ed25519Invalid)) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "unknown curve") +} + +func TestEd25519VerificationKey2020_Error(t *testing.T) { + doc, err := document.FromBytes([]byte(ed25519VerificationKey2020DocInvalid)) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "unknown curve") +} + +func TestPublicKeyBase58(t *testing.T) { + pkB58 := "36d8RkFy2SdabnGzcZ3LcCSDA8NP5T4bsoADwuXtoN3B" + + doc, err := document.FromBytes([]byte(fmt.Sprintf(publicKeyBase58Template, pkB58))) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + require.Equal(t, didDoc.VerificationMethods()[0].Controller(), didDoc.ID()) + require.Equal(t, didContext, didDoc.Context()[0]) + + pk := didDoc.VerificationMethods()[0] + require.Contains(t, pk.ID(), testID) + require.Equal(t, "Ed25519VerificationKey2018", pk.Type()) + require.Empty(t, pk.PublicKeyJwk()) + + require.Equal(t, pkB58, pk.PublicKeyBase58()) +} + +func TestPublicKeyMultibase(t *testing.T) { + pkMultibase := "z6Mkf5rGMoatrSj1f4CyvuHBeXJELe9RPdzo2PKGNCKVtZxP" + + doc, err := document.FromBytes([]byte(fmt.Sprintf(publicKeyMultibaseTemplate, pkMultibase))) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc} + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + + jsonTransformed, err := json.Marshal(result.Document) + require.NoError(t, err) + + didDoc, err := document.DidDocumentFromBytes(jsonTransformed) + require.NoError(t, err) + require.Equal(t, didDoc.VerificationMethods()[0].Controller(), didDoc.ID()) + require.Equal(t, didContext, didDoc.Context()[0]) + + pk := didDoc.VerificationMethods()[0] + require.Contains(t, pk.ID(), testID) + require.Equal(t, "Ed25519VerificationKey2020", pk.Type()) + require.Empty(t, pk.PublicKeyJwk()) + + require.Equal(t, pkMultibase, pk.PublicKeyMultibase()) +} + +func reader(t *testing.T, filename string) io.Reader { + f, err := os.Open(filename) + require.NoError(t, err) + + return f +} + +const ed25519DocTemplate = `{ + "publicKey": [ + { + "id": "assertion", + "type": "Ed25519VerificationKey2018", + "purposes": ["assertionMethod"], + "publicKeyJwk": %s + } + ], + "service": [ + { + "id": "oidc", + "type": "OpenIdConnectVersion1.0Service", + "serviceEndpoint": "https://openid.example.com/" + } + ] +}` + +const ed25519VerificationKey2020DocTemplate = `{ + "publicKey": [ + { + "id": "assertion", + "type": "Ed25519VerificationKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": %s + } + ], + "service": [ + { + "id": "oidc", + "type": "OpenIdConnectVersion1.0Service", + "serviceEndpoint": "https://openid.example.com/" + } + ] +}` + +const publicKeyBase58Template = `{ + "publicKey": [ + { + "id": "assertion", + "type": "Ed25519VerificationKey2018", + "purposes": ["assertionMethod"], + "publicKeyBase58": "%s" + } + ], + "service": [ + { + "id": "oidc", + "type": "OpenIdConnectVersion1.0Service", + "serviceEndpoint": "https://openid.example.com/" + } + ] +}` + +const publicKeyMultibaseTemplate = `{ + "publicKey": [ + { + "id": "assertion", + "type": "Ed25519VerificationKey2020", + "purposes": ["assertionMethod"], + "publicKeyMultibase": "%s" + } + ], + "service": [ + { + "id": "oidc", + "type": "OpenIdConnectVersion1.0Service", + "serviceEndpoint": "https://openid.example.com/" + } + ] +}` + +const ed25519Invalid = `{ + "publicKey": [ + { + "id": "assertion", + "type": "Ed25519VerificationKey2018", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "OKP", + "crv": "curve", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const ed25519VerificationKey2020DocInvalid = `{ + "publicKey": [ + { + "id": "assertion", + "type": "Ed25519VerificationKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "OKP", + "crv": "curve", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const noContextForKeyType = `{ + "publicKey": [ + { + "id": "assertion", + "type": "InvalidType", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "OKP", + "crv": "curve", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const allKeyTypes = `{ + "publicKey": [ + { + "id": "key-1", + "type": "Bls12381G2Key2020", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "OKP", + "crv": "P-256", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "key-2", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "key-3", + "type": "EcdsaSecp256k1VerificationKey2019", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "key-4", + "type": "Ed25519VerificationKey2018", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty":"OKP", + "crv":"Ed25519", + "x":"K24aib_Py_D2ST8F_IiIA2SJo1EiseS0hbaa36tVSAU" + } + }, + { + "id": "key-5", + "type": "X25519KeyAgreementKey2019", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "key-6", + "type": "Ed25519VerificationKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty":"OKP", + "crv":"Ed25519", + "x":"K24aib_Py_D2ST8F_IiIA2SJo1EiseS0hbaa36tVSAU" + } + } + ] +}` diff --git a/pkg/versions/1_0/doctransformer/doctransformer/transformer.go b/pkg/versions/1_0/doctransformer/doctransformer/transformer.go new file mode 100644 index 0000000..fa6fde6 --- /dev/null +++ b/pkg/versions/1_0/doctransformer/doctransformer/transformer.go @@ -0,0 +1,77 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package doctransformer + +import ( + "errors" + + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/doctransformer/metadata" +) + +// Transformer is responsible for transforming internal to external document. +type Transformer struct { + includePublishedOperations bool + includeUnpublishedOperations bool +} + +// Option is a registry instance option. +type Option func(opts *Transformer) + +// New creates a new document transformer. +func New(opts ...Option) *Transformer { + transformer := &Transformer{} + + // apply options + for _, opt := range opts { + opt(transformer) + } + + return transformer +} + +// WithIncludePublishedOperations sets optional include published operations flag. +func WithIncludePublishedOperations(enabled bool) Option { + return func(opts *Transformer) { + opts.includePublishedOperations = enabled + } +} + +// WithIncludeUnpublishedOperations sets optional include unpublished operations flag. +func WithIncludeUnpublishedOperations(enabled bool) Option { + return func(opts *Transformer) { + opts.includeUnpublishedOperations = enabled + } +} + +// TransformDocument takes internal resolution model and transformation info and creates +// external representation of document (resolution result). +func (v *Transformer) TransformDocument(rm *protocol.ResolutionModel, + info protocol.TransformationInfo) (*document.ResolutionResult, error) { + docMetadata, err := metadata.New( + metadata.WithIncludeUnpublishedOperations(v.includeUnpublishedOperations), + metadata.WithIncludePublishedOperations(v.includePublishedOperations)). + CreateDocumentMetadata(rm, info) + if err != nil { + return nil, err + } + + id, ok := info[document.IDProperty] + if !ok { + return nil, errors.New("id is required for document transformation") + } + + rm.Doc[document.IDProperty] = id + + result := &document.ResolutionResult{ + Document: rm.Doc, + DocumentMetadata: docMetadata, + } + + return result, nil +} diff --git a/pkg/versions/1_0/doctransformer/doctransformer/transformer_test.go b/pkg/versions/1_0/doctransformer/doctransformer/transformer_test.go new file mode 100644 index 0000000..0d2712f --- /dev/null +++ b/pkg/versions/1_0/doctransformer/doctransformer/transformer_test.go @@ -0,0 +1,158 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package doctransformer + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/doctransformer/metadata" +) + +const testID = "doc:abc:123" + +func TestNewTransformer(t *testing.T) { + transformer := New() + require.NotNil(t, transformer) + require.Equal(t, false, transformer.includePublishedOperations) + require.Equal(t, false, transformer.includeUnpublishedOperations) + + transformer = New(WithIncludeUnpublishedOperations(true), WithIncludePublishedOperations(true)) + require.NotNil(t, transformer) + require.Equal(t, true, transformer.includePublishedOperations) + require.Equal(t, true, transformer.includeUnpublishedOperations) +} + +func TestTransformDocument(t *testing.T) { + doc, err := document.FromBytes(validDoc) + require.NoError(t, err) + + transformer := New() + + internal := &protocol.ResolutionModel{Doc: doc, RecoveryCommitment: "recovery", UpdateCommitment: "update"} + + t.Run("success", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = "did:abc:123" + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + require.Equal(t, "did:abc:123", result.Document[document.IDProperty]) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + }) + + t.Run("success - with canonical, equivalent ID", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = "did:abc:123" + info[document.PublishedProperty] = true + info[document.CanonicalIDProperty] = "canonical" + info[document.EquivalentIDProperty] = []string{"equivalent"} + + result, err := transformer.TransformDocument(internal, info) + require.NoError(t, err) + require.Equal(t, "did:abc:123", result.Document[document.IDProperty]) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + + require.Equal(t, "canonical", result.DocumentMetadata[document.CanonicalIDProperty]) + require.NotEmpty(t, result.DocumentMetadata[document.EquivalentIDProperty]) + }) + + t.Run("success - include operations (published/unpublished)", func(t *testing.T) { + trans := New( + WithIncludePublishedOperations(true), + WithIncludeUnpublishedOperations(true)) + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testID + info[document.PublishedProperty] = true + + publishedOps := []*operation.AnchoredOperation{ + {Type: "create", UniqueSuffix: "suffix", CanonicalReference: "ref1"}, + {Type: "update", UniqueSuffix: "suffix", CanonicalReference: "ref2"}, + } + + unpublishedOps := []*operation.AnchoredOperation{ + {Type: "update", UniqueSuffix: "suffix"}, + } + + rm := &protocol.ResolutionModel{ + Doc: doc, + RecoveryCommitment: "recovery", + UpdateCommitment: "update", + PublishedOperations: publishedOps, + UnpublishedOperations: unpublishedOps, + } + + result, err := trans.TransformDocument(rm, info) + require.NoError(t, err) + require.NotNil(t, result) + require.Equal(t, testID, result.Document[document.IDProperty]) + + methodMetadataEntry, ok := result.DocumentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + + require.Equal(t, 2, len(methodMetadata[document.PublishedOperationsProperty].([]*metadata.PublishedOperation))) + require.Equal(t, 1, len(methodMetadata[document.UnpublishedOperationsProperty].([]*metadata.UnpublishedOperation))) + }) + + t.Run("error - internal document is missing", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = "doc:abc:xyz" + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(nil, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "resolution model is required for creating document metadata") + }) + + t.Run("error - transformation info is missing", func(t *testing.T) { + result, err := transformer.TransformDocument(internal, nil) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "transformation info is required for creating document metadata") + }) + + t.Run("error - transformation info is missing id", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.PublishedProperty] = true + + result, err := transformer.TransformDocument(internal, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "id is required for document transformation") + }) +} + +var validDoc = []byte(`{ "name": "John Smith" }`) diff --git a/pkg/versions/1_0/doctransformer/metadata/metadata.go b/pkg/versions/1_0/doctransformer/metadata/metadata.go new file mode 100644 index 0000000..de119d2 --- /dev/null +++ b/pkg/versions/1_0/doctransformer/metadata/metadata.go @@ -0,0 +1,229 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package metadata + +import ( + "errors" + "sort" + "time" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/document" +) + +// Metadata is responsible for creating document metadata. +type Metadata struct { + includePublishedOperations bool + includeUnpublishedOperations bool +} + +// Option is a metadata instance option. +type Option func(opts *Metadata) + +// New creates a new metadata transformer. +func New(opts ...Option) *Metadata { + md := &Metadata{} + + // apply options + for _, opt := range opts { + opt(md) + } + + return md +} + +// WithIncludePublishedOperations sets optional include published operations flag. +func WithIncludePublishedOperations(enabled bool) Option { + return func(opts *Metadata) { + opts.includePublishedOperations = enabled + } +} + +// WithIncludeUnpublishedOperations sets optional include unpublished operations flag. +func WithIncludeUnpublishedOperations(enabled bool) Option { + return func(opts *Metadata) { + opts.includeUnpublishedOperations = enabled + } +} + +// CreateDocumentMetadata will create document metadata. +// +//nolint:gocyclo +func (t *Metadata) CreateDocumentMetadata(rm *protocol.ResolutionModel, info protocol.TransformationInfo) (document.Metadata, error) { + if rm == nil || rm.Doc == nil { + return nil, errors.New("resolution model is required for creating document metadata") + } + + if info == nil { + return nil, errors.New("transformation info is required for creating document metadata") + } + + published, ok := info[document.PublishedProperty] + if !ok { + return nil, errors.New("published is required for creating document metadata") + } + + methodMetadata := make(document.Metadata) + methodMetadata[document.PublishedProperty] = published + + if rm.RecoveryCommitment != "" { + methodMetadata[document.RecoveryCommitmentProperty] = rm.RecoveryCommitment + } + + if rm.UpdateCommitment != "" { + methodMetadata[document.UpdateCommitmentProperty] = rm.UpdateCommitment + } + + if rm.AnchorOrigin != nil { + methodMetadata[document.AnchorOriginProperty] = rm.AnchorOrigin + } + + if t.includeUnpublishedOperations && len(rm.UnpublishedOperations) > 0 { + methodMetadata[document.UnpublishedOperationsProperty] = getUnpublishedOperations(rm.UnpublishedOperations) + } + + if t.includePublishedOperations && len(rm.PublishedOperations) > 0 { + methodMetadata[document.PublishedOperationsProperty] = getPublishedOperations(rm.PublishedOperations) + } + + docMetadata := make(document.Metadata) + docMetadata[document.MethodProperty] = methodMetadata + + if rm.Deactivated { + docMetadata[document.DeactivatedProperty] = rm.Deactivated + } + + canonicalID, ok := info[document.CanonicalIDProperty] + if ok { + docMetadata[document.CanonicalIDProperty] = canonicalID + } + + equivalentID, ok := info[document.EquivalentIDProperty] + if ok { + docMetadata[document.EquivalentIDProperty] = equivalentID + } + + if published.(bool) { + docMetadata[document.CreatedProperty] = time.Unix(int64(rm.CreatedTime), 0).UTC().Format(time.RFC3339) + } + + if rm.VersionID != "" { + docMetadata[document.VersionIDProperty] = rm.VersionID + if rm.UpdatedTime > 0 { + docMetadata[document.UpdatedProperty] = time.Unix(int64(rm.UpdatedTime), 0).UTC().Format(time.RFC3339) + } + } + + return docMetadata, nil +} + +func sortOperations(ops []*operation.AnchoredOperation) { + sort.Slice(ops, func(i, j int) bool { + if ops[i].TransactionTime < ops[j].TransactionTime { + return true + } + + return ops[i].TransactionNumber < ops[j].TransactionNumber + }) +} + +// remove duplicate published operations and then sort them by transaction (anchoring) time. +func getPublishedOperations(ops []*operation.AnchoredOperation) []*PublishedOperation { + sortOperations(ops) + + uniqueOps := make(map[string]bool) + + var publishedOps []*PublishedOperation + + for _, op := range ops { + _, ok := uniqueOps[op.CanonicalReference] + if !ok { + publishedOps = append(publishedOps, + &PublishedOperation{ + Type: op.Type, + OperationRequest: op.OperationRequest, + TransactionTime: op.TransactionTime, + TransactionNumber: op.TransactionNumber, + ProtocolVersion: op.ProtocolVersion, + CanonicalReference: op.CanonicalReference, + EquivalentReferences: op.EquivalentReferences, + AnchorOrigin: op.AnchorOrigin, + }) + + uniqueOps[op.CanonicalReference] = true + } + } + + return publishedOps +} + +// sort unpublished operations by request time. +func getUnpublishedOperations(ops []*operation.AnchoredOperation) []*UnpublishedOperation { + sortOperations(ops) + + unpublishedOps := make([]*UnpublishedOperation, len(ops)) + + for i, op := range ops { + unpublishedOps[i] = &UnpublishedOperation{ + Type: op.Type, + OperationRequest: op.OperationRequest, + TransactionTime: op.TransactionTime, + ProtocolVersion: op.ProtocolVersion, + AnchorOrigin: op.AnchorOrigin, + } + } + + return unpublishedOps +} + +// PublishedOperation defines an published operation for metadata. It is a subset of anchored operation. +type PublishedOperation struct { + + // Type defines operation type. + Type operation.Type `json:"type"` + + // OperationRequest is the original operation request. + OperationRequest []byte `json:"operation"` + + // TransactionTime is the logical anchoring time. + TransactionTime uint64 `json:"transactionTime"` + + // TransactionNumber is the transaction number of the transaction this operation was batched within. + TransactionNumber uint64 `json:"transactionNumber"` + + // ProtocolVersion is the genesis time of the protocol that was used for this operation. + ProtocolVersion uint64 `json:"protocolVersion"` + + // CanonicalReference contains canonical reference that applies to this operation. + CanonicalReference string `json:"canonicalReference,omitempty"` + + // EquivalenceReferences contains equivalence reference that applies to this operation. + EquivalentReferences []string `json:"equivalentReferences,omitempty"` + + // AnchorOrigin is anchor origin + AnchorOrigin interface{} `json:"anchorOrigin,omitempty"` +} + +// UnpublishedOperation defines an un-published operation for metadata. It is a subset of anchored operation. +type UnpublishedOperation struct { + + // Type defines operation type. + Type operation.Type `json:"type"` + + // OperationRequest is the original operation request. + OperationRequest []byte `json:"operation"` + + // TransactionTime is the logical anchoring time. + TransactionTime uint64 `json:"transactionTime"` + + // ProtocolVersion is the genesis time of the protocol that was used for this operation. + ProtocolVersion uint64 `json:"protocolVersion"` + + // AnchorOrigin is anchor origin. + AnchorOrigin interface{} `json:"anchorOrigin,omitempty"` +} diff --git a/pkg/versions/1_0/doctransformer/metadata/metadata_test.go b/pkg/versions/1_0/doctransformer/metadata/metadata_test.go new file mode 100644 index 0000000..f966659 --- /dev/null +++ b/pkg/versions/1_0/doctransformer/metadata/metadata_test.go @@ -0,0 +1,219 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package metadata + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/document" +) + +const ( + testDID = "did:abc:123" + canonicalID = "canonical" +) + +func TestPopulateDocumentMetadata(t *testing.T) { + doc, err := document.FromBytes(validDoc) + require.NoError(t, err) + + createdTimeStr := "2020-12-20T19:17:47Z" + updatedTimeStr := "2022-12-20T19:17:47Z" + + createdTime, err := time.Parse(time.RFC3339, createdTimeStr) + require.NoError(t, err) + + updatedTime, err := time.Parse(time.RFC3339, updatedTimeStr) + require.NoError(t, err) + + internal := &protocol.ResolutionModel{ + Doc: doc, + RecoveryCommitment: "recovery", + UpdateCommitment: "update", + AnchorOrigin: "origin.com", + VersionID: "version", + CreatedTime: uint64(createdTime.Unix()), + UpdatedTime: uint64(updatedTime.Unix()), + } + + t.Run("success - all info present", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testDID + info[document.PublishedProperty] = true + info[document.CanonicalIDProperty] = canonicalID + info[document.EquivalentIDProperty] = []string{"equivalent"} + info[document.AnchorOriginProperty] = "domain.com" + info[document.DeactivatedProperty] = true + + documentMetadata, err := New(WithIncludeUnpublishedOperations(true), + WithIncludePublishedOperations(true)).CreateDocumentMetadata(internal, info) + require.NoError(t, err) + + require.Empty(t, documentMetadata[document.DeactivatedProperty]) + require.Equal(t, canonicalID, documentMetadata[document.CanonicalIDProperty]) + require.NotEmpty(t, documentMetadata[document.EquivalentIDProperty]) + + require.Equal(t, createdTimeStr, documentMetadata[document.CreatedProperty]) + require.Equal(t, updatedTimeStr, documentMetadata[document.UpdatedProperty]) + + methodMetadataEntry, ok := documentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + }) + + t.Run("success - include operations (published/unpublished)", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testDID + info[document.PublishedProperty] = true + info[document.CanonicalIDProperty] = canonicalID + info[document.EquivalentIDProperty] = []string{"equivalent"} + info[document.AnchorOriginProperty] = "domain.com" + + publishedOps := []*operation.AnchoredOperation{ + {Type: "create", UniqueSuffix: "suffix", CanonicalReference: "ref1", TransactionTime: 1}, + {Type: "update", UniqueSuffix: "suffix", CanonicalReference: "ref3", TransactionTime: 3}, + {Type: "update", UniqueSuffix: "suffix", CanonicalReference: "ref2", TransactionTime: 2}, + {Type: "update", UniqueSuffix: "suffix", CanonicalReference: "ref2", TransactionTime: 2}, + } + + unpublishedOps := []*operation.AnchoredOperation{ + {Type: "update", UniqueSuffix: "suffix", TransactionTime: 4}, + } + + rm := &protocol.ResolutionModel{ + Doc: doc, + RecoveryCommitment: "recovery", + UpdateCommitment: "update", + PublishedOperations: publishedOps, + UnpublishedOperations: unpublishedOps, + } + + documentMetadata, err := New(WithIncludeUnpublishedOperations(true), + WithIncludePublishedOperations(true)).CreateDocumentMetadata(rm, info) + require.NoError(t, err) + + require.Empty(t, documentMetadata[document.DeactivatedProperty]) + require.Equal(t, canonicalID, documentMetadata[document.CanonicalIDProperty]) + require.NotEmpty(t, documentMetadata[document.EquivalentIDProperty]) + + methodMetadataEntry, ok := documentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Equal(t, "recovery", methodMetadata[document.RecoveryCommitmentProperty]) + require.Equal(t, "update", methodMetadata[document.UpdateCommitmentProperty]) + + require.Equal(t, 3, len(methodMetadata[document.PublishedOperationsProperty].([]*PublishedOperation))) + require.Equal(t, 1, len(methodMetadata[document.UnpublishedOperationsProperty].([]*UnpublishedOperation))) + }) + + t.Run("success - deactivated, commitments empty", func(t *testing.T) { + internal2 := &protocol.ResolutionModel{ + Doc: doc, + Deactivated: true, + CreatedTime: uint64(time.Now().Unix() - 60), + UpdatedTime: uint64(time.Now().Unix()), + VersionID: "version", + } + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testDID + info[document.PublishedProperty] = true + info[document.CanonicalIDProperty] = canonicalID + + documentMetadata, err := New().CreateDocumentMetadata(internal2, info) + require.NoError(t, err) + + require.Equal(t, true, documentMetadata[document.DeactivatedProperty]) + require.NotEmpty(t, documentMetadata[document.UpdatedProperty]) + require.NotEmpty(t, documentMetadata[document.CreatedProperty]) + require.Equal(t, canonicalID, documentMetadata[document.CanonicalIDProperty]) + require.Empty(t, documentMetadata[document.EquivalentIDProperty]) + + methodMetadataEntry, ok := documentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Empty(t, methodMetadata[document.RecoveryCommitmentProperty]) + require.Empty(t, methodMetadata[document.UpdateCommitmentProperty]) + }) + + t.Run("success - deactivated, no version ID (unpublished)", func(t *testing.T) { + internal2 := &protocol.ResolutionModel{ + Doc: doc, + Deactivated: true, + CreatedTime: uint64(time.Now().Unix() - 60), + UpdatedTime: uint64(time.Now().Unix()), + } + + info := make(protocol.TransformationInfo) + info[document.IDProperty] = testDID + info[document.PublishedProperty] = true + info[document.CanonicalIDProperty] = canonicalID + + documentMetadata, err := New().CreateDocumentMetadata(internal2, info) + require.NoError(t, err) + + require.Equal(t, true, documentMetadata[document.DeactivatedProperty]) + require.Empty(t, documentMetadata[document.UpdatedProperty]) + require.NotEmpty(t, documentMetadata[document.CreatedProperty]) + require.Equal(t, canonicalID, documentMetadata[document.CanonicalIDProperty]) + require.Empty(t, documentMetadata[document.EquivalentIDProperty]) + + methodMetadataEntry, ok := documentMetadata[document.MethodProperty] + require.True(t, ok) + methodMetadata, ok := methodMetadataEntry.(document.Metadata) + require.True(t, ok) + + require.Equal(t, true, methodMetadata[document.PublishedProperty]) + require.Empty(t, methodMetadata[document.RecoveryCommitmentProperty]) + require.Empty(t, methodMetadata[document.UpdateCommitmentProperty]) + }) + + t.Run("error - internal document is missing", func(t *testing.T) { + info := make(protocol.TransformationInfo) + info[document.IDProperty] = "doc:abc:xyz" + info[document.PublishedProperty] = true + + result, err := New().CreateDocumentMetadata(nil, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "resolution model is required for creating document metadata") + }) + + t.Run("error - transformation info is missing", func(t *testing.T) { + result, err := New().CreateDocumentMetadata(internal, nil) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "transformation info is required for creating document metadata") + }) + + t.Run("error - transformation info is missing published", func(t *testing.T) { + info := make(protocol.TransformationInfo) + + result, err := New().CreateDocumentMetadata(internal, info) + require.Error(t, err) + require.Nil(t, result) + require.Contains(t, err.Error(), "published is required for creating document metadata") + }) +} + +var validDoc = []byte(`{ "name": "John Smith" }`) diff --git a/pkg/versions/1_0/docvalidator/didvalidator/testdata/doc.json b/pkg/versions/1_0/docvalidator/didvalidator/testdata/doc.json new file mode 100644 index 0000000..bb21b8b --- /dev/null +++ b/pkg/versions/1_0/docvalidator/didvalidator/testdata/doc.json @@ -0,0 +1,144 @@ +{ + "publicKey": [ + { + "id": "master", + "type": "EcdsaSecp256k1VerificationKey2019", + "purposes": ["authentication", "assertionMethod", "keyAgreement", "capabilityDelegation", "capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-auth-gen", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "auth-only", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-assertion-gen", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "assertion-only", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-agreement-gen", + "type": "JsonWebKey2020", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "agreement-only", + "type": "JsonWebKey2020", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-invocation-gen", + "type": "JsonWebKey2020", + "purposes": ["capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "invocation-only", + "type": "JsonWebKey2020", + "purposes": ["capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-delegation-gen", + "type": "JsonWebKey2020", + "purposes": ["capabilityDelegation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "delegation-only", + "type": "JsonWebKey2020", + "purposes": ["capabilityDelegation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "general-only", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ], + "service": [ + { + "id": "hub", + "type": "IdentityHub", + "routingKeys": "routingKeysValue", + "recipientKeys": "recipientKeysValue", + "serviceEndpoint": "https://example.com/hub/" + } + ] +} \ No newline at end of file diff --git a/pkg/versions/1_0/docvalidator/didvalidator/validator.go b/pkg/versions/1_0/docvalidator/didvalidator/validator.go new file mode 100644 index 0000000..dafbd5c --- /dev/null +++ b/pkg/versions/1_0/docvalidator/didvalidator/validator.go @@ -0,0 +1,63 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package didvalidator + +import ( + "errors" + + "github.com/trustbloc/sidetree-go/pkg/document" +) + +const didSuffix = "didSuffix" + +// Validator is responsible for validating did operations and sidetree rules. +type Validator struct { +} + +// New creates a new did validator. +func New() *Validator { + return &Validator{} +} + +// IsValidPayload verifies that the given payload is a valid Sidetree specific payload +// that can be accepted by the Sidetree update operations. +func (v *Validator) IsValidPayload(payload []byte) error { + doc, err := document.FromBytes(payload) + if err != nil { + return err + } + + didSuffix := doc.GetStringValue(didSuffix) + if didSuffix == "" { + return errors.New("missing did unique suffix") + } + + // checking for previous operation existence has been pushed to handler + return nil +} + +// IsValidOriginalDocument verifies that the given payload is a valid Sidetree specific did document that +// can be accepted by the Sidetree create operation. +func (v *Validator) IsValidOriginalDocument(payload []byte) error { + didDoc, err := document.DidDocumentFromBytes(payload) + if err != nil { + return err + } + + // Sidetree rule: The document must NOT have the id property + if didDoc.ID() != "" { + return errors.New("document must NOT have the id property") + } + + // Sidetree rule: must not have context + ctx := didDoc.Context() + if len(ctx) != 0 { + return errors.New("document must NOT have context") + } + + return nil +} diff --git a/pkg/versions/1_0/docvalidator/didvalidator/validator_test.go b/pkg/versions/1_0/docvalidator/didvalidator/validator_test.go new file mode 100644 index 0000000..51a4304 --- /dev/null +++ b/pkg/versions/1_0/docvalidator/didvalidator/validator_test.go @@ -0,0 +1,90 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package didvalidator + +import ( + "io" + "os" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNew(t *testing.T) { + v := New() + require.NotNil(t, v) +} + +func TestIsValidOriginalDocument(t *testing.T) { + r := reader(t, "testdata/doc.json") + didDoc, err := io.ReadAll(r) + require.Nil(t, err) + + v := New() + + err = v.IsValidOriginalDocument(didDoc) + require.Nil(t, err) +} + +func TestIsValidOriginalDocument_ContextProvidedError(t *testing.T) { + v := New() + + err := v.IsValidOriginalDocument(docWithContext) + require.NotNil(t, err) + require.Contains(t, err.Error(), "document must NOT have context") +} + +func TestIsValidOriginalDocument_MustNotHaveIDError(t *testing.T) { + v := New() + + err := v.IsValidOriginalDocument(docWithID) + require.NotNil(t, err) + require.Contains(t, err.Error(), "document must NOT have the id property") +} + +func TestIsValidPayload(t *testing.T) { + v := New() + + err := v.IsValidPayload(validUpdate) + require.Nil(t, err) +} + +func TestIsValidPayloadError(t *testing.T) { + v := New() + + err := v.IsValidPayload(invalidUpdate) + require.NotNil(t, err) + require.Contains(t, err.Error(), "missing did unique suffix") +} + +func reader(t *testing.T, filename string) io.Reader { + f, err := os.Open(filename) + require.Nil(t, err) + + return f +} + +var ( + docWithContext = []byte(`{ + "@context": ["https://w3id.org/did/v1"], + "publicKey": [{ + "id": "key-1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}`) + + docWithID = []byte(`{ "id" : "001", "name": "John Smith" }`) + + validUpdate = []byte(`{ "didSuffix": "abc" }`) + invalidUpdate = []byte(`{ "patch": "" }`) +) diff --git a/pkg/versions/1_0/docvalidator/docvalidator/validator.go b/pkg/versions/1_0/docvalidator/docvalidator/validator.go new file mode 100644 index 0000000..a7017a1 --- /dev/null +++ b/pkg/versions/1_0/docvalidator/docvalidator/validator.go @@ -0,0 +1,58 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package docvalidator + +import ( + "errors" + + "github.com/trustbloc/sidetree-go/pkg/document" +) + +const didSuffix = "didSuffix" + +// Validator is responsible for validating document operations and Sidetree rules. +type Validator struct { +} + +// New creates a new document validator. +func New() *Validator { + return &Validator{} +} + +// IsValidPayload verifies that the given payload is a valid Sidetree specific payload +// that can be accepted by the Sidetree update operations. +func (v *Validator) IsValidPayload(payload []byte) error { + doc, err := document.FromBytes(payload) + if err != nil { + return err + } + + uniqueSuffix := doc.GetStringValue(didSuffix) + if uniqueSuffix == "" { + return errors.New("missing unique suffix") + } + + // checking for previous operation existence has been pushed to handler + + return nil +} + +// IsValidOriginalDocument verifies that the given payload is a valid Sidetree specific document that can be accepted by +// the Sidetree create operation. +func (v *Validator) IsValidOriginalDocument(payload []byte) error { + doc, err := document.FromBytes(payload) + if err != nil { + return err + } + + // The document must NOT have the id property + if doc.ID() != "" { + return errors.New("document must NOT have the id property") + } + + return nil +} diff --git a/pkg/versions/1_0/docvalidator/docvalidator/validator_test.go b/pkg/versions/1_0/docvalidator/docvalidator/validator_test.go new file mode 100644 index 0000000..c045092 --- /dev/null +++ b/pkg/versions/1_0/docvalidator/docvalidator/validator_test.go @@ -0,0 +1,67 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package docvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNew(t *testing.T) { + v := New() + require.NotNil(t, v) +} + +func TestIsValidOriginalDocument(t *testing.T) { + v := New() + + err := v.IsValidOriginalDocument(validDoc) + require.Nil(t, err) +} + +func TestValidatoIsValidOriginalDocumentError(t *testing.T) { + v := New() + + err := v.IsValidOriginalDocument(invalidDoc) + require.NotNil(t, err) + require.Contains(t, err.Error(), "document must NOT have the id property") +} + +func TestValidatorIsValidPayload(t *testing.T) { + v := New() + + err := v.IsValidPayload(validUpdate) + require.NoError(t, err) +} + +func TestInvalidPayloadError(t *testing.T) { + v := New() + + // payload is invalid json + payload := []byte("[test : 123]") + + err := v.IsValidOriginalDocument(payload) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid character") +} + +func TestValidatorIsValidPayloadError(t *testing.T) { + v := New() + + err := v.IsValidPayload(invalidUpdate) + require.NotNil(t, err) + require.Contains(t, err.Error(), "missing unique suffix") +} + +var ( + validDoc = []byte(`{ "name": "John Smith" }`) + invalidDoc = []byte(`{ "id" : "001", "name": "John Smith" }`) + + validUpdate = []byte(`{ "didSuffix": "abc" }`) + invalidUpdate = []byte(`{ "patch": "" }`) +) diff --git a/pkg/versions/1_0/model/operation.go b/pkg/versions/1_0/model/operation.go new file mode 100644 index 0000000..c7bae07 --- /dev/null +++ b/pkg/versions/1_0/model/operation.go @@ -0,0 +1,45 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package model + +import ( + "github.com/trustbloc/sidetree-go/pkg/api/operation" +) + +// Operation is used for parsing operation request. +type Operation struct { + + // Type defines operation type + Type operation.Type + + // Namespace defines document namespace + Namespace string + + // ID is full ID for this document - namespace + unique suffix + ID string + + // UniqueSuffix is unique suffix + UniqueSuffix string + + // OperationRequest is the original operation request + OperationRequest []byte + + // SignedData is signed data for the operation (compact JWS) + SignedData string + + // RevealValue is multihash of JWK + RevealValue string + + // Delta is operation delta model + Delta *DeltaModel + + // SuffixDataModel is suffix data model + SuffixData *SuffixDataModel + + // AnchorOrigin is anchor origin + AnchorOrigin interface{} +} diff --git a/pkg/versions/1_0/model/request.go b/pkg/versions/1_0/model/request.go new file mode 100644 index 0000000..431c54f --- /dev/null +++ b/pkg/versions/1_0/model/request.go @@ -0,0 +1,167 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package model + +import ( + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/jws" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +// CreateRequest is the struct for create payload JCS. +type CreateRequest struct { + // operation + // Required: true + Operation operation.Type `json:"type,omitempty"` + + // Suffix data object + // Required: true + SuffixData *SuffixDataModel `json:"suffixData,omitempty"` + + // Delta object + // Required: true + Delta *DeltaModel `json:"delta,omitempty"` +} + +// SuffixDataModel is part of create request. +type SuffixDataModel struct { + + // Hash of the delta object (required) + DeltaHash string `json:"deltaHash,omitempty"` + + // Commitment hash for the next recovery or deactivate operation (required) + RecoveryCommitment string `json:"recoveryCommitment,omitempty"` + + // AnchorOrigin signifies the system(s) that know the most recent anchor for this DID (optional) + AnchorOrigin interface{} `json:"anchorOrigin,omitempty"` + + // Type signifies the type of entity a DID represents (optional) + Type string `json:"type,omitempty"` +} + +// DeltaModel contains patch data (patches used for create, recover, update). +type DeltaModel struct { + + // Commitment hash for the next update operation + UpdateCommitment string `json:"updateCommitment,omitempty"` + + // Patches defines document patches + Patches []patch.Patch `json:"patches,omitempty"` +} + +// UpdateRequest is the struct for update request. +type UpdateRequest struct { + // Operation defines operation type + Operation operation.Type `json:"type"` + + // DidSuffix is the suffix of the DID + DidSuffix string `json:"didSuffix"` + + // RevealValue is the reveal value + RevealValue string `json:"revealValue"` + + // SignedData is compact JWS - signature information + SignedData string `json:"signedData"` + + // Delta is encoded delta object + Delta *DeltaModel `json:"delta"` +} + +// DeactivateRequest is the struct for deactivating document. +type DeactivateRequest struct { + // Operation + // Required: true + Operation operation.Type `json:"type"` + + // DidSuffix of the DID + // Required: true + DidSuffix string `json:"didSuffix"` + + // RevealValue is the reveal value + RevealValue string `json:"revealValue"` + + // Compact JWS - signature information + SignedData string `json:"signedData"` +} + +// UpdateSignedDataModel defines signed data model for update. +type UpdateSignedDataModel struct { + // UpdateKey is the current update key + UpdateKey *jws.JWK `json:"updateKey"` + + // DeltaHash of the unsigned delta object + DeltaHash string `json:"deltaHash"` + + // AnchorFrom defines earliest time for this operation. + AnchorFrom int64 `json:"anchorFrom,omitempty"` + + // AnchorUntil defines expiry time for this operation. + AnchorUntil int64 `json:"anchorUntil,omitempty"` +} + +// RecoverSignedDataModel defines signed data model for recovery. +type RecoverSignedDataModel struct { + + // DeltaHash of the unsigned delta object + DeltaHash string `json:"deltaHash"` + + // RecoveryKey is The current recovery key + RecoveryKey *jws.JWK `json:"recoveryKey"` + + // RecoveryCommitment is the commitment used for the next recovery/deactivate + RecoveryCommitment string `json:"recoveryCommitment"` + + // AnchorOrigin signifies the system(s) that know the most recent anchor for this DID (optional) + AnchorOrigin interface{} `json:"anchorOrigin,omitempty"` + + // AnchorFrom defines earliest time for this operation. + AnchorFrom int64 `json:"anchorFrom,omitempty"` + + // AnchorUntil defines expiry time for this operation. + AnchorUntil int64 `json:"anchorUntil,omitempty"` +} + +// DeactivateSignedDataModel defines data model for deactivate. +type DeactivateSignedDataModel struct { + + // DidSuffix is the suffix of the DID + // Required: true + DidSuffix string `json:"didSuffix"` + + // RevealValue is the reveal value + RevealValue string `json:"revealValue"` + + // RecoveryKey is the current recovery key + RecoveryKey *jws.JWK `json:"recoveryKey"` + + // AnchorFrom defines earliest time for this operation. + AnchorFrom int64 `json:"anchorFrom,omitempty"` + + // AnchorUntil defines expiry time for this operation. + AnchorUntil int64 `json:"anchorUntil,omitempty"` +} + +// RecoverRequest is the struct for document recovery payload. +type RecoverRequest struct { + // operation + // Required: true + Operation operation.Type `json:"type"` + + // DidSuffix is the suffix of the DID + // Required: true + DidSuffix string `json:"didSuffix"` + + // RevealValue is the reveal value + RevealValue string `json:"revealValue"` + + // Compact JWS - signature information + SignedData string `json:"signedData"` + + // Delta object + // Required: true + Delta *DeltaModel `json:"delta"` +} diff --git a/pkg/versions/1_0/model/util.go b/pkg/versions/1_0/model/util.go new file mode 100644 index 0000000..48ad9b6 --- /dev/null +++ b/pkg/versions/1_0/model/util.go @@ -0,0 +1,88 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package model + +import ( + "errors" + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/hashing" +) + +// GetAnchoredOperation is utility method for converting operation model into anchored operation. +func GetAnchoredOperation(op *Operation) (*operation.AnchoredOperation, error) { + var request interface{} + switch op.Type { + case operation.TypeCreate: + request = CreateRequest{ + Operation: op.Type, + SuffixData: op.SuffixData, + Delta: op.Delta, + } + + case operation.TypeUpdate: + request = UpdateRequest{ + Operation: op.Type, + DidSuffix: op.UniqueSuffix, + Delta: op.Delta, + SignedData: op.SignedData, + RevealValue: op.RevealValue, + } + + case operation.TypeDeactivate: + request = DeactivateRequest{ + Operation: op.Type, + DidSuffix: op.UniqueSuffix, + SignedData: op.SignedData, + RevealValue: op.RevealValue, + } + + case operation.TypeRecover: + request = RecoverRequest{ + Operation: op.Type, + DidSuffix: op.UniqueSuffix, + Delta: op.Delta, + SignedData: op.SignedData, + RevealValue: op.RevealValue, + } + + default: + return nil, fmt.Errorf("operation type %s not supported for anchored operation", op.Type) + } + + operationBuffer, err := canonicalizer.MarshalCanonical(request) + if err != nil { + return nil, fmt.Errorf("failed to canonicalize anchored operation[%v]: %s", op, err.Error()) + } + + return &operation.AnchoredOperation{ + Type: op.Type, + UniqueSuffix: op.UniqueSuffix, + OperationRequest: operationBuffer, + AnchorOrigin: op.AnchorOrigin, + }, nil +} + +// GetUniqueSuffix calculates unique suffix from suffix data and multihash algorithms. +func GetUniqueSuffix(model *SuffixDataModel, algs []uint) (string, error) { + if len(algs) == 0 { + return "", errors.New("failed to calculate unique suffix: algorithm not provided") + } + + // Even though protocol supports the list of multihashing algorithms in this protocol version (v1) we can have + // only one multihashing algorithm. Later versions may have multiple values for backward compatibility. + // At that point (version 2) the spec will hopefully better define how to handle this scenarios: + // https://github.com/decentralized-identity/sidetree/issues/965 + encodedComputedMultihash, err := hashing.CalculateModelMultihash(model, algs[0]) + if err != nil { + return "", fmt.Errorf("failed to calculate unique suffix: %s", err.Error()) + } + + return encodedComputedMultihash, nil +} diff --git a/pkg/versions/1_0/model/util_test.go b/pkg/versions/1_0/model/util_test.go new file mode 100644 index 0000000..a12b3c1 --- /dev/null +++ b/pkg/versions/1_0/model/util_test.go @@ -0,0 +1,141 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package model + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" +) + +const suffix = "suffix" + +func TestGetAnchoredOperation(t *testing.T) { + t.Run("success - create", func(t *testing.T) { + op := &Operation{ + Type: operation.TypeCreate, + UniqueSuffix: suffix, + SuffixData: &SuffixDataModel{ + RecoveryCommitment: "rc", + DeltaHash: "dh", + }, + Delta: &DeltaModel{ + UpdateCommitment: "uc", + }, + } + + opBuffer := `{"delta":{"updateCommitment":"uc"},"suffixData":{"deltaHash":"dh","recoveryCommitment":"rc"},"type":"create"}` + + anchored, err := GetAnchoredOperation(op) + require.NoError(t, err) + require.NotNil(t, anchored) + + require.Equal(t, op.Type, anchored.Type) + require.Equal(t, opBuffer, string(anchored.OperationRequest)) + require.Equal(t, suffix, anchored.UniqueSuffix) + }) + + t.Run("success - deactivate", func(t *testing.T) { + op := &Operation{ + Type: operation.TypeDeactivate, + UniqueSuffix: suffix, + RevealValue: "rv", + SignedData: "jws", + } + + opBuffer := `{"didSuffix":"suffix","revealValue":"rv","signedData":"jws","type":"deactivate"}` + + anchored, err := GetAnchoredOperation(op) + require.NoError(t, err) + require.NotNil(t, anchored) + + require.Equal(t, op.Type, anchored.Type) + require.Equal(t, opBuffer, string(anchored.OperationRequest)) + require.Equal(t, suffix, anchored.UniqueSuffix) + }) + + t.Run("success - recover", func(t *testing.T) { + op := &Operation{ + Type: operation.TypeRecover, + UniqueSuffix: suffix, + RevealValue: "rv", + SignedData: "jws", + Delta: &DeltaModel{ + UpdateCommitment: "uc", + }, + } + + opBuffer := `{"delta":{"updateCommitment":"uc"},"didSuffix":"suffix","revealValue":"rv","signedData":"jws","type":"recover"}` + + anchored, err := GetAnchoredOperation(op) + require.NoError(t, err) + require.NotNil(t, anchored) + require.Equal(t, op.Type, anchored.Type) + + require.Equal(t, opBuffer, string(anchored.OperationRequest)) + require.Equal(t, suffix, anchored.UniqueSuffix) + }) + + t.Run("success - update", func(t *testing.T) { + op := &Operation{ + Type: operation.TypeUpdate, + UniqueSuffix: suffix, + RevealValue: "rv", + SignedData: "jws", + Delta: &DeltaModel{ + UpdateCommitment: "uc", + }, + } + + opBuffer := `{"delta":{"updateCommitment":"uc"},"didSuffix":"suffix","revealValue":"rv","signedData":"jws","type":"update"}` + anchored, err := GetAnchoredOperation(op) + require.NoError(t, err) + require.NotNil(t, anchored) + require.Equal(t, anchored.Type, op.Type) + + require.Equal(t, opBuffer, string(anchored.OperationRequest)) + require.Equal(t, suffix, anchored.UniqueSuffix) + }) + + t.Run("error - type not supported", func(t *testing.T) { + op := &Operation{Type: "other"} + + anchored, err := GetAnchoredOperation(op) + require.Error(t, err) + require.Nil(t, anchored) + require.Contains(t, err.Error(), "operation type other not supported for anchored operation") + }) +} + +func TestGetUniqueSuffix(t *testing.T) { + s := &SuffixDataModel{ + RecoveryCommitment: "rc", + DeltaHash: "dh", + } + + t.Run("success", func(t *testing.T) { + uniqueSuffix, err := GetUniqueSuffix(s, []uint{18}) + require.NoError(t, err) + require.NotEmpty(t, uniqueSuffix) + }) + + t.Run("error - algorithm not provided", func(t *testing.T) { + uniqueSuffix, err := GetUniqueSuffix(s, []uint{}) + require.Error(t, err) + require.Empty(t, uniqueSuffix) + require.Contains(t, err.Error(), "failed to calculate unique suffix: algorithm not provided") + }) + + t.Run("error - algorithm not supported", func(t *testing.T) { + uniqueSuffix, err := GetUniqueSuffix(s, []uint{55}) + require.Error(t, err) + require.Empty(t, uniqueSuffix) + require.Contains(t, err.Error(), "failed to calculate unique suffix: algorithm not supported") + }) +} diff --git a/pkg/versions/1_0/operationapplier/operationapplier.go b/pkg/versions/1_0/operationapplier/operationapplier.go new file mode 100644 index 0000000..400f401 --- /dev/null +++ b/pkg/versions/1_0/operationapplier/operationapplier.go @@ -0,0 +1,387 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationapplier + +import ( + "fmt" + + "github.com/pkg/errors" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/hashing" + logfields "github.com/trustbloc/sidetree-go/pkg/internal/log" + internal "github.com/trustbloc/sidetree-go/pkg/jwsutil" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +//go:generate counterfeiter -o operationparser.gen.go --fake-name MockOperationParser . OperationParser + +var logger = log.New("sidetree-core-applier") + +// Applier is an operation applier. +type Applier struct { + protocol.Protocol + OperationParser + protocol.DocumentComposer +} + +// OperationParser defines the functions for parsing operations. +type OperationParser interface { + ValidateSuffixData(suffixData *model.SuffixDataModel) error + ValidateDelta(delta *model.DeltaModel) error + ParseCreateOperation(request []byte, anchor bool) (*model.Operation, error) + ParseUpdateOperation(request []byte, anchor bool) (*model.Operation, error) + ParseRecoverOperation(request []byte, anchor bool) (*model.Operation, error) + ParseDeactivateOperation(request []byte, anchor bool) (*model.Operation, error) + ParseSignedDataForUpdate(compactJWS string) (*model.UpdateSignedDataModel, error) + ParseSignedDataForDeactivate(compactJWS string) (*model.DeactivateSignedDataModel, error) + ParseSignedDataForRecover(compactJWS string) (*model.RecoverSignedDataModel, error) +} + +// New returns a new operation applier for the given protocol. +// +//nolint:gocritic +func New(p protocol.Protocol, parser OperationParser, dc protocol.DocumentComposer) *Applier { + return &Applier{ + Protocol: p, + OperationParser: parser, + DocumentComposer: dc, + } +} + +// Apply applies the given anchored operation. +func (s *Applier) Apply(op *operation.AnchoredOperation, rm *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { + switch op.Type { + case operation.TypeCreate: + return s.applyCreateOperation(op, rm) + case operation.TypeUpdate: + return s.applyUpdateOperation(op, rm) + case operation.TypeDeactivate: + return s.applyDeactivateOperation(op, rm) + case operation.TypeRecover: + return s.applyRecoverOperation(op, rm) + default: + return nil, fmt.Errorf("operation type not supported for process operation") + } +} + +func (s *Applier) applyCreateOperation(anchoredOp *operation.AnchoredOperation, + rm *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { + logger.Debug("Applying create operation", logfields.WithOperation(anchoredOp)) + + if rm.Doc != nil { + return nil, errors.New("create has to be the first operation") + } + + op, err := s.OperationParser.ParseCreateOperation(anchoredOp.OperationRequest, true) + if err != nil { + return nil, fmt.Errorf("failed to parse create operation in batch mode: %s", err.Error()) + } + + // from this point any error should advance recovery commitment + result := &protocol.ResolutionModel{ + Doc: make(document.Document), + CreatedTime: anchoredOp.TransactionTime, + LastOperationTransactionTime: anchoredOp.TransactionTime, + LastOperationTransactionNumber: anchoredOp.TransactionNumber, + LastOperationProtocolVersion: anchoredOp.ProtocolVersion, + VersionID: anchoredOp.CanonicalReference, + CanonicalReference: anchoredOp.CanonicalReference, + EquivalentReferences: anchoredOp.EquivalentReferences, + RecoveryCommitment: op.SuffixData.RecoveryCommitment, + AnchorOrigin: op.SuffixData.AnchorOrigin, + PublishedOperations: rm.PublishedOperations, + UnpublishedOperations: rm.UnpublishedOperations, + } + + // verify actual delta hash matches expected delta hash + err = hashing.IsValidModelMultihash(op.Delta, op.SuffixData.DeltaHash) + if err != nil { + logger.Info("Delta doesn't match delta hash; set update commitment to nil and advance recovery commitment", + log.WithError(err), logfields.WithSuffix(anchoredOp.UniqueSuffix), logfields.WithOperationType(string(anchoredOp.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), logfields.WithTransactionNumber(anchoredOp.TransactionNumber)) + + return result, nil + } + + err = s.OperationParser.ValidateDelta(op.Delta) + if err != nil { + logger.Info("Parse delta failed; set update commitment to nil and advance recovery commitment", + log.WithError(err), logfields.WithSuffix(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), logfields.WithTransactionNumber(anchoredOp.TransactionNumber)) + + return result, nil + } + + result.UpdateCommitment = op.Delta.UpdateCommitment + + doc, err := s.ApplyPatches(make(document.Document), op.Delta.Patches) + if err != nil { + logger.Info("Apply patches failed; advance commitments", + log.WithError(err), logfields.WithSuffix(anchoredOp.UniqueSuffix), logfields.WithOperationType(string(anchoredOp.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), logfields.WithTransactionNumber(anchoredOp.TransactionNumber)) + + return result, nil + } + + result.Doc = doc + + return result, nil +} + +func (s *Applier) applyUpdateOperation(anchoredOp *operation.AnchoredOperation, + rm *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { + logger.Debug("Applying update operation", logfields.WithOperation(anchoredOp)) + + if rm.Doc == nil { + return nil, errors.New("update cannot be first operation") + } + + op, err := s.OperationParser.ParseUpdateOperation(anchoredOp.OperationRequest, true) + if err != nil { + return nil, fmt.Errorf("failed to parse update operation in batch mode: %s", err.Error()) + } + + signedDataModel, err := s.ParseSignedDataForUpdate(op.SignedData) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal signed data model while applying update: %s", err.Error()) + } + + // verify the delta against the signed delta hash + err = hashing.IsValidModelMultihash(op.Delta, signedDataModel.DeltaHash) + if err != nil { + return nil, fmt.Errorf("update delta doesn't match delta hash: %s", err.Error()) + } + + // verify signature + _, err = internal.VerifyJWS(op.SignedData, signedDataModel.UpdateKey) + if err != nil { + return nil, fmt.Errorf("failed to check signature: %s", err.Error()) + } + + err = s.OperationParser.ValidateDelta(op.Delta) + if err != nil { + return nil, fmt.Errorf("failed to validate delta: %s", err.Error()) + } + + // delta is valid so advance update commitment + result := &protocol.ResolutionModel{ + Doc: rm.Doc, + CreatedTime: rm.CreatedTime, + UpdatedTime: anchoredOp.TransactionTime, + LastOperationTransactionTime: anchoredOp.TransactionTime, + LastOperationTransactionNumber: anchoredOp.TransactionNumber, + LastOperationProtocolVersion: anchoredOp.ProtocolVersion, + VersionID: anchoredOp.CanonicalReference, + CanonicalReference: rm.CanonicalReference, + EquivalentReferences: rm.EquivalentReferences, + UpdateCommitment: op.Delta.UpdateCommitment, + RecoveryCommitment: rm.RecoveryCommitment, + AnchorOrigin: rm.AnchorOrigin, + PublishedOperations: rm.PublishedOperations, + UnpublishedOperations: rm.UnpublishedOperations, + } + + // verify anchor from and until time against anchoring time + err = s.verifyAnchoringTimeRange(signedDataModel.AnchorFrom, signedDataModel.AnchorUntil, anchoredOp.TransactionTime) + if err != nil { + logger.Info("invalid anchoring time range; advance commitments", + logfields.WithSuffix(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), logfields.WithTransactionNumber(anchoredOp.TransactionNumber), + log.WithError(err)) + + return result, nil + } + + doc, err := s.ApplyPatches(rm.Doc, op.Delta.Patches) + if err != nil { + logger.Info("Apply patches failed; advance update commitment", + logfields.WithSuffixes(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), logfields.WithTransactionNumber(anchoredOp.TransactionNumber), + log.WithError(err)) + + return result, nil + } + + // applying patches succeeded so update document + result.Doc = doc + + return result, nil +} + +func (s *Applier) applyDeactivateOperation(anchoredOp *operation.AnchoredOperation, + rm *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { + logger.Debug("Applying deactivate operation", logfields.WithOperation(anchoredOp)) + + if rm.Doc == nil { + return nil, errors.New("deactivate can only be applied to an existing document") + } + + op, err := s.OperationParser.ParseDeactivateOperation(anchoredOp.OperationRequest, true) + if err != nil { + return nil, fmt.Errorf("failed to parse deactive operation in batch mode: %s", err.Error()) + } + + signedDataModel, err := s.ParseSignedDataForDeactivate(op.SignedData) + if err != nil { + return nil, fmt.Errorf("failed to parse signed data model while applying deactivate: %s", err.Error()) + } + + // verify signed did suffix against actual did suffix + if op.UniqueSuffix != signedDataModel.DidSuffix { + return nil, errors.New("did suffix doesn't match signed value") + } + + // verify signature + _, err = internal.VerifyJWS(op.SignedData, signedDataModel.RecoveryKey) + if err != nil { + return nil, fmt.Errorf("failed to check signature: %s", err.Error()) + } + + // verify anchor from and until time against anchoring time + err = s.verifyAnchoringTimeRange(signedDataModel.AnchorFrom, signedDataModel.AnchorUntil, anchoredOp.TransactionTime) + if err != nil { + return nil, fmt.Errorf("invalid anchoring time range: %s", err.Error()) + } + + return &protocol.ResolutionModel{ + Doc: make(document.Document), + CreatedTime: rm.CreatedTime, + UpdatedTime: anchoredOp.TransactionTime, + LastOperationTransactionTime: anchoredOp.TransactionTime, + LastOperationTransactionNumber: anchoredOp.TransactionNumber, + LastOperationProtocolVersion: anchoredOp.ProtocolVersion, + VersionID: anchoredOp.CanonicalReference, + CanonicalReference: rm.CanonicalReference, + EquivalentReferences: rm.EquivalentReferences, + UpdateCommitment: "", + RecoveryCommitment: "", + Deactivated: true, + AnchorOrigin: rm.AnchorOrigin, + PublishedOperations: rm.PublishedOperations, + UnpublishedOperations: rm.UnpublishedOperations, + }, nil +} + +func (s *Applier) applyRecoverOperation(anchoredOp *operation.AnchoredOperation, + rm *protocol.ResolutionModel) (*protocol.ResolutionModel, error) { + logger.Debug("Applying recover operation", logfields.WithOperation(anchoredOp)) + + if rm.Doc == nil { + return nil, errors.New("recover can only be applied to an existing document") + } + + op, err := s.OperationParser.ParseRecoverOperation(anchoredOp.OperationRequest, true) + if err != nil { + return nil, fmt.Errorf("failed to parse recover operation in batch mode: %s", err.Error()) + } + + signedDataModel, err := s.ParseSignedDataForRecover(op.SignedData) + if err != nil { + return nil, fmt.Errorf("failed to parse signed data model while applying recover: %s", err.Error()) + } + + // verify signature + _, err = internal.VerifyJWS(op.SignedData, signedDataModel.RecoveryKey) + if err != nil { + return nil, fmt.Errorf("failed to check signature: %s", err.Error()) + } + + // from this point any error should advance recovery commitment + result := &protocol.ResolutionModel{ + Doc: make(document.Document), + CreatedTime: rm.CreatedTime, + UpdatedTime: anchoredOp.TransactionTime, + LastOperationTransactionTime: anchoredOp.TransactionTime, + LastOperationTransactionNumber: anchoredOp.TransactionNumber, + LastOperationProtocolVersion: anchoredOp.ProtocolVersion, + VersionID: anchoredOp.CanonicalReference, + CanonicalReference: anchoredOp.CanonicalReference, + EquivalentReferences: anchoredOp.EquivalentReferences, + RecoveryCommitment: signedDataModel.RecoveryCommitment, + AnchorOrigin: signedDataModel.AnchorOrigin, + PublishedOperations: rm.PublishedOperations, + UnpublishedOperations: rm.UnpublishedOperations, + } + + // verify the delta against the signed delta hash + err = hashing.IsValidModelMultihash(op.Delta, signedDataModel.DeltaHash) + if err != nil { + logger.Info("Recover delta doesn't match delta hash; set update commitment to nil and advance recovery commitment", + logfields.WithSuffixes(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), logfields.WithTransactionNumber(anchoredOp.TransactionNumber), + log.WithError(err)) + + return result, nil + } + + err = s.OperationParser.ValidateDelta(op.Delta) + if err != nil { + logger.Info("Parse delta failed; set update commitment to nil and advance recovery commitment", + logfields.WithSuffixes(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), logfields.WithTransactionNumber(anchoredOp.TransactionNumber), + log.WithError(err)) + + return result, nil + } + + result.UpdateCommitment = op.Delta.UpdateCommitment + + // verify anchor from and until time against anchoring time + err = s.verifyAnchoringTimeRange(signedDataModel.AnchorFrom, signedDataModel.AnchorUntil, anchoredOp.TransactionTime) + if err != nil { + logger.Info("Invalid anchoring time range; advance commitments", + logfields.WithSuffixes(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), logfields.WithTransactionNumber(anchoredOp.TransactionNumber), + log.WithError(err)) + + return result, nil + } + + doc, err := s.ApplyPatches(make(document.Document), op.Delta.Patches) + if err != nil { + logger.Info("Apply patches failed; advance commitments", + logfields.WithSuffixes(op.UniqueSuffix), logfields.WithOperationType(string(op.Type)), + logfields.WithTransactionTime(anchoredOp.TransactionTime), logfields.WithTransactionNumber(anchoredOp.TransactionNumber), + log.WithError(err)) + + return result, nil + } + + result.Doc = doc + + return result, nil +} + +func (s *Applier) verifyAnchoringTimeRange(from, until int64, anchor uint64) error { + if from == 0 && until == 0 { + // from and until are not specified - nothing to check + return nil + } + + if from > int64(anchor) { + return fmt.Errorf("anchor from time is greater then anchoring time") + } + + if s.getAnchorUntil(from, until) < int64(anchor) { + return fmt.Errorf("anchor until time is less then anchoring time") + } + + return nil +} + +func (s *Applier) getAnchorUntil(from, until int64) int64 { + if from != 0 && until == 0 { + return from + int64(s.MaxDeltaSize) + } + + return until +} diff --git a/pkg/versions/1_0/operationapplier/operationapplier_test.go b/pkg/versions/1_0/operationapplier/operationapplier_test.go new file mode 100644 index 0000000..04b9cbb --- /dev/null +++ b/pkg/versions/1_0/operationapplier/operationapplier_test.go @@ -0,0 +1,1374 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationapplier + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "encoding/json" + "errors" + "strconv" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/commitment" + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/hashing" + "github.com/trustbloc/sidetree-go/pkg/mocks" + "github.com/trustbloc/sidetree-go/pkg/patch" + "github.com/trustbloc/sidetree-go/pkg/util/ecsigner" + "github.com/trustbloc/sidetree-go/pkg/util/pubkey" + "github.com/trustbloc/sidetree-go/pkg/util/signutil" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/client" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/doccomposer" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/operationparser" +) + +const ( + sha2_256 = 18 + dummyUniqueSuffix = "dummy" + + updateKeyID = "update-key" +) + +var ( + p = protocol.Protocol{ + GenesisTime: 0, + MultihashAlgorithms: []uint{sha2_256}, + MaxOperationCount: 2, + MaxOperationSize: 2000, + MaxOperationHashLength: 100, + MaxDeltaSize: 1000, + MaxCasURILength: 100, + CompressionAlgorithm: "GZIP", + MaxChunkFileSize: 1024, + MaxProvisionalIndexFileSize: 1024, + MaxCoreIndexFileSize: 1024, + MaxProofFileSize: 1024, + SignatureAlgorithms: []string{"EdDSA", "ES256"}, + KeyAlgorithms: []string{"Ed25519", "P-256"}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, + MaxOperationTimeDelta: 600, + NonceSize: 16, + MaxMemoryDecompressionFactor: 3, + } + + parser = operationparser.New(p) + + dc = doccomposer.New() +) + +func TestApplier_Apply(t *testing.T) { + recoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + updateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + t.Run("update is first operation error", func(t *testing.T) { + applier := New(p, parser, dc) + + const uniqueSuffix = "uniqueSuffix" + updateOp, _, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + doc, err := applier.Apply(updateOp, &protocol.ResolutionModel{}) + require.Error(t, err) + require.Nil(t, doc) + require.Equal(t, "update cannot be first operation", err.Error()) + }) + + t.Run("create is second operation error", func(t *testing.T) { + applier := New(p, parser, &mockDocComposer{}) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + doc, err := applier.Apply(createOp, &protocol.ResolutionModel{ + Doc: make(document.Document), + }) + require.Error(t, err) + require.Nil(t, doc) + require.Equal(t, "create has to be the first operation", err.Error()) + }) + + t.Run("apply recover to non existing document error", func(t *testing.T) { + applier := New(p, parser, dc) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + recoverOp, _, err := getAnchoredRecoverOperation(recoveryKey, updateKey, createOp.UniqueSuffix, 2) + require.NoError(t, err) + + doc, err := applier.Apply(recoverOp, &protocol.ResolutionModel{}) + require.Error(t, err) + require.Contains(t, err.Error(), "recover can only be applied to an existing document") + require.Nil(t, doc) + }) + + t.Run("invalid operation type error", func(t *testing.T) { + applier := New(p, parser, dc) + + doc, err := applier.Apply(&operation.AnchoredOperation{Type: "invalid"}, &protocol.ResolutionModel{Doc: make(document.Document)}) + require.Error(t, err) + require.Equal(t, "operation type not supported for process operation", err.Error()) + require.Nil(t, doc) + }) + + t.Run("create delta hash doesn't match delta error", func(t *testing.T) { + createOp, err := getCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + delta, err := getDeltaModel(validDoc, "different") + require.NoError(t, err) + + createOp.Delta = delta + + anchoredOp := getAnchoredOperation(createOp) + require.Nil(t, err) + + applier := New(p, parser, dc) + rm, err := applier.Apply(anchoredOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + require.Equal(t, make(document.Document), rm.Doc) + require.NotEmpty(t, rm.RecoveryCommitment) + require.Empty(t, rm.UpdateCommitment) + }) + + t.Run("error - failed to parse create operation", func(t *testing.T) { + createOp, err := getCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + createOp.SuffixData.RecoveryCommitment = "" + + anchoredOp := getAnchoredOperation(createOp) + require.Nil(t, err) + + applier := New(p, parser, dc) + rm, err := applier.Apply(anchoredOp, &protocol.ResolutionModel{}) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "failed to parse create operation in batch mode") + }) + + t.Run("error - apply patches (document composer) error", func(t *testing.T) { + applier := New(p, parser, &mockDocComposer{Err: errors.New("document composer error")}) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + require.Equal(t, make(document.Document), rm.Doc) + require.NotEmpty(t, rm.RecoveryCommitment) + require.NotEmpty(t, rm.UpdateCommitment) + }) +} + +func TestUpdateDocument(t *testing.T) { + recoveryKey, e := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, e) + + updateKey, e := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, e) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + uniqueSuffix := createOp.UniqueSuffix + + t.Run("success", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + createdTime := rm.CreatedTime + + require.NotZero(t, createdTime) + require.Zero(t, rm.UpdatedTime) + + updateOp, nextUpdateKey, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.Nil(t, err) + + result, err := applier.Apply(updateOp, rm) + require.Nil(t, err) + + // check if service type value is updated (done via json patch) + didDoc := document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special1", didDoc["test"]) + + // test consecutive update + updateOp, _, err = getAnchoredUpdateOperation(nextUpdateKey, uniqueSuffix, 2) + require.Nil(t, err) + + result, err = applier.Apply(updateOp, result) + require.Nil(t, err) + + require.Equal(t, createdTime, result.CreatedTime) + + // check if service type value is updated again (done via json patch) + didDoc = document.DidDocumentFromJSONLDObject(result.Doc) + require.Equal(t, "special2", didDoc["test"]) + }) + + t.Run("error - operation with reused next commitment", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + // scenario: update 1 followed by update 2 followed by update 3 with reused commitment from 1 + + updateOp, nextUpdateKey, err := getUpdateOperation(updateKey, uniqueSuffix, 1) + require.Nil(t, err) + + delta1 := updateOp.Delta + + rm, err = applier.Apply(getAnchoredOperation(updateOp), rm) + require.Nil(t, err) + + // check if service type value is updated (done via json patch) + didDoc := document.DidDocumentFromJSONLDObject(rm.Doc) + require.Equal(t, "special1", didDoc["test"]) + + // test consecutive update + updateOp, nextUpdateKey, err = getUpdateOperation(nextUpdateKey, uniqueSuffix, 2) + require.Nil(t, err) + + rm, err = applier.Apply(getAnchoredOperation(updateOp), rm) + require.Nil(t, err) + + // service type value is updated since operation is valid + didDoc = document.DidDocumentFromJSONLDObject(rm.Doc) + require.Equal(t, "special2", didDoc["test"]) + + // two successful update operations - next update with reused commitment from op 1 + updateOp, _, err = getUpdateOperation(nextUpdateKey, uniqueSuffix, 1) + require.Nil(t, err) + + delta3 := updateOp.Delta + delta3.UpdateCommitment = delta1.UpdateCommitment + updateOp.Delta = delta3 + + _, err = applier.Apply(getAnchoredOperation(updateOp), rm) + require.EqualError(t, err, "update delta doesn't match delta hash: supplied hash doesn't match original content") + }) + + t.Run("missing signed data error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + updateOp, _, err := getUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + updateOp.SignedData = "" + + rm, err = applier.Apply(getAnchoredOperation(updateOp), rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "missing signed data") + }) + + t.Run("unmarshal signed data model error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + updateOp, _, err := getUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + signer := ecsigner.New(updateKey, "ES256", "update-kid") + + compactJWS, err := signutil.SignPayload([]byte("payload"), signer) + require.NoError(t, err) + + updateOp.SignedData = compactJWS + + rm, err = applier.Apply(getAnchoredOperation(updateOp), rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "failed to parse update operation in batch mode: failed to unmarshal signed data model for update") + }) + + t.Run("invalid signature error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + // sign update operation with different key (than one used in create) + differentKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + s := ecsigner.New(differentKey, "ES256", updateKeyID) + updateOp, _, err := getUpdateOperationWithSigner(s, updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + anchoredOp := getAnchoredOperation(updateOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "ecdsa: invalid signature") + }) + + t.Run("delta hash doesn't match delta error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + updateOp, _, err := getUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + updateOp.Delta = &model.DeltaModel{UpdateCommitment: "different"} + + rm, err = applier.Apply(getAnchoredOperation(updateOp), rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "update delta doesn't match delta hash") + }) + + t.Run("invalid anchoring range - anchor until time is less then anchoring time", func(t *testing.T) { + applier := New(p, parser, dc) + + createResult, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + p := map[string]interface{}{ + "op": "replace", + "path": "/test", + "value": "value", + } + + patchBytes, err := canonicalizer.MarshalCanonical([]map[string]interface{}{p}) + require.NoError(t, err) + + jsonPatch, err := patch.NewJSONPatch(string(patchBytes)) + require.NoError(t, err) + + _, updateCommitment, err := generateKeyAndCommitment() + require.NoError(t, err) + + delta := &model.DeltaModel{ + UpdateCommitment: updateCommitment, + Patches: []patch.Patch{jsonPatch}, + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + require.NoError(t, err) + + updatePubKey, err := pubkey.GetPublicKeyJWK(&updateKey.PublicKey) + require.NoError(t, err) + + now := time.Now().Unix() + + signedData := &model.UpdateSignedDataModel{ + DeltaHash: deltaHash, + UpdateKey: updatePubKey, + AnchorUntil: now - 5*60, + } + + signer := ecsigner.New(updateKey, "ES256", "") + jws, err := signutil.SignModel(signedData, signer) + require.NoError(t, err) + + rv, err := commitment.GetRevealValue(updatePubKey, sha2_256) + require.NoError(t, err) + + updateOp := &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Delta: delta, + Type: operation.TypeUpdate, + SignedData: jws, + RevealValue: rv, + } + + anchoredOp := getAnchoredOperation(updateOp) + anchoredOp.TransactionTime = uint64(now) + + updateResult, err := applier.Apply(anchoredOp, createResult) + require.NoError(t, err) + require.NotNil(t, updateResult) + require.Equal(t, createResult.Doc, updateResult.Doc) + require.NotEqual(t, updateResult.UpdateCommitment, createResult.UpdateCommitment) + }) + + t.Run("error - document composer error", func(t *testing.T) { + applier := New(p, parser, dc) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + createResult, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + updateOp, _, err := getAnchoredUpdateOperation(updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + applier = New(p, parser, &mockDocComposer{Err: errors.New("document composer error")}) + + updateResult, err := applier.Apply(updateOp, createResult) + require.NoError(t, err) + require.NotNil(t, updateResult) + require.Equal(t, createResult.Doc, updateResult.Doc) + require.NotEqual(t, createResult.UpdateCommitment, updateResult.UpdateCommitment) + require.Equal(t, createResult.RecoveryCommitment, updateResult.RecoveryCommitment) + }) +} + +func TestDeactivate(t *testing.T) { + recoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + recoveryPubKey, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + updateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + uniqueSuffix := createOp.UniqueSuffix + + t.Run("success", func(t *testing.T) { + applier := New(p, parser, dc) + + created, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + deactivateOp, err := getAnchoredDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + deactivated, err := applier.Apply(deactivateOp, created) + require.NoError(t, err) + require.NotNil(t, deactivated) + + require.Equal(t, created.CreatedTime, deactivated.CreatedTime) + }) + + t.Run("success - anchor until time defaulted based on protocol parameter", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverPubKey, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + rv, err := commitment.GetRevealValue(recoverPubKey, sha2_256) + require.NoError(t, err) + + now := time.Now().Unix() + + signedDataModel := model.DeactivateSignedDataModel{ + DidSuffix: uniqueSuffix, + RecoveryKey: recoverPubKey, + AnchorFrom: now - 5*60, + } + + signer := ecsigner.New(recoveryKey, "ES256", "") + jws, err := signutil.SignModel(signedDataModel, signer) + require.NoError(t, err) + + deactiveOp := &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeDeactivate, + SignedData: jws, + RevealValue: rv, + } + + anchoredOp := getAnchoredOperation(deactiveOp) + anchoredOp.TransactionTime = uint64(now) + + rm, err = applier.Apply(anchoredOp, rm) + require.NoError(t, err) + require.NotNil(t, rm) + }) + + t.Run("deactivate can only be applied to an existing document", func(t *testing.T) { + deactivateOp, err := getAnchoredDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + applier := New(p, parser, dc) + doc, err := applier.Apply(deactivateOp, &protocol.ResolutionModel{}) + require.Error(t, err) + require.Contains(t, err.Error(), "deactivate can only be applied to an existing document") + require.Nil(t, doc) + }) + + t.Run("document not found error", func(t *testing.T) { + deactivateOp, err := getAnchoredDeactivateOperation(recoveryKey, dummyUniqueSuffix) + require.NoError(t, err) + require.NoError(t, err) + + applier := New(p, parser, &mockDocComposer{}) + doc, err := applier.Apply(deactivateOp, &protocol.ResolutionModel{}) + require.Error(t, err) + require.Contains(t, err.Error(), "deactivate can only be applied to an existing document") + require.Nil(t, doc) + }) + + t.Run("missing signed data error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + deactivateOp, err := getDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + deactivateOp.SignedData = "" + + anchoredOp := getAnchoredOperation(deactivateOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "missing signed data") + }) + + t.Run("unmarshal signed data model error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + deactivateOp, err := getDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + signer := ecsigner.New(recoveryKey, "ES256", "") + + compactJWS, err := signutil.SignPayload([]byte("payload"), signer) + require.NoError(t, err) + + deactivateOp.SignedData = compactJWS + + anchoredOp := getAnchoredOperation(deactivateOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "failed to parse deactive operation in batch mode: failed to unmarshal signed data model for deactivate") + }) + + t.Run("invalid signature error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + // sign recover operation with different recovery key (than one used in create) + differentRecoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + signer := ecsigner.New(differentRecoveryKey, "ES256", "") + deactivateOp, err := getDeactivateOperationWithSigner(signer, recoveryKey, uniqueSuffix) + require.NoError(t, err) + + anchoredOp := getAnchoredOperation(deactivateOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Contains(t, err.Error(), "ecdsa: invalid signature") + require.Nil(t, rm) + }) + + t.Run("did suffix doesn't match signed value error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + deactivateOp, err := getDeactivateOperation(recoveryKey, uniqueSuffix) + require.NoError(t, err) + + s := ecsigner.New(recoveryKey, "ES256", "") + + jws, err := signutil.SignModel(&model.DeactivateSignedDataModel{ + DidSuffix: "other", + RecoveryKey: recoveryPubKey, + }, s) + require.NoError(t, err) + + deactivateOp.SignedData = jws + + anchoredOp := getAnchoredOperation(deactivateOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "failed to parse deactive operation in batch mode: signed did suffix mismatch for deactivate") + }) + + t.Run("invalid anchoring time range - anchor until time is less then anchoring time", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverPubKey, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + rv, err := commitment.GetRevealValue(recoverPubKey, sha2_256) + require.NoError(t, err) + + now := time.Now().Unix() + + signedDataModel := model.DeactivateSignedDataModel{ + DidSuffix: uniqueSuffix, + RecoveryKey: recoverPubKey, + AnchorUntil: now - 5*60, + } + + signer := ecsigner.New(recoveryKey, "ES256", "") + jws, err := signutil.SignModel(signedDataModel, signer) + require.NoError(t, err) + + deactiveOp := &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeDeactivate, + SignedData: jws, + RevealValue: rv, + } + + anchoredOp := getAnchoredOperation(deactiveOp) + anchoredOp.TransactionTime = uint64(now) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "invalid anchoring time range: anchor until time is less then anchoring time") + }) +} + +func TestRecover(t *testing.T) { + recoveryKey, e := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, e) + + updateKey, e := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, e) + + createOp, err := getAnchoredCreateOperation(recoveryKey, updateKey) + require.NoError(t, err) + + uniqueSuffix := createOp.UniqueSuffix + + t.Run("success", func(t *testing.T) { + applier := New(p, parser, dc) + + created, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverOp, nextRecoveryKey, err := getAnchoredRecoverOperation(recoveryKey, updateKey, uniqueSuffix, 1) + require.NoError(t, err) + + created, err = applier.Apply(recoverOp, created) + require.NoError(t, err) + + // test for recovered key + docBytes, err := created.Doc.Bytes() + require.NoError(t, err) + require.Contains(t, string(docBytes), "recovered") + + // apply recover again - consecutive recoveries are valid + recoverOp, _, err = getAnchoredRecoverOperation(nextRecoveryKey, updateKey, uniqueSuffix, 2) + require.NoError(t, err) + + recovered, err := applier.Apply(recoverOp, created) + require.NoError(t, err) + require.NotNil(t, recovered) + + require.Equal(t, created.CreatedTime, recovered.CreatedTime) + }) + + t.Run("success - operation with invalid signature rejected", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + invalidRecoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + invalidRecoverOp.SignedData = "" + + invalidAnchoredOp := getAnchoredOperation(invalidRecoverOp) + + result, err := applier.Apply(invalidAnchoredOp, rm) + require.Error(t, err) + require.Contains(t, err.Error(), "missing signed data") + require.Nil(t, result) + + // now generate valid recovery operation with same recoveryKey + recoverOp, _, err := getAnchoredRecoverOperation(recoveryKey, updateKey, uniqueSuffix, 2) + require.NoError(t, err) + + result, err = applier.Apply(recoverOp, rm) + require.NoError(t, err) + + // test for recovered key in resolved document + docBytes, err := result.Doc.Bytes() + require.NoError(t, err) + require.Contains(t, string(docBytes), "recovered") + }) + + t.Run("success - operation with valid signature and invalid delta accepted", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + invalidRecoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + invalidRecoverOp.Delta = nil + + invalidAnchoredOp := getAnchoredOperation(invalidRecoverOp) + + result, err := applier.Apply(invalidAnchoredOp, rm) + require.NoError(t, err) + require.NotNil(t, result) + require.Equal(t, make(document.Document), result.Doc) + }) + + t.Run("missing signed data error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + recoverOp.SignedData = "" + + anchoredOp := getAnchoredOperation(recoverOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "missing signed data") + }) + + t.Run("unmarshal signed data model error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + signer := ecsigner.New(recoveryKey, "ES256", "") + + compactJWS, err := signutil.SignPayload([]byte("payload"), signer) + require.NoError(t, err) + + recoverOp.SignedData = compactJWS + + anchoredOp := getAnchoredOperation(recoverOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "failed to parse recover operation in batch mode: failed to unmarshal signed data model for recover") + }) + + t.Run("invalid signature error", func(t *testing.T) { + applier := New(p, parser, dc) + + rm, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + // sign recover operation with different recovery key (than one used in create) + differentRecoveryKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + signer := ecsigner.New(differentRecoveryKey, "ES256", "") + recoverOp, _, err := getRecoverOperationWithSigner(signer, recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + anchoredOp := getAnchoredOperation(recoverOp) + + rm, err = applier.Apply(anchoredOp, rm) + require.Error(t, err) + require.Nil(t, rm) + require.Contains(t, err.Error(), "ecdsa: invalid signature") + }) + + t.Run("delta hash doesn't match delta error", func(t *testing.T) { + applier := New(p, parser, dc) + + createResult, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + recoverOp.Delta = &model.DeltaModel{} + + anchoredOp := getAnchoredOperation(recoverOp) + + recoverResult, err := applier.Apply(anchoredOp, createResult) + require.NoError(t, err) + require.NotNil(t, recoverResult) + require.Equal(t, recoverResult.Doc, make(document.Document)) + require.NotEqual(t, recoverResult.RecoveryCommitment, createResult.RecoveryCommitment) + }) + + t.Run("invalid anchoring range - anchor until time is less then anchoring time", func(t *testing.T) { + applier := New(p, parser, dc) + + createResult, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + updateCommitment, err := getCommitment(updateKey) + require.NoError(t, err) + + delta, err := getDeltaModel(recoveredDoc, updateCommitment) + require.NoError(t, err) + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + require.NoError(t, err) + + recoveryPubKey, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + _, recoveryCommitment, err := generateKeyAndCommitment() + require.NoError(t, err) + + now := time.Now().Unix() + + recoverSignedData := &model.RecoverSignedDataModel{ + RecoveryKey: recoveryPubKey, + RecoveryCommitment: recoveryCommitment, + DeltaHash: deltaHash, + AnchorUntil: now - 6*60, + } + + signer := ecsigner.New(recoveryKey, "ES256", "") + recoverRequest, err := getRecoverRequest(signer, delta, recoverSignedData) + require.NoError(t, err) + + operationBuffer, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + recoverOp := &model.Operation{ + Namespace: mocks.DefaultNS, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeRecover, + OperationRequest: operationBuffer, + Delta: recoverRequest.Delta, + SignedData: recoverRequest.SignedData, + RevealValue: recoverRequest.RevealValue, + } + + anchoredOp := getAnchoredOperation(recoverOp) + anchoredOp.TransactionTime = uint64(now) + + recoverResult, err := applier.Apply(anchoredOp, createResult) + require.NoError(t, err) + require.NotNil(t, recoverResult) + require.Equal(t, recoverResult.Doc, make(document.Document)) + require.NotEqual(t, recoverResult.RecoveryCommitment, createResult.RecoveryCommitment) + }) + + t.Run("error - document composer error", func(t *testing.T) { + applier := New(p, parser, &mockDocComposer{Err: errors.New("doc composer error")}) + + createResult, err := applier.Apply(createOp, &protocol.ResolutionModel{}) + require.NoError(t, err) + + recoverOp, _, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + require.NoError(t, err) + + anchoredOp := getAnchoredOperation(recoverOp) + + recoverResult, err := applier.Apply(anchoredOp, createResult) + require.NoError(t, err) + require.NotNil(t, recoverResult) + require.Equal(t, make(document.Document), recoverResult.Doc) + require.NotEqual(t, recoverResult.RecoveryCommitment, createResult.RecoveryCommitment) + }) +} + +func TestVerifyAnchoringTimeRange(t *testing.T) { + applier := New(p, parser, dc) + + now := time.Now().Unix() + + t.Run("success - no anchoring times specified", func(t *testing.T) { + err := applier.verifyAnchoringTimeRange(0, 0, uint64(now)) + require.NoError(t, err) + }) + + t.Run("success - anchoring times specififed", func(t *testing.T) { + err := applier.verifyAnchoringTimeRange(now-5*60, now+5*50, uint64(now)) + require.NoError(t, err) + }) + + t.Run("error - anchor from time is greater then anchoring time", func(t *testing.T) { + err := applier.verifyAnchoringTimeRange(now+55*60, 0, uint64(now)) + require.Error(t, err) + require.Contains(t, err.Error(), "anchor from time is greater then anchoring time") + }) + + t.Run("error - anchor until time is less then anchoring time", func(t *testing.T) { + err := applier.verifyAnchoringTimeRange(now-5*60, now-5*50, uint64(now)) + require.Error(t, err) + require.Contains(t, err.Error(), "anchor until time is less then anchoring time") + }) +} + +func getUpdateOperation(privateKey *ecdsa.PrivateKey, uniqueSuffix string, operationNumber uint) (*model.Operation, *ecdsa.PrivateKey, error) { + s := ecsigner.New(privateKey, "ES256", updateKeyID) + + return getUpdateOperationWithSigner(s, privateKey, uniqueSuffix, operationNumber) +} + +func getAnchoredUpdateOperation(privateKey *ecdsa.PrivateKey, uniqueSuffix string, operationNumber uint) (*operation.AnchoredOperation, *ecdsa.PrivateKey, error) { + op, nextUpdateKey, err := getUpdateOperation(privateKey, uniqueSuffix, operationNumber) + if err != nil { + return nil, nil, err + } + + return getAnchoredOperationWithBlockNum(op, uint64(operationNumber)), nextUpdateKey, nil +} + +func getUpdateOperationWithSigner(s client.Signer, privateKey *ecdsa.PrivateKey, uniqueSuffix string, operationNumber uint) (*model.Operation, *ecdsa.PrivateKey, error) { + p := map[string]interface{}{ + "op": "replace", + "path": "/test", + "value": "special" + strconv.Itoa(int(operationNumber)), + } + + patchBytes, err := canonicalizer.MarshalCanonical([]map[string]interface{}{p}) + if err != nil { + return nil, nil, err + } + + jsonPatch, err := patch.NewJSONPatch(string(patchBytes)) + if err != nil { + return nil, nil, err + } + + nextUpdateKey, updateCommitment, err := generateKeyAndCommitment() + if err != nil { + return nil, nil, err + } + + delta := &model.DeltaModel{ + UpdateCommitment: updateCommitment, + Patches: []patch.Patch{jsonPatch}, + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, nil, err + } + + updatePubKey, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + if err != nil { + return nil, nil, err + } + + signedData := &model.UpdateSignedDataModel{ + DeltaHash: deltaHash, + UpdateKey: updatePubKey, + } + + jws, err := signutil.SignModel(signedData, s) + if err != nil { + return nil, nil, err + } + + rv, err := commitment.GetRevealValue(updatePubKey, sha2_256) + if err != nil { + return nil, nil, err + } + + op := &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Delta: delta, + Type: operation.TypeUpdate, + SignedData: jws, + RevealValue: rv, + } + + return op, nextUpdateKey, nil +} + +func generateKeyAndCommitment() (*ecdsa.PrivateKey, string, error) { + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return nil, "", err + } + + pubKey, err := pubkey.GetPublicKeyJWK(&key.PublicKey) + if err != nil { + return nil, "", err + } + + c, err := commitment.GetCommitment(pubKey, sha2_256) + if err != nil { + return nil, "", err + } + + return key, c, nil +} + +func getDeactivateOperation(privateKey *ecdsa.PrivateKey, uniqueSuffix string) (*model.Operation, error) { + signer := ecsigner.New(privateKey, "ES256", "") + + return getDeactivateOperationWithSigner(signer, privateKey, uniqueSuffix) +} + +func getAnchoredDeactivateOperation(privateKey *ecdsa.PrivateKey, uniqueSuffix string) (*operation.AnchoredOperation, error) { + op, err := getDeactivateOperation(privateKey, uniqueSuffix) + if err != nil { + return nil, err + } + + return getAnchoredOperation(op), nil +} + +func getDeactivateOperationWithSigner(singer client.Signer, privateKey *ecdsa.PrivateKey, uniqueSuffix string) (*model.Operation, error) { + recoverPubKey, err := pubkey.GetPublicKeyJWK(&privateKey.PublicKey) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(recoverPubKey, sha2_256) + if err != nil { + return nil, err + } + + signedDataModel := model.DeactivateSignedDataModel{ + DidSuffix: uniqueSuffix, + RecoveryKey: recoverPubKey, + } + + jws, err := signutil.SignModel(signedDataModel, singer) + if err != nil { + return nil, err + } + + return &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeDeactivate, + SignedData: jws, + RevealValue: rv, + }, nil +} + +func getRecoverOperation(recoveryKey, updateKey *ecdsa.PrivateKey, uniqueSuffix string) (*model.Operation, *ecdsa.PrivateKey, error) { + signer := ecsigner.New(recoveryKey, "ES256", "") + + return getRecoverOperationWithSigner(signer, recoveryKey, updateKey, uniqueSuffix) +} + +func getAnchoredRecoverOperation(recoveryKey, updateKey *ecdsa.PrivateKey, uniqueSuffix string, operationNumber uint) (*operation.AnchoredOperation, *ecdsa.PrivateKey, error) { + op, nextRecoveryKey, err := getRecoverOperation(recoveryKey, updateKey, uniqueSuffix) + if err != nil { + return nil, nil, err + } + + return getAnchoredOperationWithBlockNum(op, uint64(operationNumber)), nextRecoveryKey, nil +} + +func getRecoverOperationWithSigner(signer client.Signer, recoveryKey, updateKey *ecdsa.PrivateKey, uniqueSuffix string) (*model.Operation, *ecdsa.PrivateKey, error) { + recoverRequest, nextRecoveryKey, err := getDefaultRecoverRequest(signer, recoveryKey, updateKey) + if err != nil { + return nil, nil, err + } + + operationBuffer, err := json.Marshal(recoverRequest) + if err != nil { + return nil, nil, err + } + + return &model.Operation{ + Namespace: mocks.DefaultNS, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeRecover, + OperationRequest: operationBuffer, + Delta: recoverRequest.Delta, + SignedData: recoverRequest.SignedData, + RevealValue: recoverRequest.RevealValue, + }, nextRecoveryKey, nil +} + +func getRecoverRequest(signer client.Signer, delta *model.DeltaModel, signedDataModel *model.RecoverSignedDataModel) (*model.RecoverRequest, error) { + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, err + } + + signedDataModel.DeltaHash = deltaHash + + jws, err := signutil.SignModel(signedDataModel, signer) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(signedDataModel.RecoveryKey, sha2_256) + if err != nil { + return nil, err + } + + return &model.RecoverRequest{ + Operation: operation.TypeRecover, + DidSuffix: "suffix", + Delta: delta, + SignedData: jws, + RevealValue: rv, + }, nil +} + +func getDefaultRecoverRequest(signer client.Signer, recoveryKey, updateKey *ecdsa.PrivateKey) (*model.RecoverRequest, *ecdsa.PrivateKey, error) { + updateCommitment, err := getCommitment(updateKey) + if err != nil { + return nil, nil, err + } + + delta, err := getDeltaModel(recoveredDoc, updateCommitment) + if err != nil { + return nil, nil, err + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, nil, err + } + + recoveryPubKey, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + if err != nil { + return nil, nil, err + } + + nextRecoveryKey, recoveryCommitment, err := generateKeyAndCommitment() + if err != nil { + return nil, nil, err + } + + recoverSignedData := &model.RecoverSignedDataModel{ + RecoveryKey: recoveryPubKey, + RecoveryCommitment: recoveryCommitment, + DeltaHash: deltaHash, + } + + req, err := getRecoverRequest(signer, delta, recoverSignedData) + if err != nil { + return nil, nil, err + } + + return req, nextRecoveryKey, nil +} + +func getCreateOperationWithDoc(recoveryKey, updateKey *ecdsa.PrivateKey, doc string) (*model.Operation, error) { + createRequest, err := getCreateRequest(recoveryKey, updateKey) + if err != nil { + return nil, err + } + + operationBuffer, err := json.Marshal(createRequest) + if err != nil { + return nil, err + } + + uniqueSuffix, err := hashing.CalculateModelMultihash(createRequest.SuffixData, sha2_256) + if err != nil { + return nil, err + } + + updateCommitment, err := getCommitment(updateKey) + if err != nil { + return nil, err + } + + delta, err := getDeltaModel(doc, updateCommitment) + if err != nil { + return nil, err + } + + suffixData, err := getSuffixData(recoveryKey, delta) + if err != nil { + return nil, err + } + + return &model.Operation{ + Namespace: mocks.DefaultNS, + ID: "did:sidetree:" + uniqueSuffix, + UniqueSuffix: uniqueSuffix, + Type: operation.TypeCreate, + OperationRequest: operationBuffer, + Delta: delta, + SuffixData: suffixData, + }, nil +} + +func getCreateOperation(recoveryKey, updateKey *ecdsa.PrivateKey) (*model.Operation, error) { + return getCreateOperationWithDoc(recoveryKey, updateKey, validDoc) +} + +func getAnchoredCreateOperation(recoveryKey, updateKey *ecdsa.PrivateKey) (*operation.AnchoredOperation, error) { + op, err := getCreateOperation(recoveryKey, updateKey) + if err != nil { + return nil, err + } + + return getAnchoredOperation(op), nil +} + +func getAnchoredOperation(op *model.Operation) *operation.AnchoredOperation { + anchoredOp, err := model.GetAnchoredOperation(op) + if err != nil { + panic(err) + } + + anchoredOp.TransactionTime = uint64(time.Now().Unix()) + + return anchoredOp +} + +func getAnchoredOperationWithBlockNum(op *model.Operation, blockNum uint64) *operation.AnchoredOperation { + anchored := getAnchoredOperation(op) + anchored.TransactionTime = blockNum + + return anchored +} + +func getCreateRequest(recoveryKey, updateKey *ecdsa.PrivateKey) (*model.CreateRequest, error) { + updateCommitment, err := getCommitment(updateKey) + if err != nil { + return nil, err + } + + delta, err := getDeltaModel(validDoc, updateCommitment) + if err != nil { + return nil, err + } + + suffixData, err := getSuffixData(recoveryKey, delta) + if err != nil { + return nil, err + } + + return &model.CreateRequest{ + Operation: operation.TypeCreate, + Delta: delta, + SuffixData: suffixData, + }, nil +} + +func getDeltaModel(doc string, updateCommitment string) (*model.DeltaModel, error) { + patches, err := patch.PatchesFromDocument(doc) + if err != nil { + return nil, err + } + + return &model.DeltaModel{ + Patches: patches, + UpdateCommitment: updateCommitment, + }, nil +} + +func getCommitment(key *ecdsa.PrivateKey) (string, error) { + pubKey, err := pubkey.GetPublicKeyJWK(&key.PublicKey) + if err != nil { + return "", err + } + + c, err := commitment.GetCommitment(pubKey, sha2_256) + if err != nil { + return "", err + } + + return c, nil +} + +func getSuffixData(privateKey *ecdsa.PrivateKey, delta *model.DeltaModel) (*model.SuffixDataModel, error) { + recoveryCommitment, err := getCommitment(privateKey) + if err != nil { + return nil, err + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, err + } + + return &model.SuffixDataModel{ + DeltaHash: deltaHash, + RecoveryCommitment: recoveryCommitment, + }, nil +} + +const validDoc = `{ + "publicKey": [{ + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const recoveredDoc = `{ + "publicKey": [{ + "id": "recovered", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +type mockDocComposer struct { + Err error +} + +// ApplyPatches mocks applying patches to the document. +func (m *mockDocComposer) ApplyPatches(doc document.Document, patches []patch.Patch) (document.Document, error) { + if m.Err != nil { + return nil, m.Err + } + + return make(document.Document), nil +} diff --git a/pkg/versions/1_0/operationparser/commitment.go b/pkg/versions/1_0/operationparser/commitment.go new file mode 100644 index 0000000..b206c88 --- /dev/null +++ b/pkg/versions/1_0/operationparser/commitment.go @@ -0,0 +1,49 @@ +package operationparser + +import ( + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" +) + +// GetRevealValue returns this operation reveal value. +func (p *Parser) GetRevealValue(opBytes []byte) (string, error) { + // namespace is irrelevant in this case + op, err := p.ParseOperation("", opBytes, true) + if err != nil { + return "", fmt.Errorf("get reveal value - parse operation error: %s", err.Error()) + } + + if op.Type == operation.TypeCreate { + return "", fmt.Errorf("operation type '%s' not supported for getting operation reveal value", op.Type) + } + + return op.RevealValue, nil +} + +// GetCommitment returns next operation commitment. +func (p *Parser) GetCommitment(opBytes []byte) (string, error) { + // namespace is irrelevant in this case + op, err := p.ParseOperation("", opBytes, true) + if err != nil { + return "", fmt.Errorf("get commitment - parse operation error: %s", err.Error()) + } + + switch op.Type { //nolint:exhaustive + case operation.TypeUpdate: + return op.Delta.UpdateCommitment, nil + + case operation.TypeDeactivate: + return "", nil + + case operation.TypeRecover: + signedDataModel, innerErr := p.ParseSignedDataForRecover(op.SignedData) + if innerErr != nil { + return "", fmt.Errorf("failed to parse signed data model for recover: %s", innerErr.Error()) + } + + return signedDataModel.RecoveryCommitment, nil + } + + return "", fmt.Errorf("operation type '%s' not supported for getting next operation commitment", op.Type) +} diff --git a/pkg/versions/1_0/operationparser/commitment_test.go b/pkg/versions/1_0/operationparser/commitment_test.go new file mode 100644 index 0000000..d8e8741 --- /dev/null +++ b/pkg/versions/1_0/operationparser/commitment_test.go @@ -0,0 +1,295 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/commitment" + "github.com/trustbloc/sidetree-go/pkg/mocks" + "github.com/trustbloc/sidetree-go/pkg/patch" + "github.com/trustbloc/sidetree-go/pkg/util/ecsigner" + "github.com/trustbloc/sidetree-go/pkg/util/pubkey" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/client" +) + +func TestParser_GetCommitment(t *testing.T) { + p := mocks.NewMockProtocolClient() + + parser := New(p.Protocol) + + recoveryKey, _, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + updateKey, _, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + _, recoveryCommitment, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + _, updateCommitment, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + t.Run("success - recover", func(t *testing.T) { + recover, err := generateRecoverRequest(recoveryKey, recoveryCommitment, parser.Protocol) + require.NoError(t, err) + + c, err := parser.GetCommitment(recover) + require.NoError(t, err) + require.NotNil(t, c) + require.Equal(t, c, recoveryCommitment) + }) + + t.Run("success - deactivate", func(t *testing.T) { + deactivate, err := generateDeactivateRequest(recoveryKey) + require.NoError(t, err) + + c, err := parser.GetCommitment(deactivate) + require.NoError(t, err) + require.NotNil(t, c) + require.Equal(t, c, "") + }) + + t.Run("success - update", func(t *testing.T) { + update, err := generateUpdateRequest(updateKey, updateCommitment, parser.Protocol) + require.NoError(t, err) + + c, err := parser.GetCommitment(update) + require.NoError(t, err) + require.NotNil(t, c) + require.Equal(t, c, updateCommitment) + }) + + t.Run("success - update", func(t *testing.T) { + update, err := generateUpdateRequest(updateKey, updateCommitment, parser.Protocol) + require.NoError(t, err) + + c, err := parser.GetCommitment(update) + require.NoError(t, err) + require.NotNil(t, c) + require.Equal(t, c, updateCommitment) + }) + + t.Run("error - create", func(t *testing.T) { + create, err := generateCreateRequest(recoveryCommitment, updateCommitment, parser.Protocol) + require.NoError(t, err) + + c, err := parser.GetCommitment(create) + require.Error(t, err) + require.Empty(t, c) + require.Contains(t, err.Error(), "operation type 'create' not supported for getting next operation commitment") + }) + + t.Run("error - parse operation fails", func(t *testing.T) { + c, err := parser.GetCommitment([]byte(`{"type":"other"}`)) + require.Error(t, err) + require.Empty(t, c) + require.Contains(t, err.Error(), "get commitment - parse operation error") + }) +} + +func TestParser_GetRevealValue(t *testing.T) { + p := mocks.NewMockProtocolClient() + + parser := New(p.Protocol) + + recoveryKey, _, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + updateKey, _, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + _, recoveryCommitment, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + _, updateCommitment, err := generateKeyAndCommitment(p.Protocol) + require.NoError(t, err) + + t.Run("success - recover", func(t *testing.T) { + recover, err := generateRecoverRequest(recoveryKey, recoveryCommitment, parser.Protocol) + require.NoError(t, err) + + rv, err := parser.GetRevealValue(recover) + require.NoError(t, err) + require.NotEmpty(t, rv) + + pubJWK, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + expected, err := commitment.GetRevealValue(pubJWK, parser.Protocol.MultihashAlgorithms[0]) + require.NoError(t, err) + + require.Equal(t, rv, expected) + }) + + t.Run("success - deactivate", func(t *testing.T) { + deactivate, err := generateDeactivateRequest(recoveryKey) + require.NoError(t, err) + + rv, err := parser.GetRevealValue(deactivate) + require.NoError(t, err) + require.NotEmpty(t, rv) + + pubJWK, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + require.NoError(t, err) + + expected, err := commitment.GetRevealValue(pubJWK, parser.Protocol.MultihashAlgorithms[0]) + require.NoError(t, err) + + require.Equal(t, rv, expected) + }) + + t.Run("success - update", func(t *testing.T) { + update, err := generateUpdateRequest(updateKey, updateCommitment, parser.Protocol) + require.NoError(t, err) + + rv, err := parser.GetRevealValue(update) + require.NoError(t, err) + require.NotEmpty(t, rv) + + pubJWK, err := pubkey.GetPublicKeyJWK(&updateKey.PublicKey) + require.NoError(t, err) + + expected, err := commitment.GetRevealValue(pubJWK, parser.Protocol.MultihashAlgorithms[0]) + require.NoError(t, err) + + require.Equal(t, rv, expected) + }) + + t.Run("error - create", func(t *testing.T) { + create, err := generateCreateRequest(recoveryCommitment, updateCommitment, parser.Protocol) + require.NoError(t, err) + + c, err := parser.GetRevealValue(create) + require.Error(t, err) + require.Empty(t, c) + require.Contains(t, err.Error(), "operation type 'create' not supported for getting operation reveal value") + }) + + t.Run("error - parse operation fails", func(t *testing.T) { + c, err := parser.GetRevealValue([]byte(`{"type":"other"}`)) + require.Error(t, err) + require.Empty(t, c) + require.Contains(t, err.Error(), "get reveal value - parse operation error") + }) +} + +func generateRecoverRequest(recoveryKey *ecdsa.PrivateKey, recoveryCommitment string, p protocol.Protocol) ([]byte, error) { + jwk, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + if err != nil { + return nil, err + } + + _, updateCommitment, err := generateKeyAndCommitment(p) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(jwk, sha2_256) + if err != nil { + return nil, err + } + + info := &client.RecoverRequestInfo{ + DidSuffix: "recover-suffix", + OpaqueDocument: `{"test":"value"}`, + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, // not evaluated in operation getting commitment/reveal value + RecoveryKey: jwk, + MultihashCode: p.MultihashAlgorithms[0], + Signer: ecsigner.New(recoveryKey, "ES256", ""), + RevealValue: rv, + } + + return client.NewRecoverRequest(info) +} + +func generateCreateRequest(recoveryCommitment, updateCommitment string, p protocol.Protocol) ([]byte, error) { + info := &client.CreateRequestInfo{ + OpaqueDocument: `{"test":"value"}`, + RecoveryCommitment: recoveryCommitment, + UpdateCommitment: updateCommitment, + MultihashCode: p.MultihashAlgorithms[0], + } + + return client.NewCreateRequest(info) +} + +func generateDeactivateRequest(recoveryKey *ecdsa.PrivateKey) ([]byte, error) { + jwk, err := pubkey.GetPublicKeyJWK(&recoveryKey.PublicKey) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(jwk, sha2_256) + if err != nil { + return nil, err + } + + info := &client.DeactivateRequestInfo{ + DidSuffix: "deactivate-suffix", + Signer: ecsigner.New(recoveryKey, "ES256", ""), + RecoveryKey: jwk, + RevealValue: rv, + } + + return client.NewDeactivateRequest(info) +} + +func generateUpdateRequest(updateKey *ecdsa.PrivateKey, updateCommitment string, p protocol.Protocol) ([]byte, error) { + jwk, err := pubkey.GetPublicKeyJWK(&updateKey.PublicKey) + if err != nil { + return nil, err + } + + testPatch, err := patch.NewJSONPatch(`[{"op": "replace", "path": "/name", "value": "Jane"}]`) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(jwk, sha2_256) + if err != nil { + return nil, err + } + + info := &client.UpdateRequestInfo{ + DidSuffix: "update-suffix", + Signer: ecsigner.New(updateKey, "ES256", ""), + UpdateCommitment: updateCommitment, + UpdateKey: jwk, + Patches: []patch.Patch{testPatch}, + MultihashCode: p.MultihashAlgorithms[0], + RevealValue: rv, + } + + return client.NewUpdateRequest(info) +} + +func generateKeyAndCommitment(p protocol.Protocol) (*ecdsa.PrivateKey, string, error) { + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return nil, "", err + } + + pubKey, err := pubkey.GetPublicKeyJWK(&key.PublicKey) + if err != nil { + return nil, "", err + } + + c, err := commitment.GetCommitment(pubKey, p.MultihashAlgorithms[0]) + if err != nil { + return nil, "", err + } + + return key, c, nil +} diff --git a/pkg/versions/1_0/operationparser/create.go b/pkg/versions/1_0/operationparser/create.go new file mode 100644 index 0000000..cc77b6d --- /dev/null +++ b/pkg/versions/1_0/operationparser/create.go @@ -0,0 +1,169 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/hashing" + "github.com/trustbloc/sidetree-go/pkg/patch" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/operationparser/patchvalidator" +) + +// ParseCreateOperation will parse create operation. +func (p *Parser) ParseCreateOperation(request []byte, batch bool) (*model.Operation, error) { + schema, err := p.parseCreateRequest(request) + if err != nil { + return nil, err + } + + // create is not valid if suffix data is not valid + err = p.ValidateSuffixData(schema.SuffixData) + if err != nil { + return nil, err + } + + if !batch { + err = p.anchorOriginValidator.Validate(schema.SuffixData.AnchorOrigin) + if err != nil { + return nil, err + } + + err = p.ValidateDelta(schema.Delta) + if err != nil { + return nil, err + } + + // verify actual delta hash matches expected delta hash + err = hashing.IsValidModelMultihash(schema.Delta, schema.SuffixData.DeltaHash) + if err != nil { + return nil, fmt.Errorf("delta doesn't match suffix data delta hash: %s", err.Error()) + } + + if schema.Delta.UpdateCommitment == schema.SuffixData.RecoveryCommitment { + return nil, errors.New("recovery and update commitments cannot be equal, re-using public keys is not allowed") + } + } + + uniqueSuffix, err := model.GetUniqueSuffix(schema.SuffixData, p.MultihashAlgorithms) + if err != nil { + return nil, err + } + + return &model.Operation{ + OperationRequest: request, + Type: operation.TypeCreate, + UniqueSuffix: uniqueSuffix, + Delta: schema.Delta, + SuffixData: schema.SuffixData, + AnchorOrigin: schema.SuffixData.AnchorOrigin, + }, nil +} + +// parseCreateRequest parses a 'create' request. +func (p *Parser) parseCreateRequest(payload []byte) (*model.CreateRequest, error) { + schema := &model.CreateRequest{} + err := json.Unmarshal(payload, schema) + if err != nil { + return nil, err + } + + return schema, nil +} + +// ValidateDelta validates delta. +func (p *Parser) ValidateDelta(delta *model.DeltaModel) error { + if delta == nil { + return errors.New("missing delta") + } + + if len(delta.Patches) == 0 { + return errors.New("missing patches") + } + + for _, ptch := range delta.Patches { + action, err := ptch.GetAction() + if err != nil { + return err + } + + if !p.isPatchEnabled(action) { + return fmt.Errorf("%s patch action is not enabled", action) + } + + if err := patchvalidator.Validate(ptch); err != nil { + return err + } + } + + if err := p.validateMultihash(delta.UpdateCommitment, "update commitment"); err != nil { + return err + } + + return p.validateDeltaSize(delta) +} + +func (p *Parser) validateMultihash(mh, alias string) error { + if len(mh) > int(p.MaxOperationHashLength) { + return fmt.Errorf("%s length[%d] exceeds maximum hash length[%d]", alias, len(mh), p.MaxOperationHashLength) + } + + if !hashing.IsComputedUsingMultihashAlgorithms(mh, p.MultihashAlgorithms) { + return fmt.Errorf("%s is not computed with the required hash algorithms: %d", alias, p.MultihashAlgorithms) + } + + return nil +} + +func (p *Parser) validateDeltaSize(delta *model.DeltaModel) error { + canonicalDelta, err := canonicalizer.MarshalCanonical(delta) + if err != nil { + return fmt.Errorf("marshal canonical for delta failed: %s", err.Error()) + } + + if len(canonicalDelta) > int(p.MaxDeltaSize) { + return fmt.Errorf("delta size[%d] exceeds maximum delta size[%d]", len(canonicalDelta), p.MaxDeltaSize) + } + + return nil +} + +func (p *Parser) isPatchEnabled(action patch.Action) bool { + for _, allowed := range p.Patches { + if patch.Action(allowed) == action { + return true + } + } + + return false +} + +// ValidateSuffixData validates suffix data. +func (p *Parser) ValidateSuffixData(suffixData *model.SuffixDataModel) error { + if suffixData == nil { + return errors.New("missing suffix data") + } + + if err := p.validateMultihash(suffixData.RecoveryCommitment, "recovery commitment"); err != nil { + return err + } + + return p.validateMultihash(suffixData.DeltaHash, "delta hash") +} + +func (p *Parser) validateCreateRequest(create *model.CreateRequest) error { + if create.SuffixData == nil { + return errors.New("missing suffix data") + } + + return nil +} diff --git a/pkg/versions/1_0/operationparser/create_test.go b/pkg/versions/1_0/operationparser/create_test.go new file mode 100644 index 0000000..e5bc73d --- /dev/null +++ b/pkg/versions/1_0/operationparser/create_test.go @@ -0,0 +1,412 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/commitment" + "github.com/trustbloc/sidetree-go/pkg/encoder" + "github.com/trustbloc/sidetree-go/pkg/hashing" + "github.com/trustbloc/sidetree-go/pkg/jws" + "github.com/trustbloc/sidetree-go/pkg/patch" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +const ( + invalid = "invalid" +) + +func TestParseCreateOperation(t *testing.T) { + p := protocol.Protocol{ + MaxOperationHashLength: 100, + MaxDeltaSize: maxDeltaSize, + MultihashAlgorithms: []uint{sha2_256}, + Patches: []string{"replace", "add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + request, err := getCreateRequestBytes() + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, false) + require.NoError(t, err) + require.Equal(t, operation.TypeCreate, op.Type) + }) + + t.Run("success - JCS", func(t *testing.T) { + op, err := parser.ParseCreateOperation([]byte(jcsRequest), true) + require.NoError(t, err) + require.Equal(t, operation.TypeCreate, op.Type) + }) + + t.Run("parse create request error", func(t *testing.T) { + schema, err := parser.ParseCreateOperation([]byte(""), true) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "unexpected end of JSON input") + }) + t.Run("missing suffix data", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + create.SuffixData = nil + + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, true) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), "missing suffix data") + }) + + t.Run("parse suffix data error", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + + create.SuffixData = &model.SuffixDataModel{} + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, true) + require.Error(t, err) + require.Contains(t, err.Error(), "recovery commitment is not computed with the required hash algorithms: [18]") + require.Nil(t, op) + }) + t.Run("missing delta", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + create.Delta = nil + + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, false) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), "missing delta") + }) + + t.Run("missing delta is ok in batch mode", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + create.Delta = nil + + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, true) + require.NoError(t, err) + require.NotNil(t, op) + require.Nil(t, op.Delta) + }) + + t.Run("parse patch data error", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + + create.Delta = &model.DeltaModel{} + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "missing patches") + require.Nil(t, op) + }) + + t.Run("delta doesn't match suffix data delta hash", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + + delta, err := getDelta() + require.NoError(t, err) + delta.UpdateCommitment = computeMultihash([]byte("different")) + + create.Delta = delta + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "delta doesn't match suffix data delta hash") + require.Nil(t, op) + }) + + t.Run("error - update commitment equals recovery commitment", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + + create.SuffixData.RecoveryCommitment = create.Delta.UpdateCommitment + + request, err := json.Marshal(create) + require.NoError(t, err) + + op, err := parser.ParseCreateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "recovery and update commitments cannot be equal, re-using public keys is not allowed") + require.Nil(t, op) + }) +} + +func TestValidateSuffixData(t *testing.T) { + p := protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MultihashAlgorithms: []uint{sha2_256}, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + suffixData, err := getSuffixData() + require.NoError(t, err) + + err = parser.ValidateSuffixData(suffixData) + require.NoError(t, err) + }) + t.Run("invalid patch data hash", func(t *testing.T) { + suffixData, err := getSuffixData() + require.NoError(t, err) + + suffixData.DeltaHash = "" + err = parser.ValidateSuffixData(suffixData) + require.Error(t, err) + require.Contains(t, err.Error(), "delta hash is not computed with the required hash algorithms: [18]") + }) + t.Run("invalid next recovery commitment hash", func(t *testing.T) { + suffixData, err := getSuffixData() + require.NoError(t, err) + + suffixData.RecoveryCommitment = "" + err = parser.ValidateSuffixData(suffixData) + require.Error(t, err) + require.Contains(t, err.Error(), "recovery commitment is not computed with the required hash algorithms: [18]") + }) + t.Run("recovery commitment exceeds maximum hash length", func(t *testing.T) { + lowHashLength := protocol.Protocol{ + MaxOperationHashLength: 10, + MultihashAlgorithms: []uint{sha2_256}, + } + + suffixData, err := getSuffixData() + require.NoError(t, err) + + err = New(lowHashLength).ValidateSuffixData(suffixData) + require.Error(t, err) + require.Contains(t, err.Error(), "recovery commitment length[46] exceeds maximum hash length[10]") + }) +} + +func TestValidateDelta(t *testing.T) { + patches := []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"} + + p := protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MaxDeltaSize: maxDeltaSize, + MultihashAlgorithms: []uint{sha2_256}, + Patches: patches, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + delta, err := getDelta() + require.NoError(t, err) + + err = parser.ValidateDelta(delta) + require.NoError(t, err) + }) + + t.Run("error - delta exceeds max delta size ", func(t *testing.T) { + parserWithLowMaxDeltaSize := New(protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MaxDeltaSize: 50, + MultihashAlgorithms: []uint{sha2_256}, + Patches: patches, + }) + + delta, err := getDelta() + require.NoError(t, err) + + err = parserWithLowMaxDeltaSize.ValidateDelta(delta) + require.Error(t, err) + require.Contains(t, err.Error(), "delta size[336] exceeds maximum delta size[50]") + }) + + t.Run("invalid next update commitment hash", func(t *testing.T) { + delta, err := getDelta() + require.NoError(t, err) + + delta.UpdateCommitment = "" + err = parser.ValidateDelta(delta) + require.Error(t, err) + require.Contains(t, err.Error(), + "update commitment is not computed with the required hash algorithms: [18]") + }) + + t.Run("update commitment exceeds maximum hash length", func(t *testing.T) { + lowMaxHashLength := protocol.Protocol{ + MaxOperationHashLength: 10, + MaxDeltaSize: 50, + MultihashAlgorithms: []uint{sha2_256}, + Patches: patches, + } + + delta, err := getDelta() + require.NoError(t, err) + + err = New(lowMaxHashLength).ValidateDelta(delta) + require.Error(t, err) + require.Contains(t, err.Error(), + "update commitment length[46] exceeds maximum hash length[10]") + }) + + t.Run("missing patches", func(t *testing.T) { + delta, err := getDelta() + require.NoError(t, err) + + delta.Patches = []patch.Patch{} + err = parser.ValidateDelta(delta) + require.Error(t, err) + require.Contains(t, err.Error(), + "missing patches") + }) + + t.Run("error - invalid delta", func(t *testing.T) { + err := parser.validateDeltaSize(nil) + require.Error(t, err) + require.Contains(t, err.Error(), "marshal canonical for delta failed") + }) +} + +func TestValidateCreateRequest(t *testing.T) { + p := protocol.Protocol{} + + parser := New(p) + + t.Run("success", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + + err = parser.validateCreateRequest(create) + require.NoError(t, err) + }) + + t.Run("missing suffix data", func(t *testing.T) { + create, err := getCreateRequest() + require.NoError(t, err) + create.SuffixData = nil + + err = parser.validateCreateRequest(create) + require.Error(t, err) + require.Contains(t, err.Error(), "missing suffix data") + }) +} + +func getCreateRequest() (*model.CreateRequest, error) { + delta, err := getDelta() + if err != nil { + return nil, err + } + + suffixData, err := getSuffixData() + if err != nil { + return nil, err + } + + return &model.CreateRequest{ + Operation: operation.TypeCreate, + Delta: delta, + SuffixData: suffixData, + }, nil +} + +func getCreateRequestBytes() ([]byte, error) { + req, err := getCreateRequest() + if err != nil { + return nil, err + } + + return json.Marshal(req) +} + +func getDelta() (*model.DeltaModel, error) { + patches, err := patch.PatchesFromDocument(validDoc) + if err != nil { + return nil, err + } + + return &model.DeltaModel{ + Patches: patches, + UpdateCommitment: computeMultihash([]byte("updateReveal")), + }, nil +} + +func getSuffixData() (*model.SuffixDataModel, error) { + jwk := &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + } + + recoveryCommitment, err := commitment.GetCommitment(jwk, sha2_256) + if err != nil { + return nil, err + } + + delta, err := getDelta() + if err != nil { + return nil, err + } + + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, err + } + + return &model.SuffixDataModel{ + DeltaHash: deltaHash, + RecoveryCommitment: recoveryCommitment, + }, nil +} + +func computeMultihash(data []byte) string { + mh, err := hashing.ComputeMultihash(sha2_256, data) + if err != nil { + panic(err) + } + + return encoder.EncodeToString(mh) +} + +const validDoc = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +// samples bellow are taken from reference implementation tests. +const ( + jcsRequest = `{"delta":{"patches":[{"action":"replace","document":{"publicKeys":[{"id":"anySigningKeyId","publicKeyJwk":{"crv":"secp256k1","kty":"EC","x":"H61vqAm_-TC3OrFSqPrEfSfg422NR8QHPqr0mLx64DM","y":"s0WnWY87JriBjbyoY3FdUmifK7JJRLR65GtPthXeyuc"},"purposes":["authentication"],"type":"EcdsaSecp256k1VerificationKey2019"}],"services":[{"serviceEndpoint":"http://any.endpoint","id":"anyServiceEndpointId","type":"anyType"}]}}],"updateCommitment":"EiBMWE2JFaFipPdthcFiQek-SXTMi5IWIFXAN8hKFCyLJw"},"suffixData":{"deltaHash":"EiBP6gAOxx3YOL8PZPZG3medFgdqWSDayVX3u1W2f-IPEQ","recoveryCommitment":"EiBg8oqvU0Zq_H5BoqmWf0IrhetQ91wXc5fDPpIjB9wW5w"},"type":"create"}` +) diff --git a/pkg/versions/1_0/operationparser/deactivate.go b/pkg/versions/1_0/operationparser/deactivate.go new file mode 100644 index 0000000..e933a49 --- /dev/null +++ b/pkg/versions/1_0/operationparser/deactivate.go @@ -0,0 +1,101 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/hashing" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +// ParseDeactivateOperation will parse deactivate operation. +func (p *Parser) ParseDeactivateOperation(request []byte, batch bool) (*model.Operation, error) { + schema, err := p.parseDeactivateRequest(request) + if err != nil { + return nil, err + } + + signedData, err := p.ParseSignedDataForDeactivate(schema.SignedData) + if err != nil { + return nil, err + } + + if signedData.DidSuffix != schema.DidSuffix { + return nil, errors.New("signed did suffix mismatch for deactivate") + } + + err = hashing.IsValidModelMultihash(signedData.RecoveryKey, schema.RevealValue) + if err != nil { + return nil, fmt.Errorf("canonicalized recovery public key hash doesn't match reveal value: %s", err.Error()) + } + + if !batch { + until := p.getAnchorUntil(signedData.AnchorFrom, signedData.AnchorUntil) + + if err := p.anchorTimeValidator.Validate(signedData.AnchorFrom, until); err != nil { + return nil, err + } + } + + return &model.Operation{ + Type: operation.TypeDeactivate, + OperationRequest: request, + UniqueSuffix: schema.DidSuffix, + SignedData: schema.SignedData, + RevealValue: schema.RevealValue, + }, nil +} + +func (p *Parser) parseDeactivateRequest(payload []byte) (*model.DeactivateRequest, error) { + schema := &model.DeactivateRequest{} + err := json.Unmarshal(payload, schema) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal deactivate request: %s", err.Error()) + } + + if err := p.validateDeactivateRequest(schema); err != nil { + return nil, err + } + + return schema, nil +} + +func (p *Parser) validateDeactivateRequest(req *model.DeactivateRequest) error { + if req.DidSuffix == "" { + return errors.New("missing did suffix") + } + + if req.SignedData == "" { + return errors.New("missing signed data") + } + + return p.validateMultihash(req.RevealValue, "reveal value") +} + +// ParseSignedDataForDeactivate will parse and validate signed data for deactivate. +func (p *Parser) ParseSignedDataForDeactivate(compactJWS string) (*model.DeactivateSignedDataModel, error) { + jws, err := p.parseSignedData(compactJWS) + if err != nil { + return nil, err + } + + signedData := &model.DeactivateSignedDataModel{} + err = json.Unmarshal(jws.Payload, signedData) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal signed data model for deactivate: %s", err.Error()) + } + + if err := p.validateSigningKey(signedData.RecoveryKey); err != nil { + return nil, fmt.Errorf("validate signed data for deactivate: %s", err.Error()) + } + + return signedData, nil +} diff --git a/pkg/versions/1_0/operationparser/deactivate_test.go b/pkg/versions/1_0/operationparser/deactivate_test.go new file mode 100644 index 0000000..0c8f40d --- /dev/null +++ b/pkg/versions/1_0/operationparser/deactivate_test.go @@ -0,0 +1,244 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/commitment" + "github.com/trustbloc/sidetree-go/pkg/hashing" + "github.com/trustbloc/sidetree-go/pkg/jws" + "github.com/trustbloc/sidetree-go/pkg/util/signutil" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +const sha2_256 = 18 + +func TestParseDeactivateOperation(t *testing.T) { + p := protocol.Protocol{ + MultihashAlgorithms: []uint{sha2_256}, + MaxOperationHashLength: maxHashLength, + SignatureAlgorithms: []string{"alg"}, + KeyAlgorithms: []string{"crv"}, + MaxOperationTimeDelta: 5 * 60, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + payload, err := getDeactivateRequestBytes() + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(payload, false) + require.NoError(t, err) + require.Equal(t, operation.TypeDeactivate, op.Type) + + signedData, err := parser.ParseSignedDataForDeactivate(op.SignedData) + require.NoError(t, err) + + expectedRevealValue, err := commitment.GetRevealValue(signedData.RecoveryKey, sha2_256) + require.NoError(t, err) + + require.Equal(t, expectedRevealValue, op.RevealValue) + }) + + t.Run("success - anchor until default to anchor from + max operation time delta protocol param", func(t *testing.T) { + now := time.Now().Unix() + + signedData := &model.DeactivateSignedDataModel{ + DidSuffix: "did", + RecoveryKey: &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + }, + AnchorFrom: now - 5*60, + } + + deactivateRequest, err := getDeactivateRequest(signedData) + require.NoError(t, err) + + reqBytes, err := json.Marshal(deactivateRequest) + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(reqBytes, false) + require.NoError(t, err) + require.NotEmpty(t, op) + }) + + t.Run("missing unique suffix", func(t *testing.T) { + schema, err := parser.ParseDeactivateOperation([]byte("{}"), false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "missing did suffix") + }) + t.Run("missing signed data", func(t *testing.T) { + op, err := parser.ParseDeactivateOperation([]byte(`{"didSuffix":"abc"}`), false) + require.Error(t, err) + require.Contains(t, err.Error(), "missing signed data") + require.Nil(t, op) + }) + t.Run("parse request", func(t *testing.T) { + request, err := json.Marshal("invalidJSON") + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "cannot unmarshal string") + require.Nil(t, op) + }) + t.Run("parse signed data error - decoding failed", func(t *testing.T) { + deactivateRequest, err := getDefaultDeactivateRequest() + require.NoError(t, err) + + deactivateRequest.SignedData = "invalid" + request, err := json.Marshal(deactivateRequest) + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid JWS compact format") + require.Nil(t, op) + }) + t.Run("validate signed data error - did suffix mismatch", func(t *testing.T) { + signedData := getSignedDataForDeactivate() + signedData.DidSuffix = "different" + + recoverRequest, err := getDeactivateRequest(signedData) + require.NoError(t, err) + + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "signed did suffix mismatch for deactivate") + require.Nil(t, op) + }) + t.Run("parse signed data error - unmarshal signed data failed", func(t *testing.T) { + deactivateRequest, err := getDefaultDeactivateRequest() + require.NoError(t, err) + + compactJWS, err := signutil.SignPayload([]byte("payload"), NewMockSigner()) + require.NoError(t, err) + + deactivateRequest.SignedData = compactJWS + request, err := json.Marshal(deactivateRequest) + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to unmarshal signed data model for deactivate") + require.Nil(t, op) + }) + t.Run("error - key algorithm not supported", func(t *testing.T) { + p := protocol.Protocol{ + MultihashAlgorithms: []uint{sha2_256}, + MaxOperationHashLength: maxHashLength, + SignatureAlgorithms: []string{"alg"}, + KeyAlgorithms: []string{"other"}, + } + parser := New(p) + + request, err := getDeactivateRequestBytes() + require.NoError(t, err) + + op, err := parser.ParseDeactivateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "validate signed data for deactivate: key algorithm 'crv' is not in the allowed list [other]") + require.Nil(t, op) + }) +} + +func TestValidateDeactivateRequest(t *testing.T) { + parser := New(protocol.Protocol{MaxOperationHashLength: maxHashLength, MultihashAlgorithms: []uint{sha2_256}}) + + t.Run("success", func(t *testing.T) { + deactivate, err := getDefaultDeactivateRequest() + require.NoError(t, err) + + err = parser.validateDeactivateRequest(deactivate) + require.NoError(t, err) + }) + t.Run("missing signed data", func(t *testing.T) { + deactivate, err := getDefaultDeactivateRequest() + require.NoError(t, err) + deactivate.SignedData = "" + + err = parser.validateDeactivateRequest(deactivate) + require.Error(t, err) + require.Contains(t, err.Error(), "missing signed data") + }) + t.Run("missing did suffix", func(t *testing.T) { + deactivate, err := getDefaultDeactivateRequest() + require.NoError(t, err) + deactivate.DidSuffix = "" + + err = parser.validateDeactivateRequest(deactivate) + require.Error(t, err) + require.Contains(t, err.Error(), "missing did suffix") + }) + + t.Run("invalid reveal value", func(t *testing.T) { + deactivate, err := getDefaultDeactivateRequest() + require.NoError(t, err) + deactivate.RevealValue = "invalid" + + err = parser.validateDeactivateRequest(deactivate) + require.Error(t, err) + require.Contains(t, err.Error(), "reveal value is not computed with the required hash algorithms: [18]") + }) +} + +func getDeactivateRequest(signedData *model.DeactivateSignedDataModel) (*model.DeactivateRequest, error) { + compactJWS, err := signutil.SignModel(signedData, NewMockSigner()) + if err != nil { + return nil, err + } + + revealValue, err := hashing.CalculateModelMultihash(signedData.RecoveryKey, sha2_256) + if err != nil { + return nil, err + } + + return &model.DeactivateRequest{ + Operation: operation.TypeDeactivate, + DidSuffix: "did", + SignedData: compactJWS, + RevealValue: revealValue, + }, nil +} + +func getDefaultDeactivateRequest() (*model.DeactivateRequest, error) { + return getDeactivateRequest(getSignedDataForDeactivate()) +} + +func getSignedDataForDeactivate() *model.DeactivateSignedDataModel { + return &model.DeactivateSignedDataModel{ + DidSuffix: "did", + RecoveryKey: &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + }, + } +} + +func getDeactivateRequestBytes() ([]byte, error) { + req, err := getDeactivateRequest(getSignedDataForDeactivate()) + if err != nil { + return nil, err + } + + return json.Marshal(req) +} diff --git a/pkg/versions/1_0/operationparser/method.go b/pkg/versions/1_0/operationparser/method.go new file mode 100644 index 0000000..118926a --- /dev/null +++ b/pkg/versions/1_0/operationparser/method.go @@ -0,0 +1,84 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "errors" + "strings" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/encoder" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +const ( + longFormSeparator = ":" + didSeparator = ":" +) + +// ParseDID inspects resolution request and returns: +// - did and create request in case of long form resolution +// - just did in case of short form resolution (common scenario). +func (p *Parser) ParseDID(namespace, shortOrLongFormDID string) (string, []byte, error) { + var err error + + withoutNamespace := strings.ReplaceAll(shortOrLongFormDID, namespace+didSeparator, "") + posLongFormSeparator := strings.Index(withoutNamespace, longFormSeparator) + + if posLongFormSeparator == -1 { + // there is short form did + return shortOrLongFormDID, nil, nil + } + + // long form format: '::Base64url(JCS({suffix-data, delta}))' + endOfDIDPos := strings.LastIndex(shortOrLongFormDID, longFormSeparator) + + did := shortOrLongFormDID[0:endOfDIDPos] + longFormDID := shortOrLongFormDID[endOfDIDPos+1:] + + createRequest, err := parseInitialState(longFormDID) + if err != nil { + return "", nil, err + } + + createRequestBytes, err := canonicalizer.MarshalCanonical(createRequest) + if err != nil { + return "", nil, err + } + + // return did and initial state + return did, createRequestBytes, nil +} + +// parse initial state will get create request from encoded initial value. +func parseInitialState(initialState string) (*model.CreateRequest, error) { + decodedJCS, err := encoder.DecodeString(initialState) + if err != nil { + return nil, err + } + + var createRequest model.CreateRequest + err = json.Unmarshal(decodedJCS, &createRequest) + if err != nil { + return nil, err + } + + expected, err := canonicalizer.MarshalCanonical(createRequest) + if err != nil { + return nil, err + } + + if encoder.EncodeToString(expected) != initialState { + return nil, errors.New("initial state is not valid") + } + + createRequest.Operation = operation.TypeCreate + + return &createRequest, nil +} diff --git a/pkg/versions/1_0/operationparser/method_test.go b/pkg/versions/1_0/operationparser/method_test.go new file mode 100644 index 0000000..dc5f0fe --- /dev/null +++ b/pkg/versions/1_0/operationparser/method_test.go @@ -0,0 +1,109 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/canonicalizer" + "github.com/trustbloc/sidetree-go/pkg/docutil" + "github.com/trustbloc/sidetree-go/pkg/encoder" + "github.com/trustbloc/sidetree-go/pkg/mocks" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +const ( + docNS = "doc:method" +) + +func TestParser_ParseDID(t *testing.T) { + p := mocks.NewMockProtocolClient() + + parser := New(p.Protocol) + + const testDID = "doc:method:abc" + + req := model.CreateRequest{ + Delta: &model.DeltaModel{}, + SuffixData: &model.SuffixDataModel{}, + } + + reqBytes, err := canonicalizer.MarshalCanonical(req) + require.NoError(t, err) + fmt.Println(string(reqBytes)) + + initialState := encoder.EncodeToString(reqBytes) + + t.Run("success - just did, no initial state value", func(t *testing.T) { + did, initial, err := parser.ParseDID(docNS, testDID) + require.NoError(t, err) + require.Equal(t, testDID, did) + require.Empty(t, initial) + }) + + t.Run("success - did with dot in namespace", func(t *testing.T) { + namespaceWithDot := "did:bloc:trustbloc.dev" + didWithDot := namespaceWithDot + docutil.NamespaceDelimiter + "EiB2gB7F-aDjg8qPsTuZfVqWkJtIWXn4nObHSgtZ1IzMaQ" + + did, initial, err := parser.ParseDID(namespaceWithDot, didWithDot) + require.NoError(t, err) + require.Equal(t, didWithDot, did) + require.Nil(t, initial) + }) + + t.Run("success - did with initial state JCS", func(t *testing.T) { + did, initial, err := parser.ParseDID(docNS, testDID+longFormSeparator+initialState) + + require.NoError(t, err) + require.Equal(t, testDID, did) + require.Equal(t, `{"delta":{},"suffixData":{},"type":"create"}`, string(initial)) + }) + + t.Run("success - did with dot in namespace and initial state", func(t *testing.T) { + namespaceWithDot := "did:bloc:trustbloc.dev" + didWithDot := namespaceWithDot + docutil.NamespaceDelimiter + "EiB2gB7F-aDjg8qPsTuZfVqWkJtIWXn4nObHSgtZ1IzMaQ" + + didWithDotWithInitialState := didWithDot + longFormSeparator + initialState + did, initial, err := parser.ParseDID(namespaceWithDot, didWithDotWithInitialState) + require.NoError(t, err) + require.Equal(t, didWithDot, did) + require.Equal(t, `{"delta":{},"suffixData":{},"type":"create"}`, string(initial)) + }) + + t.Run("error - initial state not encoded", func(t *testing.T) { + notEncoded := "not encoded" + + did, initial, err := parser.ParseDID(namespace, testDID+longFormSeparator+notEncoded) + require.Error(t, err) + require.Empty(t, did) + require.Nil(t, initial) + require.Contains(t, err.Error(), "illegal base64 data") + }) + + t.Run("error - initial state not JSON", func(t *testing.T) { + invalidJCS := encoder.EncodeToString([]byte(`not JSON`)) + + did, initial, err := parser.ParseDID(docNS, testDID+longFormSeparator+invalidJCS) + require.Error(t, err) + require.Empty(t, did) + require.Nil(t, initial) + require.Contains(t, err.Error(), "invalid character") + }) + + t.Run("error - initial state not expected JCS", func(t *testing.T) { + unexpectedJCS := encoder.EncodeToString([]byte(`{"key":"value"}`)) + + did, initial, err := parser.ParseDID(docNS, testDID+longFormSeparator+unexpectedJCS) + require.Error(t, err) + require.Empty(t, did) + require.Nil(t, initial) + require.Contains(t, err.Error(), "initial state is not valid") + }) +} diff --git a/pkg/versions/1_0/operationparser/operation.go b/pkg/versions/1_0/operationparser/operation.go new file mode 100644 index 0000000..5a16f6f --- /dev/null +++ b/pkg/versions/1_0/operationparser/operation.go @@ -0,0 +1,171 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/trustbloc/logutil-go/pkg/log" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/docutil" + logfields "github.com/trustbloc/sidetree-go/pkg/internal/log" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +var logger = log.New("sidetree-core-parser") + +// Parser is an operation parser. +type Parser struct { + protocol.Protocol + anchorOriginValidator ObjectValidator + anchorTimeValidator TimeValidator +} + +// New returns a new operation parser. +// +//nolint:gocritic +func New(p protocol.Protocol, opts ...Option) *Parser { + parser := &Parser{ + Protocol: p, + } + + // default anchor origin validator + parser.anchorOriginValidator = &objectValidator{} + + // default anchor time validator + parser.anchorTimeValidator = &timeValidator{} + + // apply options + for _, opt := range opts { + opt(parser) + } + + return parser +} + +// ObjectValidator validates object. Currently used for anchor origin validation +// however it can be used for any object validation. +type ObjectValidator interface { + Validate(obj interface{}) error +} + +// Option is a parser instance option. +type Option func(opts *Parser) + +// WithAnchorOriginValidator sets optional anchor origin validator. +func WithAnchorOriginValidator(v ObjectValidator) Option { + return func(opts *Parser) { + if v != nil { + opts.anchorOriginValidator = v + } + } +} + +// ErrOperationExpired is thrown if anchor until time is less then reference time(e.g. server time or anchoring time). +var ErrOperationExpired = errors.New("operation expired") + +// ErrOperationEarly is thrown if anchor from time is greater then reference time(e.g. server time or anchoring time). +var ErrOperationEarly = errors.New("operation early") + +// TimeValidator validates earliest and expiry time for an operation against server time. +type TimeValidator interface { + Validate(from, until int64) error +} + +// WithAnchorTimeValidator sets optional anchor time validator. +func WithAnchorTimeValidator(v TimeValidator) Option { + return func(opts *Parser) { + if v != nil { + opts.anchorTimeValidator = v + } + } +} + +// Parse parses and validates operation. +func (p *Parser) Parse(namespace string, operationBuffer []byte) (*operation.Operation, error) { + // parse and validate operation buffer using this versions model and validation rules + internal, err := p.ParseOperation(namespace, operationBuffer, false) + if err != nil { + return nil, err + } + + return &operation.Operation{ + Type: internal.Type, + UniqueSuffix: internal.UniqueSuffix, + ID: internal.ID, + OperationRequest: operationBuffer, + }, nil +} + +// ParseOperation parses and validates operation. Batch mode flag gives hints for the validation of +// operation object (anticipating future pruning/checkpoint requirements). +func (p *Parser) ParseOperation(namespace string, operationBuffer []byte, batch bool) (*model.Operation, error) { + // check maximum operation size against protocol before parsing + if len(operationBuffer) > int(p.MaxOperationSize) { + return nil, fmt.Errorf("operation size[%d] exceeds maximum operation size[%d]", len(operationBuffer), int(p.MaxOperationSize)) + } + + schema := &operationSchema{} + err := json.Unmarshal(operationBuffer, schema) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal operation buffer into operation schema: %s", err.Error()) + } + + var op *model.Operation + var parseErr error + switch schema.Operation { + case operation.TypeCreate: + op, parseErr = p.ParseCreateOperation(operationBuffer, batch) + case operation.TypeUpdate: + op, parseErr = p.ParseUpdateOperation(operationBuffer, batch) + case operation.TypeDeactivate: + op, parseErr = p.ParseDeactivateOperation(operationBuffer, batch) + case operation.TypeRecover: + op, parseErr = p.ParseRecoverOperation(operationBuffer, batch) + default: + return nil, fmt.Errorf("parse operation: operation type [%s] not supported", schema.Operation) + } + + if parseErr != nil { + logger.Warn("Error parsing operation for batch", logfields.WithOperation(schema.Operation), + logfields.WithIsBatch(batch), log.WithError(parseErr)) + + return nil, parseErr + } + + op.Namespace = namespace + op.ID = namespace + docutil.NamespaceDelimiter + op.UniqueSuffix + + return op, nil +} + +// operationSchema is used to get operation type. +type operationSchema struct { + + // operation + Operation operation.Type `json:"type"` +} + +type objectValidator struct { +} + +func (ov *objectValidator) Validate(_ interface{}) error { + // default validator allows any anchor origin + return nil +} + +type timeValidator struct { +} + +func (tv *timeValidator) Validate(_, _ int64) error { + // default time validator allows any anchor time + return nil +} diff --git a/pkg/versions/1_0/operationparser/operation_test.go b/pkg/versions/1_0/operationparser/operation_test.go new file mode 100644 index 0000000..cc1958c --- /dev/null +++ b/pkg/versions/1_0/operationparser/operation_test.go @@ -0,0 +1,255 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "errors" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/api/protocol" +) + +const ( + namespace = "did:sidetree" + + maxOperationSize = 2000 + maxHashLength = 100 + maxDeltaSize = 1000 +) + +func TestNewParser(t *testing.T) { + p := protocol.Protocol{} + + parser := New(p) + require.NotNil(t, parser) + require.NotNil(t, parser.anchorOriginValidator) + + // validator cannot be set to nil (default validator will kick in) + parser = New(p, WithAnchorOriginValidator(nil)) + require.NotNil(t, parser) + require.NotNil(t, parser.anchorOriginValidator) + + // supply custom validator + ov := &mockObjectValidator{} + + parser = New(p, WithAnchorOriginValidator(ov)) + require.NotNil(t, parser) + require.Equal(t, ov, parser.anchorOriginValidator) + + // custom anchor time validator + tv := &mockTimeValidator{} + + parser = New(p, WithAnchorTimeValidator(tv)) + require.NotNil(t, parser) + require.Equal(t, tv, parser.anchorTimeValidator) +} + +func TestGetOperation(t *testing.T) { + p := protocol.Protocol{ + MaxOperationSize: maxOperationSize, + MaxOperationHashLength: maxHashLength, + MaxDeltaSize: maxDeltaSize, + MultihashAlgorithms: []uint{sha2_256}, + SignatureAlgorithms: []string{"alg"}, + KeyAlgorithms: []string{"crv"}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, + } + + parser := New(p) + + t.Run("create", func(t *testing.T) { + operation, err := getCreateRequestBytes() + require.NoError(t, err) + + op, err := parser.Parse(namespace, operation) + require.NoError(t, err) + require.NotNil(t, op) + }) + t.Run("update", func(t *testing.T) { + operation, err := getUpdateRequestBytes() + require.NoError(t, err) + + op, err := parser.Parse(namespace, operation) + require.NoError(t, err) + require.NotNil(t, op) + }) + t.Run("deactivate", func(t *testing.T) { + operation, err := getDeactivateRequestBytes() + require.NoError(t, err) + + op, err := parser.Parse(namespace, operation) + require.NoError(t, err) + require.NotNil(t, op) + }) + t.Run("recover", func(t *testing.T) { + operation, err := getRecoverRequestBytes() + require.NoError(t, err) + + op, err := parser.Parse(namespace, operation) + require.NoError(t, err) + require.NotNil(t, op) + }) + t.Run("operation parsing error - anchor origin validator error (create)", func(t *testing.T) { + operation, err := getCreateRequestBytes() + require.NoError(t, err) + + testErr := errors.New("validation error") + parserWithErr := New(p, WithAnchorOriginValidator(&mockObjectValidator{Err: testErr})) + + op, err := parserWithErr.Parse(namespace, operation) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), testErr.Error()) + }) + t.Run("operation parsing error - anchor origin validator error (recover)", func(t *testing.T) { + operation, err := getRecoverRequestBytes() + require.NoError(t, err) + + testErr := errors.New("validation error") + parserWithErr := New(p, WithAnchorOriginValidator(&mockObjectValidator{Err: testErr})) + + op, err := parserWithErr.Parse(namespace, operation) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), testErr.Error()) + }) + t.Run("operation parsing error - anchor time validator error (update)", func(t *testing.T) { + operation, err := getUpdateRequestBytes() + require.NoError(t, err) + + testErr := errors.New("anchor time validation error") + parserWithErr := New(p, WithAnchorTimeValidator(&mockTimeValidator{Err: testErr})) + + op, err := parserWithErr.Parse(namespace, operation) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), testErr.Error()) + }) + t.Run("operation parsing error - anchor time validator error (deactivate)", func(t *testing.T) { + operation, err := getDeactivateRequestBytes() + require.NoError(t, err) + + testErr := errors.New("anchor time validation error") + parserWithErr := New(p, WithAnchorTimeValidator(&mockTimeValidator{Err: testErr})) + + op, err := parserWithErr.Parse(namespace, operation) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), testErr.Error()) + }) + t.Run("operation parsing error - anchor time validator error (recover)", func(t *testing.T) { + operation, err := getRecoverRequestBytes() + require.NoError(t, err) + + testErr := errors.New("anchor time validation error") + parserWithErr := New(p, WithAnchorTimeValidator(&mockTimeValidator{Err: testErr})) + + op, err := parserWithErr.Parse(namespace, operation) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), testErr.Error()) + }) + + t.Run("operation parsing error - exceeds max operation size", func(t *testing.T) { + // set-up invalid hash algorithm in protocol configuration + invalid := protocol.Protocol{ + MaxOperationSize: 20, + MaxDeltaSize: maxDeltaSize, + } + + operation, err := getRecoverRequestBytes() + require.NoError(t, err) + + op, err := New(invalid).Parse(namespace, operation) + require.Error(t, err) + require.Contains(t, err.Error(), "operation size[761] exceeds maximum operation size[20]") + require.Nil(t, op) + }) + t.Run("operation parsing error", func(t *testing.T) { + // set-up invalid hash algorithm in protocol configuration + invalid := protocol.Protocol{ + SignatureAlgorithms: []string{"not-used"}, + MaxOperationSize: maxOperationSize, + MaxDeltaSize: maxDeltaSize, + MaxOperationHashLength: maxHashLength, + MultihashAlgorithms: []uint{sha2_256}, + } + + operation, err := getRecoverRequestBytes() + require.NoError(t, err) + + op, err := New(invalid).Parse(namespace, operation) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to parse signed data: algorithm 'alg' is not in the allowed list [not-used]") + require.Nil(t, op) + }) + t.Run("unsupported operation type error", func(t *testing.T) { + operation := getUnsupportedRequest() + op, err := parser.Parse(namespace, operation) + require.Error(t, err) + require.Contains(t, err.Error(), "parse operation: operation type [unsupported] not supported") + require.Nil(t, op) + }) + t.Run("unmarshal request error - not JSON", func(t *testing.T) { + op, err := parser.Parse(namespace, []byte("operation")) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to unmarshal operation buffer into operation schema") + require.Nil(t, op) + }) +} + +func getUnsupportedRequest() []byte { + schema := &operationSchema{ + Operation: "unsupported", + } + + payload, err := json.Marshal(schema) + if err != nil { + panic(err) + } + + return payload +} + +type mockObjectValidator struct { + Err error +} + +func (mov *mockObjectValidator) Validate(_ interface{}) error { + return mov.Err +} + +type mockTimeValidator struct { + Err error +} + +func (mtv *mockTimeValidator) Validate(from, until int64) error { + if mtv.Err != nil { + return mtv.Err + } + + if from == 0 && until == 0 { + // from and until are not specified - no error + return nil + } + + serverTime := time.Now().Unix() + + if from >= serverTime { + return ErrOperationEarly + } + + if until <= serverTime { + return ErrOperationExpired + } + + return nil +} diff --git a/pkg/versions/1_0/operationparser/patchvalidator/addkeys.go b/pkg/versions/1_0/operationparser/patchvalidator/addkeys.go new file mode 100644 index 0000000..9b94620 --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/addkeys.go @@ -0,0 +1,40 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +// NewAddPublicKeysValidator creates new validator. +func NewAddPublicKeysValidator() *AddPublicKeysValidator { + return &AddPublicKeysValidator{} +} + +// AddPublicKeysValidator implements validator for "add-public-keys" patch. +type AddPublicKeysValidator struct { +} + +// Validate validates patch. +func (v *AddPublicKeysValidator) Validate(p patch.Patch) error { + value, err := p.GetValue() + if err != nil { + return err + } + + _, err = getRequiredArray(value) + if err != nil { + return fmt.Errorf("invalid add public keys value: %s", err.Error()) + } + + publicKeys := document.ParsePublicKeys(value) + + return validatePublicKeys(publicKeys) +} diff --git a/pkg/versions/1_0/operationparser/patchvalidator/addkeys_test.go b/pkg/versions/1_0/operationparser/patchvalidator/addkeys_test.go new file mode 100644 index 0000000..c73eeea --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/addkeys_test.go @@ -0,0 +1,52 @@ +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +func TestAddPublicKeysPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + + err = NewAddPublicKeysValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("error - missing value", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + + delete(p, patch.PublicKeys) + err = NewAddPublicKeysValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "add-public-keys patch is missing key: publicKeys") + }) + t.Run("error - invalid value for public keys", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + + p[patch.PublicKeys] = "" + err = NewAddPublicKeysValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid add public keys value: expected array of interfaces") + }) +} + +const addPublicKeysPatch = `{ + "action": "add-public-keys", + "publicKeys": [{ + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` diff --git a/pkg/versions/1_0/operationparser/patchvalidator/addservices.go b/pkg/versions/1_0/operationparser/patchvalidator/addservices.go new file mode 100644 index 0000000..0201d5c --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/addservices.go @@ -0,0 +1,40 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +// NewAddServicesValidator creates new validator. +func NewAddServicesValidator() *AddServicesValidator { + return &AddServicesValidator{} +} + +// AddServicesValidator implements validator for "add-public-keys" patch. +type AddServicesValidator struct { +} + +// Validate validates patch. +func (v *AddServicesValidator) Validate(p patch.Patch) error { + value, err := p.GetValue() + if err != nil { + return err + } + + _, err = getRequiredArray(value) + if err != nil { + return fmt.Errorf("invalid add services value: %s", err.Error()) + } + + services := document.ParseServices(value) + + return validateServices(services) +} diff --git a/pkg/versions/1_0/operationparser/patchvalidator/addservices_test.go b/pkg/versions/1_0/operationparser/patchvalidator/addservices_test.go new file mode 100644 index 0000000..94419ce --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/addservices_test.go @@ -0,0 +1,60 @@ +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +func TestAddServiceEndpointsPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addServiceEndpoints)) + require.NoError(t, err) + + err = NewAddServicesValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("missing service endpoints", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addServiceEndpoints)) + require.NoError(t, err) + + delete(p, patch.ServicesKey) + err = NewAddServicesValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "add-services patch is missing key: services") + }) + t.Run("error - service is missing id", func(t *testing.T) { + p, err := patch.NewAddServiceEndpointsPatch(testAddServiceEndpointsMissingID) + require.NoError(t, err) + + err = NewAddServicesValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "service id is missing") + }) +} + +const addServiceEndpoints = `{ + "action": "add-services", + "services": [ + { + "id": "sds1", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }, + { + "id": "sds2", + "type": "SecureDataStore", + "serviceEndpoint": "http://some-cloud.com/hub" + } + ] +}` + +const testAddServiceEndpointsMissingID = `[ + { + "id": "", + "type": "SecureDataStore", + "serviceEndpoint": "http://some-cloud.com/hub" + } + ]` diff --git a/pkg/versions/1_0/operationparser/patchvalidator/alsoknownas.go b/pkg/versions/1_0/operationparser/patchvalidator/alsoknownas.go new file mode 100644 index 0000000..c489aab --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/alsoknownas.go @@ -0,0 +1,70 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "fmt" + "net/url" + + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +// NewAlsoKnownAsValidator creates new validator. +func NewAlsoKnownAsValidator() *AlsoKnownAsValidator { + return &AlsoKnownAsValidator{} +} + +// AlsoKnownAsValidator implements validator for "add-also-known-as" and "remove-also-known-as" patches. +// Both patches take have as value URIs so the validation for both add and remove are the same. +type AlsoKnownAsValidator struct { +} + +// Validate validates patch. +func (v *AlsoKnownAsValidator) Validate(p patch.Patch) error { + action, err := p.GetAction() + if err != nil { + return err + } + + value, err := p.GetValue() + if err != nil { + return fmt.Errorf("%s", err) + } + + _, err = getRequiredArray(value) + if err != nil { + return fmt.Errorf("%s: %w", action, err) + } + + uris := document.StringArray(value) + + if err := validate(uris); err != nil { + return fmt.Errorf("%s: validate URIs: %w", action, err) + } + + return nil +} + +// validateURIs validates URIs. +func validate(uris []string) error { + ids := make(map[string]bool) + for _, uri := range uris { + u, err := url.Parse(uri) + if err != nil { + return fmt.Errorf("failed to parse URI: %w", err) + } + + if _, ok := ids[u.String()]; ok { + return fmt.Errorf("duplicate uri: %s", u.String()) + } + + ids[u.String()] = true + } + + return nil +} diff --git a/pkg/versions/1_0/operationparser/patchvalidator/alsoknownas_test.go b/pkg/versions/1_0/operationparser/patchvalidator/alsoknownas_test.go new file mode 100644 index 0000000..7cb2c47 --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/alsoknownas_test.go @@ -0,0 +1,72 @@ +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +func TestAddAlsoKnowAsValidator(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addAlsoKnownAs)) + require.NoError(t, err) + + err = NewAlsoKnownAsValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("error - missing action", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addAlsoKnownAs)) + require.NoError(t, err) + + delete(p, patch.ActionKey) + err = NewAlsoKnownAsValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "patch is missing action key") + }) + t.Run("error - missing uris", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addAlsoKnownAs)) + require.NoError(t, err) + + delete(p, patch.UrisKey) + err = NewAlsoKnownAsValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "add-also-known-as patch is missing key: uris") + }) + t.Run("error - uris value is not expected type", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addAlsoKnownAs)) + require.NoError(t, err) + + p[patch.UrisKey] = []int{123} + err = NewAlsoKnownAsValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "add-also-known-as: expected array of interfaces") + }) + t.Run("error - uri is not valid", func(t *testing.T) { + p, err := patch.NewAddAlsoKnownAs(`[":abc"]`) + require.NoError(t, err) + + err = NewAlsoKnownAsValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "add-also-known-as: validate URIs: failed to parse URI:") + }) + t.Run("error - duplicate URI", func(t *testing.T) { + p, err := patch.NewAddAlsoKnownAs(`["https://abc.com", "https://abc.com"]`) + require.NoError(t, err) + + err = NewAlsoKnownAsValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "add-also-known-as: validate URIs: duplicate uri: https://abc.com") + }) +} + +const addAlsoKnownAs = `{ + "action": "add-also-known-as", + "uris": ["did:abc:123", "https://other.com"] +}` + +const removeAlsoKnownAs = `{ + "action": "remove-also-known-as", + "uris": ["did:abc:123"] +}` diff --git a/pkg/versions/1_0/operationparser/patchvalidator/document.go b/pkg/versions/1_0/operationparser/patchvalidator/document.go new file mode 100644 index 0000000..dea4ce1 --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/document.go @@ -0,0 +1,378 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "errors" + "fmt" + "net/url" + "regexp" + "strings" + + "github.com/trustbloc/sidetree-go/pkg/document" +) + +//nolint:gochecknoglobals +var ( + asciiRegex = regexp.MustCompile("^[A-Za-z0-9_-]+$") +) + +const ( + bls12381G2Key2020 = "Bls12381G2Key2020" + jsonWebKey2020 = "JsonWebKey2020" + ecdsaSecp256k1VerificationKey2019 = "EcdsaSecp256k1VerificationKey2019" + x25519KeyAgreementKey2019 = "X25519KeyAgreementKey2019" + ed25519VerificationKey2018 = "Ed25519VerificationKey2018" + ed25519VerificationKey2020 = "Ed25519VerificationKey2020" + + // public keys, services id length. + maxIDLength = 50 + + maxServiceTypeLength = 30 +) + +var allowedPurposes = map[document.KeyPurpose]bool{ + document.KeyPurposeAuthentication: true, + document.KeyPurposeAssertionMethod: true, + document.KeyPurposeKeyAgreement: true, + document.KeyPurposeCapabilityDelegation: true, + document.KeyPurposeCapabilityInvocation: true, +} + +type existenceMap map[string]string + +var allowedKeyTypesGeneral = existenceMap{ + bls12381G2Key2020: bls12381G2Key2020, + jsonWebKey2020: jsonWebKey2020, + ecdsaSecp256k1VerificationKey2019: ecdsaSecp256k1VerificationKey2019, + ed25519VerificationKey2018: ed25519VerificationKey2018, + ed25519VerificationKey2020: ed25519VerificationKey2020, + x25519KeyAgreementKey2019: x25519KeyAgreementKey2019, +} + +var allowedKeyTypesVerification = existenceMap{ + bls12381G2Key2020: bls12381G2Key2020, + jsonWebKey2020: jsonWebKey2020, + ecdsaSecp256k1VerificationKey2019: ecdsaSecp256k1VerificationKey2019, + ed25519VerificationKey2018: ed25519VerificationKey2018, + ed25519VerificationKey2020: ed25519VerificationKey2020, +} + +var allowedKeyTypesAgreement = existenceMap{ + // TODO: Verify appropriate agreement key types for JWS and Secp256k1 + bls12381G2Key2020: bls12381G2Key2020, + jsonWebKey2020: jsonWebKey2020, + ecdsaSecp256k1VerificationKey2019: ecdsaSecp256k1VerificationKey2019, + x25519KeyAgreementKey2019: x25519KeyAgreementKey2019, +} + +var allowedKeyTypes = map[string]existenceMap{ + document.KeyPurposeAuthentication: allowedKeyTypesVerification, + document.KeyPurposeAssertionMethod: allowedKeyTypesVerification, + document.KeyPurposeKeyAgreement: allowedKeyTypesAgreement, + document.KeyPurposeCapabilityDelegation: allowedKeyTypesVerification, + document.KeyPurposeCapabilityInvocation: allowedKeyTypesVerification, +} + +// validatePublicKeys validates public keys. +func validatePublicKeys(pubKeys []document.PublicKey) error { + ids := make(map[string]bool) + + for _, pubKey := range pubKeys { + if err := validatePublicKeyProperties(pubKey); err != nil { + return err + } + + kid := pubKey.ID() + if err := validateID(kid); err != nil { + return fmt.Errorf("public key: %s", err.Error()) + } + + if _, ok := ids[kid]; ok { + return fmt.Errorf("duplicate public key id: %s", kid) + } + ids[kid] = true + + if err := validateKeyPurposes(pubKey); err != nil { + return err + } + + if !validateKeyTypePurpose(pubKey) { + return fmt.Errorf("invalid key type: %s", pubKey.Type()) + } + + if err := validateJWK(pubKey.PublicKeyJwk()); err != nil { + if pubKey.PublicKeyBase58() == "" || pubKey.Type() == jsonWebKey2020 { + return err + } + } + } + + return nil +} + +func validatePublicKeyProperties(pubKey document.PublicKey) error { + requiredKeys := []string{document.TypeProperty, document.IDProperty} + optionalKeys := []string{document.PurposesProperty} + oneOfNKeys := [][]string{{document.PublicKeyJwkProperty, document.PublicKeyBase58Property}} + allowedKeys := append(requiredKeys, optionalKeys...) //nolint:gocritic + + for _, keyGroup := range oneOfNKeys { + allowedKeys = append(allowedKeys, keyGroup...) + } + + for _, required := range requiredKeys { + if _, ok := pubKey[required]; !ok { + return fmt.Errorf("key '%s' is required for public key", required) + } + } + + for _, keyGroup := range oneOfNKeys { + var satisfied bool + + for _, key := range keyGroup { + _, ok := pubKey[key] + if ok && satisfied { // at most one element + satisfied = false + + break + } + + satisfied = satisfied || ok + } + + if !satisfied { + return fmt.Errorf("exactly one key required of '%s'", strings.Join(keyGroup, "', '")) + } + } + + for key := range pubKey { + if !contains(allowedKeys, key) { + return fmt.Errorf("key '%s' is not allowed for public key", key) + } + } + + return nil +} + +// validateID validates id. +func validateID(id string) error { + if len(id) > maxIDLength { + return fmt.Errorf("id exceeds maximum length: %d", maxIDLength) + } + + if !asciiRegex.MatchString(id) { + return errors.New("id contains invalid characters") + } + + return nil +} + +// validateServices validates services. +func validateServices(services []document.Service) error { + ids := make(map[string]bool) + for _, service := range services { + if err := validateService(service); err != nil { + return err + } + + if _, ok := ids[service.ID()]; ok { + return fmt.Errorf("duplicate service id: %s", service.ID()) + } + + ids[service.ID()] = true + } + + return nil +} + +func validateService(service document.Service) error { + // expected fields are type, id, and serviceEndpoint and some optional fields + + if err := validateServiceID(service.ID()); err != nil { + return err + } + + if err := validateServiceType(service.Type()); err != nil { + return err + } + + if err := validateServiceEndpoint(service.ServiceEndpoint()); err != nil { + return err + } + + return nil +} + +func validateServiceID(id string) error { + if id == "" { + return errors.New("service id is missing") + } + + if err := validateID(id); err != nil { + return fmt.Errorf("service: %s", err.Error()) + } + + return nil +} + +func validateServiceType(serviceType string) error { + if serviceType == "" { + return errors.New("service type is missing") + } + + if len(serviceType) > maxServiceTypeLength { + return fmt.Errorf("service type exceeds maximum length: %d", maxServiceTypeLength) + } + + return nil +} + +func validateServiceEndpoint(serviceEndpoint interface{}) error { + if serviceEndpoint == nil { + return errors.New("service endpoint is missing") + } + + uri, ok := serviceEndpoint.(string) + if ok { + return validateURI(uri) + } + + uris, ok := serviceEndpoint.([]string) + if ok { + return validateURIs(uris) + } + + objs, ok := serviceEndpoint.([]interface{}) + if ok { + return validateServiceEndpointObjects(objs) + } + + return nil +} + +func validateServiceEndpointObjects(objs []interface{}) error { + for _, obj := range objs { + uri, ok := obj.(string) + if ok { + return validateURI(uri) + } + } + + return nil +} + +func validateURIs(uris []string) error { + for _, uri := range uris { + if err := validateURI(uri); err != nil { + return err + } + } + + return nil +} + +func validateURI(uri string) error { + if uri == "" { + return errors.New("service endpoint URI is empty") + } + + if _, err := url.ParseRequestURI(uri); err != nil { + return fmt.Errorf("service endpoint '%s' is not a valid URI: %s", uri, err.Error()) + } + + return nil +} + +// validateKeyTypePurpose validates if the public key type is valid for a certain purpose. +func validateKeyTypePurpose(pubKey document.PublicKey) bool { + if len(pubKey.Purpose()) == 0 { + // general key + _, ok := allowedKeyTypesGeneral[pubKey.Type()] + if !ok { + return false + } + } + + for _, purpose := range pubKey.Purpose() { + allowed, ok := allowedKeyTypes[purpose] + if !ok { + return false + } + + _, ok = allowed[pubKey.Type()] + if !ok { + return false + } + } + + return true +} + +// validateJWK validates JWK. +func validateJWK(jwk document.JWK) error { + if jwk == nil { + return errors.New("key has to be in JWK format") + } + + return jwk.Validate() +} + +// The object MAY include a purposes property, and if included, its value MUST be an array of one or more +// of the strings listed in allowed purposes array. +func validateKeyPurposes(pubKey document.PublicKey) error { + _, exists := pubKey[document.PurposesProperty] + + if exists && len(pubKey.Purpose()) == 0 { + return fmt.Errorf("if '%s' key is specified, it must contain at least one purpose", document.PurposesProperty) + } + + if len(pubKey.Purpose()) > len(allowedPurposes) { + return fmt.Errorf("public key purpose exceeds maximum length: %d", len(allowedPurposes)) + } + + for _, purpose := range pubKey.Purpose() { + if _, ok := allowedPurposes[document.KeyPurpose(purpose)]; !ok { + return fmt.Errorf("invalid purpose: %s", purpose) + } + } + + return nil +} + +func contains(values []string, value string) bool { + for _, v := range values { + if v == value { + return true + } + } + + return false +} + +func validateIds(ids []string) error { + for _, id := range ids { + if err := validateID(id); err != nil { + return err + } + } + + return nil +} + +func getRequiredArray(entry interface{}) ([]interface{}, error) { + arr, ok := entry.([]interface{}) + if !ok { + return nil, errors.New("expected array of interfaces") + } + + if len(arr) == 0 { + return nil, errors.New("required array is empty") + } + + return arr, nil +} diff --git a/pkg/versions/1_0/operationparser/patchvalidator/document_test.go b/pkg/versions/1_0/operationparser/patchvalidator/document_test.go new file mode 100644 index 0000000..aa818e4 --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/document_test.go @@ -0,0 +1,755 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "io" + "os" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/document" +) + +func TestValidatePublicKeys(t *testing.T) { + t.Run("success", func(t *testing.T) { + r := reader(t, "testdata/doc.json") + + data, err := io.ReadAll(r) + require.Nil(t, err) + + doc, err := document.DidDocumentFromBytes(data) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Nil(t, err) + }) + + t.Run("success - missing purpose", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(noPurpose)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.NoError(t, err) + }) + + t.Run("success - base58 key", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(withB58key)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.NoError(t, err) + }) +} + +func TestValidatePublicKeysErrors(t *testing.T) { + t.Run("error - empty purpose", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(emptyPurpose)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "if 'purposes' key is specified, it must contain at least one purpose") + }) + t.Run("invalid purpose", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(wrongPurpose)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid purpose") + }) + t.Run("purpose exceeds maximum", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(tooMuchPurpose)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "public key purpose exceeds maximum length") + }) + t.Run("invalid key type", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(invalidKeyType)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid key type") + }) + t.Run("missing id", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(noID)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "key 'id' is required for public key") + }) + t.Run("invalid id - too long", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(idLong)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "public key: id exceeds maximum length") + }) + t.Run("duplicate id", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(duplicateID)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "duplicate public key id") + }) + + t.Run("unknown property", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(moreProperties)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "key 'other' is not allowed for public key") + }) + + t.Run("invalid jwk", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(invalidJWK)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "JWK crv is missing") + }) + + t.Run("pkB58 key with jwk type", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(jwkTypeWithB58Key)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "key has to be in JWK format") + }) + + t.Run("no public key field", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(missingPubKey)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "exactly one key required of") + require.Contains(t, err.Error(), document.PublicKeyJwkProperty) + require.Contains(t, err.Error(), document.PublicKeyBase58Property) + }) + + t.Run("too many public key fields", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(multiplePublicKeyFields)) + require.Nil(t, err) + + err = validatePublicKeys(doc.PublicKeys()) + require.Error(t, err) + require.Contains(t, err.Error(), "exactly one key required of") + require.Contains(t, err.Error(), document.PublicKeyJwkProperty) + require.Contains(t, err.Error(), document.PublicKeyBase58Property) + }) +} + +func TestValidateServices(t *testing.T) { + t.Run("success", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDoc)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.NoError(t, err) + }) + t.Run("error - duplicate service id", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocWithDuplicateServices)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "duplicate service id: sid-123_ABC") + }) + t.Run("success - service can have allowed optional property", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocOptionalProperty)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.NoError(t, err) + }) + t.Run("error - missing service id", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocNoID)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service id is missing") + }) + t.Run("error - missing service type", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocNoType)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service type is missing") + }) + t.Run("error - service endpoint missing", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocEndpointMissing)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service endpoint is missing") + }) + t.Run("success - service endpoint is an object", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocEndpointIsAnObject)) + require.NoError(t, err) + err = validateServices(doc.Services()) + require.NoError(t, err) + }) + t.Run("success - service endpoint is an array of objects", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocEndpointIsAnArrayOfObjects)) + require.NoError(t, err) + err = validateServices(doc.Services()) + require.NoError(t, err) + }) + t.Run("success - service endpoint is an array of string objects", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocEndpointIsAnArrayOfURLStrings)) + require.NoError(t, err) + err = validateServices(doc.Services()) + require.NoError(t, err) + }) + t.Run("success - service endpoint is an array of strings", func(t *testing.T) { + servicesMap := make(map[string]interface{}) + servicesMap["id"] = "someID" + servicesMap["type"] = "someType" + servicesMap["serviceEndpoint"] = []string{"https://hello.com", "https://there.com"} + + err := validateServices([]document.Service{document.NewService(servicesMap)}) + require.NoError(t, err) + }) + t.Run("error - service endpoint is an array of invalid strings", func(t *testing.T) { + servicesMap := make(map[string]interface{}) + servicesMap["id"] = "someID" + servicesMap["type"] = "someType" + servicesMap["serviceEndpoint"] = []string{"invalid-1", "invalid-2"} + + err := validateServices([]document.Service{document.NewService(servicesMap)}) + require.Error(t, err) + require.Contains(t, err.Error(), "service endpoint 'invalid-1' is not a valid URI") + }) + t.Run("error - service endpoint is an array of invalid string URL objects", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocEndpointIsAnArrayOfInvalidURLStrings)) + require.NoError(t, err) + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service endpoint 'hello' is not a valid URI") + }) + t.Run("error - empty service endpoint URI", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocNoServiceEndpointURI)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service endpoint URI is empty") + }) + t.Run("error - service id too long", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocLongID)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service: id exceeds maximum length") + }) + t.Run("error - service type too long", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocLongType)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service type exceeds maximum length") + }) + t.Run("error - service endpoint not URI", func(t *testing.T) { + doc, err := document.DidDocumentFromBytes([]byte(serviceDocEndpointNotURI)) + require.NoError(t, err) + + err = validateServices(doc.Services()) + require.Error(t, err) + require.Contains(t, err.Error(), "service endpoint 'hello' is not a valid URI") + }) + t.Run("success - didcomm service", func(t *testing.T) { + doc, err := document.DIDDocumentFromReader(reader(t, "testdata/doc.json")) + require.NoError(t, err) + err = validateServices(doc.Services()) + require.NoError(t, err) + }) +} + +func TestValidateID(t *testing.T) { + t.Run("success", func(t *testing.T) { + err := validateID("recovered") + require.NoError(t, err) + }) + t.Run("error - id not ASCII encoded character", func(t *testing.T) { + err := validateID("a****") + require.Error(t, err) + require.Contains(t, err.Error(), "id contains invalid characters") + }) + t.Run("error - exceeded maximum length", func(t *testing.T) { + err := validateID("1234567890abcdefghijk123456789012345678901234567890") + require.Error(t, err) + require.Contains(t, err.Error(), "id exceeds maximum length: 50") + }) +} + +func TestValidateJWK(t *testing.T) { + t.Run("success", func(t *testing.T) { + jwk := document.JWK{ + "kty": "kty", + "crv": "crv", + "x": "x", + "y": "y", + } + + err := validateJWK(jwk) + require.NoError(t, err) + }) + + t.Run("missing kty", func(t *testing.T) { + jwk := document.JWK{ + "kty": "", + "crv": "crv", + "x": "x", + "y": "y", + } + + err := validateJWK(jwk) + require.Error(t, err) + require.Contains(t, err.Error(), "JWK kty is missing") + }) +} + +func TestGeneralKeyPurpose(t *testing.T) { + for _, pubKeyType := range allowedKeyTypesAgreement { + pk := createMockPublicKeyWithType(pubKeyType) + err := validatePublicKeys([]document.PublicKey{pk}) + require.NoError(t, err, "valid purpose for type") + } + + pk := createMockPublicKeyWithTypeAndPurpose("invalid", []interface{}{document.KeyPurposeAuthentication}) + err := validatePublicKeys([]document.PublicKey{pk}) + require.Error(t, err, "invalid purpose for type") +} + +func TestInvalidKeyPurpose(t *testing.T) { + pk := createMockPublicKeyWithTypeAndPurpose(jsonWebKey2020, []interface{}{"invalidpurpose"}) + err := validatePublicKeys([]document.PublicKey{pk}) + require.Error(t, err, "invalid purpose") +} + +func TestVerificationKeyPurpose(t *testing.T) { + testKeyPurpose(t, allowedKeyTypesVerification, document.KeyPurposeAssertionMethod) + testKeyPurpose(t, allowedKeyTypesVerification, document.KeyPurposeAuthentication) + testKeyPurpose(t, allowedKeyTypesVerification, document.KeyPurposeCapabilityDelegation) + testKeyPurpose(t, allowedKeyTypesVerification, document.KeyPurposeCapabilityInvocation) +} + +func TestAgreementKeyPurpose(t *testing.T) { + testKeyPurpose(t, allowedKeyTypesAgreement, document.KeyPurposeKeyAgreement) +} + +func reader(t *testing.T, filename string) io.Reader { + f, err := os.Open(filename) + require.Nil(t, err) + + return f +} + +func testKeyPurpose(t *testing.T, allowedKeys existenceMap, pubKeyPurpose string) { + for _, pubKeyType := range allowedKeys { + pk := createMockPublicKeyWithTypeAndPurpose(pubKeyType, []interface{}{pubKeyPurpose}) + err := validatePublicKeys([]document.PublicKey{pk}) + require.NoError(t, err, "valid purpose for type") + + pk = createMockPublicKeyWithTypeAndPurpose(pubKeyType, []interface{}{pubKeyPurpose}) + err = validatePublicKeys([]document.PublicKey{pk}) + require.NoError(t, err, "valid purpose for type") + } + + for _, pubKeyType := range allowedKeyTypesGeneral { + _, ok := allowedKeys[pubKeyType] + if ok { + continue + } + + pk := createMockPublicKeyWithTypeAndPurpose(pubKeyType, []interface{}{pubKeyPurpose, document.KeyPurposeKeyAgreement}) + err := validatePublicKeys([]document.PublicKey{pk}) + require.Error(t, err, "invalid purpose for type") + + pk = createMockPublicKeyWithTypeAndPurpose(pubKeyType, []interface{}{pubKeyPurpose, document.KeyPurposeAssertionMethod}) + err = validatePublicKeys([]document.PublicKey{pk}) + require.Error(t, err, "invalid purpose for type") + + pk = createMockPublicKeyWithTypeAndPurpose(pubKeyType, []interface{}{pubKeyPurpose}) + err = validatePublicKeys([]document.PublicKey{pk}) + require.Error(t, err, "invalid purpose for type") + + pk = createMockPublicKeyWithTypeAndPurpose(pubKeyType, []interface{}{pubKeyPurpose}) + err = validatePublicKeys([]document.PublicKey{pk}) + require.Error(t, err, "invalid purpose for type") + } +} + +func createMockPublicKeyWithTypeAndPurpose(pubKeyType string, purpose []interface{}) document.PublicKey { + pk := map[string]interface{}{ + "id": "key1", + "type": pubKeyType, + "purposes": purpose, + "publicKeyJwk": map[string]interface{}{ + "kty": "kty", + "crv": "crv", + "x": "x", + "y": "y", + }, + } + + return pk +} + +func createMockPublicKeyWithType(pubKeyType string) document.PublicKey { + pk := map[string]interface{}{ + "id": "key1", + "type": pubKeyType, + "publicKeyJwk": map[string]interface{}{ + "kty": "kty", + "crv": "crv", + "x": "x", + "y": "y", + }, + } + + return pk +} + +const moreProperties = `{ + "publicKey": [ + { + "id": "key1", + "other": "unknown", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const noPurpose = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const invalidJWK = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const withB58key = `{ + "publicKey": [ + { + "id": "key1", + "type": "Ed25519VerificationKey2018", + "publicKeyBase58": "36d8RkFy2SdabnGzcZ3LcCSDA8NP5T4bsoADwuXtoN3B" + } + ] +}` + +const jwkTypeWithB58Key = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyBase58": "36d8RkFy2SdabnGzcZ3LcCSDA8NP5T4bsoADwuXtoN3B" + } + ] +}` + +const emptyPurpose = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": [], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const wrongPurpose = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["invalid"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const tooMuchPurpose = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "purposes": ["authentication", "assertionMethod", "keyAgreement", "capabilityDelegation", "capabilityInvocation", "other"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const idLong = `{ + "publicKey": [ + { + "id": "idwihmorethan50characters123456789012345678901234567890", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const noID = `{ + "publicKey": [ + { + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const invalidKeyType = `{ + "publicKey": [ + { + "id": "key1", + "type": "InvalidKeyType", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const missingPubKey = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020" + } + ] +}` + +const multiplePublicKeyFields = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyBase58": "36d8RkFy2SdabnGzcZ3LcCSDA8NP5T4bsoADwuXtoN3B", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const duplicateID = `{ + "publicKey": [ + { + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "key1", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ] +}` + +const serviceDoc = `{ + "service": [{ + "id": "sid-123_ABC", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }] +}` + +const serviceDocWithDuplicateServices = `{ + "service": [{ + "id": "sid-123_ABC", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }, + { + "id": "sid-123_ABC", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }] +}` + +const serviceDocNoID = `{ + "service": [{ + "id": "", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }] +}` + +const serviceDocLongID = `{ + "service": [{ + "id": "thisissomeidthathasmorethan50characters123456789012345678901234567890", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }] +}` + +const serviceDocLongType = `{ + "service": [{ + "id": "id", + "type": "VerifiableCredentialServiceVerifiableCredentialServiceVerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }] +}` + +const serviceDocEndpointMissing = `{ + "service": [{ + "id": "vcs", + "type": "type" + }] +}` + +const serviceDocEndpointNotURI = `{ + "service": [{ + "id": "vcs", + "type": "type", + "serviceEndpoint": "hello" + }] +}` + +const serviceDocEndpointIsAnObject = `{ + "service": [{ + "id": "vcs", + "type": "type", + "serviceEndpoint": {"key":"value"} + }] +}` + +const serviceDocEndpointIsAnArrayOfObjects = `{ + "service": [{ + "id": "vcs", + "type": "type", + "serviceEndpoint": [{"key":"value"},{"key2":"value2"}] + }] +}` + +const serviceDocEndpointIsAnArrayOfURLStrings = `{ + "service": [{ + "id": "vcs", + "type": "type", + "serviceEndpoint": ["https://hello.com", "https://there.com"] + }] +}` + +const serviceDocEndpointIsAnArrayOfInvalidURLStrings = `{ + "service": [{ + "id": "vcs", + "type": "type", + "serviceEndpoint": ["hello", "there"] + }] +}` + +const serviceDocNoType = `{ + "service": [{ + "id": "vcs", + "type": "", + "serviceEndpoint": "https://example.com/vc/" + }] +}` + +const serviceDocNoServiceEndpointURI = `{ + "service": [{ + "id": "vcs", + "type": "VerifiableCredentialService", + "serviceEndpoint": "" + }] +}` + +const serviceDocOptionalProperty = `{ + "service": [{ + "id": "vcs", + "routingKeys": "value", + "type": "VerifiableCredentialService", + "serviceEndpoint": "https://example.com/vc/" + }] +}` diff --git a/pkg/versions/1_0/operationparser/patchvalidator/ietf.go b/pkg/versions/1_0/operationparser/patchvalidator/ietf.go new file mode 100644 index 0000000..d56e856 --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/ietf.go @@ -0,0 +1,76 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "encoding/json" + "fmt" + "strings" + + jsonpatch "github.com/evanphx/json-patch" + + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +// NewJSONValidator creates new validator. +func NewJSONValidator() *JSONValidator { + return &JSONValidator{} +} + +// JSONValidator implements validator for "ietf-json-patch" patch. +type JSONValidator struct { +} + +// Validate validates patch. +func (v *JSONValidator) Validate(p patch.Patch) error { + value, err := p.GetValue() + if err != nil { + return err + } + + patches, err := getRequiredArray(value) + if err != nil { + return fmt.Errorf("invalid json patch value: %s", err.Error()) + } + + patchesBytes, err := json.Marshal(patches) + if err != nil { + return err + } + + return validateJSONPatches(patchesBytes) +} + +func validateJSONPatches(patches []byte) error { + jsonPatches, err := jsonpatch.DecodePatch(patches) + if err != nil { + return fmt.Errorf("%s: %s", patch.JSONPatch, err.Error()) + } + + for _, p := range jsonPatches { + pathMsg, ok := p["path"] + if !ok { + return fmt.Errorf("%s: path not found", patch.JSONPatch) + } + + var path string + if err := json.Unmarshal(*pathMsg, &path); err != nil { + return fmt.Errorf("%s: invalid path", patch.JSONPatch) + } + + if strings.HasPrefix(path, "/"+document.ServiceProperty) { + return fmt.Errorf("%s: cannot modify services", patch.JSONPatch) + } + + if strings.HasPrefix(path, "/"+document.PublicKeyProperty) { + return fmt.Errorf("%s: cannot modify public keys", patch.JSONPatch) + } + } + + return nil +} diff --git a/pkg/versions/1_0/operationparser/patchvalidator/ietf_test.go b/pkg/versions/1_0/operationparser/patchvalidator/ietf_test.go new file mode 100644 index 0000000..85792ef --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/ietf_test.go @@ -0,0 +1,86 @@ +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +func TestIETFPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(ietfPatch)) + require.NoError(t, err) + + err = NewJSONValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("error - path not found", func(t *testing.T) { + p, err := patch.FromBytes([]byte(ietfPatchNoPath)) + require.NoError(t, err) + + err = NewJSONValidator().Validate(p) + require.Error(t, err) + require.Equal(t, err.Error(), "ietf-json-patch: path not found") + }) + t.Run("error - cannot update services", func(t *testing.T) { + p, err := patch.FromBytes([]byte(ietfServicesPatch)) + require.NoError(t, err) + + err = NewJSONValidator().Validate(p) + require.Error(t, err) + require.Equal(t, err.Error(), "ietf-json-patch: cannot modify services") + }) + t.Run("error - cannot update public keys", func(t *testing.T) { + p, err := patch.FromBytes([]byte(ietfPublicKeysPatch)) + require.NoError(t, err) + + err = NewJSONValidator().Validate(p) + require.Error(t, err) + require.Equal(t, err.Error(), "ietf-json-patch: cannot modify public keys") + }) + t.Run("error missing patches", func(t *testing.T) { + p := make(patch.Patch) + p[patch.ActionKey] = patch.JSONPatch + + err := NewJSONValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "ietf-json-patch patch is missing key: patches") + }) +} + +const ietfPatch = `{ + "action": "ietf-json-patch", + "patches": [{ + "op": "replace", + "path": "/name", + "value": "value" + }] +}` + +const ietfPatchNoPath = `{ + "action": "ietf-json-patch", + "patches": [{ + "op": "replace", + "value": "value" + }] +}` + +const ietfServicesPatch = `{ + "action": "ietf-json-patch", + "patches": [{ + "op": "replace", + "path": "/service", + "value": "new value" + }] +}` + +const ietfPublicKeysPatch = `{ + "action": "ietf-json-patch", + "patches": [{ + "op": "replace", + "path": "/publicKey/0/type", + "value": "new type" + }] +}` diff --git a/pkg/versions/1_0/operationparser/patchvalidator/removekeys.go b/pkg/versions/1_0/operationparser/patchvalidator/removekeys.go new file mode 100644 index 0000000..bacae12 --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/removekeys.go @@ -0,0 +1,38 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +// NewRemovePublicKeysValidator creates validator for "remove-public-keys" patch. +func NewRemovePublicKeysValidator() *RemovePublicKeysValidator { + return &RemovePublicKeysValidator{} +} + +// RemovePublicKeysValidator implements validator for "remove-public-keys" patch. +type RemovePublicKeysValidator struct { +} + +// Validate validates patch. +func (v *RemovePublicKeysValidator) Validate(p patch.Patch) error { + value, err := p.GetValue() + if err != nil { + return err + } + + genericArr, err := getRequiredArray(value) + if err != nil { + return fmt.Errorf("invalid remove public keys value: %s", err.Error()) + } + + return validateIds(document.StringArray(genericArr)) +} diff --git a/pkg/versions/1_0/operationparser/patchvalidator/removekeys_test.go b/pkg/versions/1_0/operationparser/patchvalidator/removekeys_test.go new file mode 100644 index 0000000..c3a5cae --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/removekeys_test.go @@ -0,0 +1,50 @@ +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +func TestRemovePublicKeysPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(removePublicKeysPatch)) + require.NoError(t, err) + + err = NewRemovePublicKeysValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("error - missing public key ids", func(t *testing.T) { + p := make(patch.Patch) + p[patch.ActionKey] = patch.RemovePublicKeys + + err := NewRemovePublicKeysValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "remove-public-keys patch is missing key: ids") + }) + t.Run("error - invalid add public keys value", func(t *testing.T) { + p := make(patch.Patch) + p[patch.ActionKey] = patch.RemovePublicKeys + p[patch.IdsKey] = "whatever" + + err := NewRemovePublicKeysValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "expected array of interfaces") + }) + t.Run("invalid public key ids", func(t *testing.T) { + const ids = `["a123*b456"]` + p, err := patch.NewRemovePublicKeysPatch(ids) + require.NoError(t, err) + + err = NewRemovePublicKeysValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "id contains invalid characters") + }) +} + +const removePublicKeysPatch = `{ + "action": "remove-public-keys", + "ids": ["key1", "key2"] +}` diff --git a/pkg/versions/1_0/operationparser/patchvalidator/removeservices.go b/pkg/versions/1_0/operationparser/patchvalidator/removeservices.go new file mode 100644 index 0000000..9571d2e --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/removeservices.go @@ -0,0 +1,38 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +// NewRemoveServicesValidator creates new validator. +func NewRemoveServicesValidator() *RemoveServicesValidator { + return &RemoveServicesValidator{} +} + +// RemoveServicesValidator implements validator for "remove-services" patch. +type RemoveServicesValidator struct { +} + +// Validate validates patch. +func (v *RemoveServicesValidator) Validate(p patch.Patch) error { + value, err := p.GetValue() + if err != nil { + return err + } + + genericArr, err := getRequiredArray(value) + if err != nil { + return fmt.Errorf("invalid remove services value: %s", err.Error()) + } + + return validateIds(document.StringArray(genericArr)) +} diff --git a/pkg/versions/1_0/operationparser/patchvalidator/removeservices_test.go b/pkg/versions/1_0/operationparser/patchvalidator/removeservices_test.go new file mode 100644 index 0000000..19e390a --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/removeservices_test.go @@ -0,0 +1,50 @@ +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +func TestRemoveServiceEndpointsPatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(removeServiceEndpoints)) + require.NoError(t, err) + + err = NewRemoveServicesValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("error - missing public key ids", func(t *testing.T) { + p := make(patch.Patch) + p[patch.ActionKey] = patch.RemoveServiceEndpoints + + err := NewRemoveServicesValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "remove-services patch is missing key: ids") + }) + t.Run("error - invalid service ids", func(t *testing.T) { + p := make(patch.Patch) + p[patch.ActionKey] = patch.RemoveServiceEndpoints + p[patch.IdsKey] = "invalid" + + err := NewRemoveServicesValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "expected array of interfaces") + }) + t.Run("invalid service ids", func(t *testing.T) { + const ids = `["a123*b456"]` + p, err := patch.NewRemoveServiceEndpointsPatch(ids) + require.NoError(t, err) + + err = NewRemoveServicesValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "id contains invalid characters") + }) +} + +const removeServiceEndpoints = `{ + "action": "remove-services", + "ids": ["sds1", "sds2"] +}` diff --git a/pkg/versions/1_0/operationparser/patchvalidator/replace.go b/pkg/versions/1_0/operationparser/patchvalidator/replace.go new file mode 100644 index 0000000..29f9190 --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/replace.go @@ -0,0 +1,66 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "errors" + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +// NewReplaceValidator creates new validator. +func NewReplaceValidator() *ReplaceValidator { + return &ReplaceValidator{} +} + +// ReplaceValidator implements validator for "replace" patch. +type ReplaceValidator struct { +} + +// Validate validates patch. +func (v *ReplaceValidator) Validate(p patch.Patch) error { + value, err := p.GetValue() + if err != nil { + return err + } + + entryMap, err := getRequiredMap(value) + if err != nil { + return err + } + + doc := document.ReplaceDocumentFromJSONLDObject(entryMap) + + allowedKeys := []string{document.ReplaceServiceProperty, document.ReplacePublicKeyProperty} + + for key := range doc { + if !contains(allowedKeys, key) { + return fmt.Errorf("key '%s' is not allowed in replace document", key) + } + } + + if err := validatePublicKeys(doc.PublicKeys()); err != nil { + return fmt.Errorf("failed to validate public keys for replace document: %s", err.Error()) + } + + if err := validateServices(doc.Services()); err != nil { + return fmt.Errorf("failed to validate services for replace document: %s", err.Error()) + } + + return nil +} + +func getRequiredMap(entry interface{}) (map[string]interface{}, error) { + required, ok := entry.(map[string]interface{}) + if !ok { + return nil, errors.New("unexpected interface for document") + } + + return required, nil +} diff --git a/pkg/versions/1_0/operationparser/patchvalidator/replace_test.go b/pkg/versions/1_0/operationparser/patchvalidator/replace_test.go new file mode 100644 index 0000000..ec03256 --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/replace_test.go @@ -0,0 +1,125 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/document" + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +func TestValidateReplacePatch(t *testing.T) { + t.Run("success", func(t *testing.T) { + p, err := patch.FromBytes([]byte(replacePatch)) + require.NoError(t, err) + + err = NewReplaceValidator().Validate(p) + require.NoError(t, err) + }) + t.Run("missing document", func(t *testing.T) { + p, err := patch.FromBytes([]byte(replacePatch)) + require.NoError(t, err) + require.NotNil(t, p) + + delete(p, patch.DocumentKey) + err = NewReplaceValidator().Validate(p) + require.Contains(t, err.Error(), "replace patch is missing key: document") + }) + t.Run("error - document has invalid property", func(t *testing.T) { + doc, err := document.FromBytes([]byte(replaceDocWithExtraProperties)) + require.NoError(t, err) + + p := make(patch.Patch) + p[patch.ActionKey] = patch.Replace + p[patch.DocumentKey] = doc.JSONLdObject() + + err = NewReplaceValidator().Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "key 'id' is not allowed in replace document") + }) + t.Run("error - public keys (missing type)", func(t *testing.T) { + p, err := patch.NewReplacePatch(replaceDocInvalidPublicKey) + require.NoError(t, err) + require.NotNil(t, p) + + err = NewReplaceValidator().Validate(p) + require.Contains(t, err.Error(), "key 'type' is required for public key") + }) + t.Run("error - services (missing endpoint)", func(t *testing.T) { + p, err := patch.NewReplacePatch(replaceDocInvalidServiceEndpoint) + require.NoError(t, err) + require.NotNil(t, p) + + err = NewReplaceValidator().Validate(p) + require.Contains(t, err.Error(), "service endpoint is missing") + }) +} + +const replacePatch = `{ + "action": "replace", + "document": { + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "type": "EcdsaSecp256k1VerificationKey2019", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }], + "services": [ + { + "id": "sds3", + "type": "SecureDataStore", + "serviceEndpoint": "http://hub.my-personal-server.com" + }] + } +}` + +const replaceDocWithExtraProperties = `{ + "id": "some-id", + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "type": "EcdsaSecp256k1VerificationKey2019", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const replaceDocInvalidPublicKey = `{ + "publicKeys": [ + { + "id": "key-1", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }] +}` + +const replaceDocInvalidServiceEndpoint = `{ + "services": [ + { + "id": "sds3", + "type": "SecureDataStore" + }] +}` diff --git a/pkg/versions/1_0/operationparser/patchvalidator/testdata/doc.json b/pkg/versions/1_0/operationparser/patchvalidator/testdata/doc.json new file mode 100644 index 0000000..bb21b8b --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/testdata/doc.json @@ -0,0 +1,144 @@ +{ + "publicKey": [ + { + "id": "master", + "type": "EcdsaSecp256k1VerificationKey2019", + "purposes": ["authentication", "assertionMethod", "keyAgreement", "capabilityDelegation", "capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-auth-gen", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "auth-only", + "type": "JsonWebKey2020", + "purposes": ["authentication"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-assertion-gen", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "assertion-only", + "type": "JsonWebKey2020", + "purposes": ["assertionMethod"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-agreement-gen", + "type": "JsonWebKey2020", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "agreement-only", + "type": "JsonWebKey2020", + "purposes": ["keyAgreement"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-invocation-gen", + "type": "JsonWebKey2020", + "purposes": ["capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "invocation-only", + "type": "JsonWebKey2020", + "purposes": ["capabilityInvocation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "dual-delegation-gen", + "type": "JsonWebKey2020", + "purposes": ["capabilityDelegation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "delegation-only", + "type": "JsonWebKey2020", + "purposes": ["capabilityDelegation"], + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + }, + { + "id": "general-only", + "type": "JsonWebKey2020", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256K", + "x": "PUymIqdtF_qxaAqPABSw-C-owT1KYYQbsMKFM-L9fJA", + "y": "nM84jDHCMOTGTh_ZdHq4dBBdo4Z5PkEOW9jA8z8IsGc" + } + } + ], + "service": [ + { + "id": "hub", + "type": "IdentityHub", + "routingKeys": "routingKeysValue", + "recipientKeys": "recipientKeysValue", + "serviceEndpoint": "https://example.com/hub/" + } + ] +} \ No newline at end of file diff --git a/pkg/versions/1_0/operationparser/patchvalidator/validator.go b/pkg/versions/1_0/operationparser/patchvalidator/validator.go new file mode 100644 index 0000000..4521e8c --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/validator.go @@ -0,0 +1,34 @@ +package patchvalidator + +import ( + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +// Validate validates patch. +func Validate(p patch.Patch) error { + action, err := p.GetAction() + if err != nil { + return err + } + + switch action { + case patch.Replace: + return NewReplaceValidator().Validate(p) + case patch.JSONPatch: + return NewJSONValidator().Validate(p) + case patch.AddPublicKeys: + return NewAddPublicKeysValidator().Validate(p) + case patch.RemovePublicKeys: + return NewRemovePublicKeysValidator().Validate(p) + case patch.AddServiceEndpoints: + return NewAddServicesValidator().Validate(p) + case patch.RemoveServiceEndpoints: + return NewRemoveServicesValidator().Validate(p) + case patch.AddAlsoKnownAs, patch.RemoveAlsoKnownAs: + return NewAlsoKnownAsValidator().Validate(p) + } + + return fmt.Errorf(" validation for action '%s' is not supported", action) +} diff --git a/pkg/versions/1_0/operationparser/patchvalidator/validator_test.go b/pkg/versions/1_0/operationparser/patchvalidator/validator_test.go new file mode 100644 index 0000000..9392fb3 --- /dev/null +++ b/pkg/versions/1_0/operationparser/patchvalidator/validator_test.go @@ -0,0 +1,82 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package patchvalidator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/patch" +) + +func TestValidate(t *testing.T) { + t.Run("success - add public keys", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addPublicKeysPatch)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - remove public keys", func(t *testing.T) { + p, err := patch.FromBytes([]byte(removePublicKeysPatch)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - add service endpoints", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addServiceEndpoints)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - remove service endpoints", func(t *testing.T) { + p, err := patch.FromBytes([]byte(removeServiceEndpoints)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - add also known as", func(t *testing.T) { + p, err := patch.FromBytes([]byte(addAlsoKnownAs)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - remove also known as", func(t *testing.T) { + p, err := patch.FromBytes([]byte(removeAlsoKnownAs)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - ietf patch", func(t *testing.T) { + p, err := patch.FromBytes([]byte(ietfPatch)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("success - replace patch", func(t *testing.T) { + p, err := patch.FromBytes([]byte(replacePatch)) + require.NoError(t, err) + + err = Validate(p) + require.NoError(t, err) + }) + t.Run("error - patch not supported", func(t *testing.T) { + p := make(patch.Patch) + p[patch.ActionKey] = "invalid" + + err := Validate(p) + require.Error(t, err) + require.Contains(t, err.Error(), "action 'invalid' is not supported") + }) +} diff --git a/pkg/versions/1_0/operationparser/recover.go b/pkg/versions/1_0/operationparser/recover.go new file mode 100644 index 0000000..814b07f --- /dev/null +++ b/pkg/versions/1_0/operationparser/recover.go @@ -0,0 +1,268 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "fmt" + + "github.com/pkg/errors" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/commitment" + "github.com/trustbloc/sidetree-go/pkg/encoder" + "github.com/trustbloc/sidetree-go/pkg/hashing" + "github.com/trustbloc/sidetree-go/pkg/jws" + internal "github.com/trustbloc/sidetree-go/pkg/jwsutil" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +// ParseRecoverOperation will parse recover operation. +func (p *Parser) ParseRecoverOperation(request []byte, batch bool) (*model.Operation, error) { + schema, err := p.parseRecoverRequest(request) + if err != nil { + return nil, err + } + + signedData, err := p.ParseSignedDataForRecover(schema.SignedData) + if err != nil { + return nil, err + } + + if !batch { + err = p.anchorOriginValidator.Validate(signedData.AnchorOrigin) + if err != nil { + return nil, err + } + + until := p.getAnchorUntil(signedData.AnchorFrom, signedData.AnchorUntil) + + err = p.anchorTimeValidator.Validate(signedData.AnchorFrom, until) + if err != nil { + return nil, err + } + + err = p.ValidateDelta(schema.Delta) + if err != nil { + return nil, err + } + + if schema.Delta.UpdateCommitment == signedData.RecoveryCommitment { + return nil, errors.New("recovery and update commitments cannot be equal, re-using public keys is not allowed") + } + } + + err = hashing.IsValidModelMultihash(signedData.RecoveryKey, schema.RevealValue) + if err != nil { + return nil, fmt.Errorf("canonicalized recovery public key hash doesn't match reveal value: %s", err.Error()) + } + + return &model.Operation{ + OperationRequest: request, + Type: operation.TypeRecover, + UniqueSuffix: schema.DidSuffix, + Delta: schema.Delta, + SignedData: schema.SignedData, + RevealValue: schema.RevealValue, + AnchorOrigin: signedData.AnchorOrigin, + }, nil +} + +func (p *Parser) parseRecoverRequest(payload []byte) (*model.RecoverRequest, error) { + schema := &model.RecoverRequest{} + err := json.Unmarshal(payload, schema) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal recover request: %s", err.Error()) + } + + if err := p.validateRecoverRequest(schema); err != nil { + return nil, err + } + + return schema, nil +} + +// ParseSignedDataForRecover will parse and validate signed data for recover. +func (p *Parser) ParseSignedDataForRecover(compactJWS string) (*model.RecoverSignedDataModel, error) { + signedData, err := p.parseSignedData(compactJWS) + if err != nil { + return nil, err + } + + schema := &model.RecoverSignedDataModel{} + err = json.Unmarshal(signedData.Payload, schema) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal signed data model for recover: %s", err.Error()) + } + + if err := p.validateSignedDataForRecovery(schema); err != nil { + return nil, fmt.Errorf("validate signed data for recovery: %s", err.Error()) + } + + return schema, nil +} + +func (p *Parser) validateSignedDataForRecovery(signedData *model.RecoverSignedDataModel) error { + if err := p.validateSigningKey(signedData.RecoveryKey); err != nil { + return err + } + + if err := p.validateMultihash(signedData.RecoveryCommitment, "recovery commitment"); err != nil { + return err + } + + if err := p.validateMultihash(signedData.DeltaHash, "delta hash"); err != nil { + return err + } + + return p.validateCommitment(signedData.RecoveryKey, signedData.RecoveryCommitment) +} + +func (p *Parser) parseSignedData(compactJWS string) (*internal.JSONWebSignature, error) { + if compactJWS == "" { + return nil, errors.New("missing signed data") + } + + sig, err := internal.ParseJWS(compactJWS) + if err != nil { + return nil, fmt.Errorf("failed to parse signed data: %s", err.Error()) + } + + err = p.validateProtectedHeaders(sig.ProtectedHeaders, p.SignatureAlgorithms) + if err != nil { + return nil, fmt.Errorf("failed to parse signed data: %s", err.Error()) + } + + return sig, nil +} + +func (p *Parser) validateProtectedHeaders(headers jws.Headers, allowedAlgorithms []string) error { + if headers == nil { + return errors.New("missing protected headers") + } + + // kid MAY be present in the protected header. + // alg MUST be present in the protected header, its value MUST NOT be none. + // no additional members may be present in the protected header. + + alg, ok := headers.Algorithm() + if !ok { + return errors.New("algorithm must be present in the protected header") + } + + if alg == "" { + return errors.New("algorithm cannot be empty in the protected header") + } + + allowedHeaders := map[string]bool{ + jws.HeaderAlgorithm: true, + jws.HeaderKeyID: true, + } + + for k := range headers { + if _, ok := allowedHeaders[k]; !ok { + return fmt.Errorf("invalid protected header: %s", k) + } + } + + if !contains(allowedAlgorithms, alg) { + return errors.Errorf("algorithm '%s' is not in the allowed list %v", alg, allowedAlgorithms) + } + + return nil +} + +func (p *Parser) validateRecoverRequest(req *model.RecoverRequest) error { + if req.DidSuffix == "" { + return errors.New("missing did suffix") + } + + if req.SignedData == "" { + return errors.New("missing signed data") + } + + return p.validateMultihash(req.RevealValue, "reveal value") +} + +func (p *Parser) validateSigningKey(key *jws.JWK) error { + if key == nil { + return errors.New("missing signing key") + } + + // validate mandatory values + err := key.Validate() + if err != nil { + return fmt.Errorf("signing key validation failed: %s", err.Error()) + } + + // validate key algorithm + if !contains(p.KeyAlgorithms, key.Crv) { + return errors.Errorf("key algorithm '%s' is not in the allowed list %v", key.Crv, p.KeyAlgorithms) + } + + // validate optional nonce + err = p.validateNonce(key.Nonce) + if err != nil { + return fmt.Errorf("validate signing key nonce: %s", err.Error()) + } + + return nil +} + +func contains(values []string, value string) bool { + for _, v := range values { + if v == value { + return true + } + } + + return false +} + +func (p *Parser) validateCommitment(jwk *jws.JWK, nextCommitment string) error { + code, err := hashing.GetMultihashCode(nextCommitment) + if err != nil { + return err + } + + currentCommitment, err := commitment.GetCommitment(jwk, uint(code)) + if err != nil { + return fmt.Errorf("calculate current commitment: %s", err.Error()) + } + + if currentCommitment == nextCommitment { + return errors.New("re-using public keys for commitment is not allowed") + } + + return nil +} + +func (p *Parser) validateNonce(nonce string) error { + // nonce is optional + if nonce == "" { + return nil + } + + nonceBytes, err := encoder.DecodeString(nonce) + if err != nil { + return fmt.Errorf("failed to decode nonce '%s': %s", nonce, err.Error()) + } + + if len(nonceBytes) != int(p.NonceSize) { + return fmt.Errorf("nonce size '%d' doesn't match configured nonce size '%d'", len(nonceBytes), p.NonceSize) + } + + return nil +} + +func (p *Parser) getAnchorUntil(from, until int64) int64 { + if from != 0 && until == 0 { + return from + int64(p.MaxDeltaSize) + } + + return until +} diff --git a/pkg/versions/1_0/operationparser/recover_test.go b/pkg/versions/1_0/operationparser/recover_test.go new file mode 100644 index 0000000..89693e1 --- /dev/null +++ b/pkg/versions/1_0/operationparser/recover_test.go @@ -0,0 +1,569 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/commitment" + "github.com/trustbloc/sidetree-go/pkg/encoder" + "github.com/trustbloc/sidetree-go/pkg/jws" + internal "github.com/trustbloc/sidetree-go/pkg/jwsutil" + "github.com/trustbloc/sidetree-go/pkg/patch" + "github.com/trustbloc/sidetree-go/pkg/util/signutil" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +const ( + kidKey = "kid" + algKey = "alg" +) + +func TestParseRecoverOperation(t *testing.T) { + p := protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MaxDeltaSize: maxDeltaSize, + MultihashAlgorithms: []uint{sha2_256}, + SignatureAlgorithms: []string{"alg"}, + KeyAlgorithms: []string{"crv"}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + request, err := getRecoverRequestBytes() + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.NoError(t, err) + require.Equal(t, operation.TypeRecover, op.Type) + + signedData, err := parser.ParseSignedDataForRecover(op.SignedData) + require.NoError(t, err) + + expectedRevealValue, err := commitment.GetRevealValue(signedData.RecoveryKey, sha2_256) + require.NoError(t, err) + + require.Equal(t, expectedRevealValue, op.RevealValue) + }) + t.Run("parse recover request error", func(t *testing.T) { + schema, err := parser.ParseRecoverOperation([]byte(""), false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "unexpected end of JSON input") + }) + t.Run("validate recover request", func(t *testing.T) { + recoverRequest, err := getDefaultRecoverRequest() + require.NoError(t, err) + + recoverRequest.DidSuffix = "" + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Nil(t, op) + require.Contains(t, err.Error(), "missing did suffix") + }) + t.Run("parse patch data error", func(t *testing.T) { + recoverRequest, err := getDefaultRecoverRequest() + require.NoError(t, err) + + recoverRequest.Delta = &model.DeltaModel{} + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "missing patches") + require.Nil(t, op) + }) + t.Run("validate patch data error", func(t *testing.T) { + delta, err := getDelta() + require.NoError(t, err) + + delta.Patches = []patch.Patch{} + recoverRequest, err := getRecoverRequest(delta, getSignedDataForRecovery()) + require.NoError(t, err) + + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "missing patches") + require.Nil(t, op) + }) + t.Run("parse signed data error", func(t *testing.T) { + recoverRequest, err := getDefaultRecoverRequest() + require.NoError(t, err) + + recoverRequest.SignedData = invalid + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid JWS compact format") + require.Nil(t, op) + }) + t.Run("parse signed data error - unmarshal failed", func(t *testing.T) { + recoverRequest, err := getDefaultRecoverRequest() + require.NoError(t, err) + + compactJWS, err := signutil.SignPayload([]byte("payload"), NewMockSigner()) + require.NoError(t, err) + + recoverRequest.SignedData = compactJWS + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to unmarshal signed data model for recover") + require.Nil(t, op) + }) + t.Run("validate signed data error", func(t *testing.T) { + signedData := getSignedDataForRecovery() + signedData.RecoveryKey = &jws.JWK{} + + delta, err := getDelta() + require.NoError(t, err) + + recoverRequest, err := getRecoverRequest(delta, signedData) + require.NoError(t, err) + + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "validate signed data for recovery: signing key validation failed: JWK crv is missing") + require.Nil(t, op) + }) + + t.Run("error - update commitment equals recovery commitment", func(t *testing.T) { + signedData := getSignedDataForRecovery() + + delta, err := getDelta() + require.NoError(t, err) + + delta.UpdateCommitment = signedData.RecoveryCommitment + recoverRequest, err := getRecoverRequest(delta, signedData) + require.NoError(t, err) + + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "recovery and update commitments cannot be equal, re-using public keys is not allowed") + require.Nil(t, op) + }) + + t.Run("error - current commitment cannot equal recovery commitment", func(t *testing.T) { + signedData := getSignedDataForRecovery() + + recoveryCommitment, err := commitment.GetCommitment(signedData.RecoveryKey, sha2_256) + require.NoError(t, err) + + signedData.RecoveryCommitment = recoveryCommitment + + delta, err := getDelta() + require.NoError(t, err) + + recoverRequest, err := getRecoverRequest(delta, signedData) + require.NoError(t, err) + + request, err := json.Marshal(recoverRequest) + require.NoError(t, err) + + op, err := parser.ParseRecoverOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "re-using public keys for commitment is not allowed") + require.Nil(t, op) + }) +} + +func TestValidateSignedDataForRecovery(t *testing.T) { + p := protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MultihashAlgorithms: []uint{sha2_256}, + KeyAlgorithms: []string{"crv"}, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + signed := getSignedDataForRecovery() + err := parser.validateSignedDataForRecovery(signed) + require.NoError(t, err) + }) + t.Run("invalid patch data hash", func(t *testing.T) { + signed := getSignedDataForRecovery() + signed.DeltaHash = "" + err := parser.validateSignedDataForRecovery(signed) + require.Error(t, err) + require.Contains(t, err.Error(), "delta hash is not computed with the required hash algorithms: [18]") + }) + t.Run("invalid next recovery commitment hash", func(t *testing.T) { + signed := getSignedDataForRecovery() + signed.RecoveryCommitment = "" + err := parser.validateSignedDataForRecovery(signed) + require.Error(t, err) + require.Contains(t, err.Error(), "recovery commitment is not computed with the required hash algorithms: [18]") + }) + t.Run("recovery commitment exceeds maximum hash length", func(t *testing.T) { + lowMaxHashLength := protocol.Protocol{ + MaxOperationHashLength: 10, + MultihashAlgorithms: []uint{sha2_256}, + KeyAlgorithms: []string{"crv"}, + } + + signed := getSignedDataForRecovery() + + err := New(lowMaxHashLength).validateSignedDataForRecovery(signed) + require.Error(t, err) + require.Contains(t, err.Error(), "recovery commitment length[46] exceeds maximum hash length[10]") + }) +} + +func TestParseSignedData(t *testing.T) { + mockSigner := NewMockSigner() + + p := protocol.Protocol{ + MultihashAlgorithms: []uint{sha2_256}, + SignatureAlgorithms: []string{"alg"}, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + jwsSignature, err := internal.NewJWS(nil, nil, []byte("payload"), mockSigner) + require.NoError(t, err) + + compactJWS, err := jwsSignature.SerializeCompact(false) + require.NoError(t, err) + + jws, err := parser.parseSignedData(compactJWS) + require.NoError(t, err) + require.NotNil(t, jws) + }) + t.Run("missing signed data", func(t *testing.T) { + jws, err := parser.parseSignedData("") + require.Error(t, err) + require.Nil(t, jws) + require.Contains(t, err.Error(), "missing signed data") + }) + t.Run("missing protected headers", func(t *testing.T) { + jws, err := parser.parseSignedData(".cGF5bG9hZA.c2lnbmF0dXJl") + require.Error(t, err) + require.Nil(t, jws) + require.Contains(t, err.Error(), "unmarshal JSON headers: unexpected end of JSON input") + }) + t.Run("missing payload", func(t *testing.T) { + jwsSignature, err := internal.NewJWS(nil, nil, nil, mockSigner) + require.NoError(t, err) + + compactJWS, err := jwsSignature.SerializeCompact(false) + require.NoError(t, err) + + jws, err := parser.parseSignedData(compactJWS) + require.Error(t, err) + require.Nil(t, jws) + require.Contains(t, err.Error(), "compact jws payload is empty") + }) + t.Run("missing signature", func(t *testing.T) { + jws, err := parser.parseSignedData("eyJhbGciOiJhbGciLCJraWQiOiJraWQifQ.cGF5bG9hZA.") + require.Error(t, err) + require.Nil(t, jws) + require.Contains(t, err.Error(), "compact jws signature is empty") + }) + t.Run("error - invalid signing algorithm", func(t *testing.T) { + jwsSignature, err := internal.NewJWS(nil, nil, []byte("payload"), mockSigner) + require.NoError(t, err) + + compactJWS, err := jwsSignature.SerializeCompact(false) + require.NoError(t, err) + + parser := New(protocol.Protocol{ + SignatureAlgorithms: []string{"other"}, + }) + + jws, err := parser.parseSignedData(compactJWS) + require.Error(t, err) + require.Nil(t, jws) + require.Contains(t, err.Error(), "failed to parse signed data: algorithm 'alg' is not in the allowed list [other]") + }) +} + +func TestValidateSigningKey(t *testing.T) { + testJWK := &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + } + + parser := New(protocol.Protocol{KeyAlgorithms: []string{"crv"}, NonceSize: 16}) + + t.Run("success", func(t *testing.T) { + err := parser.validateSigningKey(testJWK) + require.NoError(t, err) + }) + + t.Run("error - required info is missing (kty)", func(t *testing.T) { + err := parser.validateSigningKey(&jws.JWK{ + Crv: "crv", + X: "x", + }) + require.Error(t, err) + require.Contains(t, err.Error(), "signing key validation failed: JWK kty is missing") + }) + + t.Run("error - key algorithm not supported", func(t *testing.T) { + err := New(protocol.Protocol{KeyAlgorithms: []string{"other"}}).validateSigningKey(testJWK) + require.Error(t, err) + require.Contains(t, err.Error(), "key algorithm 'crv' is not in the allowed list [other]") + }) + + t.Run("error - failed to decode signing key nonce", func(t *testing.T) { + nonceJWK := &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + Nonce: "nonce", + } + + err := parser.validateSigningKey(nonceJWK) + require.Error(t, err) + require.Contains(t, err.Error(), "validate signing key nonce: failed to decode nonce 'nonce': illegal base64 data") + }) + + t.Run("error - failed to validate nonce size", func(t *testing.T) { + nonceJWK := &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + Nonce: encoder.EncodeToString([]byte("nonce")), + } + + err := parser.validateSigningKey(nonceJWK) + require.Error(t, err) + require.Contains(t, err.Error(), "validate signing key nonce: nonce size '5' doesn't match configured nonce size '16'") + }) + + t.Run("success - valid nonce size", func(t *testing.T) { + nonceJWK := &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + Nonce: encoder.EncodeToString([]byte("nonce")), + } + + parserWithNonceSize := New(protocol.Protocol{ + KeyAlgorithms: []string{"crv"}, + NonceSize: 5, + }) + + err := parserWithNonceSize.validateSigningKey(nonceJWK) + require.NoError(t, err) + }) +} + +func TestValidateRecoverRequest(t *testing.T) { + parser := New(protocol.Protocol{MaxOperationHashLength: maxHashLength, MultihashAlgorithms: []uint{sha2_256}}) + + t.Run("success", func(t *testing.T) { + recover, err := getDefaultRecoverRequest() + require.NoError(t, err) + + err = parser.validateRecoverRequest(recover) + require.NoError(t, err) + }) + t.Run("missing signed data", func(t *testing.T) { + recover, err := getDefaultRecoverRequest() + require.NoError(t, err) + recover.SignedData = "" + + err = parser.validateRecoverRequest(recover) + require.Error(t, err) + require.Contains(t, err.Error(), "missing signed data") + }) + t.Run("missing did suffix", func(t *testing.T) { + recover, err := getDefaultRecoverRequest() + require.NoError(t, err) + recover.DidSuffix = "" + + err = parser.validateRecoverRequest(recover) + require.Error(t, err) + require.Contains(t, err.Error(), "missing did suffix") + }) + + t.Run("invalid reveal value", func(t *testing.T) { + recover, err := getDefaultRecoverRequest() + require.NoError(t, err) + recover.RevealValue = "reveal" + + err = parser.validateRecoverRequest(recover) + require.Error(t, err) + require.Contains(t, err.Error(), "reveal value is not computed with the required hash algorithms: [18]") + }) +} + +func TestValidateProtectedHeader(t *testing.T) { + algs := []string{"alg-1", "alg-2"} + + parser := New(protocol.Protocol{}) + + t.Run("success - kid can be empty", func(t *testing.T) { + protected := getHeaders("alg-1", "") + + err := parser.validateProtectedHeaders(protected, algs) + require.NoError(t, err) + }) + t.Run("success - kid can be provided", func(t *testing.T) { + protected := getHeaders("alg-1", "kid-1") + + err := parser.validateProtectedHeaders(protected, algs) + require.NoError(t, err) + }) + t.Run("error - missing header", func(t *testing.T) { + err := parser.validateProtectedHeaders(nil, algs) + require.Error(t, err) + require.Contains(t, err.Error(), "missing protected headers") + }) + + t.Run("err - algorithm must be present in the protected header", func(t *testing.T) { + protected := make(jws.Headers) + protected[kidKey] = "kid-1" + + err := parser.validateProtectedHeaders(protected, algs) + require.Error(t, err) + require.Contains(t, err.Error(), "algorithm must be present in the protected header") + }) + + t.Run("err - algorithm cannot be empty", func(t *testing.T) { + protected := getHeaders("", "kid-1") + + err := parser.validateProtectedHeaders(protected, algs) + require.Error(t, err) + require.Contains(t, err.Error(), "algorithm cannot be empty in the protected header") + }) + + t.Run("err - invalid protected header value", func(t *testing.T) { + protected := make(jws.Headers) + + protected["kid"] = "kid" + protected["alg"] = "alg" + protected["other"] = "value" + + err := parser.validateProtectedHeaders(protected, algs) + require.Error(t, err) + require.Contains(t, err.Error(), "invalid protected header: other") + }) + t.Run("error - algorithm not allowed", func(t *testing.T) { + protected := getHeaders("alg-other", "kid") + + err := parser.validateProtectedHeaders(protected, algs) + require.Error(t, err) + require.Equal(t, "algorithm 'alg-other' is not in the allowed list [alg-1 alg-2]", err.Error()) + }) +} + +func getHeaders(alg, kid string) jws.Headers { + header := make(jws.Headers) + header[algKey] = alg + header[kidKey] = kid + + return header +} + +func getRecoverRequest(delta *model.DeltaModel, signedData *model.RecoverSignedDataModel) (*model.RecoverRequest, error) { + compactJWS, err := signutil.SignModel(signedData, NewMockSigner()) + if err != nil { + return nil, err + } + + rv, err := commitment.GetRevealValue(signedData.RecoveryKey, sha2_256) + if err != nil { + return nil, err + } + + return &model.RecoverRequest{ + Operation: operation.TypeRecover, + DidSuffix: "suffix", + Delta: delta, + SignedData: compactJWS, + RevealValue: rv, + }, nil +} + +func getDefaultRecoverRequest() (*model.RecoverRequest, error) { + delta, err := getDelta() + if err != nil { + return nil, err + } + + return getRecoverRequest(delta, getSignedDataForRecovery()) +} + +func getSignedDataForRecovery() *model.RecoverSignedDataModel { + return &model.RecoverSignedDataModel{ + RecoveryKey: &jws.JWK{ + Kty: "kty", + Crv: "crv", + X: "x", + }, + RecoveryCommitment: computeMultihash([]byte("recoveryReveal")), + DeltaHash: computeMultihash([]byte("operation")), + } +} + +func getRecoverRequestBytes() ([]byte, error) { + req, err := getDefaultRecoverRequest() + if err != nil { + return nil, err + } + + return json.Marshal(req) +} + +// MockSigner implements signer interface. +type MockSigner struct { + MockSignature []byte + MockHeaders jws.Headers + Err error +} + +// New creates new mock signer (default to recovery signer). +func NewMockSigner() *MockSigner { + headers := make(jws.Headers) + headers[jws.HeaderAlgorithm] = "alg" + headers[jws.HeaderKeyID] = "kid" + + return &MockSigner{MockHeaders: headers, MockSignature: []byte("signature")} +} + +// Headers provides required JWS protected headers. It provides information about signing key and algorithm. +func (ms *MockSigner) Headers() jws.Headers { + return ms.MockHeaders +} + +// Sign signs msg and returns mock signature value. +func (ms *MockSigner) Sign(msg []byte) ([]byte, error) { + if ms.Err != nil { + return nil, ms.Err + } + + return ms.MockSignature, nil +} diff --git a/pkg/versions/1_0/operationparser/update.go b/pkg/versions/1_0/operationparser/update.go new file mode 100644 index 0000000..04c76bb --- /dev/null +++ b/pkg/versions/1_0/operationparser/update.go @@ -0,0 +1,117 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/hashing" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +// ParseUpdateOperation will parse update operation. +func (p *Parser) ParseUpdateOperation(request []byte, batch bool) (*model.Operation, error) { + schema, err := p.parseUpdateRequest(request) + if err != nil { + return nil, err + } + + signedData, err := p.ParseSignedDataForUpdate(schema.SignedData) + if err != nil { + return nil, err + } + + if !batch { + until := p.getAnchorUntil(signedData.AnchorFrom, signedData.AnchorUntil) + + err = p.anchorTimeValidator.Validate(signedData.AnchorFrom, until) + if err != nil { + return nil, err + } + + err = p.ValidateDelta(schema.Delta) + if err != nil { + return nil, err + } + + err = p.validateCommitment(signedData.UpdateKey, schema.Delta.UpdateCommitment) + if err != nil { + return nil, fmt.Errorf("calculate current commitment: %s", err.Error()) + } + } + + err = hashing.IsValidModelMultihash(signedData.UpdateKey, schema.RevealValue) + if err != nil { + return nil, fmt.Errorf("canonicalized update public key hash doesn't match reveal value: %s", err.Error()) + } + + return &model.Operation{ + Type: operation.TypeUpdate, + OperationRequest: request, + UniqueSuffix: schema.DidSuffix, + Delta: schema.Delta, + SignedData: schema.SignedData, + RevealValue: schema.RevealValue, + }, nil +} + +func (p *Parser) parseUpdateRequest(payload []byte) (*model.UpdateRequest, error) { + schema := &model.UpdateRequest{} + err := json.Unmarshal(payload, schema) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal update request: %s", err.Error()) + } + + if err := p.validateUpdateRequest(schema); err != nil { + return nil, err + } + + return schema, nil +} + +// ParseSignedDataForUpdate will parse and validate signed data for update. +func (p *Parser) ParseSignedDataForUpdate(compactJWS string) (*model.UpdateSignedDataModel, error) { + jws, err := p.parseSignedData(compactJWS) + if err != nil { + return nil, err + } + + schema := &model.UpdateSignedDataModel{} + err = json.Unmarshal(jws.Payload, schema) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal signed data model for update: %s", err.Error()) + } + + if err := p.validateSignedDataForUpdate(schema); err != nil { + return nil, fmt.Errorf("validate signed data for update: %s", err.Error()) + } + + return schema, nil +} + +func (p *Parser) validateUpdateRequest(update *model.UpdateRequest) error { + if update.DidSuffix == "" { + return errors.New("missing did suffix") + } + + if update.SignedData == "" { + return errors.New("missing signed data") + } + + return p.validateMultihash(update.RevealValue, "reveal value") +} + +func (p *Parser) validateSignedDataForUpdate(signedData *model.UpdateSignedDataModel) error { + if err := p.validateSigningKey(signedData.UpdateKey); err != nil { + return err + } + + return p.validateMultihash(signedData.DeltaHash, "delta hash") +} diff --git a/pkg/versions/1_0/operationparser/update_test.go b/pkg/versions/1_0/operationparser/update_test.go new file mode 100644 index 0000000..f75da72 --- /dev/null +++ b/pkg/versions/1_0/operationparser/update_test.go @@ -0,0 +1,322 @@ +/* +Copyright Gen Digital Inc. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 +*/ + +package operationparser + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/trustbloc/sidetree-go/pkg/api/operation" + "github.com/trustbloc/sidetree-go/pkg/api/protocol" + "github.com/trustbloc/sidetree-go/pkg/commitment" + "github.com/trustbloc/sidetree-go/pkg/hashing" + "github.com/trustbloc/sidetree-go/pkg/jws" + "github.com/trustbloc/sidetree-go/pkg/patch" + "github.com/trustbloc/sidetree-go/pkg/util/signutil" + "github.com/trustbloc/sidetree-go/pkg/versions/1_0/model" +) + +func TestParseUpdateOperation(t *testing.T) { + p := protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MaxDeltaSize: maxDeltaSize, + MultihashAlgorithms: []uint{sha2_256}, + SignatureAlgorithms: []string{"alg"}, + KeyAlgorithms: []string{"crv"}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + payload, err := getUpdateRequestBytes() + require.NoError(t, err) + + op, err := parser.ParseUpdateOperation(payload, false) + require.NoError(t, err) + require.Equal(t, operation.TypeUpdate, op.Type) + + signedData, err := parser.ParseSignedDataForUpdate(op.SignedData) + require.NoError(t, err) + + expectedRevealValue, err := commitment.GetRevealValue(signedData.UpdateKey, sha2_256) + require.NoError(t, err) + + require.Equal(t, expectedRevealValue, op.RevealValue) + }) + t.Run("invalid json", func(t *testing.T) { + schema, err := parser.ParseUpdateOperation([]byte(""), false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "unexpected end of JSON input") + }) + t.Run("validate update request error", func(t *testing.T) { + req, err := getDefaultUpdateRequest() + require.NoError(t, err) + req.DidSuffix = "" + + payload, err := json.Marshal(req) + require.NoError(t, err) + + schema, err := parser.ParseUpdateOperation(payload, false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "missing did suffix") + }) + t.Run("invalid next update commitment hash", func(t *testing.T) { + delta, err := getUpdateDelta() + require.NoError(t, err) + delta.UpdateCommitment = "" + + req, err := getUpdateRequest(delta) + require.NoError(t, err) + payload, err := json.Marshal(req) + require.NoError(t, err) + + schema, err := parser.ParseUpdateOperation(payload, false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), + "update commitment is not computed with the required hash algorithms: [18]") + }) + t.Run("invalid signed data", func(t *testing.T) { + delta, err := getUpdateDelta() + require.NoError(t, err) + + req, err := getUpdateRequest(delta) + require.NoError(t, err) + + req.SignedData = "." + payload, err := json.Marshal(req) + require.NoError(t, err) + + schema, err := parser.ParseUpdateOperation(payload, false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "invalid JWS compact format") + }) + t.Run("parse signed data error - unmarshal failed", func(t *testing.T) { + req, err := getDefaultUpdateRequest() + require.NoError(t, err) + + compactJWS, err := signutil.SignPayload([]byte("payload"), NewMockSigner()) + require.NoError(t, err) + + req.SignedData = compactJWS + request, err := json.Marshal(req) + require.NoError(t, err) + + op, err := parser.ParseUpdateOperation(request, false) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to unmarshal signed data model for update") + require.Nil(t, op) + }) + + t.Run("error - current commitment cannot equal update commitment", func(t *testing.T) { + delta, err := getUpdateDelta() + require.NoError(t, err) + + currentCommitment, err := commitment.GetCommitment(testJWK, sha2_256) + require.NoError(t, err) + + delta.UpdateCommitment = currentCommitment + + req, err := getUpdateRequest(delta) + require.NoError(t, err) + + payload, err := json.Marshal(req) + require.NoError(t, err) + + schema, err := parser.ParseUpdateOperation(payload, false) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "re-using public keys for commitment is not allowed") + }) +} + +func TestParseSignedDataForUpdate(t *testing.T) { + p := protocol.Protocol{ + MaxOperationHashLength: maxHashLength, + MultihashAlgorithms: []uint{sha2_256}, + SignatureAlgorithms: []string{"alg"}, + KeyAlgorithms: []string{"crv"}, + } + + parser := New(p) + + t.Run("success", func(t *testing.T) { + req, err := getDefaultUpdateRequest() + require.NoError(t, err) + + schema, err := parser.ParseSignedDataForUpdate(req.SignedData) + require.NoError(t, err) + require.NotNil(t, schema) + }) + t.Run("invalid JWS compact format", func(t *testing.T) { + schema, err := parser.ParseSignedDataForUpdate("invalid") + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "invalid JWS compact format") + }) + t.Run("hash not computed with latest algorithm", func(t *testing.T) { + signedModel := model.UpdateSignedDataModel{ + DeltaHash: "hash", + UpdateKey: testJWK, + } + + payload, err := json.Marshal(signedModel) + require.NoError(t, err) + + compactJWS, err := signutil.SignPayload(payload, NewMockSigner()) + require.NoError(t, err) + + schema, err := parser.ParseSignedDataForUpdate(compactJWS) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "delta hash is not computed with the required hash algorithms: [18]") + }) + t.Run("payload not JSON object", func(t *testing.T) { + compactJWS, err := signutil.SignPayload([]byte("test"), NewMockSigner()) + require.NoError(t, err) + + schema, err := parser.ParseSignedDataForUpdate(compactJWS) + require.Error(t, err) + require.Nil(t, schema) + require.Contains(t, err.Error(), "invalid character") + }) +} + +func TestValidateUpdateDelta(t *testing.T) { + t.Run("invalid next update commitment hash", func(t *testing.T) { + p := protocol.Protocol{ + MultihashAlgorithms: []uint{sha2_256}, + Patches: []string{"add-public-keys", "remove-public-keys", "add-services", "remove-services", "ietf-json-patch"}, + } + + parser := New(p) + + delta, err := getUpdateDelta() + require.NoError(t, err) + + delta.UpdateCommitment = "" + err = parser.ValidateDelta(delta) + require.Error(t, err) + require.Contains(t, err.Error(), + "update commitment is not computed with the required hash algorithms") + }) +} + +func TestValidateUpdateRequest(t *testing.T) { + parser := New(protocol.Protocol{MaxOperationHashLength: maxHashLength, MultihashAlgorithms: []uint{sha2_256}}) + + t.Run("success", func(t *testing.T) { + update, err := getDefaultUpdateRequest() + require.NoError(t, err) + + err = parser.validateUpdateRequest(update) + require.NoError(t, err) + }) + t.Run("missing signed data", func(t *testing.T) { + update, err := getDefaultUpdateRequest() + require.NoError(t, err) + update.SignedData = "" + + err = parser.validateUpdateRequest(update) + require.Error(t, err) + require.Contains(t, err.Error(), "missing signed data") + }) + t.Run("missing did suffix", func(t *testing.T) { + update, err := getDefaultUpdateRequest() + require.NoError(t, err) + update.DidSuffix = "" + + err = parser.validateUpdateRequest(update) + require.Error(t, err) + require.Contains(t, err.Error(), "missing did suffix") + }) + t.Run("invalid reveal value", func(t *testing.T) { + update, err := getDefaultUpdateRequest() + require.NoError(t, err) + update.RevealValue = "reveal" + + err = parser.validateUpdateRequest(update) + require.Error(t, err) + require.Contains(t, err.Error(), "reveal value is not computed with the required hash algorithms: [18]") + }) +} + +func getUpdateRequest(delta *model.DeltaModel) (*model.UpdateRequest, error) { + deltaHash, err := hashing.CalculateModelMultihash(delta, sha2_256) + if err != nil { + return nil, err + } + + signedModel := model.UpdateSignedDataModel{ + DeltaHash: deltaHash, + UpdateKey: testJWK, + } + + rv, err := commitment.GetRevealValue(testJWK, sha2_256) + if err != nil { + return nil, err + } + + compactJWS, err := signutil.SignModel(signedModel, NewMockSigner()) + if err != nil { + return nil, err + } + + return &model.UpdateRequest{ + DidSuffix: "suffix", + SignedData: compactJWS, + Operation: operation.TypeUpdate, + Delta: delta, + RevealValue: rv, + }, nil +} + +func getDefaultUpdateRequest() (*model.UpdateRequest, error) { + delta, err := getUpdateDelta() + if err != nil { + return nil, err + } + + return getUpdateRequest(delta) +} + +func getUpdateRequestBytes() ([]byte, error) { + req, err := getDefaultUpdateRequest() + if err != nil { + return nil, err + } + + return json.Marshal(req) +} + +func getUpdateDelta() (*model.DeltaModel, error) { + jsonPatch, err := patch.NewJSONPatch(getTestPatch()) + if err != nil { + return nil, err + } + + return &model.DeltaModel{ + UpdateCommitment: computeMultihash([]byte("updateReveal")), + Patches: []patch.Patch{jsonPatch}, + }, nil +} + +func getTestPatch() string { + return `[{"op": "replace", "path": "/name", "value": "Jane"}]` +} + +var testJWK = &jws.JWK{ + Crv: "crv", + Kty: "kty", + X: "x", +}