diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..bdd21873 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,2 @@ +haskell-src/data/* @mightybyte @sirlensalot @buckie +haskell-src/exec/Chainweb/Coins.hs @mightybyte @sirlensalot @buckie diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a7634b8e..d35ebec5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,9 +9,9 @@ jobs: strategy: fail-fast: false matrix: - ghc: ['8.8.4'] + ghc: ['8.10.7'] cabal: ['3.4'] - os: ['ubuntu-18.04', 'ubuntu-20.04', 'macOS-latest'] + os: ['ubuntu-20.04', 'ubuntu-22.04', 'macOS-latest'] steps: - name: 'GitHub actions env workaround' @@ -23,7 +23,7 @@ jobs: # Haskell Setup - name: Install GHC and Cabal - uses: haskell/actions/setup@v1.2.3 + uses: haskell/actions/setup@v2.3.3 with: ghc-version: ${{ matrix.ghc }} cabal-version: ${{ matrix.cabal }} @@ -45,15 +45,23 @@ jobs: - name: Update package database run: cabal update - name: Display outdated packages - run: cabal outdated + run: | + cd haskell-src + cabal outdated - name: Install build dependencies - run: cabal build --only-dependencies + run: | + cd haskell-src + cabal build --only-dependencies - name: Build - run: cabal build + run: | + cd haskell-src + cabal build # Upload artifacts - name: Copy build artifact - run: cp `cabal list-bin exe:chainweb-data` . + run: | + cd haskell-src + cp `cabal list-bin exe:chainweb-data` .. - name: Stripping binary run: strip chainweb-data - uses: actions/upload-artifact@v2 @@ -63,4 +71,6 @@ jobs: # Test - name: Test - run: cabal v2-test + run: | + cd haskell-src + cabal v2-test diff --git a/.gitignore b/.gitignore index 3c4fd552..1412c8b4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,31 @@ -.stack-work/ -dist* *.swp -*.dump-hi .stylish-haskell.yaml *.db result .nd cwdb-pgdata -/TAGS -/stack.yaml.lock +TAGS +dist +dist-* +cabal-dev +*.o +*.hi +*.hie +*.chi +*.chs.h +*.dyn_o +*.dyn_hi +.hpc +.hsenv +.cabal-sandbox/ +cabal.sandbox.config +*.prof +*.aux +*.hp +*.eventlog +.stack-work/ +cabal.project.local +cabal.project.local~ +.HTF/ +.ghc.environment.* + diff --git a/ChangeLog.md b/ChangeLog.md index 472ed022..73221c29 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -1,6 +1,39 @@ # Changelog -## 2.0.0 (pending) +## 2.1.1 (2023-01-23) + +This is a quick release after 2.1.0 for fixing an oversight in the new `/txs/accounts` endpoint. +* Rename `chainid` -> `chain`, `name` -> `token` fields of `/txs/accounts` for consistency (#126) +* A new `minheight` parameter for `/txs/accounts` (#127) +* A `docker.nix` definition for building Ubuntu-based `chainweb-data` docker images using Nix (#84) + +## 2.1.0 (2023-01-17) + +_**IMPORTANT NOTICE**: Please skip this chainweb-data release and go straight to 2.1.1. Shortly after the release, we've noticed an oversight in the new `/txs/account` endpoint and decided to correct it with a quick breaking change instead of unnecessarily complicating the API. See [PR #126](https://github.com/kadena-io/chainweb-data/pull/126)._ + +This release drops the officiall support for Ubuntu 18.04 and adds support for Ubuntu 22.04 (see #100) + +This is the last version that uses `beam-automigrate` for managing the database schema, from this version on, we'll switch to managing the schema using incremental migration scripts (see #101, #102, #104). When future versions of `chainweb-data` need to migrate the database from a version earlier than 2.1.0, they will ask the user to first run 2.1.0 to prepare their database for incremental migrations. + +- A new `/txs/account` endpoint for fetching the incoming and outgoing transfers of a Kadena or non-Kadena account. #76 (also #83, #96, #103, #110, #114, #117, #124, #125) +- All search endpoints (`/txs/{account,events,search}`) now support an optional (at the discretion of the HTTP gateway) "bounded execution" workflow (#109, also #118) +- The event search endpoint `/txs/event` now accepts 2 new arguments to narrow down the search results (#74): + - `modulename`: Narrows down the search to events whose modules names match this value **exactly** + - `minheight`: The minimum block height of the search window +- A _hidden_ new `--serve-swagger-ui` CLI argument that can be passed to `chainweb-data` to make it serve a Swagger UI for an auto-generated OpenAPI 3 spec for the `chainweb-data` HTTP API. The CLI argument is hidden because this spec is rudimentary and unofficial at the time of this release. Future releases will improve it. +- A new `--ignore-schema-diff` CLI argument to `chainweb-data` to make it ignore any unexpected database schema changes. This can be used by `chainweb-data` operators to make schema changes to their database and keep running `chainweb-data`, but such ad-hoc database schema changes are not officially supported since they can cause a wide variety of errors under unpredictable conditions. +- A new `migrate` command for the `chainweb-data` CLI that can be used to run the database migrations and exit. +- A new `/txs/txs` endpoint similar to `/txs/tx`, but it returns a list of `TxDetail` objects, which can contain more than one entry when a transaction is introduced multiple times into the blockchain on independent branches. #71 #72 +- Code search and event search query optimization (#67) +- Add requestkey indexes on `events` and `transactions` tables (#98) +- Refactor richlist generation (#89) +- Load-based throttling for search endpoints (#116) +- Optimize the recent transactions query at server start up (#119) +- Coin circulation calculation fix #97 +- Set random_page_cost to 0 for CW-D connections #122 + + +## 2.0.0 (2021-08-18) This is a major backwards-incompatible update. All chainweb-data users need to delete their DB and rebuild from scratch. Major changes include: diff --git a/README.org b/README.org index 6f36da80..ffc16795 100644 --- a/README.org +++ b/README.org @@ -20,6 +20,7 @@ - [[#endpoints][endpoints]] - [[#fill][fill]] - [[#backfill][backfill]] + - [[#backfill-transfers][backfill-transfers]] - [[#gaps][gaps]] - [[#single][single]] @@ -31,7 +32,9 @@ can easily determine mining statistics and confirm transaction contents. * Requirements -~chainweb-data~ requires [[https://www.postgresql.org/][Postgres]]. +~chainweb-data~ requires [[https://www.postgresql.org/][Postgres]]. If you plan to host a chainweb-data instance +on a cloud machine (e.g. Amazon EC2), we recommend that you run the postgres +instance on an instance attached storage unit. * Building @@ -110,6 +113,9 @@ chainweb: global: 1000 #+end_example +You can find an example node config in this repository in +[node-config-for-chainweb-data.yaml](node-config-for-chainweb-data.yaml). + ** How to run chainweb-data When running chainweb-data for the first time you should run ~chainweb-data @@ -143,7 +149,7 @@ As a new block comes in, its chain number is printed as a single digit. ~server~ is just like ~listen~ but also runs an HTTP server that serves a few endpoints for doing common queries. -**** endpoints +**** Endpoints - ~/txs/recent~ gets a list of recent transactions - ~/txs/search?search=foo&limit=20&offset=40~ searches for transactions containing the string ~foo~ @@ -151,9 +157,33 @@ few endpoints for doing common queries. - ~/txs/events?search=foo&limit=20&offset=40~ gets the details of a transaction with the given request key - ~/stats~ returns a few stats such as transaction count and coins in circulation - ~/coins~ returns just the coins in circulation +- ~/txs/account/?token=&chainid=&fromheight=&limit=&offset=~ + returns account information given some ~account-identifier~, ~token~ and + ~chainid~. The optional parameter ~minheight~ forces the results to only have + blockheights larger than than or equal to it. If ~token~ is omitted, the token ~coin~ + is assumed. If ~chainid~ is omitted, all chains are searched. For more detailed information, see the API definition [[https://github.com/kadena-io/chainweb-api/blob/master/lib/ChainwebData/Api.hs#L24][here]]. +**** Note about partial search results + +All of ~chainweb-data~'s search endpoints (~/txs/{events,search,account}~) support a common workflow +for efficiently retrieving the results of a given search in non-overlapping batches. + +A request to any one of these endpoints that match more rows than the number asked with the ~limit~ +query parameter will respond with a ~Chainweb-Next~ response header containing a token. That token +can be used to call the same endpoint with the same query parameters plus the token passed in via +the ~next~ query parameter in order to retreive the next batch of results. + +~chainweb-data~ supports a ~Chainweb-Execution-Strategy~ request header that can be used (probably by +~chainweb-data~ operators by setting it in the API gateway) to enable +an upper bound on the amount of time the server will spend for searching results. Normally, the +search endpoints will produce the given ~limit~-many results if the search matches at least that many +entries. However, if ~Chainweb-Execution-Strategy: Bounded~ is passed in, the response can contain +less than ~limit~ rows even though there are potentially more matches, if those matches aren't found +quickly enough. In such a case, the returned ~Chainweb-Next~ token will act as a cursor for the search, +so it's possible to keep searching by making successive calls with subsequent ~Chainweb-Next~ tokens. + *** fill ~fill~ fills in missing blocks. This command used to be called ~gaps~ but it has @@ -185,6 +215,17 @@ empty chains, it won't proceed. ~backfill~ will stop when it reaches height 0. +*** backfill-transfers + +~backfill-transfers~ fills entries in the transfers table from the highest block +height it can find for each chain up until the height that events for coinbase +transfers began to exist. + +*Note:* If the transfers table is empty, you must fetch at least one row for each +chain first via ~listen~ before doing ~backfill-transfers~! If ~backfill-transfers~ detects any +empty chains, it won't proceed. + + *** gaps *Deprecated:* The backfill command is deprecated and will be removed in future diff --git a/cabal.project b/cabal.project index 5d672dbb..26f2e485 100644 --- a/cabal.project +++ b/cabal.project @@ -1,4 +1,4 @@ -packages: chainweb-data.cabal +packages: haskell-src/chainweb-data.cabal package aeson flags: +cffi @@ -7,21 +7,25 @@ source-repository-package type: git location: https://github.com/mightybyte/beam-automigrate.git tag: 112c39953c432b05ec6ae2354b0150c61ee30157 + --sha256: sha256-0x8L1moEyayqdxHUVpYvvdvMcBCo6qMXku2zDzpMNq8= source-repository-package type: git location: https://github.com/kadena-io/pact.git - tag: 8681a6d6e72eccefe00100f86202d616ab5d1621 + tag: 957b8bd7644cebc60d043b33f0d7ffc53c65d783 + --sha256: sha256-ERyHLWEoV83mzjSlMwY8WmHsEaq3JrbxFwQjW0tFXLE= source-repository-package type: git location: https://github.com/kadena-io/chainweb-api.git - tag: d6ad27ed16c060e18bd86213d04d15a360f88d35 + tag: 8a4731c2875753617ccd2b573cf726fa100c6053 + --sha256: sha256-MY470zn+BveL7X7gFVSgGqvWjD0jsk6VKRF/gzal9Bc= source-repository-package type: git location: https://github.com/tathougies/beam.git tag: 596981a1ea6765b9f311d48a2ec4d8460ebc4b7e + --sha256: sha256-4jfhHJTUKu09ULNFFACh3v9r3S0sJkUjTyjXq6gJY2A= subdir: beam-core beam-migrate @@ -31,6 +35,7 @@ source-repository-package type: git location: https://github.com/obsidiansystems/gargoyle tag: df0068f9572c1371bed7aa416af84d462c3574c0 + --sha256: sha256-1NWmQf0zgi4RtQISXvk0JbsQ1OkpYJ789pkglE3fVaU= subdir: gargoyle gargoyle-postgresql @@ -39,13 +44,11 @@ source-repository-package type: git location: https://github.com/kadena-io/thyme.git tag: 6ee9fcb026ebdb49b810802a981d166680d867c9 + --sha256: sha256-DqGIoPGBg8py044Xa7l/Y09K+cKGFwlxONHoZRJyzCU= package vault documentation: false write-ghc-environment-files: never -constraints: - sbv == 8.8 - allow-newer: gargoyle:base diff --git a/cabal.project.freeze b/cabal.project.freeze new file mode 100644 index 00000000..1e8fa15e --- /dev/null +++ b/cabal.project.freeze @@ -0,0 +1,407 @@ +active-repositories: hackage.haskell.org:merge +constraints: any.Boolean ==0.2.4, + any.Cabal ==3.2.1.0, + any.Decimal ==0.5.2, + any.Glob ==0.10.2, + any.HUnit ==1.6.2.0, + any.MemoTrie ==0.6.10, + MemoTrie -examples, + any.NumInstances ==1.4, + any.OneTuple ==0.3, + any.Only ==0.1, + any.QuickCheck ==2.14, + QuickCheck +templatehaskell, + any.StateVar ==1.2.2, + any.abstract-deque ==0.3, + abstract-deque -usecas, + any.abstract-par ==0.3.3, + any.adjunctions ==4.4.2, + any.aeson ==1.4.7.1, + aeson -bytestring-builder +cffi -developer -fast, + any.aeson-pretty ==0.8.9, + aeson-pretty -lib-only, + any.algebraic-graphs ==0.6.1, + any.ansi-terminal ==0.11.4, + ansi-terminal -example +win32-2-13-1, + any.ansi-wl-pprint ==0.6.9, + ansi-wl-pprint -example, + any.appar ==0.1.8, + any.array ==0.5.4.0, + any.asn1-encoding ==0.9.6, + any.asn1-parse ==0.9.5, + any.asn1-types ==0.3.4, + any.assoc ==1.0.2, + any.async ==2.2.4, + async -bench, + any.atomic-primops ==0.8.4, + atomic-primops -debug, + any.attoparsec ==0.13.2.5, + attoparsec -developer, + any.attoparsec-iso8601 ==1.0.2.0, + attoparsec-iso8601 -developer -fast, + any.auto-update ==0.1.6, + any.base ==4.14.3.0, + any.base-compat ==0.11.2, + any.base-compat-batteries ==0.11.2, + any.base-orphans ==0.8.7, + any.base-prelude ==1.6.1, + any.base-unicode-symbols ==0.2.4.2, + base-unicode-symbols +base-4-8 -old-base, + any.base16-bytestring ==0.1.1.7, + any.base64-bytestring ==1.0.0.3, + any.basement ==0.0.15, + any.beam-automigrate ==0.1.2.0, + beam-automigrate -build-readme -ghcipretty -integration-tests -werror, + any.beam-core ==0.9.0.0, + beam-core -werror, + any.beam-migrate ==0.5.0.0, + beam-migrate -werror, + any.beam-postgres ==0.5.0.0, + beam-postgres -werror, + any.bifunctors ==5.5.7, + bifunctors +semigroups +tagged, + any.binary ==0.8.8.0, + any.binary-orphans ==1.0.3, + any.bitvec ==1.1.3.0, + bitvec -libgmp, + any.blake2 ==0.3.0, + blake2 +hlint -llvm -support_blake2_sse, + any.blaze-builder ==0.4.2.2, + any.blaze-html ==0.9.1.2, + any.blaze-markup ==0.8.2.8, + any.boring ==0.2, + boring +tagged, + any.bound ==2.0.1, + bound +template-haskell, + any.bsb-http-chunked ==0.0.0.4, + any.byteable ==0.1.1, + any.byteorder ==1.0.4, + any.bytes ==0.17.2, + any.bytestring ==0.10.12.0, + any.bytestring-builder ==0.10.8.2.0, + bytestring-builder +bytestring_has_builder, + any.cabal-doctest ==1.0.9, + any.cache ==0.1.3.0, + any.call-stack ==0.4.0, + any.case-insensitive ==1.2.1.0, + any.cassava ==0.5.3.0, + cassava -bytestring--lt-0_10_4, + any.cereal ==0.5.8.3, + cereal -bytestring-builder, + any.chainweb-api ==1.2.1, + chainweb-data -ghc-flags, + any.charset ==0.3.9, + any.clock ==0.8.3, + clock -llvm, + any.cmdargs ==0.10.21, + cmdargs +quotation -testprog, + any.code-page ==0.2.1, + any.colour ==2.3.6, + any.comonad ==5.0.8, + comonad +containers +distributive +indexed-traversable, + any.conduit ==1.3.4.3, + any.configuration-tools ==0.6.1, + configuration-tools -remote-configs, + any.connection ==0.3.1, + any.constraints ==0.13.4, + any.constraints-extras ==0.4.0.0, + constraints-extras +build-readme, + any.containers ==0.6.5.1, + any.contravariant ==1.5.5, + contravariant +semigroups +statevar +tagged, + any.cookie ==0.4.5, + any.criterion ==1.5.13.0, + criterion -embed-data-files -fast, + any.criterion-measurement ==0.1.4.0, + criterion-measurement -fast, + any.crypto-api ==0.13.3, + crypto-api -all_cpolys, + any.cryptohash ==0.11.9, + any.cryptohash-md5 ==0.11.101.0, + any.cryptohash-sha1 ==0.11.101.0, + any.cryptonite ==0.30, + cryptonite -check_alignment +integer-gmp -old_toolchain_inliner +support_aesni +support_deepseq -support_pclmuldq +support_rdrand -support_sse +use_target_attributes, + any.data-default ==0.7.1.1, + any.data-default-class ==0.1.2.0, + any.data-default-instances-containers ==0.0.1, + any.data-default-instances-dlist ==0.0.1, + any.data-default-instances-old-locale ==0.0.1, + any.dec ==0.0.5, + any.deepseq ==1.4.4.0, + any.dense-linear-algebra ==0.1.0.0, + any.dependent-map ==0.4.0.0, + any.dependent-sum ==0.7.2.0, + any.deriving-compat ==0.5.9, + deriving-compat +base-4-9 +new-functor-classes +template-haskell-2-11, + any.direct-sqlite ==2.3.27, + direct-sqlite +fulltextsearch +haveusleep +json1 -systemlib +urifilenames, + any.directory ==1.3.6.0, + any.distributive ==0.6.2.1, + distributive +semigroups +tagged, + any.dlist ==0.8.0.8, + any.easy-file ==0.2.2, + any.ed25519-donna ==0.1.1, + any.enclosed-exceptions ==1.0.3, + any.entropy ==0.4.1.10, + entropy -donotgetentropy, + any.errors ==2.3.0, + any.exceptions ==0.10.4, + any.fast-logger ==3.1.1, + any.file-embed ==0.0.15.0, + any.file-embed-lzma ==0.0.1, + any.filelock ==0.1.1.5, + any.filepath ==1.4.2.1, + any.fingertree ==0.1.5.0, + any.finite-typelits ==0.1.6.0, + any.free ==5.1.3, + any.gargoyle ==0.1.1.0, + any.gargoyle-postgresql ==0.2.0.1, + gargoyle-postgresql -enable-psql-test, + any.generics-sop ==0.5.1.0, + any.ghc-boot-th ==8.10.7, + any.ghc-prim ==0.6.1, + any.happy ==1.20.0, + any.hashable ==1.3.2.0, + hashable +integer-gmp -random-initial-seed, + any.haskeline ==0.8.2, + any.haskell-lexer ==1.1.1, + any.haskell-src-exts ==1.23.1, + any.hourglass ==0.2.12, + any.hsc2hs ==0.68.8, + hsc2hs -in-ghc-tree, + any.hspec ==2.10.8, + any.hspec-core ==2.10.8, + any.hspec-discover ==2.10.8, + any.hspec-expectations ==0.8.2, + any.http-api-data ==0.4.3, + http-api-data -use-text-show, + any.http-client ==0.6.4.1, + http-client +network-uri, + any.http-client-tls ==0.3.5.3, + any.http-date ==0.0.11, + any.http-media ==0.8.0.0, + any.http-types ==0.12.3, + any.http2 ==3.0.3, + http2 -devel -doc -h2spec, + any.indexed-list-literals ==0.2.1.3, + any.indexed-profunctors ==0.1.1, + any.indexed-traversable ==0.1.2, + any.indexed-traversable-instances ==0.1.1.1, + any.insert-ordered-containers ==0.2.5.1, + any.integer-gmp ==1.0.3.0, + any.integer-logarithms ==1.0.3.1, + integer-logarithms -check-bounds +integer-gmp, + any.invariant ==0.5.3, + any.iproute ==1.7.12, + any.js-chart ==2.9.4.1, + any.kan-extensions ==5.2.5, + any.lens ==4.19.2, + lens -benchmark-uniplate -dump-splices +inlining -j -old-inline-pragmas -safe +test-doctests +test-hunit +test-properties +test-templates +trustworthy, + any.lens-aeson ==1.1.3, + any.libBF ==0.6.5.1, + libBF -system-libbf, + any.libyaml ==0.1.2, + libyaml -no-unicode -system-libyaml, + any.lifted-base ==0.2.3.12, + any.lzma ==0.0.0.4, + any.managed ==1.0.9, + any.math-functions ==0.3.4.2, + math-functions +system-erf +system-expm1, + any.megaparsec ==9.2.1, + megaparsec -dev, + any.memory ==0.18.0, + memory +support_bytestring +support_deepseq, + any.microlens ==0.4.13.1, + any.microstache ==1.0.2.2, + any.mime-types ==0.1.1.0, + any.mmorph ==1.1.5, + any.monad-control ==1.0.3.1, + any.monad-par ==0.3.5, + monad-par -chaselev -newgeneric, + any.monad-par-extras ==0.3.3, + any.mono-traversable ==1.0.15.3, + any.mtl ==2.2.2, + any.mwc-random ==0.14.0.0, + any.neat-interpolation ==0.5.1.3, + any.network ==3.1.2.7, + network -devel, + any.network-byte-order ==0.1.6, + any.network-info ==0.2.1, + any.network-uri ==2.6.4.2, + any.newtype-generics ==0.6.2, + any.old-locale ==1.0.0.7, + any.old-time ==1.1.0.3, + any.openapi3 ==3.2.3, + any.optics-core ==0.3.0.1, + any.optics-extra ==0.3, + any.optics-th ==0.3.0.2, + any.optparse-applicative ==0.16.1.0, + optparse-applicative +process, + any.pact ==4.4, + pact +build-tool -cryptonite-ed25519 -no-advice -with-integer-gmp, + any.pact-time ==0.2.0.1, + pact-time -with-time, + any.parallel ==3.2.2.0, + any.parsec ==3.1.14.0, + any.parser-combinators ==1.3.0, + parser-combinators -dev, + any.parsers ==0.12.11, + parsers +attoparsec +binary +parsec, + any.pem ==0.2.4, + any.posix-escape ==0.1, + any.postgresql-libpq ==0.9.4.3, + postgresql-libpq -use-pkg-config, + any.postgresql-simple ==0.6.5, + any.postgresql-simple-migration ==0.1.15.0, + any.pqueue ==1.4.3.0, + any.pretty ==1.1.3.6, + any.pretty-simple ==3.2.3.0, + pretty-simple -buildexample -buildexe, + any.prettyprinter ==1.6.0, + prettyprinter -buildreadme, + any.prettyprinter-ansi-terminal ==1.1.2, + any.primitive ==0.7.4.0, + any.process ==1.6.13.2, + any.profunctors ==5.6, + any.psqueues ==0.2.7.3, + any.pvar ==1.0.0.0, + any.quickcheck-instances ==0.3.23, + quickcheck-instances -bytestring-builder, + any.quickcheck-io ==0.2.0, + any.random ==1.1, + any.readable ==0.3.1, + any.recv ==0.0.0, + any.reducers ==3.12.4, + any.reflection ==2.1.6, + reflection -slow +template-haskell, + any.resource-pool ==0.3.1.0, + any.resourcet ==1.2.6, + any.retry ==0.8.1.2, + retry -lib-werror, + any.rts ==1.0.1, + any.safe ==0.3.19, + any.safe-exceptions ==0.1.7.3, + any.sbv ==9.0, + any.scheduler ==2.0.0.1, + any.scientific ==0.3.7.0, + scientific -bytestring-builder -integer-simple, + any.semigroupoids ==5.3.4, + semigroupoids +comonad +containers +contravariant +distributive +doctests +tagged +unordered-containers, + any.semigroups ==0.20, + semigroups +binary +bytestring -bytestring-builder +containers +deepseq +hashable +tagged +template-haskell +text +transformers +unordered-containers, + any.servant ==0.18.3, + any.servant-blaze ==0.9.1, + any.servant-client ==0.18.3, + any.servant-client-core ==0.18.3, + any.servant-openapi3 ==2.0.1.6, + any.servant-server ==0.18.3, + any.servant-swagger-ui ==0.3.5.4.5.0, + any.servant-swagger-ui-core ==0.3.5, + any.setenv ==0.1.1.3, + any.simple-sendfile ==0.2.30, + simple-sendfile +allow-bsd, + any.singleton-bool ==0.1.6, + any.socks ==0.6.1, + any.some ==1.0.4.1, + some +newtype-unsafe, + any.sop-core ==0.5.0.2, + any.split ==0.2.3.5, + any.splitmix ==0.0.5, + splitmix -optimised-mixer +random, + any.statistics ==0.15.2.0, + any.stm ==2.5.0.1, + any.stm-chans ==3.0.0.6, + any.streaming ==0.2.3.1, + any.streaming-attoparsec ==1.0.0.1, + any.streaming-bytestring ==0.1.7, + any.streaming-commons ==0.2.2.5, + streaming-commons -use-bytestring-builder, + any.streaming-events ==1.0.1, + any.strict-tuple ==0.1.5.2, + any.string-conv ==0.2.0, + string-conv -lib-werror, + any.string-conversions ==0.4.0.1, + any.stringsearch ==0.3.6.6, + stringsearch -base3 +base4, + any.syb ==0.7.2.2, + any.tagged ==0.8.6.1, + tagged +deepseq +transformers, + any.tasty ==1.4.3, + tasty +unix, + any.tasty-hunit ==0.10.0.3, + any.template-haskell ==2.16.0.0, + any.terminfo ==0.4.1.4, + any.text ==1.2.4.1, + any.text-short ==0.1.5, + text-short -asserts, + any.tf-random ==0.5, + any.th-abstraction ==0.3.2.0, + any.th-compat ==0.1.4, + any.these ==1.1.1.1, + these +assoc, + any.thyme ==0.3.6.0, + thyme +bug-for-bug -ghcjs -hlint -lens -show-internal -werror, + any.time ==1.9.3, + any.time-compat ==1.9.6.1, + time-compat -old-locale, + any.time-manager ==0.0.0, + any.tls ==1.6.0, + tls +compat -hans +network, + any.tls-session-manager ==0.0.4, + any.token-bucket ==0.1.0.1, + token-bucket +use-cbits, + any.transformers ==0.5.6.2, + any.transformers-base ==0.4.6, + transformers-base +orphaninstances, + any.transformers-compat ==0.6.6, + transformers-compat -five +five-three -four +generic-deriving +mtl -three -two, + any.trifecta ==2.1.1, + any.type-equality ==1, + any.uniplate ==1.6.13, + any.unix ==2.7.2.2, + any.unix-compat ==0.6, + unix-compat -old-time, + any.unix-time ==0.4.8, + any.unliftio ==0.2.23.0, + any.unliftio-core ==0.2.0.1, + any.unordered-containers ==0.2.15.0, + unordered-containers -debug, + any.utf8-string ==1.0.2, + any.uuid ==1.3.15, + any.uuid-types ==1.0.5, + any.vault ==0.3.1.5, + vault +useghc, + any.vector ==0.12.3.1, + vector +boundschecks -internalchecks -unsafechecks -wall, + any.vector-algorithms ==0.9.0.1, + vector-algorithms +bench +boundschecks -internalchecks -llvm +properties -unsafechecks, + any.vector-binary-instances ==0.2.5.2, + any.vector-sized ==1.4.4, + any.vector-space ==0.16, + any.vector-th-unbox ==0.2.2, + any.void ==0.7.3, + void -safe, + any.wai ==3.2.3, + any.wai-app-static ==3.1.7.4, + wai-app-static +cryptonite -print, + any.wai-cors ==0.2.7, + any.wai-extra ==3.0.32, + wai-extra -build-example, + any.wai-logger ==2.4.0, + any.wai-middleware-throttle ==0.3.0.1, + any.warp ==3.3.23, + warp +allow-sendfilefd -network-bytestring -warp-debug +x509, + any.warp-tls ==3.3.4, + any.witherable ==0.4.2, + any.word8 ==0.1.3, + any.x509 ==1.7.7, + any.x509-store ==1.6.9, + any.x509-system ==1.6.7, + any.x509-validation ==1.6.12, + any.yaml ==0.11.8.0, + yaml +no-examples +no-exe, + any.yet-another-logger ==0.4.1, + yet-another-logger -tbmqueue, + any.zlib ==0.6.3.0, + zlib -bundled-c-zlib -non-blocking-ffi -pkg-config +index-state: hackage.haskell.org 2022-12-31T20:54:02Z diff --git a/default.nix b/default.nix index 8901271f..9c90da7e 100644 --- a/default.nix +++ b/default.nix @@ -1,34 +1,65 @@ -{ kpkgs ? import ./deps/kpkgs {} +let inputs = (import ( + fetchTarball { + url = "https://github.com/edolstra/flake-compat/archive/35bb57c0c8d8b62bbfd284272c928ceb64ddbde9.tar.gz"; + sha256 = "1prd9b1xx8c0sfwnyzkspplh30m613j42l1k789s521f4kv4c2z2"; } + ) { + src = ./.; + }).defaultNix.inputs; + pkgsDef = import inputs.nixpkgs (import inputs.haskellNix {}).nixpkgsArgs; +in +{ pkgs ? pkgsDef +, dontStrip ? false +, threaded ? true +, enableProfiling ? false +, dockerTag ? "latest" +, baseImageDef ? { + imageName = "ubuntu"; + imageDigest = "sha256:965fbcae990b0467ed5657caceaec165018ef44a4d2d46c7cdea80a9dff0d1ea"; + sha256 = "10wlr8rhiwxmz1hk95s9vhkrrjkzyvrv6nshg23j86rw08ckrqnz"; + finalImageTag = "22.04"; + finalImageName = "ubuntu"; + } +, ... }: -let pkgs = kpkgs.pkgs; - haskellPackages = kpkgs.rp.ghc8_6; - nix-thunk = import ./deps/nix-thunk {}; -in haskellPackages.developPackage { - name = builtins.baseNameOf ./.; - root = kpkgs.gitignoreSource ./.; - overrides = self: super: with pkgs.haskell.lib; - let gargoylePkgs = import ./deps/gargoyle { haskellPackages = self; }; - in - { - inherit (gargoylePkgs) gargoyle gargoyle-postgresql; +let profilingModule = { + enableLibraryProfiling = enableProfiling; + enableProfiling = enableProfiling; + }; + chainweb-data = pkgs.haskell-nix.project' { + src = ./.; + index-state = "2023-02-01T00:00:00Z"; + compiler-nix-name = "ghc8107"; + projectFileName = "cabal.project"; + shell.tools = { + cabal = {}; + }; + modules = if enableProfiling then [ profilingModule ] else []; + }; + flake = chainweb-data.flake {}; + default = flake.packages."chainweb-data:exe:chainweb-data".override (old: { + inherit dontStrip; + flags = old.flags // { + inherit threaded; + }; + }); + baseImage = pkgs.dockerTools.pullImage baseImageDef; + dockerImage = pkgs.dockerTools.buildImage { + name = "chainweb-data"; + tag = dockerTag; - #beam-automigrate = self.callHackageDirect { - # pkg = "beam-automigrate"; - # ver = "0.1.2.0"; - # sha256 = "1a70da15hb4nlpxhnsy1g89frbpf3kg3mwb4g9carj5izw1w1r1k"; - #} {}; + fromImage = baseImage; - pact = dontCheck super.pact; - }; - source-overrides = { - beam-automigrate = nix-thunk.thunkSource ./deps/beam-automigrate; - chainweb-api = nix-thunk.thunkSource ./deps/chainweb-api; - pact = nix-thunk.thunkSource ./deps/pact; - }; - modifier = drv: pkgs.haskell.lib.overrideCabal drv (attrs: { - buildTools = (attrs.buildTools or []) ++ [ - haskellPackages.cabal-install - haskellPackages.ghcid - ]; - }); + runAsRoot = '' + ln -s "${default}/bin/chainweb-data" /usr/local/bin/ + mkdir -p /chainweb-data + ''; + + config = { + WorkingDir = "/chainweb-data"; + Volumes = { "/chainweb-data" = {}; }; + Entrypoint = [ "chainweb-data" ]; + }; + }; +in { + inherit flake default dockerImage; } diff --git a/deps/chainweb-api/github.json b/deps/chainweb-api/github.json index 7a7ae758..3894e73b 100644 --- a/deps/chainweb-api/github.json +++ b/deps/chainweb-api/github.json @@ -3,6 +3,6 @@ "repo": "chainweb-api", "branch": "master", "private": false, - "rev": "d6ad27ed16c060e18bd86213d04d15a360f88d35", - "sha256": "13hl6p2vr15wx7w1bsvl86hwj8zqcl8835pggbcxnfr66zacyrhf" + "rev": "dedd563432d53b39fa6553a3e6c551b471e7483f", + "sha256": "03jkzkq89haihbim193ywys2w2dh01vvjg3ssnxf12s2fhidvpm9" } diff --git a/deps/kpkgs/github.json b/deps/kpkgs/github.json index 8e4f554e..427b1c3f 100644 --- a/deps/kpkgs/github.json +++ b/deps/kpkgs/github.json @@ -3,6 +3,6 @@ "repo": "kpkgs", "branch": "master", "private": false, - "rev": "bcb86a8827c0582f4148ddd47e53a6e5687fc39f", - "sha256": "0rkw6qy113d9y1sfb6345k486wyx11y1l5xbijndhy9b7ij1zj1y" + "rev": "905ac27a05db959db3ce27c4f258310746a9f12b", + "sha256": "0mw6zsxkc93pqravvjmjiaq3bhydfabkvh12wbndj2jgv3i4z3qx" } diff --git a/deps/pact/github.json b/deps/pact/github.json index bf464b7c..5f824694 100644 --- a/deps/pact/github.json +++ b/deps/pact/github.json @@ -1,7 +1,8 @@ { "owner": "kadena-io", "repo": "pact", + "branch": "master", "private": false, - "rev": "4971ab6078b75eb612d83d56f1e7cd139a5a2ba8", - "sha256": "1jx1hd596r5rrx96r2v2xds6pjjmi4lfk7xm14f3gkx2gmavgyr3" + "rev": "957b8bd7644cebc60d043b33f0d7ffc53c65d783", + "sha256": "1caw8m5mn8q42zqvc9mpm88yqqas7h33799lrvkcsmr8c4nqf70i" } diff --git a/exec/Chainweb/RichList.hs b/exec/Chainweb/RichList.hs deleted file mode 100644 index d7b3968d..00000000 --- a/exec/Chainweb/RichList.hs +++ /dev/null @@ -1,105 +0,0 @@ -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TypeApplications #-} -module Chainweb.RichList -( richList -) where - - -import Control.Monad - -import qualified Data.ByteString.Lazy as LBS -import qualified Data.Csv as Csv -import Data.Foldable (traverse_) -import Data.List (sortOn, isPrefixOf, sort) -import qualified Data.Map.Strict as M -import Data.Text (Text) -import Data.Ord (Down(..)) -import qualified Data.Vector as V - -import System.Directory -import System.FilePath -import System.Process -import System.Logger.Types - - -richList :: LogFunctionIO Text -> FilePath -> IO () -richList logger fp = do - -- - -- Steps: - -- 1. Check whether specified top-level db path is reachable - -- 2. We assume the node data db is up to date, and for chains 0..19, - -- Check that the sqlite db paths exist. If yes, copy them to current - -- working dir. - -- 3. Execute richlist generation, outputing `richlist.csv` - -- 4. Aggregate richest accounts and prune to top 100 - -- - chains <- doesPathExist fp >>= \case - True -> copyTables - False -> ioError $ userError - $ "Chainweb-node top-level db directory does not exist: " - <> fp - - logger Info $ "Aggregating richlist.csv..." - let cmd = proc "/bin/sh" ["scripts/richlist.sh", show chains] - void $! readCreateProcess cmd [] - - logger Info $ "Filtering top 100 richest accounts..." - void $! pruneRichList - - logger Info $ "Finished." - where - pruneRichList = do - csv <- LBS.readFile "richlist.csv" - case Csv.decode Csv.HasHeader csv of - Left e -> ioError $ userError $ "Could not decode rich list .csv file: " <> e - Right (rs :: V.Vector (String,String,String)) -> do - let go acc (acct,_,bal) - | bal == "balance" = acc - | otherwise = M.insertWith (+) acct (read @Double bal) acc - - let acc = Csv.encode - $ take 100 - $ sortOn (Down . snd) - $ M.toList - $ V.foldl' go M.empty rs - - void $! LBS.writeFile "richlist.csv" acc - - copyTables :: IO Int - copyTables = do - let sqlitePath = fp "chainweb-node/mainnet01/0/sqlite" - - doesPathExist sqlitePath >>= \case - False -> ioError $ userError $ "Cannot find sqlite data. Is your node synced?" - True -> do - dir <- filter ((==) ".sqlite" . takeExtension) <$> listDirectory sqlitePath - - -- count the number of sqlite files and aggregate associated file paths - -- - let f (ns,acc) p - | "pact-v1-chain-" `isPrefixOf` p = - -- this is not a magical 14 - this is the number of chars in "pact-v1-chain-" - case splitAt 14 (fst $ splitExtension p) of - (_, "") -> ioError $ userError $ "Found corrupt sqlite path: " <> p - (_, cid) -> return ((read @Int cid):ns,p:acc) - | otherwise = return (ns,acc) - - (chains, files) <- foldM f mempty dir - - let isConsecutive = all (\(x,y) -> succ x == y) - . (zip <*> tail) - . sort - - unless (isConsecutive chains) - $ ioError $ userError - $ "Missing tables for some chain ids. Is your node synced?" - - -- copy all files to current working dir - traverse_ (\p -> copyFile (sqlitePath p) p) files - - -- return # of chains for bash - return $ length chains diff --git a/exec/Chainweb/Server.hs b/exec/Chainweb/Server.hs deleted file mode 100644 index a437c440..00000000 --- a/exec/Chainweb/Server.hs +++ /dev/null @@ -1,483 +0,0 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} - -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DerivingStrategies #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE NumericUnderscores #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TypeApplications #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE TypeOperators #-} - -module Chainweb.Server where - ------------------------------------------------------------------------------- -import Chainweb.Api.BlockHeader (BlockHeader(..)) -import Chainweb.Api.ChainId -import Chainweb.Api.Hash -import Control.Applicative -import Control.Concurrent -import Control.Error -import Control.Monad.Except -import Control.Retry -import Data.Aeson hiding (Error) -import Data.ByteString.Lazy (ByteString) -import Data.Coerce -import Data.Decimal -import Data.Foldable -import Data.Int -import Data.IORef -import qualified Data.Pool as P -import Data.Proxy -import Data.Sequence (Seq) -import qualified Data.Sequence as S -import Data.String -import Data.Text (Text) -import qualified Data.Text as T -import qualified Data.Text.IO as T -import Data.Time -import Data.Tuple.Strict (T2(..)) -import Database.Beam hiding (insert) -import Database.Beam.Backend.SQL -import Database.Beam.Postgres -import Control.Lens -import Network.Wai -import Network.Wai.Handler.Warp -import Network.Wai.Middleware.Cors -import Servant.API -import Servant.Server -import System.Directory -import System.FilePath -import System.Logger.Types hiding (logg) -import Text.Printf ------------------------------------------------------------------------------- -import Chainweb.Api.BlockPayloadWithOutputs -import Chainweb.Api.Common (BlockHeight) -import Chainweb.Coins -import Chainweb.Database -import Chainweb.Env -import Chainweb.Gaps -import Chainweb.Listen -import Chainweb.Lookups -import Chainweb.RichList -import ChainwebData.Types -import ChainwebData.Api -import ChainwebData.EventDetail -import ChainwebData.Pagination -import ChainwebData.TxDetail -import ChainwebData.TxSummary -import ChainwebDb.Types.Block -import ChainwebDb.Types.DbHash -import ChainwebDb.Types.Transaction -import ChainwebDb.Types.Event ------------------------------------------------------------------------------- - -setCors :: Middleware -setCors = cors . const . Just $ simpleCorsResourcePolicy - { corsRequestHeaders = simpleHeaders - } - -data ServerState = ServerState - { _ssRecentTxs :: RecentTxs - , _ssHighestBlockHeight :: BlockHeight - , _ssTransactionCount :: Maybe Int64 - , _ssCirculatingCoins :: Decimal - } deriving (Eq,Show) - -ssRecentTxs - :: Functor f - => (RecentTxs -> f RecentTxs) - -> ServerState -> f ServerState -ssRecentTxs = lens _ssRecentTxs setter - where - setter sc v = sc { _ssRecentTxs = v } - -ssHighestBlockHeight - :: Functor f - => (BlockHeight -> f BlockHeight) - -> ServerState -> f ServerState -ssHighestBlockHeight = lens _ssHighestBlockHeight setter - where - setter sc v = sc { _ssHighestBlockHeight = v } - -ssTransactionCount - :: Functor f - => (Maybe Int64 -> f (Maybe Int64)) - -> ServerState -> f ServerState -ssTransactionCount = lens _ssTransactionCount setter - where - setter sc v = sc { _ssTransactionCount = v } - -ssCirculatingCoins - :: Functor f - => (Decimal -> f Decimal) - -> ServerState -> f ServerState -ssCirculatingCoins = lens _ssCirculatingCoins setter - where - setter sc v = sc { _ssCirculatingCoins = v } - -type RichlistEndpoint = "richlist.csv" :> Get '[PlainText] Text - -type TxEndpoint = "tx" :> QueryParam "requestkey" Text :> Get '[JSON] TxDetail - -type TheApi = - ChainwebDataApi - :<|> RichlistEndpoint -theApi :: Proxy TheApi -theApi = Proxy - -apiServer :: Env -> ServerEnv -> IO () -apiServer env senv = do - ecut <- queryCut env - let logg = _env_logger env - case ecut of - Left e -> do - logg Error "Error querying cut" - logg Info $ fromString $ show e - Right cutBS -> apiServerCut env senv cutBS - -apiServerCut :: Env -> ServerEnv -> ByteString -> IO () -apiServerCut env senv cutBS = do - let curHeight = cutMaxHeight cutBS - logg = _env_logger env - t <- getCurrentTime - let circulatingCoins = getCirculatingCoins (fromIntegral curHeight) t - logg Info $ fromString $ "Total coins in circulation: " <> show circulatingCoins - let pool = _env_dbConnPool env - recentTxs <- RecentTxs . S.fromList <$> queryRecentTxs logg pool - numTxs <- getTransactionCount logg pool - ssRef <- newIORef $ ServerState recentTxs 0 numTxs circulatingCoins - logg Info $ fromString $ "Total number of transactions: " <> show numTxs - _ <- forkIO $ scheduledUpdates env pool ssRef (_serverEnv_runFill senv) (_serverEnv_fillDelay senv) - _ <- forkIO $ retryingListener env ssRef - logg Info $ fromString "Starting chainweb-data server" - let serverApp req = - ( ( recentTxsHandler ssRef - :<|> searchTxs logg pool req - :<|> evHandler logg pool - :<|> txHandler logg pool - ) - :<|> statsHandler ssRef - :<|> coinsHandler ssRef - ) - :<|> richlistHandler - Network.Wai.Handler.Warp.run (_serverEnv_port senv) $ setCors $ \req f -> - serve theApi (serverApp req) req f - -retryingListener :: Env -> IORef ServerState -> IO () -retryingListener env ssRef = do - let logg = _env_logger env - delay = 10_000_000 - policy = constantDelay delay - check _ _ = do - logg Warn $ fromString $ printf "Retrying node listener in %.1f seconds" - (fromIntegral delay / 1_000_000 :: Double) - return True - retrying policy check $ \_ -> do - logg Info "Starting node listener" - listenWithHandler env $ serverHeaderHandler env ssRef - -scheduledUpdates - :: Env - -> P.Pool Connection - -> IORef ServerState - -> Bool - -> Maybe Int - -> IO () -scheduledUpdates env pool ssRef runFill fillDelay = forever $ do - threadDelay (60 * 60 * 24 * micros) - - now <- getCurrentTime - logg Info $ fromString $ show now - logg Info "Recalculating coins in circulation:" - height <- _ssHighestBlockHeight <$> readIORef ssRef - let circulatingCoins = getCirculatingCoins (fromIntegral height) now - logg Info $ fromString $ show circulatingCoins - let f ss = (ss & ssCirculatingCoins .~ circulatingCoins, ()) - atomicModifyIORef' ssRef f - - numTxs <- getTransactionCount logg pool - logg Info $ fromString $ "Updated number of transactions: " <> show numTxs - let g ss = (ss & ssTransactionCount %~ (numTxs <|>), ()) - atomicModifyIORef' ssRef g - - h <- getHomeDirectory - richList logg (h ".local/share") - logg Info "Updated rich list" - - when runFill $ do - logg Info "Filling missing blocks" - gaps env (FillArgs fillDelay False) - logg Info "Fill finished" - where - micros = 1000000 - logg = _env_logger env - -richlistHandler :: Handler Text -richlistHandler = do - let f = "richlist.csv" - exists <- liftIO $ doesFileExist f - if exists - then liftIO $ T.readFile f - else throwError err404 - -coinsHandler :: IORef ServerState -> Handler Text -coinsHandler ssRef = liftIO $ fmap mkStats $ readIORef ssRef - where - mkStats ss = T.pack $ show $ _ssCirculatingCoins ss - -statsHandler :: IORef ServerState -> Handler ChainwebDataStats -statsHandler ssRef = liftIO $ do - fmap mkStats $ readIORef ssRef - where - mkStats ss = ChainwebDataStats (fromIntegral <$> _ssTransactionCount ss) - (Just $ realToFrac $ _ssCirculatingCoins ss) - -recentTxsHandler :: IORef ServerState -> Handler [TxSummary] -recentTxsHandler ss = liftIO $ fmap (toList . _recentTxs_txs . _ssRecentTxs) $ readIORef ss - -serverHeaderHandler :: Env -> IORef ServerState -> PowHeader -> IO () -serverHeaderHandler env ssRef ph@(PowHeader h _) = do - let pool = _env_dbConnPool env - let chain = _blockHeader_chainId h - let height = _blockHeader_height h - let pair = T2 (_blockHeader_chainId h) (hashToDbHash $ _blockHeader_payloadHash h) - let logg = _env_logger env - payloadWithOutputs env pair >>= \case - Left e -> do - logg Error $ fromString $ printf "Couldn't fetch parent for: %s" - (hashB64U $ _blockHeader_hash h) - logg Info $ fromString $ show e - Right pl -> do - let hash = _blockHeader_hash h - tos = _blockPayloadWithOutputs_transactionsWithOutputs pl - ts = S.fromList $ map (\(t,tout) -> mkTxSummary chain height hash t tout) tos - f ss = (ss & ssRecentTxs %~ addNewTransactions ts - & ssHighestBlockHeight %~ max height - & (ssTransactionCount . _Just) +~ (fromIntegral $ S.length ts), ()) - - let msg = printf "Got new header on chain %d height %d" (unChainId chain) height - addendum = if S.length ts == 0 - then "" - else printf " with %d transactions" (S.length ts) - - logg Debug (fromString $ msg <> addendum) - mapM_ (logg Debug . fromString . show) tos - - atomicModifyIORef' ssRef f - insertNewHeader pool ph pl - - -instance BeamSqlBackendIsString Postgres (Maybe Text) -instance BeamSqlBackendIsString Postgres (Maybe String) - -searchTxs - :: LogFunctionIO Text - -> P.Pool Connection - -> Request - -> Maybe Limit - -> Maybe Offset - -> Maybe Text - -> Handler [TxSummary] -searchTxs _ _ _ _ _ Nothing = throw404 "You must specify a search string" -searchTxs logger pool req limit offset (Just search) = do - liftIO $ logger Info $ fromString $ printf "Transaction search from %s: %s" (show $ remoteHost req) (T.unpack search) - liftIO $ P.withResource pool $ \c -> - fmap (fmap mkSummary) - $ runBeamPostgresDebug (logger Debug . fromString) c - $ runSelectReturningList - $ select - $ limit_ lim - $ offset_ off - $ orderBy_ (desc_ . _block_height . view _2) - $ do - tx <- all_ (_cddb_transactions database) - tx2 <- leftJoin_ - (all_ $ _cddb_transactions database) - (\tx2 -> (_tx_pactId tx2 ==. just_ (_tx_requestKey tx)) &&. - (_tx_code tx2 `like_` val_ (Just searchString))) - blk <- all_ (_cddb_blocks database) - guard_ (_tx_block tx `references_` blk) - guard_ (_tx_code tx `like_` val_ (Just searchString)) - return (tx,blk,tx2) - where - lim = maybe 10 (min 100 . unLimit) limit - off = maybe 0 unOffset offset - mkSummary (tx,blk,tx2) = TxSummary - { _txSummary_chain = fromIntegral (_tx_chainId tx) - , _txSummary_height = fromIntegral (_block_height blk) - , _txSummary_blockHash = unDbHash (unBlockId $ _tx_block tx) - , _txSummary_creationTime = (_tx_creationTime tx) - , _txSummary_requestKey = unDbHash (_tx_requestKey tx) - , _txSummary_sender = (_tx_sender tx) - , _txSummary_code = maybe (maybe Nothing _tx_code tx2) Just $ _tx_code tx - , _txSummary_continuation = unPgJsonb <$> (_tx_continuation tx) - , _txSummary_result = maybe TxFailed (const TxSucceeded) (_tx_goodResult tx) - } - searchString = "%" <> search <> "%" - -throw404 :: MonadError ServerError m => ByteString -> m a -throw404 msg = throwError $ err404 { errBody = msg } - -txHandler - :: LogFunctionIO Text - -> P.Pool Connection - -> Maybe RequestKey - -> Handler TxDetail -txHandler _ _ Nothing = throw404 "You must specify a search string" -txHandler logger pool (Just (RequestKey rk)) = - may404 $ liftIO $ P.withResource pool $ \c -> - runBeamPostgresDebug (logger Debug . T.pack) c $ do - r <- runSelectReturningOne $ select $ do - tx <- all_ (_cddb_transactions database) - blk <- all_ (_cddb_blocks database) - guard_ (_tx_block tx `references_` blk) - guard_ (_tx_requestKey tx ==. val_ (DbHash rk)) - return (tx,blk) - evs <- runSelectReturningList $ select $ do - ev <- all_ (_cddb_events database) - guard_ (_ev_requestkey ev ==. val_ (RKCB_RequestKey $ DbHash rk)) - return ev - return $ (`fmap` r) $ \(tx,blk) -> TxDetail - { _txDetail_ttl = fromIntegral $ _tx_ttl tx - , _txDetail_gasLimit = fromIntegral $ _tx_gasLimit tx - , _txDetail_gasPrice = _tx_gasPrice tx - , _txDetail_nonce = _tx_nonce tx - , _txDetail_pactId = unDbHash <$> _tx_pactId tx - , _txDetail_rollback = _tx_rollback tx - , _txDetail_step = fromIntegral <$> _tx_step tx - , _txDetail_data = unMaybeValue $ _tx_data tx - , _txDetail_proof = _tx_proof tx - , _txDetail_gas = fromIntegral $ _tx_gas tx - , _txDetail_result = - maybe (unMaybeValue $ _tx_badResult tx) unPgJsonb $ - _tx_goodResult tx - , _txDetail_logs = fromMaybe "" $ _tx_logs tx - , _txDetail_metadata = unMaybeValue $ _tx_metadata tx - , _txDetail_continuation = unPgJsonb <$> _tx_continuation tx - , _txDetail_txid = maybe 0 fromIntegral $ _tx_txid tx - , _txDetail_chain = fromIntegral $ _tx_chainId tx - , _txDetail_height = fromIntegral $ _block_height blk - , _txDetail_blockTime = _block_creationTime blk - , _txDetail_blockHash = unDbHash $ unBlockId $ _tx_block tx - , _txDetail_creationTime = _tx_creationTime tx - , _txDetail_requestKey = unDbHash $ _tx_requestKey tx - , _txDetail_sender = _tx_sender tx - , _txDetail_code = _tx_code tx - , _txDetail_success = - maybe False (const True) $ _tx_goodResult tx - , _txDetail_events = map toTxEvent evs - } - - where - unMaybeValue = maybe Null unPgJsonb - toTxEvent ev = - TxEvent (_ev_qualName ev) (unPgJsonb $ _ev_params ev) - may404 a = a >>= maybe (throw404 "Tx not found") return - -evHandler - :: LogFunctionIO Text - -> P.Pool Connection - -> Maybe Limit - -> Maybe Offset - -> Maybe Text -- ^ fulltext search - -> Maybe EventParam - -> Maybe EventName - -> Handler [EventDetail] -evHandler logger pool limit offset qSearch qParam qName = - liftIO $ P.withResource pool $ \c -> do - r <- runBeamPostgresDebug (logger Debug . T.pack) c $ - runSelectReturningList $ select $ - limit_ lim $ offset_ off $ orderBy_ getOrder $ do - tx <- all_ (_cddb_transactions database) - blk <- all_ (_cddb_blocks database) - ev <- all_ (_cddb_events database) - guard_ (_tx_block tx `references_` blk) - guard_ (TransactionId (coerce $ _ev_requestkey ev) (_tx_block tx) `references_` tx) - whenArg qSearch $ \s -> guard_ - ((_ev_qualName ev `like_` val_ (searchString s)) ||. - (_ev_paramText ev `like_` val_ (searchString s)) - ) - whenArg qName $ \(EventName n) -> guard_ (_ev_qualName ev `like_` val_ (searchString n)) - whenArg qParam $ \(EventParam p) -> guard_ (_ev_paramText ev `like_` val_ (searchString p)) - return (tx,blk,ev) - return $ (`map` r) $ \(tx,blk,ev) -> EventDetail - { _evDetail_name = _ev_qualName ev - , _evDetail_params = unPgJsonb $ _ev_params ev - , _evDetail_moduleHash = _ev_moduleHash ev - , _evDetail_chain = fromIntegral $ _tx_chainId tx - , _evDetail_height = fromIntegral $ _block_height blk - , _evDetail_blockTime = _block_creationTime blk - , _evDetail_blockHash = unDbHash $ unBlockId $ _tx_block tx - , _evDetail_requestKey = unDbHash $ _tx_requestKey tx - , _evDetail_idx = fromIntegral $ _ev_idx ev - } - where - whenArg p a = maybe (return ()) a p - lim = maybe 10 (min 100 . unLimit) limit - off = maybe 0 unOffset offset - getOrder (tx,blk,ev) = - (desc_ $ _block_height blk - ,asc_ $ _tx_chainId tx - ,desc_ $ _tx_txid tx - ,asc_ $ _ev_idx ev) - searchString search = "%" <> search <> "%" - -data h :. t = h :. t deriving (Eq,Ord,Show,Read,Typeable) -infixr 3 :. - -type instance QExprToIdentity (a :. b) = (QExprToIdentity a) :. (QExprToIdentity b) -type instance QExprToField (a :. b) = (QExprToField a) :. (QExprToField b) - - -queryRecentTxs :: LogFunctionIO Text -> P.Pool Connection -> IO [TxSummary] -queryRecentTxs logger pool = do - liftIO $ logger Info "Getting recent transactions" - P.withResource pool $ \c -> do - res <- runBeamPostgresDebug (logger Debug . T.pack) c $ - runSelectReturningList $ select $ do - limit_ 20 $ orderBy_ (desc_ . getHeight) $ do - tx <- all_ (_cddb_transactions database) - blk <- all_ (_cddb_blocks database) - guard_ (_tx_block tx `references_` blk) - return - ( (_tx_chainId tx) - , (_block_height blk) - , (unBlockId $ _tx_block tx) - , (_tx_creationTime tx) - , (_tx_requestKey tx) - , (_tx_sender tx) - , ((_tx_code tx) - , (_tx_continuation tx) - , (_tx_goodResult tx) - )) - return $ mkSummary <$> res - where - getHeight (_,a,_,_,_,_,_) = a - mkSummary (a,b,c,d,e,f,(g,h,i)) = TxSummary (fromIntegral a) (fromIntegral b) (unDbHash c) d (unDbHash e) f g (unPgJsonb <$> h) (maybe TxFailed (const TxSucceeded) i) - -getTransactionCount :: LogFunctionIO Text -> P.Pool Connection -> IO (Maybe Int64) -getTransactionCount logger pool = do - P.withResource pool $ \c -> do - runBeamPostgresDebug (logger Debug . T.pack) c $ runSelectReturningOne $ select $ - aggregate_ (\_ -> as_ @Int64 countAll_) (all_ (_cddb_transactions database)) - -data RecentTxs = RecentTxs - { _recentTxs_txs :: Seq TxSummary - } deriving (Eq,Show) - -getSummaries :: RecentTxs -> [TxSummary] -getSummaries (RecentTxs s) = toList s - -addNewTransactions :: Seq TxSummary -> RecentTxs -> RecentTxs -addNewTransactions txs (RecentTxs s1) = RecentTxs s2 - where - maxTransactions = 10 - s2 = S.take maxTransactions $ txs <> s1 - -unPgJsonb :: PgJSONB a -> a -unPgJsonb (PgJSONB v) = v diff --git a/exec/Main.hs b/exec/Main.hs deleted file mode 100644 index 093d056e..00000000 --- a/exec/Main.hs +++ /dev/null @@ -1,97 +0,0 @@ -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE NumericUnderscores #-} -{-# LANGUAGE OverloadedStrings #-} - -module Main where - -import Chainweb.Api.ChainId (ChainId(..)) -import Chainweb.Api.NodeInfo -import Chainweb.Backfill (backfill) -import Chainweb.Database (initializeTables) -import Chainweb.Env -import Chainweb.FillEvents (fillEvents) -import Chainweb.Gaps -import Chainweb.Listen (listen) -import Chainweb.Lookups (getNodeInfo) -import Chainweb.RichList (richList) -import Chainweb.Server (apiServer) -import Chainweb.Single (single) -import Control.Lens -import Control.Monad (unless) -import Data.Bifunctor -import qualified Data.Pool as P -import Data.String -import Network.Connection -import Network.HTTP.Client hiding (withConnection) -import Network.HTTP.Client.TLS -import Options.Applicative -import System.Directory -import System.Exit -import System.Logger hiding (logg) -import System.FilePath -import Text.Printf - ---- - -main :: IO () -main = do - args <- execParser opts - withHandleBackend backendConfig $ \backend -> - withLogger (config (getLevel args)) backend $ \logger -> do - let logg = loggerFunIO logger - case args of - RichListArgs (NodeDbPath mfp) _ -> do - fp <- case mfp of - Nothing -> do - h <- getHomeDirectory - let h' = h ".local/share" - logg Info $ "Constructing rich list using default db-path: " <> fromString h' - return h' - Just fp -> do - logg Info $ "Constructing rich list using given db-path: " <> fromString fp - return fp - richList logg fp - Args c pgc us u _ ms -> do - logg Info $ "Using database: " <> fromString (show pgc) - logg Info $ "Service API: " <> fromString (showUrlScheme us) - logg Info $ "P2P API: " <> fromString (showUrlScheme (UrlScheme Https u)) - withPool pgc $ \pool -> do - P.withResource pool (unless (isIndexedDisabled c) . initializeTables logg ms) - logg Info "DB Tables Initialized" - let mgrSettings = mkManagerSettings (TLSSettingsSimple True False False) Nothing - m <- newManager mgrSettings - getNodeInfo m us >>= \case - Left e -> logg Error (fromString $ printf "Unable to connect to %s /info endpoint%s" (showUrlScheme us) e) >> exitFailure - Right ni -> do - let !mcids = map (second (map (ChainId . fst))) <$> _nodeInfo_graphs ni - case mcids of - Nothing -> logg Error "Node did not have graph information" >> exitFailure - Just cids -> do - let !env = Env m pool us u ni cids logg - case c of - Listen -> listen env - Backfill as -> backfill env as - Fill as -> gaps env as - Single cid h -> single env cid h - FillEvents as et -> fillEvents env as et - Server serverEnv -> apiServer env serverEnv - where - opts = info ((richListP <|> envP) <**> helper) - (fullDesc <> header "chainweb-data - Processing and analysis of Chainweb data") - config level = defaultLoggerConfig - & loggerConfigThreshold .~ level - backendConfig = defaultHandleBackendConfig - isIndexedDisabled = \case - Fill (FillArgs _ p) -> p - _ -> False - getLevel = \case - Args _ _ _ _ level _ -> level - RichListArgs _ level -> level - - -{- -λ> :main single --chain 2 --height 1487570 --service-host api.chainweb.com --p2p-host us-e3.chainweb.com --dbname chainweb-data --service-port 443 --service-https -λ> :main single --chain 0 --height 1494311 --service-host api.chainweb.com --p2p-host us-e3.chainweb.com --dbname chainweb-data --service-port 443 --service-https -λ> :main server --port 9999 --service-host api.chainweb.com --p2p-host us-e3.chainweb.com --dbname chainweb-data --service-port 443 --service-https --} diff --git a/flake.lock b/flake.lock new file mode 100644 index 00000000..49e2b2a6 --- /dev/null +++ b/flake.lock @@ -0,0 +1,902 @@ +{ + "nodes": { + "HTTP": { + "flake": false, + "locked": { + "lastModified": 1451647621, + "narHash": "sha256-oHIyw3x0iKBexEo49YeUDV1k74ZtyYKGR2gNJXXRxts=", + "owner": "phadej", + "repo": "HTTP", + "rev": "9bc0996d412fef1787449d841277ef663ad9a915", + "type": "github" + }, + "original": { + "owner": "phadej", + "repo": "HTTP", + "type": "github" + } + }, + "blank": { + "locked": { + "lastModified": 1625557891, + "narHash": "sha256-O8/MWsPBGhhyPoPLHZAuoZiiHo9q6FLlEeIDEXuj6T4=", + "owner": "divnix", + "repo": "blank", + "rev": "5a5d2684073d9f563072ed07c871d577a6c614a8", + "type": "github" + }, + "original": { + "owner": "divnix", + "repo": "blank", + "type": "github" + } + }, + "cabal-32": { + "flake": false, + "locked": { + "lastModified": 1603716527, + "narHash": "sha256-X0TFfdD4KZpwl0Zr6x+PLxUt/VyKQfX7ylXHdmZIL+w=", + "owner": "haskell", + "repo": "cabal", + "rev": "48bf10787e27364730dd37a42b603cee8d6af7ee", + "type": "github" + }, + "original": { + "owner": "haskell", + "ref": "3.2", + "repo": "cabal", + "type": "github" + } + }, + "cabal-34": { + "flake": false, + "locked": { + "lastModified": 1640353650, + "narHash": "sha256-N1t6M3/wqj90AEdRkeC8i923gQYUpzSr8b40qVOZ1Rk=", + "owner": "haskell", + "repo": "cabal", + "rev": "942639c18c0cd8ec53e0a6f8d120091af35312cd", + "type": "github" + }, + "original": { + "owner": "haskell", + "ref": "3.4", + "repo": "cabal", + "type": "github" + } + }, + "cabal-36": { + "flake": false, + "locked": { + "lastModified": 1641652457, + "narHash": "sha256-BlFPKP4C4HRUJeAbdembX1Rms1LD380q9s0qVDeoAak=", + "owner": "haskell", + "repo": "cabal", + "rev": "f27667f8ec360c475027dcaee0138c937477b070", + "type": "github" + }, + "original": { + "owner": "haskell", + "ref": "3.6", + "repo": "cabal", + "type": "github" + } + }, + "cardano-shell": { + "flake": false, + "locked": { + "lastModified": 1608537748, + "narHash": "sha256-PulY1GfiMgKVnBci3ex4ptk2UNYMXqGjJOxcPy2KYT4=", + "owner": "input-output-hk", + "repo": "cardano-shell", + "rev": "9392c75087cb9a3d453998f4230930dea3a95725", + "type": "github" + }, + "original": { + "owner": "input-output-hk", + "repo": "cardano-shell", + "type": "github" + } + }, + "devshell": { + "inputs": { + "flake-utils": [ + "haskellNix", + "tullia", + "std", + "flake-utils" + ], + "nixpkgs": [ + "haskellNix", + "tullia", + "std", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1663445644, + "narHash": "sha256-+xVlcK60x7VY1vRJbNUEAHi17ZuoQxAIH4S4iUFUGBA=", + "owner": "numtide", + "repo": "devshell", + "rev": "e3dc3e21594fe07bdb24bdf1c8657acaa4cb8f66", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "devshell", + "type": "github" + } + }, + "dmerge": { + "inputs": { + "nixlib": [ + "haskellNix", + "tullia", + "std", + "nixpkgs" + ], + "yants": [ + "haskellNix", + "tullia", + "std", + "yants" + ] + }, + "locked": { + "lastModified": 1659548052, + "narHash": "sha256-fzI2gp1skGA8mQo/FBFrUAtY0GQkAIAaV/V127TJPyY=", + "owner": "divnix", + "repo": "data-merge", + "rev": "d160d18ce7b1a45b88344aa3f13ed1163954b497", + "type": "github" + }, + "original": { + "owner": "divnix", + "repo": "data-merge", + "type": "github" + } + }, + "flake-compat": { + "flake": false, + "locked": { + "lastModified": 1672831974, + "narHash": "sha256-z9k3MfslLjWQfnjBtEtJZdq3H7kyi2kQtUThfTgdRk0=", + "owner": "input-output-hk", + "repo": "flake-compat", + "rev": "45f2638735f8cdc40fe302742b79f248d23eb368", + "type": "github" + }, + "original": { + "owner": "input-output-hk", + "ref": "hkm/gitlab-fix", + "repo": "flake-compat", + "type": "github" + } + }, + "flake-compat_2": { + "flake": false, + "locked": { + "lastModified": 1650374568, + "narHash": "sha256-Z+s0J8/r907g149rllvwhb4pKi8Wam5ij0st8PwAh+E=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "b4a34015c698c7793d592d66adbab377907a2be8", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, + "flake-utils": { + "locked": { + "lastModified": 1667395993, + "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "flake-utils_2": { + "locked": { + "lastModified": 1644229661, + "narHash": "sha256-1YdnJAsNy69bpcjuoKdOYQX0YxZBiCYZo4Twxerqv7k=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "3cecb5b042f7f209c56ffd8371b2711a290ec797", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "flake-utils_3": { + "locked": { + "lastModified": 1653893745, + "narHash": "sha256-0jntwV3Z8//YwuOjzhV2sgJJPt+HY6KhU7VZUL0fKZQ=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "1ed9fb1935d260de5fe1c2f7ee0ebaae17ed2fa1", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "flake-utils_4": { + "locked": { + "lastModified": 1659877975, + "narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "flake-utils_5": { + "locked": { + "lastModified": 1653893745, + "narHash": "sha256-0jntwV3Z8//YwuOjzhV2sgJJPt+HY6KhU7VZUL0fKZQ=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "1ed9fb1935d260de5fe1c2f7ee0ebaae17ed2fa1", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "ghc-8.6.5-iohk": { + "flake": false, + "locked": { + "lastModified": 1600920045, + "narHash": "sha256-DO6kxJz248djebZLpSzTGD6s8WRpNI9BTwUeOf5RwY8=", + "owner": "input-output-hk", + "repo": "ghc", + "rev": "95713a6ecce4551240da7c96b6176f980af75cae", + "type": "github" + }, + "original": { + "owner": "input-output-hk", + "ref": "release/8.6.5-iohk", + "repo": "ghc", + "type": "github" + } + }, + "gomod2nix": { + "inputs": { + "nixpkgs": "nixpkgs_2", + "utils": "utils" + }, + "locked": { + "lastModified": 1655245309, + "narHash": "sha256-d/YPoQ/vFn1+GTmSdvbSBSTOai61FONxB4+Lt6w/IVI=", + "owner": "tweag", + "repo": "gomod2nix", + "rev": "40d32f82fc60d66402eb0972e6e368aeab3faf58", + "type": "github" + }, + "original": { + "owner": "tweag", + "repo": "gomod2nix", + "type": "github" + } + }, + "hackage": { + "flake": false, + "locked": { + "lastModified": 1675211313, + "narHash": "sha256-+VJq1EUbeXrd9ph/vN+s5+bdCk2kRbcxpVo4MjbIiSc=", + "owner": "input-output-hk", + "repo": "hackage.nix", + "rev": "030f56486a8d0f38f36e0bfbc5aab00db08245e4", + "type": "github" + }, + "original": { + "owner": "input-output-hk", + "repo": "hackage.nix", + "type": "github" + } + }, + "haskellNix": { + "inputs": { + "HTTP": "HTTP", + "cabal-32": "cabal-32", + "cabal-34": "cabal-34", + "cabal-36": "cabal-36", + "cardano-shell": "cardano-shell", + "flake-compat": "flake-compat", + "flake-utils": "flake-utils_2", + "ghc-8.6.5-iohk": "ghc-8.6.5-iohk", + "hackage": "hackage", + "hpc-coveralls": "hpc-coveralls", + "hydra": "hydra", + "iserv-proxy": "iserv-proxy", + "nixpkgs": [ + "haskellNix", + "nixpkgs-unstable" + ], + "nixpkgs-2003": "nixpkgs-2003", + "nixpkgs-2105": "nixpkgs-2105", + "nixpkgs-2111": "nixpkgs-2111", + "nixpkgs-2205": "nixpkgs-2205", + "nixpkgs-2211": "nixpkgs-2211", + "nixpkgs-unstable": "nixpkgs-unstable", + "old-ghc-nix": "old-ghc-nix", + "stackage": "stackage", + "tullia": "tullia" + }, + "locked": { + "lastModified": 1675212661, + "narHash": "sha256-WgxnuJqjIyXG8qbIXD1WiuYrE7kVPhsa/+yPtLRPyjk=", + "owner": "input-output-hk", + "repo": "haskell.nix", + "rev": "491800d10993761e50f1d9b1f0de02e73f13ad23", + "type": "github" + }, + "original": { + "owner": "input-output-hk", + "repo": "haskell.nix", + "type": "github" + } + }, + "hpc-coveralls": { + "flake": false, + "locked": { + "lastModified": 1607498076, + "narHash": "sha256-8uqsEtivphgZWYeUo5RDUhp6bO9j2vaaProQxHBltQk=", + "owner": "sevanspowell", + "repo": "hpc-coveralls", + "rev": "14df0f7d229f4cd2e79f8eabb1a740097fdfa430", + "type": "github" + }, + "original": { + "owner": "sevanspowell", + "repo": "hpc-coveralls", + "type": "github" + } + }, + "hydra": { + "inputs": { + "nix": "nix", + "nixpkgs": [ + "haskellNix", + "hydra", + "nix", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1646878427, + "narHash": "sha256-KtbrofMtN8GlM7D+n90kixr7QpSlVmdN+vK5CA/aRzc=", + "owner": "NixOS", + "repo": "hydra", + "rev": "28b682b85b7efc5cf7974065792a1f22203a5927", + "type": "github" + }, + "original": { + "id": "hydra", + "type": "indirect" + } + }, + "iserv-proxy": { + "flake": false, + "locked": { + "lastModified": 1670983692, + "narHash": "sha256-avLo34JnI9HNyOuauK5R69usJm+GfW3MlyGlYxZhTgY=", + "ref": "hkm/remote-iserv", + "rev": "50d0abb3317ac439a4e7495b185a64af9b7b9300", + "revCount": 10, + "type": "git", + "url": "https://gitlab.haskell.org/hamishmack/iserv-proxy.git" + }, + "original": { + "ref": "hkm/remote-iserv", + "type": "git", + "url": "https://gitlab.haskell.org/hamishmack/iserv-proxy.git" + } + }, + "lowdown-src": { + "flake": false, + "locked": { + "lastModified": 1633514407, + "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=", + "owner": "kristapsdz", + "repo": "lowdown", + "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8", + "type": "github" + }, + "original": { + "owner": "kristapsdz", + "repo": "lowdown", + "type": "github" + } + }, + "mdbook-kroki-preprocessor": { + "flake": false, + "locked": { + "lastModified": 1661755005, + "narHash": "sha256-1TJuUzfyMycWlOQH67LR63/ll2GDZz25I3JfScy/Jnw=", + "owner": "JoelCourtney", + "repo": "mdbook-kroki-preprocessor", + "rev": "93adb5716d035829efed27f65f2f0833a7d3e76f", + "type": "github" + }, + "original": { + "owner": "JoelCourtney", + "repo": "mdbook-kroki-preprocessor", + "type": "github" + } + }, + "n2c": { + "inputs": { + "flake-utils": "flake-utils_5", + "nixpkgs": [ + "haskellNix", + "tullia", + "std", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1665039323, + "narHash": "sha256-SAh3ZjFGsaCI8FRzXQyp56qcGdAqgKEfJWPCQ0Sr7tQ=", + "owner": "nlewo", + "repo": "nix2container", + "rev": "b008fe329ffb59b67bf9e7b08ede6ee792f2741a", + "type": "github" + }, + "original": { + "owner": "nlewo", + "repo": "nix2container", + "type": "github" + } + }, + "nix": { + "inputs": { + "lowdown-src": "lowdown-src", + "nixpkgs": "nixpkgs", + "nixpkgs-regression": "nixpkgs-regression" + }, + "locked": { + "lastModified": 1643066034, + "narHash": "sha256-xEPeMcNJVOeZtoN+d+aRwolpW8mFSEQx76HTRdlhPhg=", + "owner": "NixOS", + "repo": "nix", + "rev": "a1cd7e58606a41fcf62bf8637804cf8306f17f62", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "2.6.0", + "repo": "nix", + "type": "github" + } + }, + "nix-nomad": { + "inputs": { + "flake-compat": "flake-compat_2", + "flake-utils": [ + "haskellNix", + "tullia", + "nix2container", + "flake-utils" + ], + "gomod2nix": "gomod2nix", + "nixpkgs": [ + "haskellNix", + "tullia", + "nixpkgs" + ], + "nixpkgs-lib": [ + "haskellNix", + "tullia", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1658277770, + "narHash": "sha256-T/PgG3wUn8Z2rnzfxf2VqlR1CBjInPE0l1yVzXxPnt0=", + "owner": "tristanpemble", + "repo": "nix-nomad", + "rev": "054adcbdd0a836ae1c20951b67ed549131fd2d70", + "type": "github" + }, + "original": { + "owner": "tristanpemble", + "repo": "nix-nomad", + "type": "github" + } + }, + "nix2container": { + "inputs": { + "flake-utils": "flake-utils_3", + "nixpkgs": "nixpkgs_3" + }, + "locked": { + "lastModified": 1658567952, + "narHash": "sha256-XZ4ETYAMU7XcpEeAFP3NOl9yDXNuZAen/aIJ84G+VgA=", + "owner": "nlewo", + "repo": "nix2container", + "rev": "60bb43d405991c1378baf15a40b5811a53e32ffa", + "type": "github" + }, + "original": { + "owner": "nlewo", + "repo": "nix2container", + "type": "github" + } + }, + "nixago": { + "inputs": { + "flake-utils": [ + "haskellNix", + "tullia", + "std", + "flake-utils" + ], + "nixago-exts": [ + "haskellNix", + "tullia", + "std", + "blank" + ], + "nixpkgs": [ + "haskellNix", + "tullia", + "std", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1661824785, + "narHash": "sha256-/PnwdWoO/JugJZHtDUioQp3uRiWeXHUdgvoyNbXesz8=", + "owner": "nix-community", + "repo": "nixago", + "rev": "8c1f9e5f1578d4b2ea989f618588d62a335083c3", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "nixago", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1632864508, + "narHash": "sha256-d127FIvGR41XbVRDPVvozUPQ/uRHbHwvfyKHwEt5xFM=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "82891b5e2c2359d7e58d08849e4c89511ab94234", + "type": "github" + }, + "original": { + "id": "nixpkgs", + "ref": "nixos-21.05-small", + "type": "indirect" + } + }, + "nixpkgs-2003": { + "locked": { + "lastModified": 1620055814, + "narHash": "sha256-8LEHoYSJiL901bTMVatq+rf8y7QtWuZhwwpKE2fyaRY=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "1db42b7fe3878f3f5f7a4f2dc210772fd080e205", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-20.03-darwin", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs-2105": { + "locked": { + "lastModified": 1659914493, + "narHash": "sha256-lkA5X3VNMKirvA+SUzvEhfA7XquWLci+CGi505YFAIs=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "022caabb5f2265ad4006c1fa5b1ebe69fb0c3faf", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-21.05-darwin", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs-2111": { + "locked": { + "lastModified": 1659446231, + "narHash": "sha256-hekabNdTdgR/iLsgce5TGWmfIDZ86qjPhxDg/8TlzhE=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "eabc38219184cc3e04a974fe31857d8e0eac098d", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-21.11-darwin", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs-2205": { + "locked": { + "lastModified": 1663981975, + "narHash": "sha256-TKaxWAVJR+a5JJauKZqibmaM5e/Pi5tBDx9s8fl/kSE=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "309faedb8338d3ae8ad8f1043b3ccf48c9cc2970", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-22.05-darwin", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs-2211": { + "locked": { + "lastModified": 1669997163, + "narHash": "sha256-vhjC0kZMFoN6jzK0GR+tBzKi5KgBXgehadfidW8+Va4=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "6f87491a54d8d64d30af6663cb3bf5d2ee7db958", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-22.11-darwin", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs-regression": { + "locked": { + "lastModified": 1643052045, + "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", + "type": "github" + }, + "original": { + "id": "nixpkgs", + "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", + "type": "indirect" + } + }, + "nixpkgs-unstable": { + "locked": { + "lastModified": 1663905476, + "narHash": "sha256-0CSwRKaYravh9v6qSlBpM0gNg0UhKT2lL7Yn6Zbx7UM=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "e14f9fb57315f0d4abde222364f19f88c77d2b79", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs_2": { + "locked": { + "lastModified": 1653581809, + "narHash": "sha256-Uvka0V5MTGbeOfWte25+tfRL3moECDh1VwokWSZUdoY=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "83658b28fe638a170a19b8933aa008b30640fbd1", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs_3": { + "locked": { + "lastModified": 1654807842, + "narHash": "sha256-ADymZpr6LuTEBXcy6RtFHcUZdjKTBRTMYwu19WOx17E=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "fc909087cc3386955f21b4665731dbdaceefb1d8", + "type": "github" + }, + "original": { + "owner": "NixOS", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs_4": { + "locked": { + "lastModified": 1665087388, + "narHash": "sha256-FZFPuW9NWHJteATOf79rZfwfRn5fE0wi9kRzvGfDHPA=", + "owner": "nixos", + "repo": "nixpkgs", + "rev": "95fda953f6db2e9496d2682c4fc7b82f959878f7", + "type": "github" + }, + "original": { + "owner": "nixos", + "ref": "nixpkgs-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "old-ghc-nix": { + "flake": false, + "locked": { + "lastModified": 1631092763, + "narHash": "sha256-sIKgO+z7tj4lw3u6oBZxqIhDrzSkvpHtv0Kki+lh9Fg=", + "owner": "angerman", + "repo": "old-ghc-nix", + "rev": "af48a7a7353e418119b6dfe3cd1463a657f342b8", + "type": "github" + }, + "original": { + "owner": "angerman", + "ref": "master", + "repo": "old-ghc-nix", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-utils": "flake-utils", + "haskellNix": "haskellNix", + "nixpkgs": [ + "haskellNix", + "nixpkgs-unstable" + ] + } + }, + "stackage": { + "flake": false, + "locked": { + "lastModified": 1675210232, + "narHash": "sha256-sRYTcYZYJ6s1AriIbndhPV/TtOr/LtMDGuSA1sn8Om0=", + "owner": "input-output-hk", + "repo": "stackage.nix", + "rev": "eeb1f9bd9db2cbbcae9982d5e92c426f49db6328", + "type": "github" + }, + "original": { + "owner": "input-output-hk", + "repo": "stackage.nix", + "type": "github" + } + }, + "std": { + "inputs": { + "blank": "blank", + "devshell": "devshell", + "dmerge": "dmerge", + "flake-utils": "flake-utils_4", + "makes": [ + "haskellNix", + "tullia", + "std", + "blank" + ], + "mdbook-kroki-preprocessor": "mdbook-kroki-preprocessor", + "microvm": [ + "haskellNix", + "tullia", + "std", + "blank" + ], + "n2c": "n2c", + "nixago": "nixago", + "nixpkgs": "nixpkgs_4", + "yants": "yants" + }, + "locked": { + "lastModified": 1665513321, + "narHash": "sha256-D6Pacw9yf/HMs84KYuCxHXnNDL7v43gtcka5URagFqE=", + "owner": "divnix", + "repo": "std", + "rev": "94a90eedb9cfc115b12ae8f6622d9904788559e4", + "type": "github" + }, + "original": { + "owner": "divnix", + "repo": "std", + "type": "github" + } + }, + "tullia": { + "inputs": { + "nix-nomad": "nix-nomad", + "nix2container": "nix2container", + "nixpkgs": [ + "haskellNix", + "nixpkgs" + ], + "std": "std" + }, + "locked": { + "lastModified": 1668711738, + "narHash": "sha256-CBjky16o9pqsGE1bWu6nRlRajgSXMEk+yaFQLibqXcE=", + "owner": "input-output-hk", + "repo": "tullia", + "rev": "ead1f515c251f0e060060ef0e2356a51d3dfe4b0", + "type": "github" + }, + "original": { + "owner": "input-output-hk", + "repo": "tullia", + "type": "github" + } + }, + "utils": { + "locked": { + "lastModified": 1653893745, + "narHash": "sha256-0jntwV3Z8//YwuOjzhV2sgJJPt+HY6KhU7VZUL0fKZQ=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "1ed9fb1935d260de5fe1c2f7ee0ebaae17ed2fa1", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "yants": { + "inputs": { + "nixpkgs": [ + "haskellNix", + "tullia", + "std", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1660507851, + "narHash": "sha256-BKjq7JnVuUR/xDtcv6Vm9GYGKAblisXrAgybor9hT/s=", + "owner": "divnix", + "repo": "yants", + "rev": "0b895ca02a8fa72bad50b454cb3e7d8a66407c96", + "type": "github" + }, + "original": { + "owner": "divnix", + "repo": "yants", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 00000000..2652655f --- /dev/null +++ b/flake.nix @@ -0,0 +1,36 @@ +{ + description = "Data ingestion for Chainweb"; + + inputs = { + nixpkgs.follows = "haskellNix/nixpkgs-unstable"; + haskellNix.url = "github:input-output-hk/haskell.nix"; + flake-utils.url = "github:numtide/flake-utils"; + }; + + nixConfig = { + # This sets the flake to use the IOG nix cache. + # Nix should ask for permission before using it, + # but remove it here if you do not want it to. + extra-substituters = ["https://cache.iog.io" "https://nixcache.chainweb.com"]; + extra-trusted-public-keys = ["hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ=" "nixcache.chainweb.com:FVN503ABX9F8x8K0ptnc99XEz5SaA4Sks6kNcZn2pBY="]; + allow-import-from-derivation = "true"; + }; + + outputs = { self, nixpkgs, flake-utils, haskellNix }: + flake-utils.lib.eachSystem + [ "x86_64-linux" "x86_64-darwin" + "aarch64-linux" "aarch64-darwin" ] (system: + let + pkgs = import nixpkgs { + inherit system; + inherit (haskellNix) config; + overlays = [ haskellNix.overlay ]; + }; + defaultNix = import ./default.nix { inherit pkgs; }; + flake = defaultNix.flake; + executable = defaultNix.default; + in flake // { + packages.default = executable; + packages.chainweb-data-docker = defaultNix.dockerImage; + }); +} diff --git a/LICENSE b/haskell-src/LICENSE similarity index 100% rename from LICENSE rename to haskell-src/LICENSE diff --git a/haskell-src/bench/Bench.hs b/haskell-src/bench/Bench.hs new file mode 100644 index 00000000..7a18c7b2 --- /dev/null +++ b/haskell-src/bench/Bench.hs @@ -0,0 +1,208 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +module Main where + +import Control.Concurrent +import Control.Exception +import Data.Char +import Data.ByteString (ByteString) +import qualified Data.ByteString.Char8 as BC +import qualified Data.ByteString.Lazy as BL (ByteString) +import Data.Pool +import Data.String.Conv +import Data.Text (Text) +import qualified Data.Text as T +import Data.Vector (Vector) +import qualified Data.Vector as V +import Database.PostgreSQL.Simple +import Database.PostgreSQL.Simple.Types +import Database.Beam.Backend.SQL +import Database.Beam +-- import qualified Database.Beam.AutoMigrate as BA +import Database.Beam.Postgres +import Database.Beam.Postgres.Syntax +import Options.Applicative +import Text.Printf +import System.Exit +-------------------------------------------------------------------------------- +import ChainwebData.Api +import ChainwebData.Pagination +import ChainwebDb.Database +import ChainwebDb.Queries + +main :: IO () +main = do + execParser opts >>= \(Args pgc ms rb pr) -> do + infoPrint "Running query benchmarks" + withPool pgc $ \pool -> do + withResource pool (bench_initializeTables ms (infoPrint . T.unpack) (errorPrint . T.unpack)) >>= \case + False -> do + errorPrint "Cannot run benchmarks on mangled database schemas" + exitFailure + True -> do + infoPrint "Table check done" + case rb of + OnlyEvent es -> do + infoPrint "Running benchmarks for event search queries" + eventsBench pool es >>= V.mapM_ (`printReport` pr) + OnlyCode cs -> do + infoPrint "Running benchmarks for code search queries" + searchTxsBench pool cs >>= V.mapM_ (`printReport` pr) + Both es cs -> do + infoPrint "Running benchmarks for code search queries" + searchTxsBench pool cs >>= V.mapM_ (`printReport` pr) + infoPrint "Running benchmarks for event search queries" + eventsBench pool es >>= V.mapM_ (`printReport` pr) + where + opts = info (argsP <**> helper) + (fullDesc <> header "chainweb-data benchmarks") + +printReport :: BenchResult -> ReportFormat -> IO () +printReport br@(BenchResult {..}) = \case + Raw -> print br + Simple -> do + BC.putStrLn "----------RESULT----------" + BC.putStrLn $ "Query: " <> bench_query + BC.putStrLn bench_execution_time + BC.putStrLn bench_planning_time + BC.putStrLn "----------RESULT----------" + +data Args = Args + { + args_connectInfo :: ConnectInfo + , args_migrate :: Bool + , args_run_benches :: RunBenches + , args_print_report :: ReportFormat + } + + +data RunBenches = OnlyEvent [Text] | OnlyCode [Text] | Both [Text] [Text] + +data ReportFormat = Simple | Raw + +infoPrint :: String -> IO () +infoPrint = printf "[INFO] %s\n" + +debugPrint :: String -> IO () +debugPrint = printf "[DEBUG] %s\n" + +errorPrint :: String -> IO () +errorPrint = printf "[ERROR] %s\n" + +argsP :: Parser Args +argsP = Args <$> connectInfoParser <*> migrationP <*> runBenchesP <*> printReportP + +runBenchesP :: Parser RunBenches +runBenchesP = go <$> many eventBenchP <*> many codeBenchP + where + go xs [] = OnlyEvent xs + go [] ys = OnlyCode ys + go xs ys = Both xs ys + +eventBenchP :: Parser Text +eventBenchP = strOption (short 'e' <> long "event-search-query" <> metavar "STRING" <> help "event search query") + +codeBenchP :: Parser Text +codeBenchP = strOption (short 'c' <> long "code-search-query" <> metavar "STRING" <> help "code search query") + +printReportP :: Parser ReportFormat +printReportP = option go (short 'q' <> long "query-report-format (raw|simple)" <> value Simple <> help "print query report") + where + go = eitherReader $ \s -> case toLower <$> s of + "raw" -> Right Raw + "simple" -> Right Simple + _ -> Left $ printf "not a valid option (%s)" s + +migrationP :: Parser Bool +migrationP = + flag True False (short 'm' <> long "migrate" <> help "Run DB migration") + +connectInfoParser :: Parser ConnectInfo +connectInfoParser = ConnectInfo + <$> strOption (long "host" <> metavar "HOST" <> help "host for the chainweb-data postgres instance") + <*> option auto (long "port" <> metavar "PORT" <> help "port for the chainweb-data postgres instance") + <*> strOption (long "user" <> metavar "USER" <> value "postgres" <> help "user for the chainweb-data postgres instance") + <*> strOption (long "password" <> metavar "PASSWORD" <> value "" <> help "password for the chainweb-data postgres instance") + <*> strOption (long "database" <> metavar "DATABASE" <> help "database for the chainweb-data postgres instance") + +-- | A bracket for `Pool` interaction. +withPool :: ConnectInfo -> (Pool Connection -> IO a) -> IO a +withPool ci = bracket (getPool (connect ci)) destroyAllResources + +-- | Create a `Pool` based on `Connect` settings designated on the command line. +getPool :: IO Connection -> IO (Pool Connection) +getPool getConn = do + caps <- getNumCapabilities + createPool getConn close 1 5 caps + +searchTxsBench :: Pool Connection -> [Text] -> IO (Vector BenchResult) +searchTxsBench pool qs = + withResource pool $ \conn -> do + V.forM benchParams $ \(l,o,s) -> do + let stmt' = prependExplainAnalyze (stmt l o s) + res <- query_ @(Only ByteString) conn stmt' + return $ getBenchResult (Just s) "Code search" stmt' res + where + stmt l o s = Query $ toS $ selectStmtString $ searchTxsQueryStmt l o s + prependExplainAnalyze = ("EXPLAIN (ANALYZE) " <>) + benchParams = + V.fromList [ (l,o,s) | l <- (Just . Limit) <$> [40] , o <- [Nothing], s <- qs `onNull` (take 2 searchExamples) ] + +onNull :: [a] -> [a] -> [a] +onNull xs ys = case xs of + [] -> ys + _ -> xs + +eventsBench :: Pool Connection -> [Text] -> IO (Vector BenchResult) +eventsBench pool qs = + withResource pool $ \conn -> + V.forM benchParams $ \(l,o,s) -> do + let stmt' = prependExplainAnalyze (stmt l o s) + res <- query_ @(Only ByteString) conn stmt' + return $ getBenchResult s "Event search" stmt' res + where + stmt l o s = Query $ toS $ selectStmtString $ eventsQueryStmt l o s Nothing (Just $ EventName "coin.TRANSFER") + prependExplainAnalyze = ("EXPLAIN (ANALYZE) " <>) + benchParams = + V.fromList [ (l,o,s) | l <- (Just . Limit) <$> [40] , o <- [Nothing], s <- Just <$> qs `onNull` drop 2 searchExamples ] + + +getBenchResult :: Maybe Text -> ByteString -> Query -> [Only ByteString] -> BenchResult +getBenchResult simple_param name q = go . fmap fromOnly . reverse + where + go (pl: ex: report) = + BenchResult + { + bench_query = name + , bench_raw_query = q + , bench_simplified_query = simple_param + , bench_explain_analyze_report = BC.unlines $ reverse report + , bench_planning_time = pl + , bench_execution_time = ex + } + go _ = error "getBenchResult: impossible" + +selectStmtString :: (Sql92SelectSyntax (BeamSqlBackendSyntax be) ~ PgSelectSyntax) => SqlSelect be a -> BL.ByteString +selectStmtString s = case s of + SqlSelect ss -> pgRenderSyntaxScript $ fromPgSelect $ ss + +data BenchResult = BenchResult + { + bench_query :: ByteString + , bench_raw_query :: Query + , bench_simplified_query :: Maybe Text + , bench_explain_analyze_report :: ByteString + , bench_planning_time :: ByteString + , bench_execution_time :: ByteString + } deriving Show + +searchExamples :: [Text] +searchExamples = [ "receiver-guard" + , "transfer-crosschain" + , "module" + , "hat" + , "99cb7008d7d70c94f138cc366a825f0d9c83a8a2f4ba82c86c666e0ab6fecf3a" + , "40ab110e52d0221ec8237d16f4b415fa52b8df97b26e6ae5d3518854a4a8d30f"] diff --git a/chainweb-data.cabal b/haskell-src/chainweb-data.cabal similarity index 64% rename from chainweb-data.cabal rename to haskell-src/chainweb-data.cabal index 2a63bb51..5fd77e64 100644 --- a/chainweb-data.cabal +++ b/haskell-src/chainweb-data.cabal @@ -1,19 +1,16 @@ cabal-version: 2.2 name: chainweb-data -version: 2.0.0 +version: 2.1.1 description: Data ingestion for Chainweb. homepage: https://github.com/kadena-io/chainweb-data author: Colin Woodbury maintainer: - Douglas Beardsley , Emily Pillmore + Douglas Beardsley , Emily Pillmore , Enis Bayramoğlu , Emmanuel Denloye-Ito copyright: 2020 Kadena LLC license: BSD-3-Clause license-file: LICENSE build-type: Simple -extra-doc-files: - README.org - ChangeLog.md common commons default-language: Haskell2010 @@ -24,9 +21,9 @@ common commons -fmax-relevant-binds=0 build-depends: - , aeson >=0.11.3.0 && <1.5 + , aeson >= 1.4.3 , base >=4.7 && <5 - , base-prelude ^>=1.3 + , base-prelude , base16-bytestring ^>=0.1 , beam-automigrate , beam-core >=0.8 && <0.10 @@ -40,8 +37,12 @@ common commons , directory , exceptions , filepath + , hashable <=1.3.3.0 , lens , lens-aeson + , postgresql-simple + , resource-pool >= 0.3 && <0.4 + , safe , scientific ^>=0.3 , servant , streaming-events ^>=1.0.1 @@ -61,25 +62,48 @@ library ChainwebData.Backfill ChainwebData.Genesis ChainwebData.Types + ChainwebData.Env + ChainwebData.Spec + ChainwebDb.BoundedScan + ChainwebDb.Database + ChainwebDb.Queries ChainwebDb.Types.Block + ChainwebDb.Types.Common ChainwebDb.Types.DbHash ChainwebDb.Types.Event ChainwebDb.Types.MinerKey ChainwebDb.Types.Signer ChainwebDb.Types.Transaction + ChainwebDb.Types.Transfer build-depends: base64-bytestring ^>=1.0 + , Decimal + , gargoyle + , gargoyle-postgresql + , http-client ^>=0.6 + , http-client-tls ^>=0.3 + , http-types + , openapi3 + , optparse-applicative >=0.14 && <0.17 + , servant-client + , servant-openapi3 + , yet-another-logger if flag(ghc-flags) build-tool-depends: hsinspect:hsinspect -any ghc-options: -fplugin GhcFlags.Plugin build-depends: ghcflags +flag threaded + description: Build chainweb-data with threading support + default: True + executable chainweb-data import: commons main-is: Main.hs - hs-source-dirs: exec - ghc-options: -threaded -rtsopts -with-rtsopts=-N + hs-source-dirs: exec data + if flag(threaded) + ghc-options: -threaded -rtsopts -with-rtsopts=-N build-depends: , async ^>=2.2 , base16-bytestring @@ -91,6 +115,7 @@ executable chainweb-data , connection , containers ^>=0.6 , Decimal + , direct-sqlite , errors ^>=2.3 , file-embed , gargoyle @@ -98,22 +123,27 @@ executable chainweb-data , http-client ^>=0.6 , http-client-tls ^>=0.3 , http-types + , lens-aeson + , managed , mtl , optparse-applicative >=0.14 && <0.17 - , pact + , pact >=4.3.1 , postgresql-simple + , postgresql-simple-migration , process ^>=1.6 - , resource-pool ^>=0.2 + , resource-pool >= 0.3 , retry ^>=0.8 - , scheduler ^>=1.4 + , scheduler >=1.4 , servant-client , servant-client-core , servant-server + , servant-swagger-ui , stm , stm-chans , streaming ^>=0.2 , streaming-events , strict-tuple ^>=0.1 + , string-conv , vector , wai , wai-app-static @@ -122,15 +152,14 @@ executable chainweb-data , wai-middleware-throttle , warp , warp-tls - , witherable-class ^>=0 + , witherable , yet-another-logger -- , gargoyle-postgresql-nix other-modules: Chainweb.Backfill + Chainweb.BackfillTransfers Chainweb.Coins - Chainweb.Database - Chainweb.Env Chainweb.FillEvents Chainweb.Gaps Chainweb.Listen @@ -157,9 +186,40 @@ test-suite testsuite , chainweb-data , containers ^>=0.6 , neat-interpolation >=0.5 && < 0.6 - , tasty ^>=1.2 - , tasty-hunit ^>=0.10 + , tasty >=1.2 + , tasty-hunit >=0.10 , text +benchmark bench + import: commons + default-language: Haskell2010 + ghc-options: -threaded -rtsopts -with-rtsopts=-N + hs-source-dirs: bench + main-is: Bench.hs + type: exitcode-stdio-1.0 + build-depends: + , aeson + , base + , beam-automigrate + , beam-core >=0.8 && <0.10 + , beam-migrate >=0.4 && <0.6 + , beam-postgres >=0.5 && <0.6 + , bytestring + , chainweb-api + , chainweb-data + , connection + , containers ^>=0.6 + , deepseq + , exceptions + , http-client ^>=0.6 + , optparse-applicative >=0.14 && <0.17 + , postgresql-simple + , resource-pool >= 0.3 && <0.4 + , string-conv + , text + , time >=1.8 && <1.11 + , unordered-containers + , vector + -- internal -- external diff --git a/data/miner_rewards.csv b/haskell-src/data/miner_rewards.csv similarity index 100% rename from data/miner_rewards.csv rename to haskell-src/data/miner_rewards.csv diff --git a/data/token_payments.csv b/haskell-src/data/token_payments.csv similarity index 97% rename from data/token_payments.csv rename to haskell-src/data/token_payments.csv index 811e54e2..30e594c1 100644 --- a/data/token_payments.csv +++ b/haskell-src/data/token_payments.csv @@ -957,11 +957,6 @@ PS_C1_1060,2022-11-01T00:00:00Z,PS_C1,400000,1 PS_C2_1061,2022-11-01T00:00:00Z,PS_C2,400000,2 PS_C3_1062,2022-11-01T00:00:00Z,PS_C3,400000,3 PS_C4_1063,2022-11-01T00:00:00Z,PS_C4,400000,4 -PS_C0_1069,2022-12-01T00:00:00Z,PS_C0,400000,0 -PS_C1_1070,2022-12-01T00:00:00Z,PS_C1,400000,1 -PS_C2_1071,2022-12-01T00:00:00Z,PS_C2,400000,2 -PS_C3_1072,2022-12-01T00:00:00Z,PS_C3,400000,3 -PS_C4_1073,2022-12-01T00:00:00Z,PS_C4,400000,4 PS_C0_1079,2023-01-01T00:00:00Z,PS_C0,400000,0 PS_C1_1080,2023-01-01T00:00:00Z,PS_C1,400000,1 PS_C2_1081,2023-01-01T00:00:00Z,PS_C2,400000,2 @@ -1017,11 +1012,6 @@ PS_C1_1180,2023-11-01T00:00:00Z,PS_C1,400000,1 PS_C2_1181,2023-11-01T00:00:00Z,PS_C2,400000,2 PS_C3_1182,2023-11-01T00:00:00Z,PS_C3,400000,3 PS_C4_1183,2023-11-01T00:00:00Z,PS_C4,400000,4 -PS_C0_1189,2023-12-01T00:00:00Z,PS_C0,400000,0 -PS_C1_1190,2023-12-01T00:00:00Z,PS_C1,400000,1 -PS_C2_1191,2023-12-01T00:00:00Z,PS_C2,400000,2 -PS_C3_1192,2023-12-01T00:00:00Z,PS_C3,400000,3 -PS_C4_1193,2023-12-01T00:00:00Z,PS_C4,400000,4 PS_C0_1199,2024-01-01T00:00:00Z,PS_C0,400000,0 PS_C1_1200,2024-01-01T00:00:00Z,PS_C1,400000,1 PS_C2_1201,2024-01-01T00:00:00Z,PS_C2,400000,2 @@ -1077,66 +1067,66 @@ PS_C1_1300,2024-11-01T00:00:00Z,PS_C1,400000,1 PS_C2_1301,2024-11-01T00:00:00Z,PS_C2,400000,2 PS_C3_1302,2024-11-01T00:00:00Z,PS_C3,400000,3 PS_C4_1303,2024-11-01T00:00:00Z,PS_C4,400000,4 -PS_C0_1309,2024-12-01T00:00:00Z,PS_C0,1200000,0 -PS_C1_1310,2024-12-01T00:00:00Z,PS_C1,1200000,1 -PS_C2_1311,2024-12-01T00:00:00Z,PS_C2,1200000,2 -PS_C3_1312,2024-12-01T00:00:00Z,PS_C3,1200000,3 -PS_C4_1313,2024-12-01T00:00:00Z,PS_C4,1200000,4 +PS_C0_1069,2025-01-01T00:00:00Z,PS_C0,400000,0 +PS_C1_1070,2025-01-01T00:00:00Z,PS_C1,400000,1 +PS_C2_1071,2025-01-01T00:00:00Z,PS_C2,400000,2 +PS_C3_1072,2025-01-01T00:00:00Z,PS_C3,400000,3 +PS_C4_1073,2025-01-01T00:00:00Z,PS_C4,400000,4 PS_C5_833,2025-02-01T00:00:00Z,PS_C5,400000,5 PS_C6_834,2025-02-01T00:00:00Z,PS_C6,400000,6 PS_C7_835,2025-02-01T00:00:00Z,PS_C7,400000,7 PS_C8_836,2025-02-01T00:00:00Z,PS_C8,400000,8 -PS_C9_837,2025-02-01T00:00:00Z,PS_C9,184000,9 +PS_C0_1189,2025-02-01T00:00:00Z,PS_C0,400000,0 PS_C5_845,2025-03-01T00:00:00Z,PS_C5,400000,5 PS_C6_846,2025-03-01T00:00:00Z,PS_C6,400000,6 PS_C7_847,2025-03-01T00:00:00Z,PS_C7,400000,7 PS_C8_848,2025-03-01T00:00:00Z,PS_C8,400000,8 -PS_C9_849,2025-03-01T00:00:00Z,PS_C9,184000,9 +PS_C1_1190,2025-03-01T00:00:00Z,PS_C1,400000,1 PS_C5_857,2025-04-01T00:00:00Z,PS_C5,400000,5 PS_C6_858,2025-04-01T00:00:00Z,PS_C6,400000,6 PS_C7_859,2025-04-01T00:00:00Z,PS_C7,400000,7 PS_C8_860,2025-04-01T00:00:00Z,PS_C8,400000,8 -PS_C9_861,2025-04-01T00:00:00Z,PS_C9,184000,9 +PS_C2_1191,2025-04-01T00:00:00Z,PS_C2,400000,2 PS_C5_870,2025-05-01T00:00:00Z,PS_C5,400000,5 PS_C6_871,2025-05-01T00:00:00Z,PS_C6,400000,6 PS_C7_872,2025-05-01T00:00:00Z,PS_C7,400000,7 PS_C8_873,2025-05-01T00:00:00Z,PS_C8,400000,8 -PS_C9_874,2025-05-01T00:00:00Z,PS_C9,184000,9 +PS_C3_1192,2025-05-01T00:00:00Z,PS_C3,400000,3 PS_C5_881,2025-06-01T00:00:00Z,PS_C5,400000,5 PS_C6_882,2025-06-01T00:00:00Z,PS_C6,400000,6 PS_C7_883,2025-06-01T00:00:00Z,PS_C7,400000,7 PS_C8_884,2025-06-01T00:00:00Z,PS_C8,400000,8 -PS_C9_885,2025-06-01T00:00:00Z,PS_C9,184000,9 +PS_C4_1193,2025-06-01T00:00:00Z,PS_C4,400000,4 PS_C5_892,2025-07-01T00:00:00Z,PS_C5,400000,5 PS_C6_893,2025-07-01T00:00:00Z,PS_C6,400000,6 PS_C7_894,2025-07-01T00:00:00Z,PS_C7,400000,7 PS_C8_895,2025-07-01T00:00:00Z,PS_C8,400000,8 PS_C9_896,2025-07-01T00:00:00Z,PS_C9,184000,9 +PS_C9_837,2025-07-01T00:00:00Z,PS_C9,184000,9 PS_C5_904,2025-08-01T00:00:00Z,PS_C5,400000,5 PS_C6_905,2025-08-01T00:00:00Z,PS_C6,400000,6 PS_C7_906,2025-08-01T00:00:00Z,PS_C7,400000,7 PS_C8_907,2025-08-01T00:00:00Z,PS_C8,400000,8 PS_C9_908,2025-08-01T00:00:00Z,PS_C9,184000,9 +PS_C9_849,2025-08-01T00:00:00Z,PS_C9,184000,9 PS_C5_915,2025-09-01T00:00:00Z,PS_C5,400000,5 PS_C6_916,2025-09-01T00:00:00Z,PS_C6,400000,6 PS_C7_917,2025-09-01T00:00:00Z,PS_C7,400000,7 PS_C8_918,2025-09-01T00:00:00Z,PS_C8,400000,8 PS_C9_919,2025-09-01T00:00:00Z,PS_C9,184000,9 +PS_C9_861,2025-09-01T00:00:00Z,PS_C9,184000,9 PS_C5_926,2025-10-01T00:00:00Z,PS_C5,400000,5 PS_C6_927,2025-10-01T00:00:00Z,PS_C6,400000,6 PS_C7_928,2025-10-01T00:00:00Z,PS_C7,400000,7 PS_C8_929,2025-10-01T00:00:00Z,PS_C8,400000,8 PS_C9_930,2025-10-01T00:00:00Z,PS_C9,184000,9 +PS_C9_874,2025-10-01T00:00:00Z,PS_C9,184000,9 PS_C5_938,2025-11-01T00:00:00Z,PS_C5,400000,5 PS_C6_939,2025-11-01T00:00:00Z,PS_C6,400000,6 PS_C7_940,2025-11-01T00:00:00Z,PS_C7,400000,7 PS_C8_941,2025-11-01T00:00:00Z,PS_C8,400000,8 PS_C9_942,2025-11-01T00:00:00Z,PS_C9,184000,9 -PS_C5_949,2025-12-01T00:00:00Z,PS_C5,400000,5 -PS_C6_950,2025-12-01T00:00:00Z,PS_C6,400000,6 -PS_C7_951,2025-12-01T00:00:00Z,PS_C7,400000,7 -PS_C8_952,2025-12-01T00:00:00Z,PS_C8,400000,8 -PS_C9_953,2025-12-01T00:00:00Z,PS_C9,176000,9 +PS_C9_885,2025-11-01T00:00:00Z,PS_C9,184000,9 PS_C5_960,2026-01-01T00:00:00Z,PS_C5,400000,5 PS_C6_961,2026-01-01T00:00:00Z,PS_C6,400000,6 PS_C7_962,2026-01-01T00:00:00Z,PS_C7,400000,7 @@ -1192,11 +1182,6 @@ PS_C6_1065,2026-11-01T00:00:00Z,PS_C6,400000,6 PS_C7_1066,2026-11-01T00:00:00Z,PS_C7,400000,7 PS_C8_1067,2026-11-01T00:00:00Z,PS_C8,400000,8 PS_C9_1068,2026-11-01T00:00:00Z,PS_C9,400000,9 -PS_C5_1074,2026-12-01T00:00:00Z,PS_C5,400000,5 -PS_C6_1075,2026-12-01T00:00:00Z,PS_C6,400000,6 -PS_C7_1076,2026-12-01T00:00:00Z,PS_C7,400000,7 -PS_C8_1077,2026-12-01T00:00:00Z,PS_C8,400000,8 -PS_C9_1078,2026-12-01T00:00:00Z,PS_C9,400000,9 PS_C5_1084,2027-01-01T00:00:00Z,PS_C5,400000,5 PS_C6_1085,2027-01-01T00:00:00Z,PS_C6,400000,6 PS_C7_1086,2027-01-01T00:00:00Z,PS_C7,400000,7 @@ -1252,11 +1237,6 @@ PS_C6_1185,2027-11-01T00:00:00Z,PS_C6,400000,6 PS_C7_1186,2027-11-01T00:00:00Z,PS_C7,400000,7 PS_C8_1187,2027-11-01T00:00:00Z,PS_C8,400000,8 PS_C9_1188,2027-11-01T00:00:00Z,PS_C9,400000,9 -PS_C5_1194,2027-12-01T00:00:00Z,PS_C5,400000,5 -PS_C6_1195,2027-12-01T00:00:00Z,PS_C6,400000,6 -PS_C7_1196,2027-12-01T00:00:00Z,PS_C7,400000,7 -PS_C8_1197,2027-12-01T00:00:00Z,PS_C8,400000,8 -PS_C9_1198,2027-12-01T00:00:00Z,PS_C9,400000,9 PS_C5_1204,2028-01-01T00:00:00Z,PS_C5,400000,5 PS_C6_1205,2028-01-01T00:00:00Z,PS_C6,400000,6 PS_C7_1206,2028-01-01T00:00:00Z,PS_C7,400000,7 @@ -1312,8 +1292,28 @@ PS_C6_1305,2028-11-01T00:00:00Z,PS_C6,400000,6 PS_C7_1306,2028-11-01T00:00:00Z,PS_C7,400000,7 PS_C8_1307,2028-11-01T00:00:00Z,PS_C8,400000,8 PS_C9_1308,2028-11-01T00:00:00Z,PS_C9,400000,9 -PS_C5_1314,2028-12-01T00:00:00Z,PS_C5,1200000,5 -PS_C6_1315,2028-12-01T00:00:00Z,PS_C6,1200000,6 -PS_C7_1316,2028-12-01T00:00:00Z,PS_C7,1200000,7 -PS_C8_1317,2028-12-01T00:00:00Z,PS_C8,1200000,8 -PS_C9_1318,2028-12-01T00:00:00Z,PS_C9,1200000,9 +PS_C0_1309,2029-01-01T00:00:00Z,PS_C0,1200000,0 +PS_C5_949,2029-01-01T00:00:00Z,PS_C5,400000,5 +PS_C1_1310,2029-02-01T00:00:00Z,PS_C1,1200000,1 +PS_C6_950,2029-02-01T00:00:00Z,PS_C6,400000,6 +PS_C2_1311,2029-03-01T00:00:00Z,PS_C2,1200000,2 +PS_C7_951,2029-03-01T00:00:00Z,PS_C7,400000,7 +PS_C3_1312,2029-04-01T00:00:00Z,PS_C3,1200000,3 +PS_C8_952,2029-04-01T00:00:00Z,PS_C8,400000,8 +PS_C4_1313,2029-05-01T00:00:00Z,PS_C4,1200000,4 +PS_C9_953,2029-05-01T00:00:00Z,PS_C9,176000,9 +PS_C5_1314,2029-06-01T00:00:00Z,PS_C5,1200000,5 +PS_C5_1074,2029-06-01T00:00:00Z,PS_C5,400000,5 +PS_C6_1315,2029-07-01T00:00:00Z,PS_C6,1200000,6 +PS_C6_1075,2029-07-01T00:00:00Z,PS_C6,400000,6 +PS_C7_1316,2029-08-01T00:00:00Z,PS_C7,1200000,7 +PS_C7_1076,2029-08-01T00:00:00Z,PS_C7,400000,7 +PS_C8_1317,2029-09-01T00:00:00Z,PS_C8,1200000,8 +PS_C8_1077,2029-09-01T00:00:00Z,PS_C8,400000,8 +PS_C9_1318,2029-10-01T00:00:00Z,PS_C9,1200000,9 +PS_C9_1078,2029-10-01T00:00:00Z,PS_C9,400000,9 +PS_C5_1194,2029-11-01T00:00:00Z,PS_C5,400000,5 +PS_C6_1195,2029-11-01T00:00:00Z,PS_C6,400000,6 +PS_C7_1196,2029-11-01T00:00:00Z,PS_C7,400000,7 +PS_C8_1197,2029-11-01T00:00:00Z,PS_C8,400000,8 +PS_C9_1198,2029-11-01T00:00:00Z,PS_C9,400000,9 diff --git a/exec/Chainweb/Allocations.hs b/haskell-src/exec/Chainweb/Allocations.hs similarity index 100% rename from exec/Chainweb/Allocations.hs rename to haskell-src/exec/Chainweb/Allocations.hs diff --git a/exec/Chainweb/Backfill.hs b/haskell-src/exec/Chainweb/Backfill.hs similarity index 95% rename from exec/Chainweb/Backfill.hs rename to haskell-src/exec/Chainweb/Backfill.hs index dcc0a8db..63f643d0 100644 --- a/exec/Chainweb/Backfill.hs +++ b/haskell-src/exec/Chainweb/Backfill.hs @@ -15,8 +15,8 @@ import BasePrelude hiding (insert, range, second) import Chainweb.Api.ChainId (ChainId(..)) import Chainweb.Api.NodeInfo -import Chainweb.Database -import Chainweb.Env +import ChainwebDb.Database +import ChainwebData.Env import Chainweb.Lookups import Chainweb.Worker import ChainwebData.Backfill @@ -24,7 +24,6 @@ import ChainwebData.Genesis import ChainwebData.Types import ChainwebDb.Types.Block -import Control.Concurrent (threadDelay) import Control.Concurrent.Async (race_) import Control.Scheduler hiding (traverse_) @@ -32,8 +31,8 @@ import Data.ByteString.Lazy (ByteString) import Data.Map.Strict (Map) import qualified Data.Map.Strict as M import qualified Data.Pool as P -import Data.Witherable.Class (wither) +import Witherable import System.Logger.Types hiding (logg) diff --git a/haskell-src/exec/Chainweb/BackfillTransfers.hs b/haskell-src/exec/Chainweb/BackfillTransfers.hs new file mode 100644 index 00000000..9f15d7ea --- /dev/null +++ b/haskell-src/exec/Chainweb/BackfillTransfers.hs @@ -0,0 +1,152 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NumericUnderscores #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TupleSections #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} + +module Chainweb.BackfillTransfers where + +import BasePrelude hiding (insert, range, second) + +import Chainweb.Api.NodeInfo +import ChainwebDb.Database +import ChainwebData.Env +import ChainwebData.Types +import ChainwebDb.Types.Event +import ChainwebDb.Types.Transfer + +import Control.Concurrent.Async (race_) +import Control.Lens hiding ((<.), reuse) + +import qualified Data.Aeson as A +import Data.Aeson.Lens +import qualified Data.Pool as P +import qualified Data.Text.Read as TR + +import Database.Beam hiding (insert) +import Database.Beam.Postgres +import Database.Beam.Postgres.Full +import Database.PostgreSQL.Simple + +import System.Logger.Types hiding (logg) + +-- backfill an empty transfers table (steps) +-- 1. check if transfers table is actually empty. If so, wait until server fills some rows near "top" to start backfill +-- 2. check if events table has any coinbase gaps. If so, tell user to fill those gaps +-- 3. Fill from last known max height on each chain all the way back to events activation height(s) +backfillTransfersCut :: Env -> Bool -> BackfillArgs -> IO () +backfillTransfersCut env _disableIndexesPred args = do + + withDb env (runSelectReturningOne $ select $ pure $ exists_ (all_ (_cddb_transfers database) $> as_ @Int32 (val_ 1))) >>= \case + Just False -> do + logg Error "Run the server command to start filling this table with some entries!" + exitSuccess + Just True -> logg Info "transfers table already exists. Great!" + Nothing -> die "IMPOSSIBLE: This query (SELECT EXISTS (SELECT 1 as transfers);) failed somehow." + + withDb env (runSelectReturningOne $ select $ pure $ exists_ (all_ (_cddb_events database) $> as_ @Int32 (val_ 1))) >>= \case + Just True -> logg Info "events table already exists. Great!" + Just False -> do + logg Error "events table does not exist" + exitFailure + Nothing -> die "IMPOSSIBLE: This query (SELECT EXISTS (SELECT 1 as events);) failed somehow." + + let err = "Chainweb version: Unknown" + version = _nodeInfo_chainwebVer $ _env_nodeInfo env + + withEventsMinHeight version err $ \eventsActivationHeight -> do + minHeights <- withDbDebug env Debug chainMinHeights + let checkMinHeights xs = getSum (foldMap (maybe mempty (const $ Sum 1) . snd) xs) == _nodeInfo_numChains (_env_nodeInfo env) + unless (checkMinHeights minHeights) $ do + logg Error "Make sure transfers table has an entry for every chain id!" + exitFailure + let maxMinHeights = maximum $ mapMaybe snd $ minHeights + -- get maximum possible number of entries to fill + effectiveTotal <- withDbDebug env Debug $ runSelectReturningOne $ select $ bigEventCount maxMinHeights eventsActivationHeight + unless (isJust effectiveTotal) $ die "Cannot get the number of entries needed to fill transfers table" + mapM_ (\(cid, h) -> logg Info $ fromString $ printf "Filling transfers table on chain %d from height %d to height %d." cid eventsActivationHeight (fromJust h)) minHeights + ref <- newIORef 0 + catch + (race_ (progress logg ref (fromIntegral $ fromJust $ effectiveTotal)) + (forM_ (rangeToDescGroupsOf chunkSize (fromIntegral eventsActivationHeight) (fromIntegral maxMinHeights)) + $ \(Low endingHeight, High startingHeight) -> + transferInserter ref (fromIntegral startingHeight) (fromIntegral endingHeight))) + (\(e :: SomeException) -> do + printf "\nDepending on the error you may need to run backfill for events\n%s\n" (show e) + exitFailure) + where + logg = _env_logger env + pool = _env_dbConnPool env + chunkSize = fromMaybe 200 $ _backfillArgs_chunkSize args + getValidTransfer :: Event -> (Sum Int, [Transfer] -> [Transfer]) + getValidTransfer ev = maybe mempty ((Sum 1, ) . (:)) $ createTransfer ev + transferInserter :: IORef Int -> Int64 -> Int64 -> IO () + transferInserter count startingHeight endingHeight = do + P.withResource pool $ \c -> withTransaction c $ runBeamPostgres c $ do + evs <- runSelectReturningList $ select $ eventSelector startingHeight endingHeight + let (Sum !cnt, tfs) = foldMap getValidTransfer evs + runInsert $ + insert (_cddb_transfers database) (insertValues (tfs [])) + $ onConflict (conflictingFields primaryKey) onConflictDoNothing + liftIO $ atomicModifyIORef' count (\cnt' -> (cnt' + cnt, ())) + +chainMinHeights :: Pg [(Int64, Maybe Int64)] +chainMinHeights = runSelectReturningList $ select $ aggregate_ (\t -> (group_ (_tr_chainid t), min_ (_tr_height t))) (all_ (_cddb_transfers database)) + +createTransfer :: Event -> Maybe Transfer +createTransfer ev = do + guard $ lengthThree $ unwrap $ _ev_params ev + Transfer + <$> pure (_ev_block ev) + <*> pure (_ev_requestkey ev) + <*> pure (_ev_chainid ev) + <*> pure (_ev_height ev) + <*> pure (_ev_idx ev) + <*> pure (_ev_module ev) + <*> pure (_ev_moduleHash ev) + <*> from_acct + <*> to_acct + <*> getAmount (unwrap $ _ev_params ev) + where + from_acct = _ev_params ev ^? to unwrap . ix 0 . _String + to_acct = _ev_params ev ^? to unwrap . ix 1 . _String + unwrap (PgJSONB a) = a + lengthThree = \case + [_,_,_] -> True + _ -> False + +getAmount :: [A.Value] -> Maybe KDAScientific +getAmount params = fmap KDAScientific $ + (params ^? ix 2 . key "decimal" . _String . to TR.rational . _Right . _1) + <|> + (params ^? ix 2 . key "int" . _String . to TR.rational . _Right . _1) + <|> + (params ^? ix 2 . _Number) + <|> + -- These cases shouldn't be ever reached but these are here just in case + (params ^? ix 2 . key "int" . _Number) + <|> + (params ^? ix 2 . key "decimal" . _Number) + <|> + (params ^? ix 2 . _String . to TR.rational . _Right . _1) + +eventSelector' :: Int64 -> Int64 -> Q Postgres ChainwebDataDb s (EventT (QExpr Postgres s)) +eventSelector' startingHeight endingHeight = do + ev <- all_ (_cddb_events database) + guard_ $ _ev_height ev <=. val_ startingHeight + guard_ $ _ev_height ev >=. val_ endingHeight + guard_ $ _ev_name ev ==. val_ "TRANSFER" + return ev + +eventSelector :: Int64 -> Int64 -> Q Postgres ChainwebDataDb s (EventT (QExpr Postgres s)) +eventSelector startingHeight endingHeight = orderBy_ getOrder $ eventSelector' startingHeight endingHeight + where + getOrder ev = (desc_ $ _ev_height ev, asc_ $ _ev_chainid ev) + +bigEventCount :: Int64 -> Int64 -> Q Postgres ChainwebDataDb s (QGenExpr QValueContext Postgres s Int64) +bigEventCount startingHeight endingHeight = aggregate_ (\_ -> as_ @Int64 countAll_) $ eventSelector' startingHeight endingHeight diff --git a/exec/Chainweb/Coins.hs b/haskell-src/exec/Chainweb/Coins.hs similarity index 78% rename from exec/Chainweb/Coins.hs rename to haskell-src/exec/Chainweb/Coins.hs index babcfe83..d57bf3c1 100644 --- a/exec/Chainweb/Coins.hs +++ b/haskell-src/exec/Chainweb/Coins.hs @@ -18,6 +18,9 @@ import qualified Data.ByteString.Lazy as BL import qualified Data.Csv as CSV import Data.Decimal import Data.FileEmbed +import Data.Function +import Data.List (groupBy, sortBy) +import Data.Ord import Data.Map (Map) import qualified Data.Map as M import Data.Text (Text) @@ -40,6 +43,20 @@ rawMinerRewards = $(embedFile "data/miner_rewards.csv") rawAllocations :: ByteString rawAllocations = $(embedFile "data/token_payments.csv") +allocations :: [AllocationEntry] +allocations = V.toList $ decodeAllocations rawAllocations + +sortedAllocations :: [AllocationEntry] +sortedAllocations = sortBy (comparing $ _csvTime . _allocationTime) allocations + +allocationGroups :: [[AllocationEntry]] +allocationGroups = groupBy ((==) `on` utcMonth . _csvTime . _allocationTime) sortedAllocations + +utcMonth :: UTCTime -> (Integer, Int) +utcMonth t = (y,m) + where + (y,m,_) = toGregorian $ utctDay t + newtype CsvDecimal = CsvDecimal { _csvDecimal :: Decimal } deriving newtype (Eq, Ord, Show, Read) @@ -139,3 +156,32 @@ cumulativeRewards = M.fromList $ go 0 0 $ M.toList $ _minerRewards minerRewardMa go lastHeight total ((height,reward):rs) = (lastHeight, (total, reward)) : go height t2 rs where t2 = total + fromIntegral (height - lastHeight) * reward + +-- Helper functions + +genesisDate :: UTCTime +genesisDate = UTCTime (fromGregorian 2019 10 30) 0 + +dateToHeight :: UTCTime -> Word64 +dateToHeight t = blockHeight + where + genesis = fromGregorian 2019 10 30 + diff = diffUTCTime t (UTCTime genesis 0) + blockHeight = round $ diff / 30 + +heightToDate :: Word64 -> UTCTime +heightToDate height = addUTCTime (fromIntegral height * 30) genesisDate + +getCirculatingCoinsByDate :: UTCTime -> Decimal +getCirculatingCoinsByDate t = getCirculatingCoins (dateToHeight t) t + +everyMonth :: [UTCTime] +everyMonth = filter (> genesisDate) $ do + y <- [2019..2050] + m <- [1..12] + return $ UTCTime (fromGregorian y m 1) 0 + +everyYear :: [UTCTime] +everyYear = filter (> genesisDate) $ do + y <- [2020..2050] + return $ UTCTime (fromGregorian y 1 1) 0 diff --git a/exec/Chainweb/FillEvents.hs b/haskell-src/exec/Chainweb/FillEvents.hs similarity index 69% rename from exec/Chainweb/FillEvents.hs rename to haskell-src/exec/Chainweb/FillEvents.hs index 155dc63a..4795c30d 100644 --- a/exec/Chainweb/FillEvents.hs +++ b/haskell-src/exec/Chainweb/FillEvents.hs @@ -16,8 +16,8 @@ import BasePrelude hiding (insert, range, second) import Chainweb.Api.BlockHeader import Chainweb.Api.ChainId (ChainId(..)) import Chainweb.Api.NodeInfo -import Chainweb.Database -import Chainweb.Env +import ChainwebDb.Database +import ChainwebData.Env import Chainweb.Lookups import Chainweb.Worker import ChainwebData.Types @@ -26,12 +26,12 @@ import ChainwebDb.Types.Event import ChainwebDb.Types.DbHash import ChainwebDb.Types.Transaction -import Control.Concurrent (threadDelay) import Control.Concurrent.Async (race_) import Data.ByteString.Lazy (ByteString) import qualified Data.Map.Strict as M import qualified Data.Pool as P +import Data.Time.Clock.POSIX (posixSecondsToUTCTime) import Database.Beam hiding (insert) import Database.Beam.Postgres @@ -67,53 +67,52 @@ fillEventsCut env args et cutBS = do counter <- newIORef 0 when (et == CoinbaseAndTx) $ do - let startingHeight = case _nodeInfo_chainwebVer $ _env_nodeInfo env of - "mainnet01" -> 1722501 - "testnet04" -> 1261001 - _ -> error "Chainweb version: Unknown" - gaps <- getCoinbaseGaps env startingHeight - mapM_ (logg Debug . fromString . show) gaps - let numMissingCoinbase = sum $ map (\(_,a,b) -> b - a - 1) gaps - logg Info $ fromString $ printf "Got %d gaps" (length gaps) - - if null gaps - then do - logg Info "There are no missing coinbase events on any of the chains!" - exitSuccess - else do - logg Info $ fromString $ printf "Filling coinbase transactions of %d blocks." numMissingCoinbase - race_ (progress logg counter $ fromIntegral numMissingCoinbase) $ do - forM gaps $ \(chain, low, high) -> do - -- TODO Maybe make the chunk size configurable - forM (rangeToDescGroupsOf 100 (Low $ fromIntegral low) (High $ fromIntegral high)) $ \(chunkLow, chunkHigh) -> do - headersBetween env (ChainId $ fromIntegral chain, chunkLow, chunkHigh) >>= \case - Left e -> logg Error $ fromString $ printf "ApiError for range %s: %s" (show (chunkLow, chunkHigh)) (show e) - Right [] -> logg Error $ fromString $ printf "headersBetween: %s" $ show (chunkLow, chunkHigh) - Right headers -> do - let payloadHashes = M.fromList $ map (\header -> (hashToDbHash $ _blockHeader_payloadHash header, header)) headers - payloadWithOutputsBatch env (ChainId $ fromIntegral chain) payloadHashes >>= \case - Left e -> do - -- TODO Possibly also check for "key not found" message - if (apiError_type e == ClientError) - then do - forM_ (filter (\header -> curHeight - (fromIntegral $ _blockHeader_height header) > 120) headers) $ \header -> do - logg Debug $ fromString $ printf "Setting numEvents to 0 for all transactions with block hash %s" (unDbHash $ hashToDbHash $ _blockHeader_hash header) - P.withResource pool $ \c -> - withTransaction c $ runBeamPostgres c $ - runUpdate - $ update (_cddb_transactions database) - (\tx -> _tx_numEvents tx <-. val_ (Just 0)) - (\tx -> _tx_block tx ==. val_ (BlockId (hashToDbHash $ _blockHeader_hash header))) - logg Debug $ fromString $ show e - else logg Error $ fromString $ printf "no payloads for header range (%d, %d) on chain %d" (coerce chunkLow :: Int) (coerce chunkHigh :: Int) chain - Right bpwos -> do - let write header bpwo = do - let curHash = hashToDbHash $ _blockHeader_hash header - height = fromIntegral $ _blockHeader_height header - writePayload pool (ChainId $ fromIntegral chain) curHash height bpwo - atomicModifyIORef' counter (\n -> (n+1, ())) - forM_ bpwos (uncurry write) - forM_ delay threadDelay + let version = _nodeInfo_chainwebVer $ _env_nodeInfo env + err = printf "fillEventsCut failed because we don't know how to work this version %s" version + withEventsMinHeight version err $ \(startingHeight :: Integer) -> do + gaps <- getCoinbaseGaps env (fromIntegral startingHeight) + mapM_ (logg Debug . fromString . show) gaps + let numMissingCoinbase = sum $ map (\(_,a,b) -> b - a - 1) gaps + logg Info $ fromString $ printf "Got %d gaps" (length gaps) + + if null gaps + then do + logg Info "There are no missing coinbase events on any of the chains!" + exitSuccess + else do + logg Info $ fromString $ printf "Filling coinbase transactions of %d blocks." numMissingCoinbase + race_ (progress logg counter $ fromIntegral numMissingCoinbase) $ do + forM gaps $ \(chain, low, high) -> do + -- TODO Maybe make the chunk size configurable + forM (rangeToDescGroupsOf 100 (Low $ fromIntegral low) (High $ fromIntegral high)) $ \(chunkLow, chunkHigh) -> do + headersBetween env (ChainId $ fromIntegral chain, chunkLow, chunkHigh) >>= \case + Left e -> logg Error $ fromString $ printf "ApiError for range %s: %s" (show (chunkLow, chunkHigh)) (show e) + Right [] -> logg Error $ fromString $ printf "headersBetween: %s" $ show (chunkLow, chunkHigh) + Right headers -> do + let payloadHashes = M.fromList $ map (\header -> (hashToDbHash $ _blockHeader_payloadHash header, header)) headers + payloadWithOutputsBatch env (ChainId $ fromIntegral chain) payloadHashes _blockHeader_hash >>= \case + Left e -> do + -- TODO Possibly also check for "key not found" message + if (apiError_type e == ClientError) + then do + forM_ (filter (\header -> curHeight - (fromIntegral $ _blockHeader_height header) > 120) headers) $ \header -> do + logg Debug $ fromString $ printf "Setting numEvents to 0 for all transactions with block hash %s" (unDbHash $ hashToDbHash $ _blockHeader_hash header) + P.withResource pool $ \c -> + withTransaction c $ runBeamPostgres c $ + runUpdate + $ update (_cddb_transactions database) + (\tx -> _tx_numEvents tx <-. val_ (Just 0)) + (\tx -> _tx_block tx ==. val_ (BlockId (hashToDbHash $ _blockHeader_hash header))) + logg Debug $ fromString $ show e + else logg Error $ fromString $ printf "no payloads for header range (%d, %d) on chain %d" (coerce chunkLow :: Int) (coerce chunkHigh :: Int) chain + Right bpwos -> do + let write header bpwo = do + let curHash = hashToDbHash $ _blockHeader_hash header + height = fromIntegral $ _blockHeader_height header + writePayload pool (ChainId $ fromIntegral chain) curHash height (_nodeInfo_chainwebVer $ _env_nodeInfo env) (posixSecondsToUTCTime $ _blockHeader_creationTime header) bpwo + atomicModifyIORef' counter (\n -> (n+1, ())) + forM_ bpwos (uncurry write) + forM_ delay threadDelay where logg = _env_logger env diff --git a/exec/Chainweb/Gaps.hs b/haskell-src/exec/Chainweb/Gaps.hs similarity index 95% rename from exec/Chainweb/Gaps.hs rename to haskell-src/exec/Chainweb/Gaps.hs index f9ce4f6f..753118a0 100644 --- a/exec/Chainweb/Gaps.hs +++ b/haskell-src/exec/Chainweb/Gaps.hs @@ -7,8 +7,8 @@ module Chainweb.Gaps ( gaps ) where import Chainweb.Api.ChainId (ChainId(..)) import Chainweb.Api.NodeInfo -import Chainweb.Database -import Chainweb.Env +import ChainwebDb.Database +import ChainwebData.Env import Chainweb.Lookups import Chainweb.Worker (writeBlocks) import ChainwebDb.Types.Block @@ -110,6 +110,13 @@ dropIndexes :: P.Pool Connection -> [(String, String)] -> IO () dropIndexes pool indexinfos = forM_ indexinfos $ \(tablename, indexname) -> P.withResource pool $ \conn -> execute_ conn $ Query $ fromString $ printf "ALTER TABLE %s DROP CONSTRAINT %s CASCADE;" tablename indexname +dropExtensions :: P.Pool Connection -> IO () +dropExtensions pool = P.withResource pool $ \conn -> + mapM_ (execute_ conn . Query) stmts + where + stmts = map ("DROP EXTENSION " <>) ["btree_gin;"] + + dedupeMinerKeysTable :: P.Pool Connection -> LogFunctionIO Text -> IO () dedupeMinerKeysTable pool logger = do logger Info "Deduping minerkeys table" @@ -144,7 +151,7 @@ dedupeTables pool logger = do withDroppedIndexes :: P.Pool Connection -> LogFunctionIO Text -> IO a -> IO a withDroppedIndexes pool logger action = do indexInfos <- listIndexes pool logger - fmap fst $ generalBracket (dropIndexes pool indexInfos) release (const action) + fmap fst $ generalBracket (dropIndexes pool indexInfos >> dropExtensions pool) release (const action) where release _ = \case ExitCaseSuccess _ -> dedupeTables pool logger diff --git a/exec/Chainweb/Listen.hs b/haskell-src/exec/Chainweb/Listen.hs similarity index 82% rename from exec/Chainweb/Listen.hs rename to haskell-src/exec/Chainweb/Listen.hs index 0bebad61..28eb4356 100644 --- a/exec/Chainweb/Listen.hs +++ b/haskell-src/exec/Chainweb/Listen.hs @@ -14,7 +14,7 @@ import Chainweb.Api.BlockPayloadWithOutputs import Chainweb.Api.ChainId (unChainId) import Chainweb.Api.Hash import Chainweb.Api.NodeInfo -import Chainweb.Env +import ChainwebData.Env import Chainweb.Lookups import Chainweb.Worker import ChainwebData.Types @@ -25,6 +25,7 @@ import qualified Data.Pool as P import Data.String import qualified Data.Text as T import Data.Text.Encoding +import Data.Time.Clock.POSIX (posixSecondsToUTCTime) import Data.Tuple.Strict (T2(..)) import Database.Beam.Postgres (Connection) import Network.HTTP.Client @@ -66,18 +67,22 @@ getOutputsAndInsert env ph@(PowHeader h _) = do (hashB64U $ _blockHeader_hash h) logg Info $ fromString $ show e Right pl -> do - insertNewHeader (_env_dbConnPool env) ph pl + insertNewHeader (_nodeInfo_chainwebVer $ _env_nodeInfo env) (_env_dbConnPool env) ph pl logg Info (fromString $ printf "%d" (unChainId $ _blockHeader_chainId h)) >> hFlush stdout -insertNewHeader :: P.Pool Connection -> PowHeader -> BlockPayloadWithOutputs -> IO () -insertNewHeader pool ph pl = do +insertNewHeader :: T.Text -> P.Pool Connection -> PowHeader -> BlockPayloadWithOutputs -> IO () +insertNewHeader version pool ph pl = do let !m = _blockPayloadWithOutputs_minerData pl !b = asBlock ph m !t = mkBlockTransactions b pl !es = mkBlockEvents (fromIntegral $ _blockHeader_height $ _hwp_header ph) (_blockHeader_chainId $ _hwp_header ph) (DbHash $ hashB64U $ _blockHeader_hash $ _hwp_header ph) pl !ss = concat $ map (mkTransactionSigners . fst) (_blockPayloadWithOutputs_transactionsWithOutputs pl) + !k = bpwoMinerKeys pl - writes pool b k t es ss + err = printf "insertNewHeader failed because we don't know how to work this version %s" version + withEventsMinHeight version err $ \minHeight -> do + let !tf = mkTransferRows (fromIntegral $ _blockHeader_height $ _hwp_header ph) (_blockHeader_chainId $ _hwp_header ph) (DbHash $ hashB64U $ _blockHeader_hash $ _hwp_header ph) (posixSecondsToUTCTime $ _blockHeader_creationTime $ _hwp_header ph) pl minHeight + writes pool b k t es ss tf mkRequest :: UrlScheme -> ChainwebVersion -> Request mkRequest us (ChainwebVersion cv) = defaultRequest diff --git a/exec/Chainweb/Lookups.hs b/haskell-src/exec/Chainweb/Lookups.hs similarity index 79% rename from exec/Chainweb/Lookups.hs rename to haskell-src/exec/Chainweb/Lookups.hs index 063d7d8d..1261eaab 100644 --- a/exec/Chainweb/Lookups.hs +++ b/haskell-src/exec/Chainweb/Lookups.hs @@ -1,7 +1,10 @@ {-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NumericUnderscores #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TypeApplications #-} module Chainweb.Lookups ( -- * Endpoints @@ -15,8 +18,10 @@ module Chainweb.Lookups , mkBlockTransactions , mkBlockEvents , mkBlockEvents' + , mkBlockEventsWithCreationTime , mkCoinbaseEvents , mkTransactionSigners + , mkTransferRows , bpwoMinerKeys , ErrorType(..) @@ -36,13 +41,15 @@ import Chainweb.Api.Payload import Chainweb.Api.Sig import qualified Chainweb.Api.Signer as CW import qualified Chainweb.Api.Transaction as CW -import Chainweb.Env +import ChainwebData.Env import ChainwebData.Types import ChainwebDb.Types.Block +import ChainwebDb.Types.Common import ChainwebDb.Types.DbHash import ChainwebDb.Types.Event import ChainwebDb.Types.Signer import ChainwebDb.Types.Transaction +import ChainwebDb.Types.Transfer import Control.Applicative import Control.Error.Util (hush) import Control.Lens @@ -55,10 +62,13 @@ import qualified Data.HashMap.Strict as HM import qualified Data.Map.Strict as M import Data.Foldable import Data.Int +import qualified Data.List as L (intercalate) import Data.Maybe import Data.Serialize.Get (runGet) import qualified Data.Text as T import qualified Data.Text.Encoding as T +import qualified Data.Text.Read as TR +import Data.Time.Clock (UTCTime) import Data.Time.Clock.POSIX (posixSecondsToUTCTime) import Data.Tuple.Strict (T2(..)) import qualified Data.Vector as V @@ -116,19 +126,21 @@ payloadWithOutputsBatch :: Env -> ChainId -> M.Map (DbHash PayloadHash) a + -> (a -> Hash) -> IO (Either ApiError [(a, BlockPayloadWithOutputs)]) -payloadWithOutputsBatch env (ChainId cid) m = do +payloadWithOutputsBatch env (ChainId cid) m _f = do initReq <- parseRequest url let req = initReq { method = "POST" , requestBody = RequestBodyLBS $ encode requestObject, requestHeaders = encoding} eresp <- handleRequest req (_env_httpManager env) let res = do resp <- eresp case eitherDecode' (responseBody resp) of - Left e -> Left $ ApiError (OtherError $ "Decoding error in payloadWithOutputsBatch: " <> T.pack e) + Left e -> Left $ ApiError (OtherError $ "Decoding error in payloadWithOutputsBatch: " <> T.pack e <> rest) (responseStatus resp) (responseBody resp) Right (as :: [BlockPayloadWithOutputs]) -> Right $ foldr go [] as pure res where + rest = T.pack $ "\nHashes: ( " ++ (L.intercalate " " $ M.elems (show . hashB64U . _f <$> m)) ++ " )" url = showUrlScheme (UrlScheme Https $ _env_p2pUrl env) <> T.unpack query v = _nodeInfo_chainwebVer $ _env_nodeInfo env query = "/chainweb/0.0/" <> v <> "/chain/" <> T.pack (show cid) <> "/payload/outputs/batch" @@ -204,11 +216,67 @@ mkBlockEvents' height cid blockhash pl = mkPair p = ( DbHash $ hashB64U $ CW._transaction_hash $ fst p , mkTxEvents height cid blockhash p) +mkBlockEventsWithCreationTime :: Int64 -> ChainId -> DbHash BlockHash -> BlockPayloadWithOutputs -> ([Event], [(DbHash TxHash, UTCTime, [Event])]) +mkBlockEventsWithCreationTime height cid blockhash pl = (mkCoinbaseEvents height cid blockhash pl, map mkTriple tos) + where + tos = _blockPayloadWithOutputs_transactionsWithOutputs pl + mkTriple p = (DbHash $ hashB64U $ CW._transaction_hash $ fst p + , posixSecondsToUTCTime $ _chainwebMeta_creationTime $ _pactCommand_meta $ CW._transaction_cmd $ fst p + , mkTxEvents height cid blockhash p) + mkBlockEvents :: Int64 -> ChainId -> DbHash BlockHash -> BlockPayloadWithOutputs -> [Event] mkBlockEvents height cid blockhash pl = cbes ++ concatMap snd txes where (cbes, txes) = mkBlockEvents' height cid blockhash pl +mkTransferRows :: Int64 -> ChainId -> DbHash BlockHash -> UTCTime -> BlockPayloadWithOutputs -> Int -> [Transfer] +mkTransferRows height cid@(ChainId cid') blockhash _creationTime pl eventMinHeight = + let (coinbaseEvs, evs) = mkBlockEventsWithCreationTime height cid blockhash pl + in if height >= fromIntegral eventMinHeight + then createNonCoinBaseTransfers evs ++ createCoinBaseTransfers coinbaseEvs + else [] + where + unwrap (PgJSONB a) = a + mkTransfer mReqKey ev = do + let (PgJSONB params) = _ev_params ev + amount <- getAmount params + fromAccount <- params ^? ix 0 . _String + toAccount <- params ^? ix 1 . _String + return Transfer + { + _tr_block = BlockId blockhash + , _tr_requestkey = maybe RKCB_Coinbase RKCB_RequestKey mReqKey + , _tr_chainid = fromIntegral cid' + , _tr_height = height + , _tr_idx = _ev_idx ev + , _tr_modulename = _ev_module ev + , _tr_moduleHash = _ev_moduleHash ev + , _tr_from_acct = fromAccount + , _tr_to_acct = toAccount + , _tr_amount = amount + } + getAmount :: [Value] -> Maybe KDAScientific + getAmount params = fmap KDAScientific $ + (params ^? ix 2 . key "decimal" . _Number) + <|> + (params ^? ix 2 . key "decimal" . _String . to TR.rational . _Right . _1) + <|> + (params ^? ix 2 . key "int" . _Number) + <|> + (params ^? ix 2 . key "int" . _String . to TR.rational . _Right . _1) + <|> + (params ^? ix 2 . _Number) + <|> + (params ^? ix 2 . _String . to TR.rational . _Right . _1) + createCoinBaseTransfers = fmap (fromMaybe (error "") . mkTransfer Nothing) + createNonCoinBaseTransfers xs = [ transfer + | (txhash,_,evs) <- xs + , ev <- evs + , T.takeEnd 8 (_ev_qualName ev) == "TRANSFER" + , length (unwrap (_ev_params ev)) == 3 + , transfer <- maybeToList $ mkTransfer (Just txhash) ev + ] + mkTransactionSigners :: CW.Transaction -> [Signer] mkTransactionSigners t = zipWith3 mkSigner signers sigs [0..] where @@ -244,7 +312,7 @@ mkTransaction b (tx,txo) = Transaction , _tx_creationTime = posixSecondsToUTCTime $ _chainwebMeta_creationTime mta , _tx_ttl = fromIntegral $ _chainwebMeta_ttl mta , _tx_gasLimit = fromIntegral $ _chainwebMeta_gasLimit mta - , _tx_gasPrice = _chainwebMeta_gasPrice mta + , _tx_gasPrice = realToFrac $ _chainwebMeta_gasPrice mta , _tx_sender = _chainwebMeta_sender mta , _tx_nonce = _pactCommand_nonce cmd , _tx_code = _exec_code <$> exc diff --git a/haskell-src/exec/Chainweb/RichList.hs b/haskell-src/exec/Chainweb/RichList.hs new file mode 100644 index 00000000..f76a4f2c --- /dev/null +++ b/haskell-src/exec/Chainweb/RichList.hs @@ -0,0 +1,131 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE TupleSections #-} +{-# LANGUAGE TypeApplications #-} +module Chainweb.RichList ( richList ) where + +import Control.Applicative ((<|>)) +import Control.Exception +import Control.Monad +import Control.Lens +import Data.Aeson (eitherDecodeStrict, Value(..)) +import Data.Aeson.Lens +import qualified Data.ByteString.Lazy as LBS +import Data.ByteString (ByteString) +import qualified Data.Csv as Csv +import Data.Foldable (asum) +import Data.Int (Int64) +import Data.List (isPrefixOf, sort,sortOn) +import qualified Data.Map.Strict as M +import Data.Ord (Down(..)) +import qualified Data.Text as T +import Data.Text (Text) +import Data.Text.Read (double) +import Data.String.Conv + +import System.Directory +import System.FilePath +import System.Logger.Types + +import Text.Printf (printf) +import Text.Read + +import Database.SQLite3 +import Database.SQLite3.Direct (Utf8(..)) + +import ChainwebData.Env (ChainwebVersion(..)) + +import Pact.Types.SQLite + + +richList :: LogFunctionIO Text -> FilePath -> ChainwebVersion -> IO () +richList logger fp (ChainwebVersion version) = do + + files <- doesPathExist fp >>= \case + True -> checkChains + False -> ioError $ userError + $ "Chainweb-node top-level db directory does not exist: " + <> fp + logger Info "Aggregating richlist ..." + results <- fmap mconcat $ forM files $ \(cid, file) -> fmap (fmap (\(acct,txid, bal) -> (cid,acct,txid,bal))) $ withSQLiteConnection file richListQuery + logger Info $ "Filtering top 100 richest accounts..." + pruneRichList (either error id . parseResult <$> results) + where + parseResult (cid, a,txid, b) = do + validJSON <- eitherDecodeStrict b + let msg = "Unable to get balance\n invalid JSON " <> show validJSON + maybe (Left msg) (Right . (cid,a,txid,)) $ getBalance validJSON + checkChains :: IO [(Int, FilePath)] + checkChains = do + let sqlitePath = appendSlash fp <> "chainweb-node/" <> T.unpack version <> "/0/sqlite" + appendSlash str = if last str == '/' then str else str <> "/" + + doesPathExist sqlitePath >>= \case + False -> ioError $ userError $ printf "Cannot find sqlite data (at \"%s\"). Is your node synced?" sqlitePath + True -> do + files <- filter ((==) ".sqlite" . takeExtension) <$> listDirectory sqlitePath + + let go p + | "pact-v1-chain-" `isPrefixOf` p = + case splitAt 14 (fst $ splitExtension p) of + (_, "") -> error $ "Found corrupt sqlite path: " <> p + (_, cid) -> case readMaybe @Int cid of + Just c -> (((sqlitePath <> "/" <> p) :), (c :)) + Nothing -> error "Couldn't read chain id" + | otherwise = mempty + (fdl, cdl) = foldMap go files + chains = cdl [] + isConsecutive = all (\(x,y) -> succ x == y) + . (zip <*> tail) + . sort + unless (isConsecutive chains) + $ ioError $ userError + $ "Missing tables for some chain ids. Is your node synced?" + return $ zip (cdl []) $ fdl [] + +getBalance :: Value -> Maybe Double +getBalance bytes = asum $ basecase : (fmap getBalance $ bytes ^.. members) + where + fromSci = fromRational . toRational + basecase = + bytes ^? key "balance" . _Number . to fromSci + <|> + bytes ^? key "balance" . key "decimal" . _Number . to fromSci + <|> + bytes ^? key "balance" . key "int" . _Number . to fromSci + <|> + bytes ^? key "balance" . key "decimal" . _String . to double . _Right . _1 + <|> + bytes ^? key "balance" . key "int" . _String . to double . _Right . _1 + +pruneRichList :: [(Int, Text,Int64, Double)] -> IO () +pruneRichList = LBS.writeFile "richlist.csv" + . Csv.encode + . take 100 + . map (\((cid,acct,txid),bal) -> (cid,acct,bal,txid)) + . sortOn (Down . snd) + . M.toList + . M.fromListWith (+) + . map (\(cid,acct,txid, balance) -> ((cid,acct,txid), balance)) + +-- WARNING: This function will throw errors if found. We don't "catch" errors in an Either type +withSQLiteConnection :: FilePath -> (Database -> IO a) -> IO a +withSQLiteConnection fp action = bracket (open (T.pack fp)) close action + +richListQuery :: Database -> IO [(Text, Int64, ByteString)] +richListQuery db = do + rows <- qry_ db richListQueryStmt [RText, RInt, RBlob] + return $ rows <&> \case + [SText (Utf8 account), SInt txid, SBlob jsonvalue] -> (toS account,txid, jsonvalue) + _ -> error "impossible?" -- TODO: Make this use throwError/throwM instead of error + +richListQueryStmt :: Utf8 +richListQueryStmt = + "select rowkey as acct_id, txid, rowdata \ + \ from [coin_coin-table] as coin\ + \ INNER JOIN (\ + \ select\ + \ rowkey as acct_id,\ + \ max(txid) as last_txid\ + \ from 'coin_coin-table'\ + \ group by acct_id\ + \ ) latest ON coin.rowkey = latest.acct_id AND coin.txid = latest.last_txid;" diff --git a/haskell-src/exec/Chainweb/Server.hs b/haskell-src/exec/Chainweb/Server.hs new file mode 100644 index 00000000..aa6349bb --- /dev/null +++ b/haskell-src/exec/Chainweb/Server.hs @@ -0,0 +1,741 @@ +{-# OPTIONS_GHC -fno-warn-orphans #-} + +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE NumericUnderscores #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TypeOperators #-} + +module Chainweb.Server where + +------------------------------------------------------------------------------ +import Chainweb.Api.BlockHeader (BlockHeader(..)) +import Chainweb.Api.ChainId +import Chainweb.Api.Hash +import Chainweb.Api.NodeInfo +import Control.Applicative +import Control.Concurrent +import Control.Error +import Control.Exception (bracket_) +import Control.Monad.Except +import qualified Control.Monad.Managed as M +import Control.Retry +import Data.Aeson hiding (Error) +import qualified Data.ByteString as BS +import qualified Data.ByteString.Base64.URL as B64 +import Data.ByteString.Lazy (ByteString) +import Data.Decimal +import Data.Foldable +import Data.Int +import Data.IORef +import qualified Data.Pool as P +import Data.Proxy +import Data.Sequence (Seq) +import qualified Data.Sequence as S +import Data.String +import Data.String.Conv (toS) +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Text.IO as T +import Data.Time +import Data.Tuple.Strict (T2(..)) +import Database.Beam hiding (insert) +import Database.Beam.Backend.SQL +import Database.Beam.Postgres +import qualified Database.PostgreSQL.Simple.Transaction as PG +import Control.Lens +import Network.Wai +import Network.Wai.Handler.Warp +import Network.Wai.Middleware.Cors +import Servant.API +import Servant.Server +import Servant.Swagger.UI +import System.Directory +import System.FilePath +import System.Logger.Types hiding (logg) +import Text.Printf +------------------------------------------------------------------------------ +import Chainweb.Api.BlockPayloadWithOutputs +import Chainweb.Api.Common (BlockHeight) +import Chainweb.Api.StringEncoded (StringEncoded(..)) +import Chainweb.Coins +import ChainwebDb.Database +import ChainwebDb.Queries +import ChainwebData.Env +import Chainweb.Gaps +import Chainweb.Listen +import Chainweb.Lookups +import Chainweb.RichList +import ChainwebData.Types +import ChainwebData.Api +import ChainwebData.TransferDetail +import ChainwebData.EventDetail +import qualified ChainwebData.Spec as Spec +import ChainwebData.Pagination +import ChainwebData.TxDetail +import ChainwebData.TxSummary +import ChainwebDb.Types.Block +import ChainwebDb.Types.Common +import ChainwebDb.Types.DbHash +import ChainwebDb.Types.Transfer +import ChainwebDb.Types.Transaction +import ChainwebDb.Types.Event +import ChainwebDb.BoundedScan +------------------------------------------------------------------------------ + +setCors :: Middleware +setCors = cors . const . Just $ simpleCorsResourcePolicy + { corsRequestHeaders = simpleHeaders + , corsExposedHeaders = Just ["Chainweb-Next"] + } + +data ServerState = ServerState + { _ssRecentTxs :: RecentTxs + , _ssHighestBlockHeight :: BlockHeight + , _ssTransactionCount :: Maybe Int64 + , _ssCirculatingCoins :: Decimal + } deriving (Eq,Show) + +ssRecentTxs + :: Functor f + => (RecentTxs -> f RecentTxs) + -> ServerState -> f ServerState +ssRecentTxs = lens _ssRecentTxs setter + where + setter sc v = sc { _ssRecentTxs = v } + +ssHighestBlockHeight + :: Functor f + => (BlockHeight -> f BlockHeight) + -> ServerState -> f ServerState +ssHighestBlockHeight = lens _ssHighestBlockHeight setter + where + setter sc v = sc { _ssHighestBlockHeight = v } + +ssTransactionCount + :: Functor f + => (Maybe Int64 -> f (Maybe Int64)) + -> ServerState -> f ServerState +ssTransactionCount = lens _ssTransactionCount setter + where + setter sc v = sc { _ssTransactionCount = v } + +ssCirculatingCoins + :: Functor f + => (Decimal -> f Decimal) + -> ServerState -> f ServerState +ssCirculatingCoins = lens _ssCirculatingCoins setter + where + setter sc v = sc { _ssCirculatingCoins = v } + +type RichlistEndpoint = "richlist.csv" :> Get '[PlainText] Text + +type TxEndpoint = "tx" :> QueryParam "requestkey" Text :> Get '[JSON] TxDetail + +type TheApi = + ChainwebDataApi + :<|> RichlistEndpoint + +type ApiWithSwaggerUI + = TheApi + :<|> SwaggerSchemaUI "cwd-spec" "cwd-spec.json" + +type ApiWithNoSwaggerUI + = TheApi + :<|> "cwd-spec" :> Get '[PlainText] Text -- Respond with 404 + +apiServer :: Env -> ServerEnv -> IO () +apiServer env senv = do + ecut <- queryCut env + let logg = _env_logger env + case ecut of + Left e -> do + logg Error "Error querying cut" + logg Info $ fromString $ show e + Right cutBS -> apiServerCut env senv cutBS + +type ConnectionWithThrottling = (Connection, Double) + +-- | Given the amount of contention on connections, calculate a factor between +-- 0 and 1 that should be used to scale down the amount of work done by request +-- handlers +throttlingFactor :: Integer -> Double +throttlingFactor load = if loadPerCap <= 1 then 1 else 1 / loadPerCap where + -- We're arbitrarily assuming that Postgres will handle 3 concurrent requests + -- without any slowdown + loadPerCap = fromInteger load / 3 + +apiServerCut :: Env -> ServerEnv -> ByteString -> IO () +apiServerCut env senv cutBS = do + let curHeight = cutMaxHeight cutBS + logg = _env_logger env + t <- getCurrentTime + let circulatingCoins = getCirculatingCoins (fromIntegral curHeight) t + logg Info $ fromString $ "Total coins in circulation: " <> show circulatingCoins + let pool = _env_dbConnPool env + recentTxs <- RecentTxs . S.fromList <$> queryRecentTxs logg pool + numTxs <- getTransactionCount logg pool + ssRef <- newIORef $ ServerState recentTxs 0 numTxs circulatingCoins + logg Info $ fromString $ "Total number of transactions: " <> show numTxs + _ <- forkIO $ scheduledUpdates env pool ssRef (_serverEnv_runFill senv) (_serverEnv_fillDelay senv) + _ <- forkIO $ retryingListener env ssRef + logg Info $ fromString "Starting chainweb-data server" + throttledPool <- do + loadedSrc <- mkLoadedSource $ M.managed $ P.withResource pool + return $ do + loadedRes <- loadedSrc + load <- M.liftIO (lrLoadRef loadedRes) + return (lrResource loadedRes, throttlingFactor load) + + let unThrottledPool = fst <$> throttledPool + let serverApp req = + ( ( recentTxsHandler ssRef + :<|> searchTxs logg throttledPool req + :<|> evHandler logg throttledPool req + :<|> txHandler logg unThrottledPool + :<|> txsHandler logg unThrottledPool + :<|> accountHandler logg throttledPool req + ) + :<|> statsHandler ssRef + :<|> coinsHandler ssRef + ) + :<|> richlistHandler + let swaggerServer = swaggerSchemaUIServer Spec.spec + noSwaggerServer = throw404 "Swagger UI server is not enabled on this instance" + Network.Wai.Handler.Warp.run (_serverEnv_port senv) $ setCors $ \req f -> + if _serverEnv_serveSwaggerUi senv + then serve (Proxy @ApiWithSwaggerUI) (serverApp req :<|> swaggerServer) req f + else serve (Proxy @ApiWithNoSwaggerUI) (serverApp req :<|> noSwaggerServer) req f + +retryingListener :: Env -> IORef ServerState -> IO () +retryingListener env ssRef = do + let logg = _env_logger env + delay = 10_000_000 + policy = constantDelay delay + check _ _ = do + logg Warn $ fromString $ printf "Retrying node listener in %.1f seconds" + (fromIntegral delay / 1_000_000 :: Double) + return True + retrying policy check $ \_ -> do + logg Info "Starting node listener" + listenWithHandler env $ serverHeaderHandler env ssRef + +scheduledUpdates + :: Env + -> P.Pool Connection + -> IORef ServerState + -> Bool + -> Maybe Int + -> IO () +scheduledUpdates env pool ssRef runFill fillDelay = forever $ do + threadDelay (60 * 60 * 24 * micros) + + now <- getCurrentTime + logg Info $ fromString $ show now + logg Info "Recalculating coins in circulation:" + height <- _ssHighestBlockHeight <$> readIORef ssRef + let circulatingCoins = getCirculatingCoins (fromIntegral height) now + logg Info $ fromString $ show circulatingCoins + let f ss = (ss & ssCirculatingCoins .~ circulatingCoins, ()) + atomicModifyIORef' ssRef f + + numTxs <- getTransactionCount logg pool + logg Info $ fromString $ "Updated number of transactions: " <> show numTxs + let g ss = (ss & ssTransactionCount %~ (numTxs <|>), ()) + atomicModifyIORef' ssRef g + + h <- getHomeDirectory + richList logg (h ".local/share") (ChainwebVersion $ _nodeInfo_chainwebVer $ _env_nodeInfo env) + logg Info "Updated rich list" + + when runFill $ do + logg Info "Filling missing blocks" + gaps env (FillArgs fillDelay False) + logg Info "Fill finished" + where + micros = 1000000 + logg = _env_logger env + +richlistHandler :: Handler Text +richlistHandler = do + let f = "richlist.csv" + exists <- liftIO $ doesFileExist f + if exists + then liftIO $ T.readFile f + else throwError err404 + +coinsHandler :: IORef ServerState -> Handler Text +coinsHandler ssRef = liftIO $ fmap mkStats $ readIORef ssRef + where + mkStats ss = T.pack $ show $ _ssCirculatingCoins ss + +statsHandler :: IORef ServerState -> Handler ChainwebDataStats +statsHandler ssRef = liftIO $ do + fmap mkStats $ readIORef ssRef + where + mkStats ss = ChainwebDataStats (fromIntegral <$> _ssTransactionCount ss) + (Just $ realToFrac $ _ssCirculatingCoins ss) + +recentTxsHandler :: IORef ServerState -> Handler [TxSummary] +recentTxsHandler ss = liftIO $ fmap (toList . _recentTxs_txs . _ssRecentTxs) $ readIORef ss + +serverHeaderHandler :: Env -> IORef ServerState -> PowHeader -> IO () +serverHeaderHandler env ssRef ph@(PowHeader h _) = do + let pool = _env_dbConnPool env + let ni = _env_nodeInfo env + let chain = _blockHeader_chainId h + let height = _blockHeader_height h + let pair = T2 (_blockHeader_chainId h) (hashToDbHash $ _blockHeader_payloadHash h) + let logg = _env_logger env + payloadWithOutputs env pair >>= \case + Left e -> do + logg Error $ fromString $ printf "Couldn't fetch parent for: %s" + (hashB64U $ _blockHeader_hash h) + logg Info $ fromString $ show e + Right pl -> do + let hash = _blockHeader_hash h + tos = _blockPayloadWithOutputs_transactionsWithOutputs pl + ts = S.fromList $ map (\(t,tout) -> mkTxSummary chain height hash t tout) tos + f ss = (ss & ssRecentTxs %~ addNewTransactions ts + & ssHighestBlockHeight %~ max height + & (ssTransactionCount . _Just) +~ (fromIntegral $ S.length ts), ()) + + let msg = printf "Got new header on chain %d height %d" (unChainId chain) height + addendum = if S.length ts == 0 + then "" + else printf " with %d transactions" (S.length ts) + + logg Debug (fromString $ msg <> addendum) + mapM_ (logg Debug . fromString . show) tos + + atomicModifyIORef' ssRef f + insertNewHeader (_nodeInfo_chainwebVer ni) pool ph pl + + +instance BeamSqlBackendIsString Postgres (Maybe Text) +instance BeamSqlBackendIsString Postgres (Maybe String) + +type TxSearchToken = BSContinuation TxCursor + +readTxToken :: NextToken -> Maybe TxSearchToken +readTxToken tok = readBSToken tok <&> \mbBSC -> mbBSC <&> \(hgt, reqkey) -> + TxCursor hgt (DbHash reqkey) + +mkTxToken :: TxSearchToken -> NextToken +mkTxToken txt = mkBSToken $ txt <&> \c -> (txcHeight c, unDbHash $ txcReqKey c) + +-- We're looking up the execution strategy directly inside the 'Request' +-- instead of properly adding it as a RequestHeader to the servant endpoint +-- definition, because we don't actually expect the clients to set this +-- header. This header is meant for the application gateway to set for +-- tuning purposes. +isBoundedStrategy :: Request -> Either ByteString Bool +isBoundedStrategy req = + case lookup (fromString headerName) $ requestHeaders req of + Nothing -> Right False + Just header -> case header of + "Bounded" -> Right True + "Unbounded" -> Right False + other -> Left $ toS $ "Unknown " <> fromString headerName <> ": " <> other + where headerName = "Chainweb-Execution-Strategy" + +isBoundedStrategyM :: Request -> Handler Bool +isBoundedStrategyM req = do + either throw400 return $ isBoundedStrategy req + +mkContinuation :: MonadError ServerError m => + (NextToken -> Maybe b) -> + Maybe Offset -> + Maybe NextToken -> + m (Either (Maybe Integer) b) +mkContinuation readTkn mbOffset mbNext = case (mbNext, mbOffset) of + (Just nextToken, Nothing) -> case readTkn nextToken of + Nothing -> throw400 $ toS $ "Invalid next token: " <> unNextToken nextToken + Just cont -> return $ Right cont + (Just _, Just _) -> throw400 "next token query parameter not allowed with offset" + (Nothing, Just (Offset offset)) -> return $ Left $ offset <$ guard (offset > 0) + (Nothing, Nothing) -> return $ Left Nothing + +searchTxs + :: LogFunctionIO Text + -> M.Managed ConnectionWithThrottling + -> Request + -> Maybe Limit + -> Maybe Offset + -> Maybe Text + -> Maybe BlockHeight -- ^ minimum block height + -> Maybe BlockHeight -- ^ maximum block height + -> Maybe NextToken + -> Handler (NextHeaders [TxSummary]) +searchTxs _ _ _ _ _ Nothing _ _ _ = throw404 "You must specify a search string" +searchTxs logger pool req givenMbLim mbOffset (Just search) minheight maxheight mbNext = do + liftIO $ logger Info $ fromString $ printf + "Transaction search from %s: %s" (show $ remoteHost req) (T.unpack search) + continuation <- mkContinuation readTxToken mbOffset mbNext + + isBounded <- isBoundedStrategyM req + + liftIO $ M.with pool $ \(c, throttling) -> do + let + scanLimit = ceiling $ 50000 * throttling + maxResultLimit = ceiling $ 250 * throttling + resultLimit = min maxResultLimit $ maybe 10 unLimit givenMbLim + strategy = if isBounded then Bounded scanLimit else Unbounded + + PG.withTransactionLevel PG.RepeatableRead c $ do + (mbCont, results) <- performBoundedScan strategy + (runBeamPostgresDebug (logger Debug . T.pack) c) + toTxSearchCursor + (txSearchSource search $ HeightRangeParams minheight maxheight) + noDecoration + continuation + resultLimit + return $ maybe noHeader (addHeader . mkTxToken) mbCont $ + results <&> \(s,_) -> TxSummary + { _txSummary_chain = fromIntegral $ dtsChainId s + , _txSummary_height = fromIntegral $ dtsHeight s + , _txSummary_blockHash = unDbHash $ dtsBlock s + , _txSummary_creationTime = dtsCreationTime s + , _txSummary_requestKey = unDbHash $ dtsReqKey s + , _txSummary_sender = dtsSender s + , _txSummary_code = dtsCode s + , _txSummary_continuation = unPgJsonb <$> dtsContinuation s + , _txSummary_result = maybe TxFailed (const TxSucceeded) $ dtsGoodResult s + } + +throw404 :: MonadError ServerError m => ByteString -> m a +throw404 msg = throwError $ err404 { errBody = msg } + +throw400 :: MonadError ServerError m => ByteString -> m a +throw400 msg = throwError $ err400 { errBody = msg } + +txHandler + :: LogFunctionIO Text + -> M.Managed Connection + -> Maybe RequestKey + -> Handler TxDetail +txHandler _ _ Nothing = throw404 "You must specify a search string" +txHandler logger pool (Just (RequestKey rk)) = + may404 $ liftIO $ M.with pool $ \c -> + runBeamPostgresDebug (logger Debug . T.pack) c $ do + r <- runSelectReturningOne $ select $ do + tx <- all_ (_cddb_transactions database) + blk <- all_ (_cddb_blocks database) + guard_ (_tx_block tx `references_` blk) + guard_ (_tx_requestKey tx ==. val_ (DbHash rk)) + return (tx,blk) + evs <- runSelectReturningList $ select $ do + ev <- all_ (_cddb_events database) + guard_ (_ev_requestkey ev ==. val_ (RKCB_RequestKey $ DbHash rk)) + return ev + return $ (`fmap` r) $ \(tx,blk) -> TxDetail + { _txDetail_ttl = fromIntegral $ _tx_ttl tx + , _txDetail_gasLimit = fromIntegral $ _tx_gasLimit tx + , _txDetail_gasPrice = _tx_gasPrice tx + , _txDetail_nonce = _tx_nonce tx + , _txDetail_pactId = unDbHash <$> _tx_pactId tx + , _txDetail_rollback = _tx_rollback tx + , _txDetail_step = fromIntegral <$> _tx_step tx + , _txDetail_data = unMaybeValue $ _tx_data tx + , _txDetail_proof = _tx_proof tx + , _txDetail_gas = fromIntegral $ _tx_gas tx + , _txDetail_result = + maybe (unMaybeValue $ _tx_badResult tx) unPgJsonb $ + _tx_goodResult tx + , _txDetail_logs = fromMaybe "" $ _tx_logs tx + , _txDetail_metadata = unMaybeValue $ _tx_metadata tx + , _txDetail_continuation = unPgJsonb <$> _tx_continuation tx + , _txDetail_txid = maybe 0 fromIntegral $ _tx_txid tx + , _txDetail_chain = fromIntegral $ _tx_chainId tx + , _txDetail_height = fromIntegral $ _block_height blk + , _txDetail_blockTime = _block_creationTime blk + , _txDetail_blockHash = unDbHash $ unBlockId $ _tx_block tx + , _txDetail_creationTime = _tx_creationTime tx + , _txDetail_requestKey = unDbHash $ _tx_requestKey tx + , _txDetail_sender = _tx_sender tx + , _txDetail_code = _tx_code tx + , _txDetail_success = + maybe False (const True) $ _tx_goodResult tx + , _txDetail_events = map toTxEvent evs + } + + where + unMaybeValue = maybe Null unPgJsonb + toTxEvent ev = + TxEvent (_ev_qualName ev) (unPgJsonb $ _ev_params ev) + may404 a = a >>= maybe (throw404 "Tx not found") return + +txsHandler + :: LogFunctionIO Text + -> M.Managed Connection + -> Maybe RequestKey + -> Handler [TxDetail] +txsHandler _ _ Nothing = throw404 "You must specify a search string" +txsHandler logger pool (Just (RequestKey rk)) = + emptyList404 $ liftIO $ M.with pool $ \c -> + runBeamPostgresDebug (logger Debug . T.pack) c $ do + r <- runSelectReturningList $ select $ do + tx <- all_ (_cddb_transactions database) + blk <- all_ (_cddb_blocks database) + guard_ (_tx_block tx `references_` blk) + guard_ (_tx_requestKey tx ==. val_ (DbHash rk)) + return (tx,blk) + evs <- runSelectReturningList $ select $ do + ev <- all_ (_cddb_events database) + guard_ (_ev_requestkey ev ==. val_ (RKCB_RequestKey $ DbHash rk)) + return ev + return $ (`fmap` r) $ \(tx,blk) -> TxDetail + { _txDetail_ttl = fromIntegral $ _tx_ttl tx + , _txDetail_gasLimit = fromIntegral $ _tx_gasLimit tx + , _txDetail_gasPrice = _tx_gasPrice tx + , _txDetail_nonce = _tx_nonce tx + , _txDetail_pactId = unDbHash <$> _tx_pactId tx + , _txDetail_rollback = _tx_rollback tx + , _txDetail_step = fromIntegral <$> _tx_step tx + , _txDetail_data = unMaybeValue $ _tx_data tx + , _txDetail_proof = _tx_proof tx + , _txDetail_gas = fromIntegral $ _tx_gas tx + , _txDetail_result = + maybe (unMaybeValue $ _tx_badResult tx) unPgJsonb $ + _tx_goodResult tx + , _txDetail_logs = fromMaybe "" $ _tx_logs tx + , _txDetail_metadata = unMaybeValue $ _tx_metadata tx + , _txDetail_continuation = unPgJsonb <$> _tx_continuation tx + , _txDetail_txid = maybe 0 fromIntegral $ _tx_txid tx + , _txDetail_chain = fromIntegral $ _tx_chainId tx + , _txDetail_height = fromIntegral $ _block_height blk + , _txDetail_blockTime = _block_creationTime blk + , _txDetail_blockHash = unDbHash $ unBlockId $ _tx_block tx + , _txDetail_creationTime = _tx_creationTime tx + , _txDetail_requestKey = unDbHash $ _tx_requestKey tx + , _txDetail_sender = _tx_sender tx + , _txDetail_code = _tx_code tx + , _txDetail_success = + maybe False (const True) $ _tx_goodResult tx + , _txDetail_events = map toTxEvent evs + } + + where + emptyList404 xs = xs >>= \case + [] -> throw404 "no txs not found" + ys -> return ys + unMaybeValue = maybe Null unPgJsonb + toTxEvent ev = + TxEvent (_ev_qualName ev) (unPgJsonb $ _ev_params ev) + +type AccountNextToken = (Int64, T.Text, Int64) + +readToken :: Read a => NextToken -> Maybe a +readToken (NextToken nextToken) = readMay $ toS $ B64.decodeLenient $ toS nextToken + +mkToken :: Show a => a -> NextToken +mkToken contents = NextToken $ T.pack $ + toS $ BS.filter (/= 0x3d) $ B64.encode $ toS $ show contents + +accountHandler + :: LogFunctionIO Text + -> M.Managed ConnectionWithThrottling + -> Request + -> Text -- ^ account identifier + -> Maybe Text -- ^ token type + -> Maybe ChainId -- ^ chain identifier + -> Maybe BlockHeight -- ^ minimum block height + -> Maybe BlockHeight -- ^ maximum block height + -> Maybe Limit + -> Maybe Offset + -> Maybe NextToken + -> Handler (NextHeaders [TransferDetail]) +accountHandler logger pool req account token chain minheight maxheight limit mbOffset mbNext = do + let usedCoinType = fromMaybe "coin" token + liftIO $ logger Info $ + fromString $ printf "Account search from %s for: %s %s %s" (show $ remoteHost req) (T.unpack account) (T.unpack usedCoinType) (maybe "" show chain) + + continuation <- mkContinuation readEventToken mbOffset mbNext + isBounded <- isBoundedStrategyM req + let searchParams = TransferSearchParams + { tspToken = usedCoinType + , tspChainId = chain + , tspHeightRange = HeightRangeParams minheight maxheight + , tspAccount = account + } + liftIO $ M.with pool $ \(c, throttling) -> do + let + scanLimit = ceiling $ 50000 * throttling + maxResultLimit = ceiling $ 250 * throttling + resultLimit = min maxResultLimit $ maybe 10 unLimit limit + strategy = if isBounded then Bounded scanLimit else Unbounded + PG.withTransactionLevel PG.RepeatableRead c $ do + (mbCont, results) <- performBoundedScan strategy + (runBeamPostgresDebug (logger Debug . T.pack) c) + toAccountsSearchCursor + (transfersSearchSource searchParams) + transferSearchExtras + continuation + resultLimit + return $ maybe noHeader (addHeader . mkEventToken) mbCont $ results <&> \(tr, extras) -> TransferDetail + { _trDetail_token = _tr_modulename tr + , _trDetail_chain = fromIntegral $ _tr_chainid tr + , _trDetail_height = fromIntegral $ _tr_height tr + , _trDetail_blockHash = unDbHash $ unBlockId $ _tr_block tr + , _trDetail_requestKey = getTxHash $ _tr_requestkey tr + , _trDetail_idx = fromIntegral $ _tr_idx tr + , _trDetail_amount = StringEncoded $ getKDAScientific $ _tr_amount tr + , _trDetail_fromAccount = _tr_from_acct tr + , _trDetail_toAccount = _tr_to_acct tr + , _trDetail_blockTime = tseBlockTime extras + } + +type EventSearchToken = BSContinuation EventCursor + +readBSToken :: Read cursor => NextToken -> Maybe (BSContinuation cursor) +readBSToken tok = readToken tok <&> \(cursor, offNum) -> + BSContinuation cursor $ if offNum <= 0 then Nothing else Just offNum + +mkBSToken :: Show cursor => BSContinuation cursor -> NextToken +mkBSToken (BSContinuation cur mbOff) = mkToken (cur, fromMaybe 0 mbOff) + +readEventToken :: NextToken -> Maybe EventSearchToken +readEventToken tok = readBSToken tok <&> \mbBSC -> mbBSC <&> \(hgt,reqkey,idx) -> + EventCursor hgt (rkcbFromText reqkey) idx + +mkEventToken :: EventSearchToken -> NextToken +mkEventToken est = mkBSToken $ est <&> \c -> + ( ecHeight c + , show $ ecReqKey c + , ecIdx c + ) + +evHandler + :: LogFunctionIO Text + -> M.Managed ConnectionWithThrottling + -> Request + -> Maybe Limit + -> Maybe Offset + -> Maybe Text -- ^ fulltext search + -> Maybe EventParam + -> Maybe EventName + -> Maybe EventModuleName + -> Maybe BlockHeight -- ^ minimum block height + -> Maybe BlockHeight -- ^ maximum block height + -> Maybe NextToken + -> Handler (NextHeaders [EventDetail]) +evHandler logger pool req limit mbOffset qSearch qParam qName qModuleName minheight maxheight mbNext = do + liftIO $ logger Info $ fromString $ printf "Event search from %s: %s" (show $ remoteHost req) (maybe "\"\"" T.unpack qSearch) + continuation <- mkContinuation readEventToken mbOffset mbNext + let searchParams = EventSearchParams + { espSearch = qSearch + , espParam = qParam + , espName = qName + , espModuleName = qModuleName + } + + isBounded <- isBoundedStrategyM req + + liftIO $ M.with pool $ \(c, throttling) -> do + let + scanLimit = ceiling $ 50000 * throttling + maxResultLimit = ceiling $ 250 * throttling + resultLimit = min maxResultLimit $ maybe 10 unLimit limit + strategy = if isBounded then Bounded scanLimit else Unbounded + PG.withTransactionLevel PG.RepeatableRead c $ do + (mbCont, results) <- performBoundedScan strategy + (runBeamPostgresDebug (logger Debug . T.pack) c) + toEventsSearchCursor + (eventsSearchSource searchParams $ HeightRangeParams minheight maxheight) + eventSearchExtras + continuation + resultLimit + return $ maybe noHeader (addHeader . mkEventToken) mbCont $ + results <&> \(ev,extras) -> EventDetail + { _evDetail_name = _ev_qualName ev + , _evDetail_params = unPgJsonb $ _ev_params ev + , _evDetail_moduleHash = _ev_moduleHash ev + , _evDetail_chain = fromIntegral $ _ev_chainid ev + , _evDetail_height = fromIntegral $ _ev_height ev + , _evDetail_blockTime = eseBlockTime extras + , _evDetail_blockHash = unDbHash $ unBlockId $ _ev_block ev + , _evDetail_requestKey = getTxHash $ _ev_requestkey ev + , _evDetail_idx = fromIntegral $ _ev_idx ev + } + +data h :. t = h :. t deriving (Eq,Ord,Show,Read,Typeable) +infixr 3 :. + +type instance QExprToIdentity (a :. b) = (QExprToIdentity a) :. (QExprToIdentity b) +type instance QExprToField (a :. b) = (QExprToField a) :. (QExprToField b) + + +queryRecentTxs :: LogFunctionIO Text -> P.Pool Connection -> IO [TxSummary] +queryRecentTxs logger pool = do + liftIO $ logger Info "Getting recent transactions" + P.withResource pool $ \c -> do + res <- runBeamPostgresDebug (logger Debug . T.pack) c $ + runSelectReturningList $ select $ do + limit_ 20 $ orderBy_ (desc_ . getHeight) $ do + tx <- all_ (_cddb_transactions database) + return + ( (_tx_chainId tx) + , (_tx_height tx) + , (unBlockId $ _tx_block tx) + , (_tx_creationTime tx) + , (_tx_requestKey tx) + , (_tx_sender tx) + , ((_tx_code tx) + , (_tx_continuation tx) + , (_tx_goodResult tx) + )) + return $ mkSummary <$> res + where + getHeight (_,a,_,_,_,_,_) = a + mkSummary (a,b,c,d,e,f,(g,h,i)) = TxSummary (fromIntegral a) (fromIntegral b) (unDbHash c) d (unDbHash e) f g (unPgJsonb <$> h) (maybe TxFailed (const TxSucceeded) i) + +getTransactionCount :: LogFunctionIO Text -> P.Pool Connection -> IO (Maybe Int64) +getTransactionCount logger pool = do + P.withResource pool $ \c -> do + runBeamPostgresDebug (logger Debug . T.pack) c $ runSelectReturningOne $ select $ + aggregate_ (\_ -> as_ @Int64 countAll_) (all_ (_cddb_transactions database)) + +data RecentTxs = RecentTxs + { _recentTxs_txs :: Seq TxSummary + } deriving (Eq,Show) + +getSummaries :: RecentTxs -> [TxSummary] +getSummaries (RecentTxs s) = toList s + +addNewTransactions :: Seq TxSummary -> RecentTxs -> RecentTxs +addNewTransactions txs (RecentTxs s1) = RecentTxs s2 + where + maxTransactions = 10 + s2 = S.take maxTransactions $ txs <> s1 + +unPgJsonb :: PgJSONB a -> a +unPgJsonb (PgJSONB v) = v + +-- | A "LoadedResource" is a resource along with a way to read an integer +-- quantity representing how much load there is on the resource currently +data LoadedResource resource = LoadedResource + { lrResource :: resource + , lrLoadRef :: IO Integer + } + +type LoadedSource resource = M.Managed (LoadedResource resource) + +-- | Wrap a given "Managed" with a layer that keeps track of how many other +-- consumers there currently are using or waiting on the inner "Managed". +-- At any given moment, this number can be read through the "lrLoadRef" of the +-- provided "LoadedResource". +mkLoadedSource :: M.Managed resource -> IO (M.Managed (LoadedResource resource)) +mkLoadedSource innerSource = do + loadRef <- newIORef 0 + let modifyLoad f = atomicModifyIORef' loadRef $ \load -> (f load, ()) + return $ M.managed $ \outerBorrower -> + bracket_ (modifyLoad succ) (modifyLoad pred) $ + M.with innerSource $ \resource -> outerBorrower $ + LoadedResource resource (readIORef loadRef) diff --git a/exec/Chainweb/Single.hs b/haskell-src/exec/Chainweb/Single.hs similarity index 97% rename from exec/Chainweb/Single.hs rename to haskell-src/exec/Chainweb/Single.hs index 7807cecd..d6f08255 100644 --- a/exec/Chainweb/Single.hs +++ b/haskell-src/exec/Chainweb/Single.hs @@ -3,7 +3,7 @@ module Chainweb.Single ( single ) where import Chainweb.Api.ChainId (ChainId(..)) import Chainweb.Api.Common (BlockHeight) -import Chainweb.Env +import ChainwebData.Env import Chainweb.Lookups import Chainweb.Worker (writeBlock) import ChainwebData.Types diff --git a/exec/Chainweb/Worker.hs b/haskell-src/exec/Chainweb/Worker.hs similarity index 64% rename from exec/Chainweb/Worker.hs rename to haskell-src/exec/Chainweb/Worker.hs index 3df83d4b..50a7b2a2 100644 --- a/exec/Chainweb/Worker.hs +++ b/haskell-src/exec/Chainweb/Worker.hs @@ -15,8 +15,9 @@ import Chainweb.Api.BlockHeader import Chainweb.Api.BlockPayloadWithOutputs import Chainweb.Api.ChainId (ChainId(..)) import Chainweb.Api.Hash -import Chainweb.Database -import Chainweb.Env +import Chainweb.Api.NodeInfo +import ChainwebDb.Database +import ChainwebData.Env import Chainweb.Lookups import ChainwebData.Types import ChainwebDb.Types.Block @@ -25,6 +26,7 @@ import ChainwebDb.Types.Event import ChainwebDb.Types.MinerKey import ChainwebDb.Types.Signer import ChainwebDb.Types.Transaction +import ChainwebDb.Types.Transfer import Control.Lens (iforM_) import Control.Retry import qualified Data.ByteString as B @@ -33,6 +35,8 @@ import qualified Data.Map as M import qualified Data.Pool as P import qualified Data.Text as T import qualified Data.Text.Encoding as T +import Data.Time.Clock (UTCTime) +import Data.Time.Clock.POSIX (posixSecondsToUTCTime) import Data.Tuple.Strict (T2(..)) import Database.Beam hiding (insert) import Database.Beam.Backend.SQL.BeamExtensions @@ -44,8 +48,8 @@ import System.Logger hiding (logg) -- | Write a Block and its Transactions to the database. Also writes the Miner -- if it hasn't already been via some other block. -writes :: P.Pool Connection -> Block -> [T.Text] -> [Transaction] -> [Event] -> [Signer] -> IO () -writes pool b ks ts es ss = P.withResource pool $ \c -> withTransaction c $ do +writes :: P.Pool Connection -> Block -> [T.Text] -> [Transaction] -> [Event] -> [Signer] -> [Transfer] -> IO () +writes pool b ks ts es ss tf = P.withResource pool $ \c -> withTransaction c $ do runBeamPostgres c $ do -- Write the Block if unique -- runInsert @@ -69,14 +73,17 @@ writes pool b ks ts es ss = P.withResource pool $ \c -> withTransaction c $ do runInsert $ insert (_cddb_signers database) (insertValues ss) $ onConflict (conflictingFields primaryKey) onConflictDoNothing + runInsert + $ insert (_cddb_transfers database) (insertValues tf) + $ onConflict (conflictingFields primaryKey) onConflictDoNothing -- liftIO $ printf "[OKAY] Chain %d: %d: %s %s\n" -- (_block_chainId b) -- (_block_height b) -- (unDbHash $ _block_hash b) -- (map (const '.') ts) -batchWrites :: P.Pool Connection -> Bool -> [Block] -> [[T.Text]] -> [[Transaction]] -> [[Event]] -> [[Signer]] -> IO () -batchWrites pool indexesDisabled bs kss tss ess sss = P.withResource pool $ \c -> withTransaction c $ do +batchWrites :: P.Pool Connection -> Bool -> [Block] -> [[T.Text]] -> [[Transaction]] -> [[Event]] -> [[Signer]] -> [[Transfer]] -> IO () +batchWrites pool indexesDisabled bs kss tss ess sss tfs = P.withResource pool $ \c -> withTransaction c $ do runBeamPostgres c $ do -- Write the Blocks if unique runInsert @@ -87,18 +94,22 @@ batchWrites pool indexesDisabled bs kss tss ess sss = P.withResource pool $ \c - $ insert (_cddb_minerkeys database) (insertValues $ concat $ zipWith (\b ks -> map (MinerKey (pk b)) ks) bs kss) $ actionOnConflict $ onConflict (conflictingFields primaryKey) onConflictDoNothing - withSavepoint c $ runBeamPostgres c $ do - -- Write the TXs if unique - runInsert - $ insert (_cddb_transactions database) (insertValues $ concat tss) - $ actionOnConflict $ onConflict (conflictingFields primaryKey) onConflictDoNothing + withSavepoint c $ do + runBeamPostgres c $ do + -- Write the TXs if unique + runInsert + $ insert (_cddb_transactions database) (insertValues $ concat tss) + $ actionOnConflict $ onConflict (conflictingFields primaryKey) onConflictDoNothing - runInsert - $ insert (_cddb_events database) (insertValues $ concat ess) - $ actionOnConflict $ onConflict (conflictingFields primaryKey) onConflictDoNothing - runInsert - $ insert (_cddb_signers database) (insertValues $ concat sss) - $ actionOnConflict $ onConflict (conflictingFields primaryKey) onConflictDoNothing + runInsert + $ insert (_cddb_events database) (insertValues $ concat ess) + $ actionOnConflict $ onConflict (conflictingFields primaryKey) onConflictDoNothing + runInsert + $ insert (_cddb_signers database) (insertValues $ concat sss) + $ actionOnConflict $ onConflict (conflictingFields primaryKey) onConflictDoNothing + runInsert + $ insert (_cddb_transfers database) (insertValues $ concat tfs) + $ actionOnConflict $ onConflict (conflictingFields primaryKey) onConflictDoNothing where {- the type system won't allow me to simply inline the "other" expression -} actionOnConflict other = if indexesDisabled @@ -126,9 +137,13 @@ writeBlock env pool count bh = do !t = mkBlockTransactions b pl !es = mkBlockEvents (fromIntegral $ _blockHeader_height bh) (_blockHeader_chainId bh) (DbHash $ hashB64U $ _blockHeader_hash bh) pl !ss = concat $ map (mkTransactionSigners . fst) (_blockPayloadWithOutputs_transactionsWithOutputs pl) + version = _nodeInfo_chainwebVer $ _env_nodeInfo env !k = bpwoMinerKeys pl - atomicModifyIORef' count (\n -> (n+1, ())) - writes pool b k t es ss + err = printf "writeBlock failed because we don't know how to work this version %s" version + withEventsMinHeight version err $ \evMinHeight -> do + let !tf = mkTransferRows (fromIntegral $ _blockHeader_height bh) (_blockHeader_chainId bh) (DbHash $ hashB64U $ _blockHeader_hash bh) (posixSecondsToUTCTime $ _blockHeader_creationTime bh) pl evMinHeight + atomicModifyIORef' count (\n -> (n+1, ())) + writes pool b k t es ss tf where policy :: RetryPolicyM IO policy = exponentialBackoff 250_000 <> limitRetries 3 @@ -137,23 +152,27 @@ writeBlocks :: Env -> P.Pool Connection -> Bool -> IORef Int -> [BlockHeader] -> writeBlocks env pool disableIndexesPred count bhs = do iforM_ blocksByChainId $ \chain (Sum numWrites, bhs') -> do let ff bh = (hashToDbHash $ _blockHeader_payloadHash bh, _blockHeader_hash bh) - retrying policy check (const $ payloadWithOutputsBatch env chain (M.fromList (ff <$> bhs'))) >>= \case + retrying policy check (const $ payloadWithOutputsBatch env chain (M.fromList (ff <$> bhs')) id) >>= \case Left e -> do logger Error $ fromString $ printf "Couldn't fetch payload batch for chain: %d" (unChainId chain) - print e + logger Error $ fromString $ show e Right pls' -> do let !pls = M.fromList pls' !ms = _blockPayloadWithOutputs_minerData <$> pls !bs = M.intersectionWith (\m bh -> asBlock (asPow bh) m) ms (makeBlockMap bhs') !tss = M.intersectionWith (flip mkBlockTransactions) pls bs + version = _nodeInfo_chainwebVer $ _env_nodeInfo env !ess = M.intersectionWith (\pl bh -> mkBlockEvents (fromIntegral $ _blockHeader_height bh) (_blockHeader_chainId bh) (DbHash $ hashB64U $ _blockHeader_hash bh) pl) pls (makeBlockMap bhs') !sss = M.intersectionWith (\pl _ -> concat $ mkTransactionSigners . fst <$> _blockPayloadWithOutputs_transactionsWithOutputs pl) pls (makeBlockMap bhs') !kss = M.intersectionWith (\p _ -> bpwoMinerKeys p) pls (makeBlockMap bhs') - batchWrites pool disableIndexesPred (M.elems bs) (M.elems kss) (M.elems tss) (M.elems ess) (M.elems sss) - atomicModifyIORef' count (\n -> (n + numWrites, ())) + err = printf "writeBlocks failed because we don't know how to work this version %s" version + withEventsMinHeight version err $ \evMinHeight -> do + let !tfs = M.intersectionWith (\pl bh -> mkTransferRows (fromIntegral $ _blockHeader_height bh) (_blockHeader_chainId bh) (DbHash $ hashB64U $ _blockHeader_hash bh) (posixSecondsToUTCTime $ _blockHeader_creationTime bh) pl evMinHeight) pls (makeBlockMap bhs') + batchWrites pool disableIndexesPred (M.elems bs) (M.elems kss) (M.elems tss) (M.elems ess) (M.elems sss) (M.elems tfs) + atomicModifyIORef' count (\n -> (n + numWrites, ())) where makeBlockMap = M.fromList . fmap (\bh -> (_blockHeader_hash bh, bh)) @@ -180,20 +199,27 @@ writePayload -> ChainId -> DbHash BlockHash -> Int64 + -> T.Text + -> UTCTime -> BlockPayloadWithOutputs -> IO () -writePayload pool chain blockHash blockHeight bpwo = do +writePayload pool chain blockHash blockHeight version creationTime bpwo = do let (cbEvents, txEvents) = mkBlockEvents' blockHeight chain blockHash bpwo - - P.withResource pool $ \c -> - withTransaction c $ do - runBeamPostgres c $ - runInsert - $ insert (_cddb_events database) (insertValues $ cbEvents ++ concatMap snd txEvents) - $ onConflict (conflictingFields primaryKey) onConflictDoNothing - withSavepoint c $ runBeamPostgres c $ - forM_ txEvents $ \(reqKey, events) -> - runUpdate - $ update (_cddb_transactions database) - (\tx -> _tx_numEvents tx <-. val_ (Just $ fromIntegral $ length events)) - (\tx -> _tx_requestKey tx ==. val_ reqKey) + err = printf "writePayload failed because we don't know how to work this version %s" version + withEventsMinHeight version err $ \evMinHeight -> do + let !tfs = mkTransferRows blockHeight chain blockHash creationTime bpwo evMinHeight + P.withResource pool $ \c -> + withTransaction c $ do + runBeamPostgres c $ do + runInsert + $ insert (_cddb_events database) (insertValues $ cbEvents ++ concatMap snd txEvents) + $ onConflict (conflictingFields primaryKey) onConflictDoNothing + runInsert + $ insert (_cddb_transfers database) (insertValues tfs) + $ onConflict (conflictingFields primaryKey) onConflictDoNothing + withSavepoint c $ runBeamPostgres c $ + forM_ txEvents $ \(reqKey, events) -> + runUpdate + $ update (_cddb_transactions database) + (\tx -> _tx_numEvents tx <-. val_ (Just $ fromIntegral $ length events)) + (\tx -> _tx_requestKey tx ==. val_ reqKey) diff --git a/haskell-src/exec/Main.hs b/haskell-src/exec/Main.hs new file mode 100644 index 00000000..28d1360b --- /dev/null +++ b/haskell-src/exec/Main.hs @@ -0,0 +1,217 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE NumericUnderscores #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeApplications #-} + +module Main where + +import Chainweb.Api.ChainId (ChainId(..)) +import Chainweb.Api.NodeInfo +import Chainweb.Backfill (backfill) +import Chainweb.BackfillTransfers (backfillTransfersCut) +import ChainwebDb.Database (initializeTables) +import ChainwebData.Env +import Chainweb.FillEvents (fillEvents) +import Chainweb.Gaps +import Chainweb.Listen (listen) +import Chainweb.Lookups (getNodeInfo) +import Chainweb.RichList (richList) +import Chainweb.Server (apiServer) +import Chainweb.Single (single) +import Control.Lens +import Control.Monad (unless,void) +import Data.Bifunctor +import qualified Data.Pool as P +import Data.String +import Data.Text (Text) +import Database.PostgreSQL.Simple +import qualified Database.PostgreSQL.Simple.Migration as Mg +import Network.Connection hiding (Connection) +import Network.HTTP.Client hiding (withConnection) +import Network.HTTP.Client.TLS +import Options.Applicative +import System.Directory +import System.Exit +import System.Logger hiding (logg) +import System.FilePath +import Text.Printf + +--- + +main :: IO () +main = do + args <- execParser opts + withHandleBackend backendConfig $ \backend -> + withLogger (config (getLevel args)) backend $ \logger -> do + let logg = loggerFunIO logger + case args of + MigrateOnly pgc _ -> withPool pgc $ \pool -> + runMigrations pool logg RunMigration False + RichListArgs (NodeDbPath mfp) _ version -> do + fp <- case mfp of + Nothing -> do + h <- getHomeDirectory + let h' = h ".local/share" + logg Info $ "Constructing rich list using default db-path: " <> fromString h' + return h' + Just fp -> do + logg Info $ "Constructing rich list using given db-path: " <> fromString fp + return fp + richList logg fp version + Args c pgc us u _ ms -> do + logg Info $ "Using database: " <> fromString (show pgc) + logg Info $ "Service API: " <> fromString (showUrlScheme us) + logg Info $ "P2P API: " <> fromString (showUrlScheme (UrlScheme Https u)) + withCWDPool pgc $ \pool -> do + runMigrations pool logg ms (isIndexedDisabled c) + let mgrSettings = mkManagerSettings (TLSSettingsSimple True False False) Nothing + m <- newManager mgrSettings + getNodeInfo m us >>= \case + Left e -> logg Error (fromString $ printf "Unable to connect to %s /info endpoint%s" (showUrlScheme us) e) >> exitFailure + Right ni -> do + let !mcids = map (second (map (ChainId . fst))) <$> _nodeInfo_graphs ni + case mcids of + Nothing -> logg Error "Node did not have graph information" >> exitFailure + Just cids -> do + let !env = Env m pool us u ni cids logg + case c of + Listen -> listen env + Backfill as -> backfill env as + BackFillTransfers indexP as -> backfillTransfersCut env indexP as + Fill as -> gaps env as + Single cid h -> single env cid h + FillEvents as et -> fillEvents env as et + Server serverEnv -> apiServer env serverEnv + where + opts = info ((richListP <|> migrateOnlyP <|> envP) <**> helper) + (fullDesc <> header "chainweb-data - Processing and analysis of Chainweb data") + config level = defaultLoggerConfig + & loggerConfigThreshold .~ level + backendConfig = defaultHandleBackendConfig + isIndexedDisabled = \case + Fill (FillArgs _ p) -> p + _ -> False + getLevel = \case + Args _ _ _ _ level _ -> level + RichListArgs _ level _ -> level + MigrateOnly _ level -> level + +runMigrations :: + P.Pool Connection -> LogFunctionIO Text -> MigrateStatus -> Bool -> IO () +runMigrations pool logg ms indexesDisabled = do + P.withResource pool $ \conn -> + unless indexesDisabled $ do + initializeTables logg ms conn + addTransactionsHeightIndex logg conn + addEventsHeightChainIdIdxIndex logg conn + addEventsHeightNameParamsIndex logg conn + addFromAccountsIndex logg conn + addToAccountsIndex logg conn + addTransactionsRequestKeyIndex logg conn + addEventsRequestKeyIndex logg conn + initializePGSimpleMigrations logg conn + logg Info "DB Tables Initialized" + + +data IndexCreationInfo = IndexCreationInfo + { + message :: Text + , statement :: Query + } + +addIndex :: IndexCreationInfo -> LogFunctionIO Text -> Connection -> IO () +addIndex (IndexCreationInfo m s) logg conn = do + logg Info m + void $ execute_ conn s + +addTransactionsHeightIndex :: LogFunctionIO Text -> Connection -> IO () +addTransactionsHeightIndex = + addIndex $ + IndexCreationInfo + { + message = "Adding height index on transactions table" + , statement = "CREATE INDEX IF NOT EXISTS transactions_height_idx ON transactions(height);" + } + +addEventsHeightChainIdIdxIndex :: LogFunctionIO Text -> Connection -> IO () +addEventsHeightChainIdIdxIndex = + addIndex $ + IndexCreationInfo + { + message = "Adding (height, chainid, idx) index on events table" + , statement = "CREATE INDEX IF NOT EXISTS events_height_chainid_idx ON events(height DESC, chainid ASC, idx ASC);" + } + +-- this is roughly "events_height_name_expr_expr1_idx" btree (height, name, +-- (params ->> 0), (params ->> 1)) WHERE name::text = 'TRANSFER'::text + +addEventsHeightNameParamsIndex :: LogFunctionIO Text -> Connection -> IO () +addEventsHeightNameParamsIndex = + addIndex $ + IndexCreationInfo + { + message = "Adding \"(height,name,(params ->> 0),(params ->> 1)) WHERE name = 'TRANSFER'\" index" + , statement = "CREATE INDEX IF NOT EXISTS events_height_name_expr_expr1_idx ON events (height desc, name, (params ->> 0), (params ->> 1)) WHERE name = 'TRANSFER';" + } + +addEventsModuleNameIndex :: LogFunctionIO Text -> Connection -> IO () +addEventsModuleNameIndex = + addIndex $ + IndexCreationInfo + { + message = "Adding \"(height desc, chainid, module)\" index" + , statement = "CREATE INDEX IF NOT EXISTS events_height_chainid_module ON events (height DESC, chainid, module);" + } + +addFromAccountsIndex :: LogFunctionIO Text -> Connection -> IO () +addFromAccountsIndex = + addIndex + IndexCreationInfo + { + message = "Adding \"(from_acct, height desc, idx)\" index on transfers table" + , statement = "CREATE INDEX IF NOT EXISTS transfers_from_acct_height_idx ON transfers (from_acct, height desc, idx);" + } + +addToAccountsIndex :: LogFunctionIO Text -> Connection -> IO () +addToAccountsIndex = + addIndex + IndexCreationInfo + { + message = "Adding \"(to_acct, height desc,idx)\" index on transfers table" + , statement = "CREATE INDEX IF NOT EXISTS transfers_to_acct_height_idx_idx ON transfers (to_acct, height desc, idx);" + } + +addTransactionsRequestKeyIndex :: LogFunctionIO Text -> Connection -> IO () +addTransactionsRequestKeyIndex = + addIndex + IndexCreationInfo + { + message = "Adding \"(requestkey)\" index on transactions table" + , statement = "CREATE INDEX IF NOT EXISTS transactions_requestkey_idx ON transactions (requestkey);" + } + +addEventsRequestKeyIndex :: LogFunctionIO Text -> Connection -> IO () +addEventsRequestKeyIndex = + addIndex + IndexCreationInfo + { + message = "Adding \"(requestkey)\" index on events table" + , statement = "CREATE INDEX IF NOT EXISTS events_requestkey_idx ON events (requestkey);" + } + +initializePGSimpleMigrations :: LogFunctionIO Text -> Connection -> IO () +initializePGSimpleMigrations logg conn = do + logg Info "Initializing the incremental migrations table" + Mg.runMigration (Mg.MigrationContext Mg.MigrationInitialization False conn) >>= \case + Mg.MigrationError err -> do + let msg = "Error initializing migrations: " ++ err + logg Error $ fromString msg + die msg + Mg.MigrationSuccess -> logg Info "Initialized migrations" + +{- +λ> :main single --chain 2 --height 1487570 --service-host api.chainweb.com --p2p-host us-e3.chainweb.com --dbname chainweb-data --service-port 443 --service-https +λ> :main single --chain 0 --height 1494311 --service-host api.chainweb.com --p2p-host us-e3.chainweb.com --dbname chainweb-data --service-port 443 --service-https +λ> :main server --port 9999 --service-host api.chainweb.com --p2p-host us-e3.chainweb.com --dbname chainweb-data --service-port 443 --service-https +-} diff --git a/lib/ChainwebData/Backfill.hs b/haskell-src/lib/ChainwebData/Backfill.hs similarity index 100% rename from lib/ChainwebData/Backfill.hs rename to haskell-src/lib/ChainwebData/Backfill.hs diff --git a/exec/Chainweb/Env.hs b/haskell-src/lib/ChainwebData/Env.hs similarity index 72% rename from exec/Chainweb/Env.hs rename to haskell-src/lib/ChainwebData/Env.hs index 7569baaf..bee36658 100644 --- a/exec/Chainweb/Env.hs +++ b/haskell-src/lib/ChainwebData/Env.hs @@ -2,13 +2,13 @@ {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NumericUnderscores #-} -module Chainweb.Env +module ChainwebData.Env ( MigrateStatus(..) , Args(..) , Env(..) , chainStartHeights , ServerEnv(..) - , Connect(..), withPool + , Connect(..), withPoolInit, withPool, withCWDPool , Scheme(..) , toServantScheme , Url(..) @@ -20,6 +20,7 @@ module Chainweb.Env , BackfillArgs(..) , FillArgs(..) , envP + , migrateOnlyP , richListP , NodeDbPath(..) , progress @@ -31,16 +32,18 @@ import Chainweb.Api.Common (BlockHeight) import Chainweb.Api.NodeInfo import Control.Concurrent import Control.Exception +import Control.Monad (void) import Data.ByteString (ByteString) +import Data.Char (toLower) import Data.IORef import Data.Map.Strict (Map) import qualified Data.Map.Strict as M -import Data.Maybe import Data.String import Data.Pool import Data.Text (pack, Text) import Data.Time.Clock.POSIX import Database.Beam.Postgres +import Database.PostgreSQL.Simple (execute_) import Gargoyle import Gargoyle.PostgreSQL -- To get gargoyle to give you postgres automatically without having to install @@ -57,14 +60,15 @@ import Text.Printf --- -data MigrateStatus = RunMigration | DontMigrate +data MigrateStatus = RunMigration | DontMigrate Bool deriving (Eq,Ord,Show,Read) data Args = Args Command Connect UrlScheme Url LogLevel MigrateStatus -- ^ arguments for all but the richlist command - | RichListArgs NodeDbPath LogLevel + | RichListArgs NodeDbPath LogLevel ChainwebVersion -- ^ arguments for the Richlist command + | MigrateOnly Connect LogLevel deriving (Show) data Env = Env @@ -87,26 +91,63 @@ data Connect = PGInfo ConnectInfo | PGString ByteString | PGGargoyle String deriving (Eq,Show) -- | Equivalent to withPool but uses a Postgres DB started by Gargoyle -withGargoyleDb :: FilePath -> (Pool Connection -> IO a) -> IO a -withGargoyleDb dbPath func = do +withGargoyleDbInit :: + (Connection -> IO ()) -> + FilePath -> + (Pool Connection -> IO a) -> IO a +withGargoyleDbInit initConn dbPath func = do --pg <- postgresNix let pg = defaultPostgres withGargoyle pg dbPath $ \dbUri -> do caps <- getNumCapabilities - pool <- createPool (connectPostgreSQL dbUri) close 1 5 caps + let poolConfig = + PoolConfig + { + createResource = connectPostgreSQL dbUri >>= \c -> c <$ initConn c + , freeResource = close + , poolCacheTTL = 5 + , poolMaxResources = caps + } + pool <- newPool poolConfig func pool -- | Create a `Pool` based on `Connect` settings designated on the command line. getPool :: IO Connection -> IO (Pool Connection) getPool getConn = do caps <- getNumCapabilities - createPool getConn close 1 5 caps + let poolConfig = + PoolConfig + { + createResource = getConn + , freeResource = close + , poolCacheTTL = 5 + , poolMaxResources = caps + } + newPool poolConfig + +-- | A bracket for `Pool` interaction. +withPoolInit :: (Connection -> IO ()) -> Connect -> (Pool Connection -> IO a) -> IO a +withPoolInit initC = \case + PGGargoyle dbPath -> withGargoyleDbInit initC dbPath + PGInfo ci -> bracket (getPool $ withInit (connect ci)) destroyAllResources + PGString s -> bracket (getPool $ withInit (connectPostgreSQL s)) destroyAllResources + where withInit mkConn = mkConn >>= \c -> c <$ initC c + -- | A bracket for `Pool` interaction. withPool :: Connect -> (Pool Connection -> IO a) -> IO a -withPool (PGGargoyle dbPath) = withGargoyleDb dbPath -withPool (PGInfo ci) = bracket (getPool (connect ci)) destroyAllResources -withPool (PGString s) = bracket (getPool (connectPostgreSQL s)) destroyAllResources +withPool = withPoolInit mempty + +withCWDPool :: Connect -> (Pool Connection -> IO a) -> IO a +withCWDPool = withPoolInit $ \conn -> do + -- The following tells postgres to assume that accesses to random disk pages + -- is equally expensive as accessing sequential pages. This is generally a good + -- setting for a database that's storing its data on an SSD, but our intention + -- here is to encourage Postgres to use index scan over table scans. The + -- justification is that we design CW-D indexes and queries in tandem carefully + -- to make sure all requests are serviced with predictable performance + -- characteristics. + void $ execute_ conn "SET random_page_cost = 1.0" data Scheme = Http | Https deriving (Eq,Ord,Show,Enum,Bounded) @@ -169,11 +210,12 @@ data Command | Fill FillArgs | Single ChainId BlockHeight | FillEvents BackfillArgs EventType + | BackFillTransfers Bool BackfillArgs deriving (Show) data BackfillArgs = BackfillArgs { _backfillArgs_delayMicros :: Maybe Int - , _backfillArgs_eventChunkSize :: Maybe Integer + , _backfillArgs_chunkSize :: Maybe Int } deriving (Eq,Ord,Show) data FillArgs = FillArgs @@ -185,20 +227,26 @@ data ServerEnv = ServerEnv { _serverEnv_port :: Int , _serverEnv_runFill :: Bool , _serverEnv_fillDelay :: Maybe Int + , _serverEnv_serveSwaggerUi :: Bool } deriving (Eq,Ord,Show) envP :: Parser Args envP = Args <$> commands - <*> (fromMaybe (PGGargoyle "cwdb-pgdata") <$> optional connectP) + <*> connectP <*> urlSchemeParser "service" 1848 <*> urlParser "p2p" 443 <*> logLevelParser <*> migrationP migrationP :: Parser MigrateStatus -migrationP = - flag DontMigrate RunMigration (short 'm' <> long "migrate" <> help "Run DB migration") +migrationP + = flag' RunMigration (short 'm' <> long "migrate" <> help "Run DB migration") + <|> flag' (DontMigrate True) + ( long "ignore-schema-diff" + <> help "Ignore any unexpected differences in the database schema" + ) + <|> pure (DontMigrate False) logLevelParser :: Parser LogLevel logLevelParser = @@ -207,6 +255,18 @@ logLevelParser = <> value Info <> help "Initial log threshold" +migrateOnlyP :: Parser Args +migrateOnlyP = hsubparser + ( command "migrate" + ( info opts $ progDesc + "Run the database migrations only" + ) + ) + where + opts = MigrateOnly + <$> connectP + <*> logLevelParser + richListP :: Parser Args richListP = hsubparser ( command "richlist" @@ -223,9 +283,26 @@ richListP = hsubparser <> help "Chainweb node db filepath" ) <*> logLevelParser + <*> simpleVersionParser + +versionReader :: ReadM ChainwebVersion +versionReader = eitherReader $ \case + txt | map toLower txt == "mainnet01" || map toLower txt == "mainnet" -> Right "mainnet01" + txt | map toLower txt == "testnet04" || map toLower txt == "testnet" -> Right "testnet04" + txt -> Left $ printf "Can'txt read chainwebversion: got %" txt + +simpleVersionParser :: Parser ChainwebVersion +simpleVersionParser = + option versionReader $ + long "chainweb-version" + <> value "mainnet01" + <> help "Chainweb node version" connectP :: Parser Connect -connectP = (PGString <$> pgstringP) <|> (PGInfo <$> connectInfoP) <|> (PGGargoyle <$> dbdirP) +connectP = (PGString <$> pgstringP) + <|> (PGInfo <$> connectInfoP) + <|> (PGGargoyle <$> dbdirP) + <|> pure (PGGargoyle "cwdb-pgdata") dbdirP :: Parser FilePath dbdirP = strOption (long "dbdir" <> help "Directory for self-run postgres") @@ -252,6 +329,8 @@ serverP = ServerEnv <$> option auto (long "port" <> metavar "INT" <> help "Port the server will listen on") <*> flag False True (long "run-fill" <> short 'f' <> help "Run fill operation once a day to fill gaps") <*> delayP + -- The OpenAPI spec is currently rudimentary and not official so we're hiding this option + <*> flag False True (long "serve-swagger-ui" <> internal) delayP :: Parser (Maybe Int) delayP = optional $ option auto (long "delay" <> metavar "DELAY_MICROS" <> help "Number of microseconds to delay between queries to the node") @@ -289,6 +368,8 @@ commands = hsubparser (progDesc "Serve the chainweb-data REST API (also does listen)")) <> command "fill-events" (info (FillEvents <$> bfArgsP <*> eventTypeP) (progDesc "Event Worker - Fills missing events")) + <> command "backfill-transfers" (info (BackFillTransfers <$> flag False True (long "disable-indexes" <> help "Delete indexes on transfers table") <*> bfArgsP) + (progDesc "Backfill transfer table entries")) ) progress :: LogFunctionIO Text -> IORef Int -> Int -> IO a diff --git a/lib/ChainwebData/Genesis.hs b/haskell-src/lib/ChainwebData/Genesis.hs similarity index 90% rename from lib/ChainwebData/Genesis.hs rename to haskell-src/lib/ChainwebData/Genesis.hs index 4055b0da..65fa053f 100644 --- a/lib/ChainwebData/Genesis.hs +++ b/haskell-src/lib/ChainwebData/Genesis.hs @@ -8,7 +8,7 @@ module ChainwebData.Genesis ) where -import Data.List +import qualified Data.List as L import qualified Data.Map as M import Data.Maybe (fromMaybe) @@ -32,7 +32,7 @@ genesisHeight (ChainId c) (GenesisInfo gi) = gi M.! c -- mkGenesisInfo :: NodeInfo -> GenesisInfo mkGenesisInfo = GenesisInfo - . foldl' go mempty + . L.foldl' go mempty . fromMaybe [] . _nodeInfo_graphs where @@ -41,4 +41,4 @@ mkGenesisInfo = GenesisInfo f (Just bh') | bh > bh' = Just bh' | otherwise = Just bh - in foldl' (\m' (c', _) -> M.alter f c' m') m adjs + in L.foldl' (\m' (c', _) -> M.alter f c' m') m adjs diff --git a/haskell-src/lib/ChainwebData/Spec.hs b/haskell-src/lib/ChainwebData/Spec.hs new file mode 100644 index 00000000..07e96d33 --- /dev/null +++ b/haskell-src/lib/ChainwebData/Spec.hs @@ -0,0 +1,82 @@ +{-# OPTIONS_GHC -fno-warn-orphans #-} + +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE GeneralisedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} + +module ChainwebData.Spec where + + +import Control.Lens +import ChainwebData.Api + +import Data.Proxy + +import Data.OpenApi.ParamSchema +import Data.OpenApi.Schema +import Servant.OpenApi +import ChainwebData.Pagination +import Chainweb.Api.ChainId +import ChainwebData.TxSummary +import Data.OpenApi + +import ChainwebData.EventDetail (EventDetail) +import ChainwebData.Util +import qualified Data.Aeson as A +import ChainwebData.TxDetail +import ChainwebData.TransferDetail (TransferDetail) +import Chainweb.Api.StringEncoded (StringEncoded) +import Data.Scientific (Scientific) + +instance ToSchema A.Value where + declareNamedSchema _ = pure $ NamedSchema (Just "AnyValue") mempty + +deriving newtype instance ToParamSchema Limit +deriving newtype instance ToParamSchema Offset +deriving newtype instance ToParamSchema EventParam +deriving newtype instance ToParamSchema EventName +deriving newtype instance ToParamSchema EventModuleName +deriving newtype instance ToParamSchema RequestKey +deriving newtype instance ToParamSchema ChainId +deriving newtype instance ToParamSchema NextToken + +instance ToSchema TxSummary where + declareNamedSchema = genericDeclareNamedSchema + defaultSchemaOptions{ fieldLabelModifier = drop 11 } + +deriving anyclass instance ToSchema TxResult + +instance ToSchema EventDetail where + declareNamedSchema = genericDeclareNamedSchema + defaultSchemaOptions{ fieldLabelModifier = lensyConstructorToNiceJson 10 } + +instance ToSchema TxDetail where + declareNamedSchema = genericDeclareNamedSchema + defaultSchemaOptions{ fieldLabelModifier = lensyConstructorToNiceJson 10 } + +instance ToSchema TxEvent where + declareNamedSchema = genericDeclareNamedSchema + defaultSchemaOptions{ fieldLabelModifier = lensyConstructorToNiceJson 9 } + +instance ToSchema TransferDetail where + declareNamedSchema = genericDeclareNamedSchema + defaultSchemaOptions{ fieldLabelModifier = lensyConstructorToNiceJson 10 } + +instance ToSchema ChainwebDataStats where + declareNamedSchema = genericDeclareNamedSchema + defaultSchemaOptions{ fieldLabelModifier = drop 5 } + +instance ToSchema (StringEncoded Scientific) where + declareNamedSchema _ = pure $ NamedSchema (Just "StringEncodedNumber") $ mempty + & type_ ?~ OpenApiString + & example ?~ A.String "-1234.5e6" + & pattern ?~ "[-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?" + +spec :: OpenApi +spec = toOpenApi (Proxy :: Proxy ChainwebDataApi) + diff --git a/lib/ChainwebData/Types.hs b/haskell-src/lib/ChainwebData/Types.hs similarity index 86% rename from lib/ChainwebData/Types.hs rename to haskell-src/lib/ChainwebData/Types.hs index 6886db3a..8bc3fbbe 100644 --- a/lib/ChainwebData/Types.hs +++ b/haskell-src/lib/ChainwebData/Types.hs @@ -1,5 +1,7 @@ {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE NumericUnderscores #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE TypeApplications #-} {-# OPTIONS_GHC -fno-warn-orphans #-} @@ -15,6 +17,7 @@ module ChainwebData.Types , groupsOf , rangeToDescGroupsOf , blockHeaderRequestSize + , withEventsMinHeight ) where import BasePrelude @@ -104,3 +107,16 @@ instance ToJSON BlockHeader where -- This constant defines the maximum number of blockheaders we can retrieve from a node at a time. blockHeaderRequestSize :: Int blockHeaderRequestSize = 360 + +withVersion :: T.Text -> (T.Text -> a) -> (a -> b) -> b +withVersion version onVersion action = action $ onVersion version + +withEventsMinHeight :: Num a => MonadIO m => T.Text -> String -> (a -> m b) -> m b +withEventsMinHeight version errorMessage action = withVersion version onVersion $ \case + Just height -> action height + Nothing -> liftIO $ die errorMessage + where + onVersion = \case + "mainnet01" -> Just 1_722_500 + "testnet04" -> Just 1_261_000 + _ -> Nothing diff --git a/haskell-src/lib/ChainwebDb/BoundedScan.hs b/haskell-src/lib/ChainwebDb/BoundedScan.hs new file mode 100644 index 00000000..4f583646 --- /dev/null +++ b/haskell-src/lib/ChainwebDb/BoundedScan.hs @@ -0,0 +1,360 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TupleSections #-} + +module ChainwebDb.BoundedScan ( + FilterMarked(..), + applyFilterMark, + BSContinuation(..), + ExecutionStrategy(..), + performBoundedScan, + boundedScanOffset, + boundedScanLimit, + Directional, asc, desc, + cursorCmp, + CompPair(..), tupleCmp, + noDecoration, +) where + +import Control.Applicative + +import Data.Coerce (coerce) +import Data.Functor +import Data.Functor.Identity(Identity(..)) +import Data.Kind (Type) +import Data.Maybe +import Data.Proxy (Proxy(..)) + +import Database.Beam +import Database.Beam.Backend +import Database.Beam.Query.Internal (QOrd) +import Database.Beam.Schema.Tables (Beamable(..), Columnar' (..)) +import Database.Beam.Postgres + +import Safe + +asc, desc :: Columnar f a -> Columnar (Directional f) a +asc = Directional Asc . Columnar' +desc = Directional Desc . Columnar' + +data FilterMarked rowT f = FilterMarked + { fmMatch :: C f Bool + , fmRow :: rowT f + } deriving (Generic, Beamable) + +applyFilterMark :: + QPg db s (FilterMarked rowT (Exp s)) -> + QPg db s (rowT (Exp s)) +applyFilterMark source = do + row <- source + guard_ $ fmMatch row + return $ fmRow row + +type Offset = Integer +type ResultLimit = Integer +type ScanLimit = Integer + +-- | Build the offset segment of a bounded scan query. +-- +-- This function will produce a SQL query with the following shape: +-- +-- > SELECT , found_num, scan_num +-- > FROM ( +-- > SELECT $(toCursor rest) +-- > , COUNT(*) OVER (ORDER BY $(orderOverCursor rest)) AS scan_num +-- > , COUNT(*) FILTER (WHERE match) +-- > OVER (ORDER BY $(orderOverCursor rest)) AS found_num +-- > FROM $source AS t(match,rest...) +-- > ORDER BY $(orderOverCursor rest) +-- > LIMIT $scanLimit +-- > ) AS t +-- > WHERE scan_num = $scanLimit +-- > OR found_num = $offset +-- > ORDER BY $(orderOverCursor ) +-- > LIMIT 1 +-- +-- This query will either find the `offset`th row of `source` with `match = TRUE` and +-- yield a `cursor` that points at it (so that the `LIMIT` segment of the query +-- can start), or it will stop after scanning `scanLimit` number of rows from +-- `source` and yielding the cursor to the row that it stopped. +-- +-- Note that this function expects the passed in `offset` to be greater than zero +-- since there's no meaningful cursor it could return in the offset=0 case. +boundedScanOffset :: forall s db rowT cursorT. + (Beamable rowT, Beamable cursorT) => + (forall s2. QPg db s2 (FilterMarked rowT (Exp s2))) -> + (forall f. rowT f -> cursorT (Directional f)) -> + Offset -> + ScanLimit -> + QPg db s (cursorT (Exp s), (Exp s ResultLimit, Exp s ScanLimit)) +boundedScanOffset source toCursor offset scanLimit = + limit_ 1 $ orderBy_ (\(cur,_) -> orderCursor cur) $ do + (cursor, scan_num, found_num) <- + subselect_ $ limit_ scanLimit $ orderBy_ (\(cur,_,_) -> orderCursor cur) $ withWindow_ + (\row -> + ( frame_ (noPartition_ @Int) (Just $ orderRow $ fmRow row) noBounds_ + , frame_ (noPartition_ @Int) (Just $ orderRow $ fmRow row) (fromBound_ unbounded_) + ) + ) + (\row (wNoBounds, wTrailing) -> + ( unDirectional $ toCursor $ fmRow row + , rowNumber_ `over_` wNoBounds + , countAll_ `filterWhere_` fmMatch row `over_` wTrailing + ) + ) + source + guard_ $ scan_num ==. val_ scanLimit + ||. found_num ==. val_ offset + return (cursor, (found_num, scan_num)) + where orderRow = directionalOrd . toCursor + orderCursor :: forall s3. cursorT (Exp s3) -> [AnyOrd Postgres s3] + orderCursor cur = directionalOrdZip (toCursor tblSkeleton) cur + + +-- | Build the limit segment of a bounded scan query +-- +-- This function will produce a SQL query with the following shape: +-- +-- > SELECT rest, scan_num, matching_row +-- > FROM ( +-- > SELECT t.*, COUNT(*) OVER (ORDER BY $(orderOverCursor rest)) +-- > FROM $source AS t(match,rest...) +-- > ) AS t +-- > WHERE scan_num = $scanLimit OR match +-- +-- This query will return up to `limit` number of rows of the `source` table +-- with `match = True` ordered by `order`, but it won't scan any more than +-- `scanLimit` number of rows. If it hits the `scanLimit`th row, the last row +-- returned will have `matchingRow = FALSE` so that it can be discarded from +-- the result set but still used as a cursor to resume the search later. +boundedScanLimit :: + (Beamable rowT) => + (forall s2. QPg db s2 (FilterMarked rowT (Exp s2))) -> + (forall s2. rowT (Exp s2) -> [AnyOrd Postgres s2]) -> + ScanLimit -> + QPg db s (rowT (Exp s), (Exp s ScanLimit, Exp s Bool)) +boundedScanLimit source order scanLimit = do + (row, scan_num) <- subselect_ $ limit_ scanLimit $ withWindow_ + (\row -> frame_ (noPartition_ @Int) (Just $ order $ fmRow row) noBounds_) + (\row window -> + ( row + , rowNumber_ `over_` window + ) + ) + source + let scan_end = scan_num ==. val_ scanLimit + matchingRow = fmMatch row + guard_ $ scan_end ||. matchingRow + return (fmRow row, (scan_num, matchingRow)) + +data BSContinuation cursor = BSContinuation + { bscCursor :: cursor + , bscOffset :: Maybe Offset + } + deriving (Functor, Foldable, Traversable) + +data ExecutionStrategy = Bounded ScanLimit | Unbounded + +-- | Execute a bounded scan over a relation of `rowT` rows using a bounded +-- or unbounded scanning strategy. +-- +-- Depending on the provided 'ExecutionStrategy', 'performBoundedScan' will +-- either use 'boundedScanOffset' and 'boundedScanLimit' to search through +-- `source`, or it will do a naive `OFFSET ... LIMIT ...` search. +-- +-- In both cases, if there are potentially more results to find in subsequent +-- searches, 'performBoundedScan' will return (along with any results found so +-- far) a contiuation that can be used to resume the search efficiently. +performBoundedScan :: forall db rowT extrasT cursorT m. + FromBackendRow Postgres (rowT Identity, extrasT Identity) => + (Beamable rowT, Beamable extrasT) => + (FromBackendRow Postgres (cursorT Identity), SqlValableTable Postgres cursorT) => + Monad m => + ExecutionStrategy -> + (forall a. Pg a -> m a) -> + -- | Convert a row to a cursor with order direction annotations + (forall f. rowT f -> cursorT (Directional f)) -> + -- | A relation of rows with annotations indicating whether each row should be kept + (forall s. QPg db s (FilterMarked rowT (Exp s))) -> + (forall s. rowT (Exp s) -> QPg db s (extrasT (Exp s))) -> + -- | The start of this execution, indicates whether this is a new query or the + -- contionation of a previous execution. + Either (Maybe Offset) (BSContinuation (cursorT Identity)) -> + -- | The maximum number of rows to return + ResultLimit -> + m ( Maybe (BSContinuation (cursorT Identity)) + , [(rowT Identity, extrasT Identity)] + ) +performBoundedScan stg runPg toCursor source decorate contination resultLimit = do + let + runOffset mbStart offset scanLimit = do + mbCursor <- runPg $ runSelectReturningOne $ select $ boundedScanOffset + (resumeSource toCursor source mbStart) + toCursor + offset + scanLimit + case mbCursor of + Nothing -> return (Nothing, []) + Just (cursor, (found_cnt, scan_cnt)) -> if found_cnt < offset + then do + let remainingOffset = offset - found_cnt + cont = BSContinuation cursor $ Just remainingOffset + return (Just cont, []) + else runLimit (Just cursor) (scanLimit - scan_cnt) + + runLimit mbStart scanLim = do + rows <- runPg $ runSelectReturningList $ select $ + limit_ resultLimit $ + orderBy_ (\((row,_),_) -> directionalOrd $ toCursor row ) $ do + (row, bsBookKeeping) <- boundedScanLimit + (resumeSource toCursor source mbStart) + (directionalOrd . toCursor) + scanLim + extras <- decorate row + return ((row, extras), bsBookKeeping) + let + scanned = fromMaybe 0 $ lastMay rows <&> \(_,(scanNum,_)) -> scanNum + mbNextCursor = (lastMay rows <&> \((row,_),_) -> unDirectional $ toCursor row) + <|> mbStart + mbContinuation = mbNextCursor <&> \cur -> BSContinuation cur Nothing + results = [row | (row,(_,found)) <- rows, found ] + return $ if scanned < scanLim && fromIntegral (length rows) < resultLimit + then (Nothing, results) + else (mbContinuation, results) + + runUnbounded mbStart mbOffset = do + let sourceCont = resumeSource toCursor source mbStart + offset = fromMaybe 0 mbOffset + rows <- runPg $ runSelectReturningList $ select $ + limit_ resultLimit $ + offset_ offset $ + orderBy_ (directionalOrd . toCursor . fst) $ do + row <- applyFilterMark sourceCont + extras <- decorate row + return (row, extras) + return $ (,rows) $ if fromIntegral (length rows) < resultLimit + then Nothing + else lastMay rows <&> \row -> + BSContinuation (unDirectional $ toCursor $ fst row) Nothing + + (mbStartTop, mbOffsetTop) = case contination of + Left mbO -> (Nothing, mbO) + Right (BSContinuation cursor mbO) -> (Just cursor, mbO) + case stg of + Bounded scanLimit -> case mbOffsetTop of + Just offset | offset > 0 -> runOffset mbStartTop offset scanLimit + _ -> runLimit mbStartTop scanLimit + Unbounded -> runUnbounded mbStartTop mbOffsetTop + +resumeSource :: (SqlValableTable Postgres cursorT) => + (rowT (Exp s) -> cursorT (Directional (Exp s))) -> + QPg db s (FilterMarked rowT (Exp s)) -> + Maybe (cursorT Identity) -> + QPg db s (FilterMarked rowT (Exp s)) +resumeSource toCursor source mbResume = case mbResume of + Nothing -> source + Just cursor -> do + row <- source + guard_ $ cursorCmp (>.) (toCursor $ fmRow row) cursor + return row + +------------------------------------------------------------------------------- +-- Utilities for constructing SQL expressions comparing two tuples + +data CompPair be s = forall t. (:<>) (QExpr be s t ) (QExpr be s t) + +tupleCmp + :: IsSql92ExpressionSyntax (BeamSqlBackendExpressionSyntax be) + => (forall t. QExpr be s t -> QExpr be s t -> QExpr be s Bool) + -> [CompPair be s] + -> QExpr be s Bool +tupleCmp cmp cps = QExpr lExp `cmp` QExpr rExp where + lExp = rowE <$> sequence [e | QExpr e :<> _ <- cps] + rExp = rowE <$> sequence [e | _ :<> QExpr e <- cps] + +------------------------------------------------------------------------------- +-- Utilities for working on cursors with fields that have direction annotations + +-- | A Dir indicates a SQL ORDER BY direction +data Dir = Asc | Desc + +-- | (Directional f) is meant to be used as an argument to a 'Beamable' table, +-- annotating each field with a Dir +data Directional f a = Directional Dir (Columnar' f a) + +-- | Given a 'Beamable' row containing Dir-annotated SQL expressions, return +-- a value that can be passed to an 'orderBy_' that orders the results in +-- "cursor order", i.e. in ascending cursor values. +directionalOrd :: forall t backend s. (BeamSqlBackend backend, Beamable t) => + t (Directional (QExpr backend s)) -> [AnyOrd backend s] +directionalOrd t = directionalOrdZip t $ unDirectional t + +-- | Given a 'Beamable' row containing Dir-annotations and another row of the +-- same shape containing SQL expressions, return a value that can be passed to +-- an 'orderBy_' that orders the results in "cursor order", i.e. in ascending +-- cursor values. +directionalOrdZip :: forall t backend s ignored. (BeamSqlBackend backend, Beamable t) => + t (Directional ignored) -> t (QExpr backend s) -> [AnyOrd backend s] +directionalOrdZip tDirs tExps = fst $ zipBeamFieldsM mkOrd tDirs tExps where + mkOrd :: + Columnar' (Directional ignored) a -> + Columnar' (QExpr backend s) a -> + ([AnyOrd backend s], Columnar' Proxy a) + mkOrd (Columnar' (Directional dir _)) (Columnar' q) = ([ord],Columnar' Proxy) + where ord = anyOrd $ case dir of + Asc -> asc_ q + Desc -> desc_ q + +-- | This function stripts the type of the field in a QOrd so that they can be +-- gathered in a list +anyOrd :: QOrd be s a -> AnyOrd be s +anyOrd = coerce + +type AnyOrd be s = QOrd be s () + +unDirectional :: Beamable t => + t (Directional f) -> t f +unDirectional t = runIdentity $ zipBeamFieldsM mkOrd t tblSkeleton where + mkOrd (Columnar' (Directional _ (Columnar' q))) _ = Identity $ Columnar' q + +-- | Compare a cursor expression with direction annotations against a cursor +-- value in "cursor order". Cursor order means that whichever cursor is ahead +-- as viewed as a moving cursor is considered greater. +cursorCmp :: (SqlValableTable Postgres cursorT) => + (forall t. Exp s t -> Exp s t -> Exp s Bool) -> + cursorT (Directional (Exp s)) -> + cursorT Identity -> Exp s Bool +cursorCmp cmpOp cursorExp cursorVal = tupleCmp cmpOp cmpPairs where + cmpPairs = fst $ zipBeamFieldsM mkPair cursorExp (val_ cursorVal) + mkPair :: + Columnar' (Directional (Exp s)) a -> + Columnar' (Exp s) a -> + ([CompPair Postgres s], Columnar' Proxy a) + mkPair (Columnar' (Directional dir (Columnar' lExp))) (Columnar' rExp) = + ([withDirOrder dir lExp rExp],Columnar' Proxy) + withDirOrder Asc lExp rExp = lExp :<> rExp + withDirOrder Desc lExp rExp = rExp :<> lExp + +------------------------------------------------------------------------------- +-- | UnitF is a beam record with no fields, so returning it doesn't actually +-- cause new columns to be defined in a SELECT query. +data UnitF (f :: Type -> Type) = UnitF deriving (Generic, Beamable, Show) + +-- | noDecoration can be used as the decoration of a performBoundedScan call +-- without causing any overhead on the database side. +noDecoration :: Monad m => a -> m (UnitF (QGenExpr ctx be s)) +noDecoration _ = return UnitF + +------------------------------------------------------------------------------- +-- Some internal utility definitions used to shorten the types in this module + +type QPg = Q Postgres + +type Exp = QGenExpr QValueContext Postgres diff --git a/exec/Chainweb/Database.hs b/haskell-src/lib/ChainwebDb/Database.hs similarity index 61% rename from exec/Chainweb/Database.hs rename to haskell-src/lib/ChainwebDb/Database.hs index a370cde8..72dc77de 100644 --- a/exec/Chainweb/Database.hs +++ b/haskell-src/lib/ChainwebDb/Database.hs @@ -8,24 +8,28 @@ {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeApplications #-} {-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} -module Chainweb.Database +module ChainwebDb.Database ( ChainwebDataDb(..) , database , initializeTables + , bench_initializeTables , withDb , withDbDebug ) where -import Chainweb.Env +import ChainwebData.Env import ChainwebDb.Types.Block import ChainwebDb.Types.Event import ChainwebDb.Types.MinerKey import ChainwebDb.Types.Signer import ChainwebDb.Types.Transaction +import ChainwebDb.Types.Transfer +import Control.Monad (unless) +import Data.Functor ((<&>)) import qualified Data.Pool as P -import Data.Proxy import Data.Text (Text) import qualified Data.Text as T import Data.String @@ -43,6 +47,7 @@ data ChainwebDataDb f = ChainwebDataDb , _cddb_minerkeys :: f (TableEntity MinerKeyT) , _cddb_events :: f (TableEntity EventT) , _cddb_signers :: f (TableEntity SignerT) + , _cddb_transfers :: f (TableEntity TransferT) } deriving stock (Generic) deriving anyclass (Database be) @@ -126,38 +131,83 @@ database = defaultDbSettings `withDbModification` dbModification , _signer_caps = "caps" , _signer_sig = "sig" } + , _cddb_transfers = modifyEntityName modTableName <> + modifyTableFields tableModification + {_tr_requestkey = "requestkey" + , _tr_chainid = "chainid" + , _tr_height = "height" + , _tr_idx = "idx" + , _tr_modulename = "modulename" + , _tr_moduleHash = "modulehash" + , _tr_from_acct = "from_acct" + , _tr_to_acct = "to_acct" + , _tr_amount = "amount" + , _tr_block = BlockId "block" + } } annotatedDb :: BA.AnnotatedDatabaseSettings be ChainwebDataDb annotatedDb = BA.defaultAnnotatedDbSettings database -hsSchema :: BA.Schema -hsSchema = BA.fromAnnotatedDbSettings annotatedDb (Proxy @'[]) +calcCWMigrationSteps :: Connection -> IO BA.Diff +calcCWMigrationSteps conn = BA.calcMigrationSteps annotatedDb conn <&> \eiSteps -> + flip fmap eiSteps $ filter $ \step -> case BA._editAction $ fst $ BA.unPriority step of + BA.TableRemoved _ -> False + _ -> True showMigration :: Connection -> IO () -showMigration conn = +showMigration conn = do + diff <- calcCWMigrationSteps conn runBeamPostgres conn $ - BA.printMigration $ BA.migrate conn hsSchema + BA.printMigration $ BA.createMigration diff -- | Create the DB tables if necessary. initializeTables :: LogFunctionIO Text -> MigrateStatus -> Connection -> IO () initializeTables logg migrateStatus conn = do - diff <- BA.calcMigrationSteps annotatedDb conn - case diff of - Left err -> do - logg Error "Error detecting database migration requirements: " - logg Error $ fromString $ show err - Right [] -> logg Info "No database migration needed. Continuing..." - Right _ -> do - logg Info "Database migration needed." - case migrateStatus of - RunMigration -> do - BA.tryRunMigrationsWithEditUpdate annotatedDb conn - logg Info "Done with database migration." - DontMigrate -> do - logg Info "Database needs to be migrated. Re-run with the -m option or you can migrate by hand with the following query:" - showMigration conn - exitFailure + diffA <- calcCWMigrationSteps conn + case diffA of + Left err -> do + logg Error "Error detecting database migration requirements: " + logg Error $ fromString $ show err + Right [] -> logg Info "No database migration needed. Continuing..." + Right _ -> do + logg Info "Database migration needed." + case migrateStatus of + RunMigration -> do + BA.tryRunMigrationsWithEditUpdate annotatedDb conn + logg Info "Done with database migration." + DontMigrate ignoreDiffs -> do + logg Info "Database needs to be migrated." + showMigration conn + unless ignoreDiffs $ do + logg Error "Re-run with the -m option or you can run the queries above manually" + exitFailure + + + +bench_initializeTables :: Bool -> (Text -> IO ()) -> (Text -> IO ()) -> Connection -> IO Bool +bench_initializeTables migrate loggInfo loggError conn = do + diffA <- calcCWMigrationSteps conn + case diffA of + Left err -> do + loggError "Error detecting database migration requirements: " + loggError $ fromString $ show err + return False + Right [] -> do + loggInfo "No database migration needed. Continuing..." + return True + Right _ -> do + loggInfo "Database migration needed." + case migrate of + True -> do + BA.tryRunMigrationsWithEditUpdate annotatedDb conn + loggInfo "Done with database migration." + return True + False -> do + loggInfo "Database needs to be migrated. Re-run with the -m option or you can migrate by hand with the following query:" + showMigration conn + return False + withDb :: Env -> Pg b -> IO b withDb env qry = P.withResource (_env_dbConnPool env) $ \c -> runBeamPostgres c qry diff --git a/haskell-src/lib/ChainwebDb/Queries.hs b/haskell-src/lib/ChainwebDb/Queries.hs new file mode 100644 index 00000000..ed687450 --- /dev/null +++ b/haskell-src/lib/ChainwebDb/Queries.hs @@ -0,0 +1,247 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TypeFamilies #-} +-- | + +module ChainwebDb.Queries where + +------------------------------------------------------------------------------ +import Data.Aeson hiding (Error) +import Data.ByteString.Lazy (ByteString) +import Data.Int +import Data.Functor ((<&>)) +import Data.Maybe (maybeToList) +import Data.Text (Text) +import Data.Time +import Database.Beam hiding (insert) +import Database.Beam.Postgres +import Database.Beam.Postgres.Syntax +import Database.Beam.Backend.SQL.SQL92 +import Database.Beam.Backend.SQL +------------------------------------------------------------------------------ +import Chainweb.Api.ChainId +import Chainweb.Api.Common (BlockHeight) +import ChainwebData.Api +import ChainwebData.Pagination +import ChainwebDb.BoundedScan +import ChainwebDb.Database +import ChainwebDb.Types.Block +import ChainwebDb.Types.DbHash +import ChainwebDb.Types.Event +import ChainwebDb.Types.Transaction +import ChainwebDb.Types.Transfer +import ChainwebDb.Types.Common (ReqKeyOrCoinbase) +------------------------------------------------------------------------------ + +type PgSelect a = SqlSelect Postgres (QExprToIdentity a) +type PgExpr s = QGenExpr QValueContext Postgres s +type PgBaseExpr = PgExpr QBaseScope + +data HeightRangeParams = HeightRangeParams + { hrpMinHeight :: Maybe BlockHeight + , hrpMaxHeight :: Maybe BlockHeight + } + +guardInRange :: HeightRangeParams -> PgExpr s Int64 -> Q Postgres db s () +guardInRange HeightRangeParams{..} hgt = do + whenArg hrpMinHeight $ \h -> guard_ $ hgt >=. val_ (fromIntegral h) + whenArg hrpMaxHeight $ \h -> guard_ $ hgt <=. val_ (fromIntegral h) + +-- | A subset of the TransactionT record, used with beam for querying the +-- /txs/search endpoint response payload +data DbTxSummaryT f = DbTxSummary + { dtsChainId :: C f Int64 + , dtsHeight :: C f Int64 + , dtsBlock :: C f (DbHash BlockHash) + , dtsCreationTime :: C f UTCTime + , dtsReqKey :: C f (DbHash TxHash) + , dtsSender :: C f Text + , dtsCode :: C f (Maybe Text) + , dtsContinuation :: C f (Maybe (PgJSONB Value)) + , dtsGoodResult :: C f (Maybe (PgJSONB Value)) + } deriving (Generic, Beamable) + +type DbTxSummary = DbTxSummaryT Identity + +data TxCursorT f = TxCursor + { txcHeight :: C f Int64 + , txcReqKey :: C f (DbHash TxHash) + } deriving (Generic, Beamable) + +type TxCursor = TxCursorT Identity + +toTxSearchCursor :: DbTxSummaryT f -> TxCursorT (Directional f) +toTxSearchCursor DbTxSummary{..} = TxCursor + (desc dtsHeight) + (desc dtsReqKey) + +toDbTxSummary :: TransactionT f -> DbTxSummaryT f +toDbTxSummary Transaction{..} = DbTxSummary + { dtsChainId = _tx_chainId + , dtsHeight = _tx_height + , dtsBlock = unBlockId _tx_block + , dtsCreationTime = _tx_creationTime + , dtsReqKey = _tx_requestKey + , dtsSender = _tx_sender + , dtsCode = _tx_code + , dtsContinuation = _tx_continuation + , dtsGoodResult = _tx_goodResult + } + +txSearchSource :: + Text -> + HeightRangeParams -> + Q Postgres ChainwebDataDb s (FilterMarked DbTxSummaryT (PgExpr s)) +txSearchSource search hgtRange = do + txMerged <- do + tx <- all_ $ _cddb_transactions database + initTx <- leftJoin_' (all_ $ _cddb_transactions database) $ \tx2 -> + just_ (_tx_requestKey tx2) ==?. _tx_pactId tx + let codeMerged = coalesce_ + [ just_ (_tx_code tx) + , _tx_code initTx + ] + nothing_ + return $ tx { _tx_code = codeMerged } + guardInRange hgtRange (_tx_height txMerged) + let searchExp = val_ ("%" <> search <> "%") + isMatch = fromMaybe_ (val_ "") (_tx_code txMerged) `like_` searchExp + return $ FilterMarked isMatch $ toDbTxSummary txMerged + +data EventSearchParams = EventSearchParams + { espSearch :: Maybe Text + , espParam :: Maybe EventParam + , espName :: Maybe EventName + , espModuleName :: Maybe EventModuleName + } + +eventSearchCond :: + EventSearchParams -> + EventT (PgExpr s) -> + PgExpr s Bool +eventSearchCond EventSearchParams{..} ev = and_ $ + concat [searchCond, qualNameCond, paramCond, moduleCond] + where + searchString search = "%" <> search <> "%" + searchCond = fromMaybeArg espSearch $ \s -> + (_ev_qualName ev `like_` val_ (searchString s)) ||. + (_ev_paramText ev `like_` val_ (searchString s)) + qualNameCond = fromMaybeArg espName $ \(EventName n) -> + _ev_qualName ev `like_` val_ (searchString n) + paramCond = fromMaybeArg espParam $ \(EventParam p) -> + _ev_paramText ev `like_` val_ (searchString p) + moduleCond = fromMaybeArg espModuleName $ \(EventModuleName m) -> + _ev_module ev ==. val_ m + fromMaybeArg mbA f = f <$> maybeToList mbA + +data EventCursorT f = EventCursor + { ecHeight :: C f Int64 + , ecReqKey :: C f ReqKeyOrCoinbase + , ecIdx :: C f Int64 + } deriving (Generic, Beamable) + +type EventCursor = EventCursorT Identity +deriving instance Show EventCursor + +type EventQueryStart = Maybe BlockHeight + +toEventsSearchCursor :: EventT f -> EventCursorT (Directional f) +toEventsSearchCursor Event{..} = EventCursor + (desc _ev_height) + (desc _ev_requestkey) + (asc _ev_idx) + +eventsSearchSource :: + EventSearchParams -> + HeightRangeParams -> + Q Postgres ChainwebDataDb s (FilterMarked EventT (PgExpr s)) +eventsSearchSource esp hgtRange = do + ev <- all_ $ _cddb_events database + guardInRange hgtRange (_ev_height ev) + return $ FilterMarked (eventSearchCond esp ev) ev + +newtype EventSearchExtrasT f = EventSearchExtras + { eseBlockTime :: C f UTCTime + } deriving (Generic, Beamable) + +eventSearchExtras :: + EventT (PgExpr s) -> + Q Postgres ChainwebDataDb s (EventSearchExtrasT (PgExpr s)) +eventSearchExtras ev = do + blk <- all_ $ _cddb_blocks database + guard_ $ _ev_block ev `references_` blk + return $ EventSearchExtras + { eseBlockTime = _block_creationTime blk + } + +_bytequery :: Sql92SelectSyntax (BeamSqlBackendSyntax be) ~ PgSelectSyntax => SqlSelect be a -> ByteString +_bytequery = \case + SqlSelect s -> pgRenderSyntaxScript $ fromPgSelect s + +data AccountQueryStart + = AQSNewQuery Offset + | AQSContinue BlockHeight ReqKeyOrCoinbase Int + +toAccountsSearchCursor :: TransferT f -> EventCursorT (Directional f) +toAccountsSearchCursor Transfer{..} = EventCursor + (desc _tr_height) + (desc _tr_requestkey) + (asc _tr_idx) + +data TransferSearchParams = TransferSearchParams + { tspToken :: Text + , tspChainId :: Maybe ChainId + , tspHeightRange :: HeightRangeParams + , tspAccount :: Text + } + +transfersSearchSource :: + TransferSearchParams -> + Q Postgres ChainwebDataDb s (FilterMarked TransferT (PgExpr s)) +transfersSearchSource tsp = do + tr <- sourceTransfersScan + return $ FilterMarked (searchCond tr) tr + where + tokenCond tr = _tr_modulename tr ==. val_ (tspToken tsp) + chainCond tr = tspChainId tsp <&> \(ChainId c) -> _tr_chainid tr ==. fromIntegral c + searchCond tr = and_ $ tokenCond tr : maybeToList (chainCond tr) + getOrder tr = + ( desc_ $ _tr_height tr + , desc_ $ _tr_requestkey tr + , asc_ $ _tr_idx tr) + accountQuery accountField = orderBy_ getOrder $ do + tr <- all_ (_cddb_transfers database) + guard_ $ accountField tr ==. val_ (tspAccount tsp) + guardInRange (tspHeightRange tsp) (_tr_height tr) + return tr + sourceTransfersScan = unionAll_ (accountQuery _tr_from_acct) (accountQuery _tr_to_acct) + +newtype TransferSearchExtrasT f = TransferSearchExtras + { tseBlockTime :: C f UTCTime + } deriving (Generic, Beamable) + +transferSearchExtras :: + TransferT (PgExpr s) -> + Q Postgres ChainwebDataDb s (TransferSearchExtrasT (PgExpr s)) +transferSearchExtras tr = do + blk <- all_ $ _cddb_blocks database + guard_ $ _tr_block tr `references_` blk + return $ TransferSearchExtras + { tseBlockTime = _block_creationTime blk + } + +whenArg :: Monad m => Maybe a -> (a -> m ()) -> m () +whenArg p a = maybe (return ()) a p + +and_ :: BeamSqlBackend be => [QExpr be s Bool] -> QExpr be s Bool +and_ [] = val_ True +and_ (cond:conds) = foldr (&&.) cond conds \ No newline at end of file diff --git a/lib/ChainwebDb/Types/Block.hs b/haskell-src/lib/ChainwebDb/Types/Block.hs similarity index 97% rename from lib/ChainwebDb/Types/Block.hs rename to haskell-src/lib/ChainwebDb/Types/Block.hs index 93e6d7d4..450e6abc 100644 --- a/lib/ChainwebDb/Types/Block.hs +++ b/haskell-src/lib/ChainwebDb/Types/Block.hs @@ -23,11 +23,11 @@ import Data.Time.Clock (UTCTime) import Database.Beam import Database.Beam.AutoMigrate hiding (Table) import Database.Beam.Backend.SQL hiding (tableName) -import Database.Beam.Backend.SQL.Row (FromBackendRow) +import Database.Beam.Backend.SQL.Row () import Database.Beam.Migrate import Database.Beam.Postgres (Postgres) import Database.Beam.Postgres.Syntax (PgValueSyntax) -import Database.Beam.Query (HasSqlEqualityCheck) +import Database.Beam.Query () ------------------------------------------------------------------------------ import ChainwebDb.Types.DbHash ------------------------------------------------------------------------------ diff --git a/haskell-src/lib/ChainwebDb/Types/Common.hs b/haskell-src/lib/ChainwebDb/Types/Common.hs new file mode 100644 index 00000000..d3e84987 --- /dev/null +++ b/haskell-src/lib/ChainwebDb/Types/Common.hs @@ -0,0 +1,61 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE ImpredicativeTypes #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +{-# OPTIONS_GHC -fno-warn-missing-signatures #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} + +module ChainwebDb.Types.Common where + +------------------------------------------------------------------------------ +import Data.Text (Text) +import qualified Data.Text as T +import Database.Beam +import Database.Beam.AutoMigrate hiding (Table) +import Database.Beam.Backend.SQL hiding (tableName) +import Database.Beam.Backend.SQL.Row () +import Database.Beam.Backend.SQL.SQL92 () +import Database.Beam.Backend.Types +import Database.Beam.Migrate +------------------------------------------------------------------------------ +import ChainwebDb.Types.DbHash +------------------------------------------------------------------------------ + +data ReqKeyOrCoinbase = RKCB_RequestKey (DbHash TxHash) | RKCB_Coinbase + deriving (Eq, Ord, Generic) + +instance Show ReqKeyOrCoinbase where + show RKCB_Coinbase = "cb" + show (RKCB_RequestKey rk) = T.unpack $ unDbHash rk + +getTxHash :: ReqKeyOrCoinbase -> T.Text +getTxHash = \case + RKCB_RequestKey txhash -> unDbHash txhash + RKCB_Coinbase -> "" + +rkcbFromText :: Text -> ReqKeyOrCoinbase +rkcbFromText "cb" = RKCB_Coinbase +rkcbFromText rk = RKCB_RequestKey $ DbHash rk + +instance BeamMigrateSqlBackend be => HasSqlEqualityCheck be ReqKeyOrCoinbase + +instance BeamMigrateSqlBackend be => HasDefaultSqlDataType be ReqKeyOrCoinbase where + defaultSqlDataType _ _ _ = varCharType Nothing Nothing + +instance HasSqlValueSyntax be String => HasSqlValueSyntax be ReqKeyOrCoinbase where + sqlValueSyntax = autoSqlValueSyntax + +instance (BeamBackend be, FromBackendRow be Text) => FromBackendRow be ReqKeyOrCoinbase where + fromBackendRow = rkcbFromText <$> fromBackendRow + +instance HasColumnType ReqKeyOrCoinbase where + defaultColumnType _ = SqlStdType $ varCharType Nothing Nothing + defaultTypeCast _ = Just "character varying" diff --git a/lib/ChainwebDb/Types/DbHash.hs b/haskell-src/lib/ChainwebDb/Types/DbHash.hs similarity index 92% rename from lib/ChainwebDb/Types/DbHash.hs rename to haskell-src/lib/ChainwebDb/Types/DbHash.hs index 032fdc6f..d7c60acb 100644 --- a/lib/ChainwebDb/Types/DbHash.hs +++ b/haskell-src/lib/ChainwebDb/Types/DbHash.hs @@ -10,8 +10,8 @@ import Data.Aeson import Data.Text (Text) import Database.Beam.AutoMigrate import Database.Beam.Backend.SQL hiding (tableName) -import Database.Beam.Backend.SQL.Row (FromBackendRow) -import Database.Beam.Backend.SQL.SQL92 (HasSqlValueSyntax) +import Database.Beam.Backend.SQL.Row () +import Database.Beam.Backend.SQL.SQL92 () import Database.Beam.Migrate (HasDefaultSqlDataType) import Database.Beam.Postgres (Postgres) import Database.Beam.Postgres.Syntax (PgValueSyntax) diff --git a/lib/ChainwebDb/Types/Event.hs b/haskell-src/lib/ChainwebDb/Types/Event.hs similarity index 65% rename from lib/ChainwebDb/Types/Event.hs rename to haskell-src/lib/ChainwebDb/Types/Event.hs index e3e7580e..dc54ba9b 100644 --- a/lib/ChainwebDb/Types/Event.hs +++ b/haskell-src/lib/ChainwebDb/Types/Event.hs @@ -19,46 +19,13 @@ module ChainwebDb.Types.Event where import Data.Aeson import Data.Int import Data.Text (Text) -import qualified Data.Text as T import Database.Beam -import Database.Beam.AutoMigrate hiding (Table) -import Database.Beam.Backend.SQL hiding (tableName) -import Database.Beam.Backend.SQL.Row (FromBackendRow) -import Database.Beam.Backend.SQL.SQL92 (HasSqlValueSyntax) -import Database.Beam.Backend.Types -import Database.Beam.Migrate import Database.Beam.Postgres ------------------------------------------------------------------------------ import ChainwebDb.Types.Block -import ChainwebDb.Types.DbHash +import ChainwebDb.Types.Common ------------------------------------------------------------------------------ -data ReqKeyOrCoinbase = RKCB_RequestKey (DbHash TxHash) | RKCB_Coinbase - deriving (Eq, Ord, Generic) - -instance Show ReqKeyOrCoinbase where - show RKCB_Coinbase = "cb" - show (RKCB_RequestKey rk) = T.unpack $ unDbHash rk - -rkcbFromText :: Text -> ReqKeyOrCoinbase -rkcbFromText "cb" = RKCB_Coinbase -rkcbFromText rk = RKCB_RequestKey $ DbHash rk - -instance BeamMigrateSqlBackend be => HasSqlEqualityCheck be ReqKeyOrCoinbase - -instance BeamMigrateSqlBackend be => HasDefaultSqlDataType be ReqKeyOrCoinbase where - defaultSqlDataType _ _ _ = varCharType Nothing Nothing - -instance HasSqlValueSyntax be String => HasSqlValueSyntax be ReqKeyOrCoinbase where - sqlValueSyntax = autoSqlValueSyntax - -instance (BeamBackend be, FromBackendRow be Text) => FromBackendRow be ReqKeyOrCoinbase where - fromBackendRow = rkcbFromText <$> fromBackendRow - -instance HasColumnType ReqKeyOrCoinbase where - defaultColumnType _ = SqlStdType $ varCharType Nothing Nothing - defaultTypeCast _ = Just "character varying" - data EventT f = Event { _ev_requestkey :: C f ReqKeyOrCoinbase , _ev_block :: PrimaryKey BlockT f diff --git a/lib/ChainwebDb/Types/MinerKey.hs b/haskell-src/lib/ChainwebDb/Types/MinerKey.hs similarity index 100% rename from lib/ChainwebDb/Types/MinerKey.hs rename to haskell-src/lib/ChainwebDb/Types/MinerKey.hs diff --git a/lib/ChainwebDb/Types/Signer.hs b/haskell-src/lib/ChainwebDb/Types/Signer.hs similarity index 95% rename from lib/ChainwebDb/Types/Signer.hs rename to haskell-src/lib/ChainwebDb/Types/Signer.hs index cbace42d..112c525f 100644 --- a/lib/ChainwebDb/Types/Signer.hs +++ b/haskell-src/lib/ChainwebDb/Types/Signer.hs @@ -22,8 +22,8 @@ import Data.Text (Text) import Database.Beam import Database.Beam.AutoMigrate hiding (Table) import Database.Beam.Backend.SQL hiding (tableName) -import Database.Beam.Backend.SQL.Row (FromBackendRow) -import Database.Beam.Backend.SQL.SQL92 (HasSqlValueSyntax) +import Database.Beam.Backend.SQL.Row () +import Database.Beam.Backend.SQL.SQL92 () import Database.Beam.Migrate import Database.Beam.Postgres import Database.Beam.Postgres.Syntax (PgValueSyntax) diff --git a/lib/ChainwebDb/Types/Transaction.hs b/haskell-src/lib/ChainwebDb/Types/Transaction.hs similarity index 99% rename from lib/ChainwebDb/Types/Transaction.hs rename to haskell-src/lib/ChainwebDb/Types/Transaction.hs index b9270120..493306f5 100644 --- a/lib/ChainwebDb/Types/Transaction.hs +++ b/haskell-src/lib/ChainwebDb/Types/Transaction.hs @@ -79,7 +79,7 @@ Transaction (LensFor tx_badResult) (LensFor tx_goodResult) (LensFor tx_logs) - (LensFor tx_metadat) + (LensFor tx_metadata) (LensFor tx_continuation) (LensFor tx_txid) (LensFor tx_numEvents) diff --git a/haskell-src/lib/ChainwebDb/Types/Transfer.hs b/haskell-src/lib/ChainwebDb/Types/Transfer.hs new file mode 100644 index 00000000..f6f6e0e0 --- /dev/null +++ b/haskell-src/lib/ChainwebDb/Types/Transfer.hs @@ -0,0 +1,75 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE ImpredicativeTypes #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +{-# OPTIONS_GHC -fno-warn-missing-signatures #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} + +module ChainwebDb.Types.Transfer where + +---------------------------------------------------------------------------- +import BasePrelude +import Data.Scientific +import Data.Text (Text) +import Database.Beam +import Database.Beam.Backend.SQL.SQL92 +import Database.Beam.Postgres +import Database.Beam.Postgres.Syntax +import qualified Database.Beam.AutoMigrate as BA +import Database.PostgreSQL.Simple.ToField +import Database.PostgreSQL.Simple.FromField +------------------------------------------------------------------------------ +import ChainwebDb.Types.Block +import ChainwebDb.Types.Common +------------------------------------------------------------------------------ +data TransferT f = Transfer + { _tr_block :: PrimaryKey BlockT f + , _tr_requestkey :: C f ReqKeyOrCoinbase + , _tr_chainid :: C f Int64 + , _tr_height :: C f Int64 + , _tr_idx :: C f Int64 + , _tr_modulename :: C f Text + , _tr_moduleHash :: C f Text + , _tr_from_acct :: C f Text + , _tr_to_acct :: C f Text + , _tr_amount :: C f KDAScientific + } + deriving stock (Generic) + deriving anyclass (Beamable) + +Transfer + (BlockId (LensFor tr_block)) + (LensFor tr_requestkey) + (LensFor tr_chainid) + (LensFor tr_height) + (LensFor tr_idx) + (LensFor tr_modulename) + (LensFor tr_moduleHash) + (LensFor tr_from_acct) + (LensFor tr_to_acct) + (LensFor tr_amount) + = tableLenses + +type Transfer = TransferT Identity +type TransferId = PrimaryKey TransferT Identity + +instance Table TransferT where + data PrimaryKey TransferT f = TransferId (PrimaryKey BlockT f) (C f ReqKeyOrCoinbase) (C f Int64) (C f Int64) (C f Text) + deriving stock (Generic) + deriving anyclass (Beamable) + primaryKey = TransferId <$> _tr_block <*> _tr_requestkey <*> _tr_chainid <*> _tr_idx <*> _tr_moduleHash + +newtype KDAScientific = KDAScientific { getKDAScientific :: Scientific } + deriving newtype (Eq, Show, HasSqlValueSyntax PgValueSyntax, ToField, FromField, FromBackendRow Postgres) + +instance BA.HasColumnType KDAScientific where + defaultColumnType _ = BA.SqlStdType (numericType Nothing) + defaultTypeCast _ = Just "numeric" diff --git a/test/Chainweb/Data/Test/Backfill.hs b/haskell-src/test/Chainweb/Data/Test/Backfill.hs similarity index 100% rename from test/Chainweb/Data/Test/Backfill.hs rename to haskell-src/test/Chainweb/Data/Test/Backfill.hs diff --git a/test/Chainweb/Data/Test/Parser.hs b/haskell-src/test/Chainweb/Data/Test/Parser.hs similarity index 100% rename from test/Chainweb/Data/Test/Parser.hs rename to haskell-src/test/Chainweb/Data/Test/Parser.hs diff --git a/test/Main.hs b/haskell-src/test/Main.hs similarity index 100% rename from test/Main.hs rename to haskell-src/test/Main.hs diff --git a/lib/ChainwebData/TxDetail.hs b/lib/ChainwebData/TxDetail.hs deleted file mode 100644 index c83ad3c4..00000000 --- a/lib/ChainwebData/TxDetail.hs +++ /dev/null @@ -1,52 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - -module ChainwebData.TxDetail where - -import ChainwebData.Util -import Data.Aeson -import Data.Text (Text) -import Data.Time -import GHC.Generics - -data TxEvent = TxEvent - { _txEvent_name :: Text - , _txEvent_params :: [Value] - } deriving (Eq,Show,Generic) - -instance ToJSON TxEvent where toJSON = lensyToJSON 9 -instance FromJSON TxEvent where parseJSON = lensyParseJSON 9 - - -data TxDetail = TxDetail - { _txDetail_ttl :: Int - , _txDetail_gasLimit :: Int - , _txDetail_gasPrice :: Double - , _txDetail_nonce :: Text - , _txDetail_pactId :: Maybe Text - , _txDetail_rollback :: Maybe Bool - , _txDetail_step :: Maybe Int - , _txDetail_data :: Value - , _txDetail_proof :: (Maybe Text) - , _txDetail_gas :: Int - , _txDetail_result :: Value - , _txDetail_logs :: Text - , _txDetail_metadata :: Value - , _txDetail_continuation :: Maybe Value - , _txDetail_txid :: Int - , _txDetail_chain :: Int - , _txDetail_height :: Int - , _txDetail_blockTime :: UTCTime - , _txDetail_blockHash :: Text - , _txDetail_creationTime :: UTCTime - , _txDetail_requestKey :: Text - , _txDetail_sender :: Text - , _txDetail_code :: Maybe Text - , _txDetail_success :: Bool - , _txDetail_events :: [TxEvent] - } deriving (Eq,Show,Generic) - -instance ToJSON TxDetail where - toJSON = lensyToJSON 10 - -instance FromJSON TxDetail where - parseJSON = lensyParseJSON 10 diff --git a/node-config-for-chainweb-data.yaml b/node-config-for-chainweb-data.yaml new file mode 100644 index 00000000..79857cff --- /dev/null +++ b/node-config-for-chainweb-data.yaml @@ -0,0 +1,41 @@ +chainweb: + allowReadsInLocal: true + headerStream: true + throttling: + global: 1000 + +# Less chatty logging +logging: + telemetryBackend: + enabled: true + configuration: + handle: stdout + color: auto + format: text + + backend: + handle: stdout + color: auto + format: text + + logger: + log_level: info + + filter: + rules: + - key: component + value: cut-monitor + level: info + - key: component + value: pact-tx-replay + level: info + - key: component + value: connection-manager + level: info + - key: component + value: miner + level: info + - key: component + value: local-handler + level: info + default: error diff --git a/scripts/richlist.sh b/scripts/richlist.sh deleted file mode 100755 index 2db9c8cd..00000000 --- a/scripts/richlist.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/sh - - -for i in $(seq 0 $1); -do - sqlite3 -header -csv pact-v1-chain-$i.sqlite "select rowkey as acct_id, txid, cast(ifnull(json_extract(rowdata, '$.balance.decimal'), json_extract(rowdata, '$.balance')) as REAL) as 'balance' - from [coin_coin-table] as coin - INNER JOIN ( - select - rowkey as acct_id, - max(txid) as last_txid - from 'coin_coin-table' - group by acct_id - ) latest ON coin.rowkey = latest.acct_id AND coin.txid = latest.last_txid - order by balance desc;" > richlist-chain-$i.csv -done - -cat richlist-chain-*.csv > richlist.csv -rm richlist-chain-*.csv -rm pact-v1-chain-*.sqlite* diff --git a/shell.nix b/shell.nix new file mode 100644 index 00000000..29524ca6 --- /dev/null +++ b/shell.nix @@ -0,0 +1,8 @@ +(import ( + fetchTarball { + url = "https://github.com/edolstra/flake-compat/archive/35bb57c0c8d8b62bbfd284272c928ceb64ddbde9.tar.gz"; + sha256 = "1prd9b1xx8c0sfwnyzkspplh30m613j42l1k789s521f4kv4c2z2"; } +) { + src = ./.; +}).shellNix + diff --git a/stack.yaml b/stack.yaml index d074e618..5962bf0b 100644 --- a/stack.yaml +++ b/stack.yaml @@ -2,6 +2,9 @@ resolver: lts-14.27 ghc-options: {"$locals": -ddump-to-file -ddump-hi} +packages: + - haskell-src + extra-deps: - ed25519-donna-0.1.1 - megaparsec-9.0.0