diff --git a/codegenerator/cli/README.md b/codegenerator/cli/README.md index fb9063e78..b5b52b632 100644 --- a/codegenerator/cli/README.md +++ b/codegenerator/cli/README.md @@ -19,7 +19,7 @@ HyperIndex is a fast, developer-friendly multichain indexer, optimized for both - **[Reorg support](https://docs.envio.dev/docs/HyperIndex/reorgs-support)** – Graceful handling of blockchain reorganizations - **GraphQL API** – Easy-to-query indexed data - **Flexible language support** – JavaScript, TypeScript, and ReScript -- **Factory contract support** – Index data from 100,000+ factory contracts seamlessly +- **Factory contract support** – Index data from 1M+ dynamically registered contracts seamlessly - **On-chain & off-chain data integration** – Easily combine multiple data sources - **[Self-hosted & managed options](https://docs.envio.dev/docs/HyperIndex/hosted-service)** – Run your own setup or use HyperIndex hosted services - **Detailed logging & error reporting** – Debug and optimize with clarity diff --git a/codegenerator/cli/npm/envio/package.json b/codegenerator/cli/npm/envio/package.json index fd9ab44a9..9b3ea89f2 100644 --- a/codegenerator/cli/npm/envio/package.json +++ b/codegenerator/cli/npm/envio/package.json @@ -23,6 +23,11 @@ "@envio-dev/hypersync-client": "0.6.5", "rescript": "11.1.3", "rescript-schema": "9.3.0", + "@rescript/react": "0.12.1", + "ink": "3.2.0", + "ink-big-text": "1.2.0", + "ink-spinner": "4.0.3", + "ink-use-stdout-dimensions": "1.0.5", "viem": "2.21.0", "bignumber.js": "9.1.2", "pino": "8.16.1", diff --git a/codegenerator/cli/npm/envio/package.json.tmpl b/codegenerator/cli/npm/envio/package.json.tmpl index 0042d481d..69d677128 100644 --- a/codegenerator/cli/npm/envio/package.json.tmpl +++ b/codegenerator/cli/npm/envio/package.json.tmpl @@ -34,6 +34,11 @@ "@envio-dev/hypersync-client": "0.6.5", "rescript": "11.1.3", "rescript-schema": "9.3.0", + "@rescript/react": "0.12.1", + "ink": "3.2.0", + "ink-big-text": "1.2.0", + "ink-spinner": "4.0.3", + "ink-use-stdout-dimensions": "1.0.5", "viem": "2.21.0", "bignumber.js": "9.1.2", "pino": "8.16.1", diff --git a/codegenerator/cli/npm/envio/pnpm-lock.yaml b/codegenerator/cli/npm/envio/pnpm-lock.yaml index d7649d102..3353b52cf 100644 --- a/codegenerator/cli/npm/envio/pnpm-lock.yaml +++ b/codegenerator/cli/npm/envio/pnpm-lock.yaml @@ -11,9 +11,24 @@ importers: '@envio-dev/hypersync-client': specifier: 0.6.5 version: 0.6.5 + '@rescript/react': + specifier: 0.12.1 + version: 0.12.1(react-dom@19.1.1(react@19.1.1))(react@19.1.1) bignumber.js: specifier: 9.1.2 version: 9.1.2 + ink: + specifier: 3.2.0 + version: 3.2.0(react@19.1.1) + ink-big-text: + specifier: 1.2.0 + version: 1.2.0(ink@3.2.0(react@19.1.1))(react@19.1.1) + ink-spinner: + specifier: 4.0.3 + version: 4.0.3(ink@3.2.0(react@19.1.1))(react@19.1.1) + ink-use-stdout-dimensions: + specifier: 1.0.5 + version: 1.0.5(ink@3.2.0(react@19.1.1))(react@19.1.1) pino: specifier: 8.16.1 version: 8.16.1 @@ -89,6 +104,12 @@ packages: resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} engines: {node: '>=8.0.0'} + '@rescript/react@0.12.1': + resolution: {integrity: sha512-ZD7nhDr5FZgLYqRH9s4CNM+LRz/3IMuTb+LH12fd2Akk0xYkYUP+DZveB2VQUC2UohJnTf/c8yPSNsiFihVCCg==} + peerDependencies: + react: '>=18.0.0' + react-dom: '>=18.0.0' + '@scure/base@1.1.9': resolution: {integrity: sha512-8YKhl8GHiNI/pU2VMaofa2Tor7PJRAjwQLBBuilkJ9L5+13yVbC7JO/wS7piioAvPSwR3JKM1IJ/u4xQzbcXKg==} @@ -98,6 +119,9 @@ packages: '@scure/bip39@1.3.0': resolution: {integrity: sha512-disdg7gHuTDZtY+ZdkmLpPCk7fxZSu3gBiEGuoC1XYxv9cGx3Z6cpTggCgW6odSOOIXCiDjuGejW+aJKCY/pIQ==} + '@types/yoga-layout@1.9.2': + resolution: {integrity: sha512-S9q47ByT2pPvD65IvrWp7qppVMpk9WGMbVq9wbWZOHg6tnXSD4vyhao6nOSBwwfDdV2p3Kx9evA9vI+XWTfDvw==} + abitype@1.0.5: resolution: {integrity: sha512-YzDhti7cjlfaBhHutMaboYB21Ha3rXR9QTkNJFzYC4kC8YclaiwPBBBJY8ejFdu2wnJeZCVZSMlQJ7fi8S6hsw==} peerDependencies: @@ -113,10 +137,30 @@ packages: resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} engines: {node: '>=6.5'} + ansi-escapes@4.3.2: + resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} + engines: {node: '>=8'} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + astral-regex@2.0.0: + resolution: {integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==} + engines: {node: '>=8'} + atomic-sleep@1.0.0: resolution: {integrity: sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==} engines: {node: '>=8.0.0'} + auto-bind@4.0.0: + resolution: {integrity: sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ==} + engines: {node: '>=8'} + balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} @@ -135,15 +179,69 @@ packages: buffer@6.0.3: resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} + cfonts@2.10.1: + resolution: {integrity: sha512-l5IcLv4SaOdL/EGR6BpOF5SEro88VcGJJ6+xbvJb+wXi19YC6UeHE/brv7a4vIcLZopnt3Ys3zWeNnyfB04UPg==} + engines: {node: '>=10'} + hasBin: true + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + ci-info@2.0.0: + resolution: {integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==} + + cli-boxes@2.2.1: + resolution: {integrity: sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==} + engines: {node: '>=6'} + + cli-cursor@3.1.0: + resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} + engines: {node: '>=8'} + + cli-spinners@2.9.2: + resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} + engines: {node: '>=6'} + + cli-truncate@2.1.0: + resolution: {integrity: sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==} + engines: {node: '>=8'} + + code-excerpt@3.0.0: + resolution: {integrity: sha512-VHNTVhd7KsLGOqfX3SyeO8RyYPMp1GJOg194VITk04WMYCv4plV68YWe6TJZxd9MhobjtpMRnVky01gqZsalaw==} + engines: {node: '>=10'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + colorette@2.0.20: resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==} + convert-to-spaces@1.0.2: + resolution: {integrity: sha512-cj09EBuObp9gZNQCzc7hByQyrs6jVGE+o9kSJmeUoj+GiPiJvi5LYqEH/Hmme4+MTLHM+Ejtq+FChpjjEnsPdQ==} + engines: {node: '>= 4'} + dateformat@4.6.3: resolution: {integrity: sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==} + define-property@1.0.0: + resolution: {integrity: sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==} + engines: {node: '>=0.10.0'} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + end-of-stream@1.4.4: resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} + escape-string-regexp@2.0.0: + resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} + engines: {node: '>=8'} + event-target-shim@5.0.1: resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} engines: {node: '>=6'} @@ -165,17 +263,32 @@ packages: fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + glob@8.1.0: resolution: {integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==} engines: {node: '>=12'} deprecated: Glob versions prior to v9 are no longer supported + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + help-me@4.2.0: resolution: {integrity: sha512-TAOnTB8Tz5Dw8penUuzHVrKNKlCIbwwbHnXraNJxPwf8LRtE2HlM84RYuezMFcwOJmoYOCWVDyJ8TQGxn9PgxA==} ieee754@1.2.1: resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + indent-string@4.0.0: + resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} + engines: {node: '>=8'} + inflight@1.0.6: resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. @@ -183,6 +296,63 @@ packages: inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + ink-big-text@1.2.0: + resolution: {integrity: sha512-xDfn8oOhiji9c4wojTKSaBnEfgpTTd3KL7jsMYVht4SbpfLdSKvVZiMi3U5v45eSjLm1ycMmeMWAP1G99lWL5Q==} + engines: {node: '>=8'} + peerDependencies: + ink: '>=2.0.0' + react: '>=16.8.0' + + ink-spinner@4.0.3: + resolution: {integrity: sha512-uJ4nbH00MM9fjTJ5xdw0zzvtXMkeGb0WV6dzSWvFv2/+ks6FIhpkt+Ge/eLdh0Ah6Vjw5pLMyNfoHQpRDRVFbQ==} + engines: {node: '>=10'} + peerDependencies: + ink: '>=3.0.5' + react: '>=16.8.2' + + ink-use-stdout-dimensions@1.0.5: + resolution: {integrity: sha512-rVsqnw4tQEAJUoknU09+zHdDf30GJdkumkHr0iz/TOYMYEZJkYqziQSGJAM+Z+M603EDfO89+Nxyn/Ko2Zknfw==} + peerDependencies: + ink: '>=2.0.0' + react: '>=16.0.0' + + ink@3.2.0: + resolution: {integrity: sha512-firNp1q3xxTzoItj/eOOSZQnYSlyrWks5llCTVX37nJ59K3eXbQ8PtzCguqo8YI19EELo5QxaKnJd4VxzhU8tg==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '>=16.8.0' + react: '>=16.8.0' + peerDependenciesMeta: + '@types/react': + optional: true + + is-accessor-descriptor@1.0.1: + resolution: {integrity: sha512-YBUanLI8Yoihw923YeFUS5fs0fF2f5TSFTNiYAAzhhDscDa3lEqYuz1pDOEP5KvX94I9ey3vsqjJcLVFVU+3QA==} + engines: {node: '>= 0.10'} + + is-buffer@1.1.6: + resolution: {integrity: sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==} + + is-ci@2.0.0: + resolution: {integrity: sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==} + hasBin: true + + is-data-descriptor@1.0.1: + resolution: {integrity: sha512-bc4NlCDiCr28U4aEsQ3Qs2491gVq4V8G7MQyws968ImqjKuYtTJXrl7Vq7jsN7Ly/C3xj5KWFrY7sHNeDkAzXw==} + engines: {node: '>= 0.4'} + + is-descriptor@1.0.3: + resolution: {integrity: sha512-JCNNGbwWZEVaSPtS45mdtrneRWJFp07LLmykxeFV5F6oBvNF8vHSfJuJgoT472pSfk+Mf8VnlrspaFBHWM8JAw==} + engines: {node: '>= 0.4'} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-number@3.0.0: + resolution: {integrity: sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==} + engines: {node: '>=0.10.0'} + isows@1.0.4: resolution: {integrity: sha512-hEzjY+x9u9hPmBom9IIAqdJCwNLax+xrPb51vEPpERoFlIxgmZcHzsT5jKG06nvInKOBGvReAVz80Umed5CczQ==} peerDependencies: @@ -192,6 +362,24 @@ packages: resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} engines: {node: '>=10'} + js-tokens@4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + + kind-of@3.2.2: + resolution: {integrity: sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==} + engines: {node: '>=0.10.0'} + + lodash@4.17.21: + resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + + loose-envify@1.4.0: + resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} + hasBin: true + + mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + minimatch@5.1.6: resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==} engines: {node: '>=10'} @@ -199,6 +387,10 @@ packages: minimist@1.2.8: resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + on-exit-leak-free@2.1.2: resolution: {integrity: sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==} engines: {node: '>=14.0.0'} @@ -206,6 +398,14 @@ packages: once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + + patch-console@1.0.0: + resolution: {integrity: sha512-nxl9nrnLQmh64iTzMfyylSlRozL7kAXIaxw1fVcLYdyhNkJCRUzirRZTikXGJsg+hc4fqpneTK6iU2H1Q8THSA==} + engines: {node: '>=10'} + pino-abstract-transport@1.1.0: resolution: {integrity: sha512-lsleG3/2a/JIWUtf9Q5gUNErBqwIu1tUKTT3dUzaf5DySw9ra1wcqKjJjLX1VTY64Wk1eEOYsVGSaGfCK85ekA==} @@ -234,12 +434,36 @@ packages: resolution: {integrity: sha512-UocpgIrKyA2TKLVZDSfm8rGkL13C19YrQBAiG3xo3aDFWcHedxRxI3z+cIcucoxpSO0h5lff5iv/SXoxyeopeA==} engines: {node: ^16 || ^18 || >=20} + prop-types@15.8.1: + resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} + pump@3.0.2: resolution: {integrity: sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==} quick-format-unescaped@4.0.4: resolution: {integrity: sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==} + react-devtools-core@4.28.5: + resolution: {integrity: sha512-cq/o30z9W2Wb4rzBefjv5fBalHU0rJGZCHAkf/RHSBWSSYwh8PlQTqqOJmgIIbBtpj27T6FIPXeomIjZtCNVqA==} + + react-dom@19.1.1: + resolution: {integrity: sha512-Dlq/5LAZgF0Gaz6yiqZCf6VCcZs1ghAJyrsu84Q/GT0gV+mCxbfmKNoGRKBYMJ8IEdGPqu49YWXD02GCknEDkw==} + peerDependencies: + react: ^19.1.1 + + react-is@16.13.1: + resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} + + react-reconciler@0.26.2: + resolution: {integrity: sha512-nK6kgY28HwrMNwDnMui3dvm3rCFjZrcGiuwLc5COUipBK5hWHLOxMJhSnSomirqWwjPBJKV1QcbkI0VJr7Gl1Q==} + engines: {node: '>=0.10.0'} + peerDependencies: + react: ^17.0.2 + + react@19.1.1: + resolution: {integrity: sha512-w8nqGImo45dmMIfljjMwOGtbmC/mk4CMYhWIicdSflH91J9TyCyczcPFXJzrZ/ZXcgGRFeP6BU0BEJTw6tZdfQ==} + engines: {node: '>=0.10.0'} + readable-stream@3.6.2: resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} engines: {node: '>= 6'} @@ -265,6 +489,10 @@ packages: engines: {node: '>=10'} hasBin: true + restore-cursor@3.1.0: + resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} + engines: {node: '>=8'} + safe-buffer@5.2.1: resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} @@ -272,9 +500,26 @@ packages: resolution: {integrity: sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==} engines: {node: '>=10'} + scheduler@0.20.2: + resolution: {integrity: sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==} + + scheduler@0.26.0: + resolution: {integrity: sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==} + secure-json-parse@2.7.0: resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==} + shell-quote@1.8.3: + resolution: {integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==} + engines: {node: '>= 0.4'} + + signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + + slice-ansi@3.0.0: + resolution: {integrity: sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==} + engines: {node: '>=8'} + sonic-boom@3.8.1: resolution: {integrity: sha512-y4Z8LCDBuum+PBP3lSV7RHrXscqksve/bi0as7mhwVnBW+/wUqKT/2Kb7um8yqcFy0duYbbPxzt89Zy2nOCaxg==} @@ -282,19 +527,43 @@ packages: resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} engines: {node: '>= 10.x'} + stack-utils@2.0.6: + resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} + engines: {node: '>=10'} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + string_decoder@1.3.0: resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + strip-json-comments@3.1.1: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + tdigest@0.1.2: resolution: {integrity: sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==} thread-stream@2.7.0: resolution: {integrity: sha512-qQiRWsU/wvNolI6tbbCKd9iKaTnCXsTwVxhhKM6nctPdujTyztjlbUkUTUymidWcMnZ5pWR0ej4a0tjsW021vw==} + type-fest@0.12.0: + resolution: {integrity: sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==} + engines: {node: '>=10'} + + type-fest@0.21.3: + resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} + engines: {node: '>=10'} + util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} @@ -309,9 +578,34 @@ packages: webauthn-p256@0.0.5: resolution: {integrity: sha512-drMGNWKdaixZNobeORVIqq7k5DsRC9FnG201K2QjeOoQLmtSDaSsVZdkg6n5jUALJKcAG++zBPJXmv6hy0nWFg==} + widest-line@3.1.0: + resolution: {integrity: sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==} + engines: {node: '>=8'} + + window-size@1.1.1: + resolution: {integrity: sha512-5D/9vujkmVQ7pSmc0SCBmHXbkv6eaHwXEx65MywhmUMsI8sGqJ972APq1lotfcwMKPFLuCFfL8xGHLIp7jaBmA==} + engines: {node: '>= 0.10.0'} + hasBin: true + + wrap-ansi@6.2.0: + resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} + engines: {node: '>=8'} + wrappy@1.0.2: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + ws@7.5.10: + resolution: {integrity: sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==} + engines: {node: '>=8.3.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + ws@8.17.1: resolution: {integrity: sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==} engines: {node: '>=10.0.0'} @@ -324,6 +618,10 @@ packages: utf-8-validate: optional: true + yoga-layout-prebuilt@1.10.0: + resolution: {integrity: sha512-YnOmtSbv4MTf7RGJMK0FvZ+KD8OEe/J5BNnR0GHhD8J/XcG/Qvxgszm0Un6FTHWW4uHlTgP0IztiXQnGyIR45g==} + engines: {node: '>=8'} + snapshots: '@adraffy/ens-normalize@1.10.0': {} @@ -363,6 +661,11 @@ snapshots: '@opentelemetry/api@1.9.0': {} + '@rescript/react@0.12.1(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + dependencies: + react: 19.1.1 + react-dom: 19.1.1(react@19.1.1) + '@scure/base@1.1.9': {} '@scure/bip32@1.4.0': @@ -376,14 +679,30 @@ snapshots: '@noble/hashes': 1.4.0 '@scure/base': 1.1.9 + '@types/yoga-layout@1.9.2': {} + abitype@1.0.5: {} abort-controller@3.0.0: dependencies: event-target-shim: 5.0.1 + ansi-escapes@4.3.2: + dependencies: + type-fest: 0.21.3 + + ansi-regex@5.0.1: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + astral-regex@2.0.0: {} + atomic-sleep@1.0.0: {} + auto-bind@4.0.0: {} + balanced-match@1.0.2: {} base64-js@1.5.1: {} @@ -401,14 +720,59 @@ snapshots: base64-js: 1.5.1 ieee754: 1.2.1 + cfonts@2.10.1: + dependencies: + chalk: 4.1.2 + window-size: 1.1.1 + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + ci-info@2.0.0: {} + + cli-boxes@2.2.1: {} + + cli-cursor@3.1.0: + dependencies: + restore-cursor: 3.1.0 + + cli-spinners@2.9.2: {} + + cli-truncate@2.1.0: + dependencies: + slice-ansi: 3.0.0 + string-width: 4.2.3 + + code-excerpt@3.0.0: + dependencies: + convert-to-spaces: 1.0.2 + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + colorette@2.0.20: {} + convert-to-spaces@1.0.2: {} + dateformat@4.6.3: {} + define-property@1.0.0: + dependencies: + is-descriptor: 1.0.3 + + emoji-regex@8.0.0: {} + end-of-stream@1.4.4: dependencies: once: 1.4.0 + escape-string-regexp@2.0.0: {} + event-target-shim@5.0.1: {} events@3.3.0: {} @@ -421,6 +785,8 @@ snapshots: fs.realpath@1.0.0: {} + function-bind@1.1.2: {} + glob@8.1.0: dependencies: fs.realpath: 1.0.0 @@ -429,6 +795,12 @@ snapshots: minimatch: 5.1.6 once: 1.4.0 + has-flag@4.0.0: {} + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + help-me@4.2.0: dependencies: glob: 8.1.0 @@ -436,6 +808,8 @@ snapshots: ieee754@1.2.1: {} + indent-string@4.0.0: {} + inflight@1.0.6: dependencies: once: 1.4.0 @@ -443,24 +817,119 @@ snapshots: inherits@2.0.4: {} + ink-big-text@1.2.0(ink@3.2.0(react@19.1.1))(react@19.1.1): + dependencies: + cfonts: 2.10.1 + ink: 3.2.0(react@19.1.1) + prop-types: 15.8.1 + react: 19.1.1 + + ink-spinner@4.0.3(ink@3.2.0(react@19.1.1))(react@19.1.1): + dependencies: + cli-spinners: 2.9.2 + ink: 3.2.0(react@19.1.1) + react: 19.1.1 + + ink-use-stdout-dimensions@1.0.5(ink@3.2.0(react@19.1.1))(react@19.1.1): + dependencies: + ink: 3.2.0(react@19.1.1) + react: 19.1.1 + + ink@3.2.0(react@19.1.1): + dependencies: + ansi-escapes: 4.3.2 + auto-bind: 4.0.0 + chalk: 4.1.2 + cli-boxes: 2.2.1 + cli-cursor: 3.1.0 + cli-truncate: 2.1.0 + code-excerpt: 3.0.0 + indent-string: 4.0.0 + is-ci: 2.0.0 + lodash: 4.17.21 + patch-console: 1.0.0 + react: 19.1.1 + react-devtools-core: 4.28.5 + react-reconciler: 0.26.2(react@19.1.1) + scheduler: 0.20.2 + signal-exit: 3.0.7 + slice-ansi: 3.0.0 + stack-utils: 2.0.6 + string-width: 4.2.3 + type-fest: 0.12.0 + widest-line: 3.1.0 + wrap-ansi: 6.2.0 + ws: 7.5.10 + yoga-layout-prebuilt: 1.10.0 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + is-accessor-descriptor@1.0.1: + dependencies: + hasown: 2.0.2 + + is-buffer@1.1.6: {} + + is-ci@2.0.0: + dependencies: + ci-info: 2.0.0 + + is-data-descriptor@1.0.1: + dependencies: + hasown: 2.0.2 + + is-descriptor@1.0.3: + dependencies: + is-accessor-descriptor: 1.0.1 + is-data-descriptor: 1.0.1 + + is-fullwidth-code-point@3.0.0: {} + + is-number@3.0.0: + dependencies: + kind-of: 3.2.2 + isows@1.0.4(ws@8.17.1): dependencies: ws: 8.17.1 joycon@3.1.1: {} + js-tokens@4.0.0: {} + + kind-of@3.2.2: + dependencies: + is-buffer: 1.1.6 + + lodash@4.17.21: {} + + loose-envify@1.4.0: + dependencies: + js-tokens: 4.0.0 + + mimic-fn@2.1.0: {} + minimatch@5.1.6: dependencies: brace-expansion: 2.0.2 minimist@1.2.8: {} + object-assign@4.1.1: {} + on-exit-leak-free@2.1.2: {} once@1.4.0: dependencies: wrappy: 1.0.2 + onetime@5.1.2: + dependencies: + mimic-fn: 2.1.0 + + patch-console@1.0.0: {} + pino-abstract-transport@1.1.0: dependencies: readable-stream: 4.7.0 @@ -513,6 +982,12 @@ snapshots: '@opentelemetry/api': 1.9.0 tdigest: 0.1.2 + prop-types@15.8.1: + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + react-is: 16.13.1 + pump@3.0.2: dependencies: end-of-stream: 1.4.4 @@ -520,6 +995,30 @@ snapshots: quick-format-unescaped@4.0.4: {} + react-devtools-core@4.28.5: + dependencies: + shell-quote: 1.8.3 + ws: 7.5.10 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + react-dom@19.1.1(react@19.1.1): + dependencies: + react: 19.1.1 + scheduler: 0.26.0 + + react-is@16.13.1: {} + + react-reconciler@0.26.2(react@19.1.1): + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + react: 19.1.1 + scheduler: 0.20.2 + + react@19.1.1: {} + readable-stream@3.6.2: dependencies: inherits: 2.0.4 @@ -542,24 +1041,64 @@ snapshots: rescript@11.1.3: {} + restore-cursor@3.1.0: + dependencies: + onetime: 5.1.2 + signal-exit: 3.0.7 + safe-buffer@5.2.1: {} safe-stable-stringify@2.5.0: {} + scheduler@0.20.2: + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + + scheduler@0.26.0: {} + secure-json-parse@2.7.0: {} + shell-quote@1.8.3: {} + + signal-exit@3.0.7: {} + + slice-ansi@3.0.0: + dependencies: + ansi-styles: 4.3.0 + astral-regex: 2.0.0 + is-fullwidth-code-point: 3.0.0 + sonic-boom@3.8.1: dependencies: atomic-sleep: 1.0.0 split2@4.2.0: {} + stack-utils@2.0.6: + dependencies: + escape-string-regexp: 2.0.0 + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + string_decoder@1.3.0: dependencies: safe-buffer: 5.2.1 + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + strip-json-comments@3.1.1: {} + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + tdigest@0.1.2: dependencies: bintrees: 1.0.2 @@ -568,6 +1107,10 @@ snapshots: dependencies: real-require: 0.2.0 + type-fest@0.12.0: {} + + type-fest@0.21.3: {} + util-deprecate@1.0.2: {} viem@2.21.0: @@ -591,6 +1134,27 @@ snapshots: '@noble/curves': 1.4.0 '@noble/hashes': 1.4.0 + widest-line@3.1.0: + dependencies: + string-width: 4.2.3 + + window-size@1.1.1: + dependencies: + define-property: 1.0.0 + is-number: 3.0.0 + + wrap-ansi@6.2.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrappy@1.0.2: {} + ws@7.5.10: {} + ws@8.17.1: {} + + yoga-layout-prebuilt@1.10.0: + dependencies: + '@types/yoga-layout': 1.9.2 diff --git a/codegenerator/cli/npm/envio/rescript.json b/codegenerator/cli/npm/envio/rescript.json index 8570882bb..9442d80c1 100644 --- a/codegenerator/cli/npm/envio/rescript.json +++ b/codegenerator/cli/npm/envio/rescript.json @@ -13,8 +13,14 @@ "in-source": true }, "gentypeconfig": { + "shims": { + "Js": "Js" + }, "generatedFileExtension": ".gen.ts" }, - "bs-dependencies": ["rescript-schema"], + "jsx": { + "version": 4 + }, + "bs-dependencies": ["rescript-schema", "@rescript/react"], "bsc-flags": ["-open RescriptSchema"] } diff --git a/codegenerator/cli/npm/envio/src/FetchState.res b/codegenerator/cli/npm/envio/src/FetchState.res index 8a4952dca..d5ef3b308 100644 --- a/codegenerator/cli/npm/envio/src/FetchState.res +++ b/codegenerator/cli/npm/envio/src/FetchState.res @@ -818,20 +818,27 @@ let getNextQuery = ( if ( p->checkIsFetchingPartition->not && p.latestFetchedBlock.blockNumber < maxQueryBlockNumber ) { - switch p->makePartitionQuery( - ~indexingContracts, - ~endBlock=switch blockLag { - | 0 => endBlock - | _ => - switch endBlock { - | Some(endBlock) => Some(Pervasives.min(headBlock, endBlock)) - // Force head block as an endBlock when blockLag is set - // because otherwise HyperSync might return bigger range - | None => Some(headBlock) - } - }, - ~mergeTarget, - ) { + let endBlock = switch blockLag { + | 0 => endBlock + | _ => + switch endBlock { + | Some(endBlock) => Some(Pervasives.min(headBlock, endBlock)) + // Force head block as an endBlock when blockLag is set + // because otherwise HyperSync might return bigger range + | None => Some(headBlock) + } + } + // Enforce the respose range up until target block + // Otherwise for indexers with 100+ partitions + // we might blow up the buffer size to more than 600k events + // simply because of HyperSync returning extra blocks + let endBlock = switch (endBlock, maxQueryBlockNumber < currentBlockHeight) { + | (Some(endBlock), true) => Some(Pervasives.min(maxQueryBlockNumber, endBlock)) + | (None, true) => Some(maxQueryBlockNumber) + | (_, false) => endBlock + } + + switch p->makePartitionQuery(~indexingContracts, ~endBlock, ~mergeTarget) { | Some(q) => queries->Array.push(q) | None => () } @@ -1061,10 +1068,6 @@ let make = ( Prometheus.IndexingPartitions.set(~partitionsCount=partitions->Array.length, ~chainId) Prometheus.IndexingBufferSize.set(~bufferSize=0, ~chainId) Prometheus.IndexingBufferBlockNumber.set(~blockNumber=latestFetchedBlock.blockNumber, ~chainId) - switch endBlock { - | Some(endBlock) => Prometheus.IndexingEndBlock.set(~endBlock, ~chainId) - | None => () - } { partitions, diff --git a/codegenerator/cli/npm/envio/src/Hasura.res b/codegenerator/cli/npm/envio/src/Hasura.res index d6eff6af9..909984c07 100644 --- a/codegenerator/cli/npm/envio/src/Hasura.res +++ b/codegenerator/cli/npm/envio/src/Hasura.res @@ -222,24 +222,43 @@ let trackDatabase = async ( ~endpoint, ~auth, ~pgSchema, - ~allStaticTables, - ~allEntityTables, + ~userEntities: array, ~aggregateEntities, ~responseLimit, ~schema, ) => { + let trackOnlyInternalTableNames = [ + InternalTable.Chains.table.tableName, + InternalTable.EventSyncState.table.tableName, + InternalTable.PersistedState.table.tableName, + InternalTable.EndOfBlockRangeScannedData.table.tableName, + InternalTable.DynamicContractRegistry.table.tableName, + ] + let exposedInternalTableNames = [ + InternalTable.RawEvents.table.tableName, + InternalTable.Views.metaViewName, + InternalTable.Views.chainMetadataViewName, + ] + let userTableNames = userEntities->Js.Array2.map(entity => entity.table.tableName) + Logging.info("Tracking tables in Hasura") let _ = await clearHasuraMetadata(~endpoint, ~auth) - let tableNames = - [allStaticTables, allEntityTables] - ->Belt.Array.concatMany - ->Js.Array2.map(({tableName}: Table.table) => tableName) - await trackTables(~endpoint, ~auth, ~pgSchema, ~tableNames) + await trackTables( + ~endpoint, + ~auth, + ~pgSchema, + ~tableNames=[ + exposedInternalTableNames, + trackOnlyInternalTableNames, + userTableNames, + ]->Belt.Array.concatMany, + ) let _ = - await tableNames + await [exposedInternalTableNames, userTableNames] + ->Belt.Array.concatMany ->Js.Array2.map(tableName => createSelectPermissions( ~endpoint, @@ -251,11 +270,11 @@ let trackDatabase = async ( ) ) ->Js.Array2.concatMany( - allEntityTables->Js.Array2.map(table => { - let {tableName} = table + userEntities->Js.Array2.map(entityConfig => { + let {tableName} = entityConfig.table [ //Set array relationships - table + entityConfig.table ->Table.getDerivedFromFields ->Js.Array2.map(derivedFromField => { //determines the actual name of the underlying relational field (if it's an entity mapping then suffixes _id for eg.) @@ -275,7 +294,7 @@ let trackDatabase = async ( ) }), //Set object relationships - table + entityConfig.table ->Table.getLinkedEntityFields ->Js.Array2.map(((field, linkedEntityName)) => { createEntityRelationship( diff --git a/codegenerator/cli/npm/envio/src/Internal.res b/codegenerator/cli/npm/envio/src/Internal.res index 158c0a5e5..7501c3d6a 100644 --- a/codegenerator/cli/npm/envio/src/Internal.res +++ b/codegenerator/cli/npm/envio/src/Internal.res @@ -176,13 +176,16 @@ let fuelTransferParamsSchema = S.schema(s => { }) type entity = private {id: string} -type entityConfig = { +type genericEntityConfig<'entity> = { name: string, - schema: S.t, - rowsSchema: S.t>, + schema: S.t<'entity>, + rowsSchema: S.t>, table: Table.table, - entityHistory: EntityHistory.t, + entityHistory: EntityHistory.t<'entity>, } +type entityConfig = genericEntityConfig +external fromGenericEntityConfig: genericEntityConfig<'entity> => entityConfig = "%identity" + type enum type enumConfig<'enum> = { name: string, diff --git a/codegenerator/cli/npm/envio/src/InternalConfig.res b/codegenerator/cli/npm/envio/src/InternalConfig.res new file mode 100644 index 000000000..28e5ffb5c --- /dev/null +++ b/codegenerator/cli/npm/envio/src/InternalConfig.res @@ -0,0 +1,22 @@ +// TODO: rename the file to Config.res after finishing the migration from codegen +// And turn it into PublicConfig instead +// For internal use we should create Indexer.res with a stateful type + +type contract = { + name: string, + abi: EvmTypes.Abi.t, + addresses: array, + events: array, + startBlock: option, +} + +type chain = { + id: int, + startBlock: int, + endBlock?: int, + confirmedBlockThreshold: int, + contracts: array, + sources: array, +} + +type ecosystem = | @as("evm") Evm | @as("fuel") Fuel diff --git a/codegenerator/cli/npm/envio/src/Js.shim.ts b/codegenerator/cli/npm/envio/src/Js.shim.ts new file mode 100644 index 000000000..51f44c5fd --- /dev/null +++ b/codegenerator/cli/npm/envio/src/Js.shim.ts @@ -0,0 +1,11 @@ +export type Json_t = + | string + | boolean + | number + | null + | { [key: string]: Json_t } + | Json_t[]; + +export type t = unknown; + +export type Exn_t = Error; diff --git a/codegenerator/cli/npm/envio/src/LoadManager.res b/codegenerator/cli/npm/envio/src/LoadManager.res index 734d7faa7..17ef7be48 100644 --- a/codegenerator/cli/npm/envio/src/LoadManager.res +++ b/codegenerator/cli/npm/envio/src/LoadManager.res @@ -66,9 +66,10 @@ let schedule = async loadManager => { } }) - if inputsToLoad->Utils.Array.isEmpty->not { + let isSuccess = if inputsToLoad->Utils.Array.isEmpty->not { try { await group.load(inputsToLoad) + true } catch { | exn => { let exn = exn->Utils.prettifyExn @@ -76,16 +77,21 @@ let schedule = async loadManager => { let call = calls->Js.Dict.unsafeGet(inputKey) call.reject(exn) }) + false } } + } else { + true } if currentInputKeys->Utils.Array.isEmpty->not { - currentInputKeys->Js.Array2.forEach(inputKey => { - let call = calls->Js.Dict.unsafeGet(inputKey) - calls->Utils.Dict.deleteInPlace(inputKey) - call.resolve(group.getUnsafeInMemory(inputKey)) - }) + if isSuccess { + currentInputKeys->Js.Array2.forEach(inputKey => { + let call = calls->Js.Dict.unsafeGet(inputKey) + calls->Utils.Dict.deleteInPlace(inputKey) + call.resolve(group.getUnsafeInMemory(inputKey)) + }) + } // Clean up executed batch to reset // provided load function which diff --git a/codegenerator/cli/npm/envio/src/Persistence.res b/codegenerator/cli/npm/envio/src/Persistence.res index 6a563d6cb..d943d6e97 100644 --- a/codegenerator/cli/npm/envio/src/Persistence.res +++ b/codegenerator/cli/npm/envio/src/Persistence.res @@ -13,6 +13,12 @@ type effectCacheRecord = { mutable count: int, } +type initialState = { + cleanRun: bool, + cache: dict, + chains: array, +} + type operator = [#">" | #"="] type storage = { @@ -22,10 +28,11 @@ type storage = { // Should initialize the storage so we can start interacting with it // Eg create connection, schema, tables, etc. initialize: ( + ~chainConfigs: array=?, ~entities: array=?, - ~generalTables: array=?, ~enums: array>=?, - ) => promise, + ) => promise, + loadInitialState: unit => promise, @raises("StorageError") loadByIdsOrThrow: 'item. ( ~ids: array, @@ -55,10 +62,6 @@ type storage = { ) => promise, // This is to download cache from the database to .envio/cache dumpEffectCache: unit => promise, - // This is not good, but the function does two things: - // - Gets info about existing cache tables - // - if withUpload is true, it also populates the cache from .envio/cache to the database - restoreEffectCache: (~withUpload: bool) => promise>, } exception StorageError({message: string, reason: exn}) @@ -66,11 +69,10 @@ exception StorageError({message: string, reason: exn}) type storageStatus = | Unknown | Initializing(promise) - | Ready({cleanRun: bool, cache: dict}) + | Ready(initialState) type t = { userEntities: array, - staticTables: array, allEntities: array, allEnums: array>, mutable storageStatus: storageStatus, @@ -86,18 +88,15 @@ let entityHistoryActionEnumConfig: Internal.enumConfig { - let allEntities = userEntities->Js.Array2.concat([dcRegistryEntityConfig]) + let allEntities = userEntities->Js.Array2.concat([InternalTable.DynamicContractRegistry.config]) let allEnums = allEnums->Js.Array2.concat([entityHistoryActionEnumConfig->Internal.fromGenericEnumConfig]) { userEntities, - staticTables, allEntities, allEnums, storageStatus: Unknown, @@ -106,17 +105,7 @@ let make = ( } let init = { - let loadInitialCache = async (persistence, ~withUpload) => { - let effectCacheRecords = await persistence.storage.restoreEffectCache(~withUpload) - let cache = Js.Dict.empty() - effectCacheRecords->Js.Array2.forEach(record => { - Prometheus.EffectCacheCount.set(~count=record.count, ~effectName=record.effectName) - cache->Js.Dict.set(record.effectName, record) - }) - cache - } - - async (persistence, ~reset=false) => { + async (persistence, ~chainConfigs, ~reset=false) => { try { let shouldRun = switch persistence.storageStatus { | Unknown => true @@ -135,17 +124,14 @@ let init = { if reset || !(await persistence.storage.isInitialized()) { Logging.info(`Initializing the indexer storage...`) - await persistence.storage.initialize( + let initialState = await persistence.storage.initialize( ~entities=persistence.allEntities, - ~generalTables=persistence.staticTables, ~enums=persistence.allEnums, + ~chainConfigs, ) Logging.info(`The indexer storage is ready. Uploading cache...`) - persistence.storageStatus = Ready({ - cleanRun: true, - cache: await loadInitialCache(persistence, ~withUpload=true), - }) + persistence.storageStatus = Ready(initialState) } else if ( // In case of a race condition, // we want to set the initial status to Ready only once. @@ -155,10 +141,7 @@ let init = { } ) { Logging.info(`The indexer storage is ready.`) - persistence.storageStatus = Ready({ - cleanRun: false, - cache: await loadInitialCache(persistence, ~withUpload=false), - }) + persistence.storageStatus = Ready(await persistence.storage.loadInitialState()) } resolveRef.contents() } @@ -178,6 +161,15 @@ let getInitializedStorageOrThrow = persistence => { } } +let getInitializedState = persistence => { + switch persistence.storageStatus { + | Unknown + | Initializing(_) => + Js.Exn.raiseError(`Failed to access the initial state. The Persistence layer is not initialized.`) + | Ready(initialState) => initialState + } +} + let setEffectCacheOrThrow = async (persistence, ~effect: Internal.effect, ~items) => { switch persistence.storageStatus { | Unknown diff --git a/codegenerator/cli/npm/envio/src/PgStorage.res b/codegenerator/cli/npm/envio/src/PgStorage.res index 7f8a36184..6cef24482 100644 --- a/codegenerator/cli/npm/envio/src/PgStorage.res +++ b/codegenerator/cli/npm/envio/src/PgStorage.res @@ -57,11 +57,19 @@ let makeCreateTableQuery = (table: Table.table, ~pgSchema) => { let makeInitializeTransaction = ( ~pgSchema, ~pgUser, - ~generalTables=[], + ~chainConfigs=[], ~entities=[], ~enums=[], ~isEmptyPgSchema=false, ) => { + let generalTables = [ + InternalTable.EventSyncState.table, + InternalTable.Chains.table, + InternalTable.PersistedState.table, + InternalTable.EndOfBlockRangeScannedData.table, + InternalTable.RawEvents.table, + ] + let allTables = generalTables->Array.copy let allEntityTables = [] entities->Js.Array2.forEach((entity: Internal.entityConfig) => { @@ -113,7 +121,8 @@ GRANT ALL ON SCHEMA "${pgSchema}" TO public;`, // Add derived indices entities->Js.Array2.forEach((entity: Internal.entityConfig) => { - functionsQuery := functionsQuery.contents ++ "\n" ++ entity.entityHistory.createInsertFnQuery + functionsQuery := + functionsQuery.contents ++ "\n" ++ entity.entityHistory.makeInsertFnQuery(~pgSchema) entity.table ->Table.getDerivedFromFields @@ -131,6 +140,16 @@ GRANT ALL ON SCHEMA "${pgSchema}" TO public;`, }) }) + // Create views for Hasura integration + query := query.contents ++ "\n" ++ InternalTable.Views.makeMetaViewQuery(~pgSchema) + query := query.contents ++ "\n" ++ InternalTable.Views.makeChainMetadataViewQuery(~pgSchema) + + // Populate initial chain data + switch InternalTable.Chains.makeInitialValuesQuery(~pgSchema, ~chainConfigs) { + | Some(initialChainsValuesQuery) => query := query.contents ++ "\n" ++ initialChainsValuesQuery + | None => () + } + // Add cache row count function functionsQuery := functionsQuery.contents ++ @@ -162,6 +181,10 @@ let makeLoadByIdsQuery = (~pgSchema, ~tableName) => { `SELECT * FROM "${pgSchema}"."${tableName}" WHERE id = ANY($1::text[]);` } +let makeLoadAllQuery = (~pgSchema, ~tableName) => { + `SELECT * FROM "${pgSchema}"."${tableName}";` +} + let makeInsertUnnestSetQuery = (~pgSchema, ~table: Table.table, ~itemSchema, ~isRawEvents) => { let {quotedFieldNames, quotedNonPrimaryFieldNames, arrayFieldTypes} = table->Table.toSqlParams(~schema=itemSchema, ~pgSchema) @@ -234,21 +257,19 @@ VALUES${placeholders.contents}` ++ } ++ ";" } -// Should move this to a better place -// We need it for the isRawEvents check in makeTableBatchSet -// to always apply the unnest optimization. -// This is needed, because even though it has JSON fields, -// they are always guaranteed to be an object. -// FIXME what about Fuel params? -let rawEventsTableName = "raw_events" -let eventSyncStateTableName = "event_sync_state" - // Constants for chunking let maxItemsPerQuery = 500 let makeTableBatchSetQuery = (~pgSchema, ~table: Table.table, ~itemSchema: S.t<'item>) => { let {dbSchema, hasArrayField} = table->Table.toSqlParams(~schema=itemSchema, ~pgSchema) - let isRawEvents = table.tableName === rawEventsTableName + + // Should move this to a better place + // We need it for the isRawEvents check in makeTableBatchSet + // to always apply the unnest optimization. + // This is needed, because even though it has JSON fields, + // they are always guaranteed to be an object. + // FIXME what about Fuel params? + let isRawEvents = table.tableName === InternalTable.RawEvents.table.tableName // Should experiment how much it'll affect performance // Although, it should be fine not to perform the validation check, @@ -401,8 +422,7 @@ let setEntityHistoryOrThrow = ( ~shouldCopyCurrentEntity=?, ~shouldRemoveInvalidUtf8=false, ) => { - rows - ->Belt.Array.map(historyRow => { + rows->Belt.Array.map(historyRow => { let row = historyRow->S.reverseConvertToJsonOrThrow(entityHistory.schema) if shouldRemoveInvalidUtf8 { [row]->removeInvalidUtf8InPlace @@ -418,10 +438,19 @@ let setEntityHistoryOrThrow = ( !containsRollbackDiffChange } }, - ) + )->Promise.catch(exn => { + let reason = exn->Utils.prettifyExn + let detail = %raw(`reason?.detail || ""`) + raise( + Persistence.StorageError({ + message: `Failed to insert history item into table "${entityHistory.table.tableName}".${detail !== "" + ? ` Details: ${detail}` + : ""}`, + reason, + }), + ) + }) }) - ->Promise.all - ->(Utils.magic: promise> => promise) } type schemaTableName = { @@ -539,12 +568,95 @@ let make = ( let isInitialized = async () => { let envioTables = await sql->Postgres.unsafe( - `SELECT table_schema FROM information_schema.tables WHERE table_schema = '${pgSchema}' AND table_name = '${eventSyncStateTableName}';`, + `SELECT table_schema FROM information_schema.tables WHERE table_schema = '${pgSchema}' AND table_name = '${InternalTable.EventSyncState.table.tableName}' OR table_name = '${InternalTable.Chains.table.tableName}';`, ) envioTables->Utils.Array.notEmpty } - let initialize = async (~entities=[], ~generalTables=[], ~enums=[]) => { + let restoreEffectCache = async (~withUpload) => { + if withUpload { + // Try to restore cache tables from binary files + let nothingToUploadErrorMessage = "Nothing to upload." + + switch await Promise.all2(( + NodeJs.Fs.Promises.readdir(cacheDirPath) + ->Promise.thenResolve(e => Ok(e)) + ->Promise.catch(_ => Promise.resolve(Error(nothingToUploadErrorMessage))), + getConnectedPsqlExec(~pgUser, ~pgHost, ~pgDatabase, ~pgPort), + )) { + | (Ok(entries), Ok(psqlExec)) => { + let cacheFiles = entries->Js.Array2.filter(entry => { + entry->Js.String2.endsWith(".tsv") + }) + + let _ = + await cacheFiles + ->Js.Array2.map(entry => { + let effectName = entry->Js.String2.slice(~from=0, ~to_=-4) // Remove .tsv extension + let table = Internal.makeCacheTable(~effectName) + + sql + ->Postgres.unsafe(makeCreateTableQuery(table, ~pgSchema)) + ->Promise.then(() => { + let inputFile = NodeJs.Path.join(cacheDirPath, entry)->NodeJs.Path.toString + + let command = `${psqlExec} -c 'COPY "${pgSchema}"."${table.tableName}" FROM STDIN WITH (FORMAT text, HEADER);' < ${inputFile}` + + Promise.make( + (resolve, reject) => { + NodeJs.ChildProcess.execWithOptions( + command, + psqlExecOptions, + (~error, ~stdout, ~stderr as _) => { + switch error { + | Value(error) => reject(error) + | Null => resolve(stdout) + } + }, + ) + }, + ) + }) + }) + ->Promise.all + + Logging.info("Successfully uploaded cache.") + } + | (Error(message), _) + | (_, Error(message)) => + if message === nothingToUploadErrorMessage { + Logging.info("No cache found to upload.") + } else { + Logging.error(`Failed to upload cache, continuing without it. ${message}`) + } + } + } + + let cacheTableInfo: array = + await sql->Postgres.unsafe(makeSchemaCacheTableInfoQuery(~pgSchema)) + + if withUpload && cacheTableInfo->Utils.Array.notEmpty { + // Integration with other tools like Hasura + switch onNewTables { + | Some(onNewTables) => + await onNewTables( + ~tableNames=cacheTableInfo->Js.Array2.map(info => { + info.tableName + }), + ) + | None => () + } + } + + let cache = Js.Dict.empty() + cacheTableInfo->Js.Array2.forEach(({tableName, count}) => { + let effectName = tableName->Js.String2.sliceToEnd(~from=cacheTablePrefixLength) + cache->Js.Dict.set(effectName, ({effectName, count}: Persistence.effectCacheRecord)) + }) + cache + } + + let initialize = async (~chainConfigs=[], ~entities=[], ~enums=[]): Persistence.initialState => { let schemaTableNames: array = await sql->Postgres.unsafe(makeSchemaTableNamesQuery(~pgSchema)) @@ -557,7 +669,11 @@ let make = ( schemaTableNames->Utils.Array.notEmpty && // Otherwise should throw if there's a table, but no envio specific one // This means that the schema is used for something else than envio. - !(schemaTableNames->Js.Array2.some(table => table.tableName === eventSyncStateTableName)) + !( + schemaTableNames->Js.Array2.some(table => + table.tableName === InternalTable.EventSyncState.table.tableName + ) + ) ) { Js.Exn.raiseError( `Cannot run Envio migrations on PostgreSQL schema "${pgSchema}" because it contains non-Envio tables. Running migrations would delete all data in this schema.\n\nTo resolve this:\n1. If you want to use this schema, first backup any important data, then drop it with: "pnpm envio local db-migrate down"\n2. Or specify a different schema name by setting the "ENVIO_PG_PUBLIC_SCHEMA" environment variable\n3. Or manually drop the schema in your database if you're certain the data is not needed.`, @@ -567,21 +683,31 @@ let make = ( let queries = makeInitializeTransaction( ~pgSchema, ~pgUser, - ~generalTables, ~entities, ~enums, + ~chainConfigs, ~isEmptyPgSchema=schemaTableNames->Utils.Array.isEmpty, ) // Execute all queries within a single transaction for integrity let _ = await sql->Postgres.beginSql(sql => { - queries->Js.Array2.map(query => sql->Postgres.unsafe(query)) + // Promise.all might be not safe to use here, + // but it's just how it worked before. + Promise.all(queries->Js.Array2.map(query => sql->Postgres.unsafe(query))) }) + let cache = await restoreEffectCache(~withUpload=true) + // Integration with other tools like Hasura switch onInitialize { | Some(onInitialize) => await onInitialize() | None => () } + + { + cleanRun: true, + cache, + chains: chainConfigs->Js.Array2.map(InternalTable.Chains.initialFromConfig), + } } let loadByIdsOrThrow = async (~ids, ~table: Table.table, ~rowsSchema) => { @@ -767,97 +893,31 @@ let make = ( } } - let restoreEffectCache = async (~withUpload) => { - if withUpload { - // Try to restore cache tables from binary files - let nothingToUploadErrorMessage = "Nothing to upload." - - switch await Promise.all2(( - NodeJs.Fs.Promises.readdir(cacheDirPath) - ->Promise.thenResolve(e => Ok(e)) - ->Promise.catch(_ => Promise.resolve(Error(nothingToUploadErrorMessage))), - getConnectedPsqlExec(~pgUser, ~pgHost, ~pgDatabase, ~pgPort), - )) { - | (Ok(entries), Ok(psqlExec)) => { - let cacheFiles = entries->Js.Array2.filter(entry => { - entry->Js.String2.endsWith(".tsv") - }) - - let _ = - await cacheFiles - ->Js.Array2.map(entry => { - let effectName = entry->Js.String2.slice(~from=0, ~to_=-4) // Remove .tsv extension - let table = Internal.makeCacheTable(~effectName) - - sql - ->Postgres.unsafe(makeCreateTableQuery(table, ~pgSchema)) - ->Promise.then(() => { - let inputFile = NodeJs.Path.join(cacheDirPath, entry)->NodeJs.Path.toString - - let command = `${psqlExec} -c 'COPY "${pgSchema}"."${table.tableName}" FROM STDIN WITH (FORMAT text, HEADER);' < ${inputFile}` - - Promise.make( - (resolve, reject) => { - NodeJs.ChildProcess.execWithOptions( - command, - psqlExecOptions, - (~error, ~stdout, ~stderr as _) => { - switch error { - | Value(error) => reject(error) - | Null => resolve(stdout) - } - }, - ) - }, - ) - }) - }) - ->Promise.all - - Logging.info("Successfully uploaded cache.") - } - | (Error(message), _) - | (_, Error(message)) => - if message === nothingToUploadErrorMessage { - Logging.info("No cache found to upload.") - } else { - Logging.error(`Failed to upload cache, continuing without it. ${message}`) - } - } - } - - let cacheTableInfo: array = - await sql->Postgres.unsafe(makeSchemaCacheTableInfoQuery(~pgSchema)) + let loadInitialState = async (): Persistence.initialState => { + let (cache, chains) = await Promise.all2(( + restoreEffectCache(~withUpload=false), + sql + ->Postgres.unsafe( + makeLoadAllQuery(~pgSchema, ~tableName=InternalTable.Chains.table.tableName), + ) + ->(Utils.magic: promise> => promise>), + )) - if withUpload && cacheTableInfo->Utils.Array.notEmpty { - // Integration with other tools like Hasura - switch onNewTables { - | Some(onNewTables) => - await onNewTables( - ~tableNames=cacheTableInfo->Js.Array2.map(info => { - info.tableName - }), - ) - | None => () - } + { + cleanRun: false, + cache, + chains, } - - cacheTableInfo->Js.Array2.map((info): Persistence.effectCacheRecord => { - { - effectName: info.tableName->Js.String2.sliceToEnd(~from=cacheTablePrefixLength), - count: info.count, - } - }) } { isInitialized, initialize, + loadInitialState, loadByFieldOrThrow, loadByIdsOrThrow, setOrThrow, setEffectCacheOrThrow, dumpEffectCache, - restoreEffectCache, } } diff --git a/codegenerator/cli/npm/envio/src/Prometheus.res b/codegenerator/cli/npm/envio/src/Prometheus.res index 4f7a1d9b1..fce6a20a5 100644 --- a/codegenerator/cli/npm/envio/src/Prometheus.res +++ b/codegenerator/cli/npm/envio/src/Prometheus.res @@ -213,9 +213,9 @@ let incrementExecuteBatchDurationCounter = (~duration) => { executeBatchDurationCounter->PromClient.Counter.incMany(duration) } -let setSourceChainHeight = (~blockNumber, ~chain) => { +let setSourceChainHeight = (~blockNumber, ~chainId) => { sourceChainHeight - ->PromClient.Gauge.labels({"chainId": chain->ChainMap.Chain.toString}) + ->PromClient.Gauge.labels({"chainId": chainId}) ->PromClient.Gauge.set(blockNumber) } @@ -389,6 +389,18 @@ module IndexingEndBlock = { } } +module IndexingStartBlock = { + let gauge = SafeGauge.makeOrThrow( + ~name="envio_indexing_start_block", + ~help="The block number to start indexing at. (inclusive)", + ~labelSchema=chainIdLabelsSchema, + ) + + let set = (~startBlock, ~chainId) => { + gauge->SafeGauge.handleInt(~labels=chainId, ~value=startBlock) + } +} + let sourceLabelsSchema = S.schema(s => { "source": s.matches(S.string), @@ -574,7 +586,7 @@ module ProgressEventsCount = { }) let gauge = SafeGauge.makeOrThrow( - ~name="envio_progress_events_count", + ~name="envio_events_processed_count", ~help="The number of events processed and reflected in the database.", ~labelSchema=chainIdLabelsSchema, ) diff --git a/codegenerator/cli/npm/envio/src/bindings/Pino.res b/codegenerator/cli/npm/envio/src/bindings/Pino.res index c1e4b966c..ad3fa606b 100644 --- a/codegenerator/cli/npm/envio/src/bindings/Pino.res +++ b/codegenerator/cli/npm/envio/src/bindings/Pino.res @@ -46,7 +46,7 @@ let createPinoMessageWithError = (message, err): pinoMessageBlobWithError => { // serializers type below: `type serializers = {err: Js.Json.t => Js.Json.t}` Utils.magic({ "msg": message, - "err": err, + "err": err->Utils.prettifyExn, }) } diff --git a/codegenerator/cli/npm/envio/src/bindings/Postgres.res b/codegenerator/cli/npm/envio/src/bindings/Postgres.res index 4437cb122..bb143ed78 100644 --- a/codegenerator/cli/npm/envio/src/bindings/Postgres.res +++ b/codegenerator/cli/npm/envio/src/bindings/Postgres.res @@ -89,7 +89,7 @@ type poolConfig = { @module external makeSql: (~config: poolConfig) => sql = "postgres" -@send external beginSql: (sql, sql => array>) => promise = "begin" +@send external beginSql: (sql, sql => promise<'result>) => promise<'result> = "begin" // TODO: can explore this approach (https://forum.rescript-lang.org/t/rfc-support-for-tagged-template-literals/3744) // @send @variadic diff --git a/codegenerator/cli/npm/envio/src/db/EntityHistory.res b/codegenerator/cli/npm/envio/src/db/EntityHistory.res index d76d91878..52856918e 100644 --- a/codegenerator/cli/npm/envio/src/db/EntityHistory.res +++ b/codegenerator/cli/npm/envio/src/db/EntityHistory.res @@ -141,7 +141,7 @@ let makeHistoryRowSchema: S.t<'entity> => S.t> = entitySchem type t<'entity> = { table: table, - createInsertFnQuery: string, + makeInsertFnQuery: (~pgSchema: string) => string, schema: S.t>, // Used for parsing schemaRows: S.t>>, @@ -153,7 +153,7 @@ type entityInternal external castInternal: t<'entity> => t = "%identity" external eval: string => 'a = "eval" -let fromTable = (table: table, ~pgSchema, ~schema: S.t<'entity>): t<'entity> => { +let fromTable = (table: table, ~schema: S.t<'entity>): t<'entity> => { let entity_history_block_timestamp = "entity_history_block_timestamp" let entity_history_chain_id = "entity_history_chain_id" let entity_history_block_number = "entity_history_block_number" @@ -227,19 +227,6 @@ let fromTable = (table: table, ~pgSchema, ~schema: S.t<'entity>): t<'entity> => ) let insertFnName = `"insert_${table.tableName}"` - let historyRowArg = "history_row" - let historyTablePath = `"${pgSchema}"."${historyTableName}"` - let originTablePath = `"${pgSchema}"."${originTableName}"` - - let previousHistoryFieldsAreNullStr = - previousChangeFieldNames - ->Belt.Array.map(fieldName => `${historyRowArg}.${fieldName} IS NULL`) - ->Js.Array2.joinWith(" OR ") - - let currentChangeFieldNamesCommaSeparated = currentChangeFieldNames->Js.Array2.joinWith(", ") - - let dataFieldNamesDoubleQuoted = dataFieldNames->Belt.Array.map(fieldName => `"${fieldName}"`) - let dataFieldNamesCommaSeparated = dataFieldNamesDoubleQuoted->Js.Array2.joinWith(", ") let allFieldNamesDoubleQuoted = Belt.Array.concatMany([ @@ -249,7 +236,21 @@ let fromTable = (table: table, ~pgSchema, ~schema: S.t<'entity>): t<'entity> => [actionFieldName], ])->Belt.Array.map(fieldName => `"${fieldName}"`) - let createInsertFnQuery = { + let makeInsertFnQuery = (~pgSchema) => { + let historyRowArg = "history_row" + let historyTablePath = `"${pgSchema}"."${historyTableName}"` + let originTablePath = `"${pgSchema}"."${originTableName}"` + + let previousHistoryFieldsAreNullStr = + previousChangeFieldNames + ->Belt.Array.map(fieldName => `${historyRowArg}.${fieldName} IS NULL`) + ->Js.Array2.joinWith(" OR ") + + let currentChangeFieldNamesCommaSeparated = currentChangeFieldNames->Js.Array2.joinWith(", ") + + let dataFieldNamesDoubleQuoted = dataFieldNames->Belt.Array.map(fieldName => `"${fieldName}"`) + let dataFieldNamesCommaSeparated = dataFieldNamesDoubleQuoted->Js.Array2.joinWith(", ") + `CREATE OR REPLACE FUNCTION ${insertFnName}(${historyRowArg} ${historyTablePath}, should_copy_current_entity BOOLEAN) RETURNS void AS $$ DECLARE @@ -315,7 +316,7 @@ $$ LANGUAGE plpgsql;` let schema = makeHistoryRowSchema(schema) - {table, createInsertFnQuery, schema, schemaRows: S.array(schema), insertFn} + {table, makeInsertFnQuery, schema, schemaRows: S.array(schema), insertFn} } type safeReorgBlocks = { diff --git a/codegenerator/cli/npm/envio/src/db/InternalTable.gen.ts b/codegenerator/cli/npm/envio/src/db/InternalTable.gen.ts new file mode 100644 index 000000000..dd5db2425 --- /dev/null +++ b/codegenerator/cli/npm/envio/src/db/InternalTable.gen.ts @@ -0,0 +1,43 @@ +/* TypeScript file generated from InternalTable.res by genType. */ + +/* eslint-disable */ +/* tslint:disable */ + +import type {Json_t as Js_Json_t} from '../../src/Js.shim'; + +import type {t as Address_t} from '../../src/Address.gen'; + +export type EventSyncState_t = { + readonly chain_id: number; + readonly block_number: number; + readonly log_index: number; + readonly block_timestamp: number +}; + +export type RawEvents_t = { + readonly chain_id: number; + readonly event_id: bigint; + readonly event_name: string; + readonly contract_name: string; + readonly block_number: number; + readonly log_index: number; + readonly src_address: Address_t; + readonly block_hash: string; + readonly block_timestamp: number; + readonly block_fields: Js_Json_t; + readonly transaction_fields: Js_Json_t; + readonly params: Js_Json_t +}; + +export type DynamicContractRegistry_t = { + readonly id: string; + readonly chain_id: number; + readonly registering_event_block_number: number; + readonly registering_event_log_index: number; + readonly registering_event_block_timestamp: number; + readonly registering_event_contract_name: string; + readonly registering_event_name: string; + readonly registering_event_src_address: Address_t; + readonly contract_address: Address_t; + readonly contract_name: string +}; diff --git a/codegenerator/cli/npm/envio/src/db/InternalTable.res b/codegenerator/cli/npm/envio/src/db/InternalTable.res new file mode 100644 index 000000000..632259e9e --- /dev/null +++ b/codegenerator/cli/npm/envio/src/db/InternalTable.res @@ -0,0 +1,430 @@ +open Table + +//shorthand for punning +let isPrimaryKey = true +let isNullable = true +let isIndex = true + +module EventSyncState = { + //Used unsafely in DbFunctions.res so just enforcing the naming here + let blockTimestampFieldName = "block_timestamp" + let blockNumberFieldName = "block_number" + let logIndexFieldName = "log_index" + let isPreRegisteringDynamicContractsFieldName = "is_pre_registering_dynamic_contracts" + + // @genType Used for Test DB + @genType + type t = { + @as("chain_id") chainId: int, + @as("block_number") blockNumber: int, + @as("log_index") logIndex: int, + @as("block_timestamp") blockTimestamp: int, + } + + let table = mkTable( + "event_sync_state", + ~fields=[ + mkField("chain_id", Integer, ~fieldSchema=S.int, ~isPrimaryKey), + mkField(blockNumberFieldName, Integer, ~fieldSchema=S.int), + mkField(logIndexFieldName, Integer, ~fieldSchema=S.int), + mkField(blockTimestampFieldName, Integer, ~fieldSchema=S.int), + // Keep it in case Hosted Service relies on it to prevent a breaking changes + mkField( + isPreRegisteringDynamicContractsFieldName, + Boolean, + ~default="false", + ~fieldSchema=S.bool, + ), + ], + ) + + //We need to update values here not delet the rows, since restarting without a row + //has a different behaviour to restarting with an initialised row with zero values + let resetCurrentCurrentSyncStateQuery = (~pgSchema) => + `UPDATE "${pgSchema}"."${table.tableName}" + SET ${blockNumberFieldName} = 0, + ${logIndexFieldName} = 0, + ${blockTimestampFieldName} = 0, + ${isPreRegisteringDynamicContractsFieldName} = false;` +} + +module Chains = { + type field = [ + | #id + | #start_block + | #end_block + | #source_block + | #first_event_block + | #buffer_block + | #ready_at + | #events_processed + | #_is_hyper_sync + | #_latest_processed_block + | #_num_batches_fetched + ] + + let fields: array = [ + #id, + #start_block, + #end_block, + #source_block, + #first_event_block, + #buffer_block, + #ready_at, + #events_processed, + #_is_hyper_sync, + #_latest_processed_block, + #_num_batches_fetched, + ] + + type t = { + @as("id") id: int, + @as("start_block") startBlock: int, + @as("end_block") endBlock: Js.null, + @as("source_block") blockHeight: int, + @as("first_event_block") firstEventBlockNumber: Js.null, + @as("buffer_block") latestFetchedBlockNumber: int, + @as("ready_at") + timestampCaughtUpToHeadOrEndblock: Js.null, + @as("events_processed") numEventsProcessed: int, + @as("_latest_processed_block") latestProcessedBlock: Js.null, + @as("_is_hyper_sync") isHyperSync: bool, + @as("_num_batches_fetched") numBatchesFetched: int, + } + + let table = mkTable( + "envio_chains", + ~fields=[ + mkField((#id: field :> string), Integer, ~fieldSchema=S.int, ~isPrimaryKey), + // Values populated from config + mkField((#start_block: field :> string), Integer, ~fieldSchema=S.int), + mkField((#end_block: field :> string), Integer, ~fieldSchema=S.null(S.int), ~isNullable), + // Block number of the latest block that was fetched from the source + mkField((#buffer_block: field :> string), Integer, ~fieldSchema=S.int), + // Block number of the currently active source + mkField((#source_block: field :> string), Integer, ~fieldSchema=S.int), + // Block number of the first event that was processed for this chain + mkField( + (#first_event_block: field :> string), + Integer, + ~fieldSchema=S.null(S.int), + ~isNullable, + ), + // Used to show how much time historical sync has taken, so we need a timezone here (TUI and Hosted Service) + // null during historical sync, set to current time when sync is complete + mkField( + (#ready_at: field :> string), + TimestampWithNullTimezone, + ~fieldSchema=S.null(Utils.Schema.dbDate), + ~isNullable, + ), + mkField((#events_processed: field :> string), Integer, ~fieldSchema=S.int), // TODO: In the future it should reference a table with sources + mkField((#_is_hyper_sync: field :> string), Boolean, ~fieldSchema=S.bool), + // TODO: Make the data more public facing + mkField( + (#_latest_processed_block: field :> string), + Integer, + ~fieldSchema=S.null(S.int), + ~isNullable, + ), + mkField((#_num_batches_fetched: field :> string), Integer, ~fieldSchema=S.int), + ], + ) + + let initialFromConfig = (chainConfig: InternalConfig.chain) => { + { + id: chainConfig.id, + startBlock: chainConfig.startBlock, + endBlock: chainConfig.endBlock->Js.Null.fromOption, + blockHeight: 0, + firstEventBlockNumber: Js.Null.empty, + latestFetchedBlockNumber: -1, + timestampCaughtUpToHeadOrEndblock: Js.Null.empty, + latestProcessedBlock: Js.Null.empty, + isHyperSync: false, + numEventsProcessed: 0, + numBatchesFetched: 0, + } + } + + let makeInitialValuesQuery = (~pgSchema, ~chainConfigs: array) => { + if chainConfigs->Array.length === 0 { + None + } else { + // Create column names list + let columnNames = fields->Belt.Array.map(field => `"${(field :> string)}"`) + + // Create VALUES rows for each chain config + let valuesRows = chainConfigs->Belt.Array.map(chainConfig => { + let initialValues = initialFromConfig(chainConfig) + let values = fields->Belt.Array.map((field: field) => { + let value = + initialValues->(Utils.magic: t => dict)->Js.Dict.get((field :> string)) + switch Js.typeof(value) { + | "object" => "NULL" + | "number" => value->Utils.magic->Belt.Int.toString + | "boolean" => value->Utils.magic ? "true" : "false" + | _ => Js.Exn.raiseError("Invalid envio_chains value type") + } + }) + + `(${values->Js.Array2.joinWith(", ")})` + }) + + Some( + `INSERT INTO "${pgSchema}"."${table.tableName}" (${columnNames->Js.Array2.joinWith(", ")}) +VALUES ${valuesRows->Js.Array2.joinWith(",\n ")};`, + ) + } + } + + // Fields that should be updated on conflict (excluding static config fields) + let updateFields: array = [ + #source_block, + #first_event_block, + #buffer_block, + #ready_at, + #events_processed, + #_is_hyper_sync, + #_latest_processed_block, + #_num_batches_fetched, + ] + + let makeSingleUpdateQuery = (~pgSchema) => { + // Generate SET clauses with parameter placeholders + let setClauses = Belt.Array.mapWithIndex(updateFields, (index, field) => { + let fieldName = (field :> string) + let paramIndex = index + 2 // +2 because $1 is for id in WHERE clause + `"${fieldName}" = $${Belt.Int.toString(paramIndex)}` + }) + + `UPDATE "${pgSchema}"."${table.tableName}" +SET ${setClauses->Js.Array2.joinWith(",\n ")} +WHERE "id" = $1;` + } + + let setValues = (sql, ~pgSchema, ~chainsData: array) => { + let query = makeSingleUpdateQuery(~pgSchema) + + let promises = chainsData->Belt.Array.map(chain => { + let params = [] + + // Push id first (for WHERE clause) + let idValue = chain->(Utils.magic: t => dict)->Js.Dict.get("id") + params->Js.Array2.push(idValue)->ignore + + // Then push all updateable field values (for SET clause) + updateFields->Js.Array2.forEach(field => { + let value = chain->(Utils.magic: t => dict)->Js.Dict.get((field :> string)) + params->Js.Array2.push(value)->ignore + }) + + sql->Postgres.preparedUnsafe(query, params->Obj.magic) + }) + + Promise.all(promises) + } +} + +module PersistedState = { + type t = { + id: int, + envio_version: string, + config_hash: string, + schema_hash: string, + handler_files_hash: string, + abi_files_hash: string, + } + + let table = mkTable( + "persisted_state", + ~fields=[ + mkField("id", Serial, ~fieldSchema=S.int, ~isPrimaryKey), + mkField("envio_version", Text, ~fieldSchema=S.string), + mkField("config_hash", Text, ~fieldSchema=S.string), + mkField("schema_hash", Text, ~fieldSchema=S.string), + mkField("handler_files_hash", Text, ~fieldSchema=S.string), + mkField("abi_files_hash", Text, ~fieldSchema=S.string), + ], + ) +} + +module EndOfBlockRangeScannedData = { + type t = { + chain_id: int, + block_number: int, + block_hash: string, + } + + let table = mkTable( + "end_of_block_range_scanned_data", + ~fields=[ + mkField("chain_id", Integer, ~fieldSchema=S.int, ~isPrimaryKey), + mkField("block_number", Integer, ~fieldSchema=S.int, ~isPrimaryKey), + mkField("block_hash", Text, ~fieldSchema=S.string), + ], + ) +} + +module RawEvents = { + // @genType Used for Test DB and internal tests + @genType + type t = { + @as("chain_id") chainId: int, + @as("event_id") eventId: bigint, + @as("event_name") eventName: string, + @as("contract_name") contractName: string, + @as("block_number") blockNumber: int, + @as("log_index") logIndex: int, + @as("src_address") srcAddress: Address.t, + @as("block_hash") blockHash: string, + @as("block_timestamp") blockTimestamp: int, + @as("block_fields") blockFields: Js.Json.t, + @as("transaction_fields") transactionFields: Js.Json.t, + params: Js.Json.t, + } + + let schema = S.schema(s => { + chainId: s.matches(S.int), + eventId: s.matches(S.bigint), + eventName: s.matches(S.string), + contractName: s.matches(S.string), + blockNumber: s.matches(S.int), + logIndex: s.matches(S.int), + srcAddress: s.matches(Address.schema), + blockHash: s.matches(S.string), + blockTimestamp: s.matches(S.int), + blockFields: s.matches(S.json(~validate=false)), + transactionFields: s.matches(S.json(~validate=false)), + params: s.matches(S.json(~validate=false)), + }) + + let table = mkTable( + "raw_events", + ~fields=[ + mkField("chain_id", Integer, ~fieldSchema=S.int), + mkField("event_id", Numeric, ~fieldSchema=S.bigint), + mkField("event_name", Text, ~fieldSchema=S.string), + mkField("contract_name", Text, ~fieldSchema=S.string), + mkField("block_number", Integer, ~fieldSchema=S.int), + mkField("log_index", Integer, ~fieldSchema=S.int), + mkField("src_address", Text, ~fieldSchema=Address.schema), + mkField("block_hash", Text, ~fieldSchema=S.string), + mkField("block_timestamp", Integer, ~fieldSchema=S.int), + mkField("block_fields", JsonB, ~fieldSchema=S.json(~validate=false)), + mkField("transaction_fields", JsonB, ~fieldSchema=S.json(~validate=false)), + mkField("params", JsonB, ~fieldSchema=S.json(~validate=false)), + mkField( + "db_write_timestamp", + TimestampWithoutTimezone, + ~default="CURRENT_TIMESTAMP", + ~fieldSchema=S.int, + ), + mkField("serial", Serial, ~isNullable, ~isPrimaryKey, ~fieldSchema=S.null(S.int)), + ], + ) +} + +// View names for Hasura integration +module Views = { + let metaViewName = "_meta" + let chainMetadataViewName = "chain_metadata" + + let makeMetaViewQuery = (~pgSchema) => { + `CREATE VIEW "${pgSchema}"."${metaViewName}" AS + SELECT + "${(#id: Chains.field :> string)}" AS "chainId", + "${(#start_block: Chains.field :> string)}" AS "startBlock", + "${(#end_block: Chains.field :> string)}" AS "endBlock", + "${(#buffer_block: Chains.field :> string)}" AS "bufferBlock", + "${(#ready_at: Chains.field :> string)}" AS "readyAt", + "${(#first_event_block: Chains.field :> string)}" AS "firstEventBlock", + "${(#events_processed: Chains.field :> string)}" AS "eventsProcessed", + ("${(#ready_at: Chains.field :> string)}" IS NOT NULL) AS "isReady" + FROM "${pgSchema}"."${Chains.table.tableName}" + ORDER BY "${(#id: Chains.field :> string)}";` + } + + let makeChainMetadataViewQuery = (~pgSchema) => { + `CREATE VIEW "${pgSchema}"."${chainMetadataViewName}" AS + SELECT + "${(#source_block: Chains.field :> string)}" AS "block_height", + "${(#id: Chains.field :> string)}" AS "chain_id", + "${(#end_block: Chains.field :> string)}" AS "end_block", + "${(#first_event_block: Chains.field :> string)}" AS "first_event_block_number", + "${(#_is_hyper_sync: Chains.field :> string)}" AS "is_hyper_sync", + "${(#buffer_block: Chains.field :> string)}" AS "latest_fetched_block_number", + "${(#_latest_processed_block: Chains.field :> string)}" AS "latest_processed_block", + "${(#_num_batches_fetched: Chains.field :> string)}" AS "num_batches_fetched", + "${(#events_processed: Chains.field :> string)}" AS "num_events_processed", + "${(#start_block: Chains.field :> string)}" AS "start_block", + "${(#ready_at: Chains.field :> string)}" AS "timestamp_caught_up_to_head_or_endblock" + FROM "${pgSchema}"."${Chains.table.tableName}";` + } +} + +module DynamicContractRegistry = { + let name = "dynamic_contract_registry" + + let makeId = (~chainId, ~contractAddress) => { + chainId->Belt.Int.toString ++ "-" ++ contractAddress->Address.toString + } + + // @genType Used for Test DB + @genType + type t = { + id: string, + @as("chain_id") chainId: int, + @as("registering_event_block_number") registeringEventBlockNumber: int, + @as("registering_event_log_index") registeringEventLogIndex: int, + @as("registering_event_block_timestamp") registeringEventBlockTimestamp: int, + @as("registering_event_contract_name") registeringEventContractName: string, + @as("registering_event_name") registeringEventName: string, + @as("registering_event_src_address") registeringEventSrcAddress: Address.t, + @as("contract_address") contractAddress: Address.t, + @as("contract_name") contractName: string, + } + + let schema = S.schema(s => { + id: s.matches(S.string), + chainId: s.matches(S.int), + registeringEventBlockNumber: s.matches(S.int), + registeringEventLogIndex: s.matches(S.int), + registeringEventContractName: s.matches(S.string), + registeringEventName: s.matches(S.string), + registeringEventSrcAddress: s.matches(Address.schema), + registeringEventBlockTimestamp: s.matches(S.int), + contractAddress: s.matches(Address.schema), + contractName: s.matches(S.string), + }) + + let rowsSchema = S.array(schema) + + let table = mkTable( + name, + ~fields=[ + mkField("id", Text, ~isPrimaryKey, ~fieldSchema=S.string), + mkField("chain_id", Integer, ~fieldSchema=S.int), + mkField("registering_event_block_number", Integer, ~fieldSchema=S.int), + mkField("registering_event_log_index", Integer, ~fieldSchema=S.int), + mkField("registering_event_block_timestamp", Integer, ~fieldSchema=S.int), + mkField("registering_event_contract_name", Text, ~fieldSchema=S.string), + mkField("registering_event_name", Text, ~fieldSchema=S.string), + mkField("registering_event_src_address", Text, ~fieldSchema=Address.schema), + mkField("contract_address", Text, ~fieldSchema=Address.schema), + mkField("contract_name", Text, ~fieldSchema=S.string), + ], + ) + + let entityHistory = table->EntityHistory.fromTable(~schema) + + external castToInternal: t => Internal.entity = "%identity" + + let config = { + name, + schema, + rowsSchema, + table, + entityHistory, + }->Internal.fromGenericEntityConfig +} diff --git a/codegenerator/cli/npm/envio/src/tui/Tui.res b/codegenerator/cli/npm/envio/src/tui/Tui.res new file mode 100644 index 000000000..8bd2d0a4d --- /dev/null +++ b/codegenerator/cli/npm/envio/src/tui/Tui.res @@ -0,0 +1,196 @@ +open Ink +open Belt + +type params = { + getMetrics: unit => promise, + indexerStartTime: Js.Date.t, + envioAppUrl: string, + envioApiToken: option, + envioVersion: option, + ecosystem: InternalConfig.ecosystem, + hasuraUrl: string, + hasuraPassword: string, +} + +module TotalEventsProcessed = { + @react.component + let make = (~totalEventsProcessed) => { + let label = "Events Processed: " + + {label->React.string} + + {`${totalEventsProcessed->TuiData.formatLocaleString}`->React.string} + + + } +} + +module ChainLine = { + @react.component + let make = ( + ~chainId, + ~maxChainIdLenght, + ~dimensions: Hooks.dimensions, + ~progressBlock, + ~bufferBlock, + ~sourceBlock, + ~firstEventBlock, + ~startBlock, + ~endBlock, + ~poweredByHyperSync, + ) => { + switch (progressBlock, bufferBlock, sourceBlock) { + | (Some(progressBlock), Some(bufferBlock), Some(sourceBlock)) => + let toBlock = switch endBlock { + | Some(endBlock) => Pervasives.min(sourceBlock, endBlock) + | None => sourceBlock + } + let firstEventBlock = firstEventBlock->Option.getWithDefault(startBlock) + + let chainsWidth = Pervasives.min(dimensions.columns - 2, 60) + let headerWidth = maxChainIdLenght + 10 // 10 for additional text + + + + {"Chain: "->React.string} + {chainId->React.string} + {" "->React.string} + {poweredByHyperSync ? {"⚡"->React.string} : React.null} + + + + + + {"Blocks: "->React.string} + + {progressBlock->TuiData.formatLocaleString->React.string} + {" / "->React.string} + {toBlock->TuiData.formatLocaleString->React.string} + {switch endBlock { + | Some(_) => {` (End Block)`->React.string} + | None => React.null + }} + + + + + + | (_, _, _) => + + + {"Chain: "->React.string} + {chainId->React.string} + {" "->React.string} + {poweredByHyperSync ? {"⚡"->React.string} : React.null} + + {"Loading progress..."->React.string} + + } + } +} + +module App = { + @react.component + let make = (~params: params) => { + let {envioAppUrl, envioApiToken, envioVersion, ecosystem, getMetrics} = params + + let dimensions = Hooks.useStdoutDimensions() + + let (chains, setChains) = React.useState((): array => []) + let totalEventsProcessed = chains->Array.reduce(0, (acc, chain) => { + switch chain.eventsProcessed { + | Some(count) => acc + count + | None => acc + } + }) + let maxChainIdLenght = chains->Array.reduce(0, (acc, chain) => { + let chainIdLength = chain.chainId->String.length + if chainIdLength > acc { + chainIdLength + } else { + acc + } + }) + + // useEffect to fetch metrics every 500ms + React.useEffect(() => { + let intervalId = Js.Global.setInterval(() => { + getMetrics() + ->Promise.thenResolve( + metricsData => { + let parsedMetrics = TuiData.Metrics.parseMetrics(metricsData) + let chainsFromMetrics = TuiData.Metrics.parseMetricsToChains(parsedMetrics) + setChains(_ => chainsFromMetrics) + }, + ) + ->Promise.catch( + _ => { + Js.log("Error fetching TUI metrics") + Promise.resolve() + }, + ) + ->ignore + }, 500) + + Some( + () => { + Js.Global.clearInterval(intervalId) + }, + ) + }, [getMetrics]) + + + + Array.length > 5 ? Tiny : Block} + space=false + /> + + {chains + ->Array.mapWithIndex((i, chain) => { + Int.toString} + chainId={chain.chainId} + maxChainIdLenght={maxChainIdLenght} + progressBlock={chain.progressBlock} + bufferBlock={chain.bufferBlock} + sourceBlock={chain.sourceBlock} + startBlock={chain.startBlock} + endBlock={chain.endBlock} + dimensions + firstEventBlock=None // FIXME: + poweredByHyperSync={chain.poweredByHyperSync} + /> + }) + ->React.array} + + // + + + {"Development Console: "->React.string} + {`${envioAppUrl}/console`->React.string} + + + {"GraphQL Interface: "->React.string} + {params.hasuraUrl->React.string} + // {` (password: ${params.hasuraPassword})`->React.string} FIXME: + + + + } +} + +let start = params => { + let {rerender} = render() + params => { + rerender() + } +} diff --git a/codegenerator/cli/templates/static/codegen/src/ink/bindings/DateFns.res b/codegenerator/cli/npm/envio/src/tui/bindings/DateFns.res similarity index 100% rename from codegenerator/cli/templates/static/codegen/src/ink/bindings/DateFns.res rename to codegenerator/cli/npm/envio/src/tui/bindings/DateFns.res diff --git a/codegenerator/cli/templates/static/codegen/src/ink/bindings/Ink.res b/codegenerator/cli/npm/envio/src/tui/bindings/Ink.res similarity index 95% rename from codegenerator/cli/templates/static/codegen/src/ink/bindings/Ink.res rename to codegenerator/cli/npm/envio/src/tui/bindings/Ink.res index 6dad99675..e87524a76 100644 --- a/codegenerator/cli/templates/static/codegen/src/ink/bindings/Ink.res +++ b/codegenerator/cli/npm/envio/src/tui/bindings/Ink.res @@ -107,9 +107,8 @@ module Newline = { Adds one or more newline characters. Must be used within components. */ - @module("ink") @react.component - external make: (~count: int=?) => React.element = "Newline" + let make = () => {" "->React.string} } module Spacer = { @@ -216,6 +215,15 @@ module Hooks = { } @module("ink") external useFocusManager: unit => focusManager = "useFocusManager" + + type dimensions = { + @as("0") + columns: int, + @as("1") + rows: int, + } + // It doesn't have .default export + let useStdoutDimensions: unit => dimensions = %raw(`require("ink-use-stdout-dimensions")`) } module BigText = { @@ -353,3 +361,8 @@ module Spinner = { @module("ink-spinner") @react.component external make: (@as("type") ~type_: typeOption=?) => React.element = "default" } + +module Table = { + @module("ink-table") @react.component + external make: (~head: array, ~rows: array>) => React.element = "Table" +} diff --git a/codegenerator/cli/templates/static/codegen/src/ink/bindings/Style.res b/codegenerator/cli/npm/envio/src/tui/bindings/Style.res similarity index 100% rename from codegenerator/cli/templates/static/codegen/src/ink/bindings/Style.res rename to codegenerator/cli/npm/envio/src/tui/bindings/Style.res diff --git a/codegenerator/cli/templates/static/codegen/src/ink/components/BufferedProgressBar.res b/codegenerator/cli/npm/envio/src/tui/components/BufferedProgressBar.res similarity index 99% rename from codegenerator/cli/templates/static/codegen/src/ink/components/BufferedProgressBar.res rename to codegenerator/cli/npm/envio/src/tui/components/BufferedProgressBar.res index 72aed5314..3758c9c6f 100644 --- a/codegenerator/cli/templates/static/codegen/src/ink/components/BufferedProgressBar.res +++ b/codegenerator/cli/npm/envio/src/tui/components/BufferedProgressBar.res @@ -1,5 +1,6 @@ open Ink open Belt + @react.component let make = (~loaded, ~buffered=?, ~outOf, ~barWidth=36, ~loadingColor=Style.Secondary) => { let maxCount = barWidth diff --git a/codegenerator/cli/templates/static/codegen/src/ink/components/CustomHooks.res b/codegenerator/cli/npm/envio/src/tui/components/CustomHooks.res similarity index 70% rename from codegenerator/cli/templates/static/codegen/src/ink/components/CustomHooks.res rename to codegenerator/cli/npm/envio/src/tui/components/CustomHooks.res index b6c76afde..36d40901c 100644 --- a/codegenerator/cli/templates/static/codegen/src/ink/components/CustomHooks.res +++ b/codegenerator/cli/npm/envio/src/tui/components/CustomHooks.res @@ -1,4 +1,5 @@ open Belt + module InitApi = { type ecosystem = | @as("evm") Evm | @as("fuel") Fuel type body = { @@ -17,24 +18,22 @@ module InitApi = { rpcNetworks: s.field("rpcNetworks", S.array(S.int)), }) - let makeBody = (~envioVersion, ~envioApiToken, ~config: Config.t) => { + let makeBody = (~envioVersion, ~envioApiToken, ~ecosystem, ~chains: array) => { let hyperSyncNetworks = [] let rpcNetworks = [] - config.chainMap - ->ChainMap.values - ->Array.forEach(({sources, chain}) => { - switch sources->Js.Array2.some(s => s.poweredByHyperSync) { + chains->Array.forEach(({poweredByHyperSync, chainId}) => { + switch poweredByHyperSync { | true => hyperSyncNetworks | false => rpcNetworks } - ->Js.Array2.push(chain->ChainMap.Chain.toChainId) + ->Js.Array2.push(chainId->Belt.Int.fromString->Belt.Option.getExn) ->ignore }) { envioVersion, envioApiToken, - ecosystem: (config.ecosystem :> ecosystem), + ecosystem: (ecosystem: InternalConfig.ecosystem :> ecosystem), hyperSyncNetworks, rpcNetworks, } @@ -70,8 +69,6 @@ module InitApi = { content: s.field("content", S.string), }) - let client = Rest.client(Env.envioAppUrl ++ "/api") - let route = Rest.route(() => { method: Post, path: "/hyperindex/init", @@ -79,12 +76,10 @@ module InitApi = { responses: [s => s.field("messages", S.array(messageSchema))], }) - let getMessages = async (~config) => { - let envioVersion = - PersistedState.getPersistedState()->Result.mapWithDefault(None, p => Some(p.envioVersion)) - let body = makeBody(~envioVersion, ~envioApiToken=Env.envioApiToken, ~config) + let getMessages = async (~envioAppUrl, ~envioApiToken, ~envioVersion, ~chains, ~ecosystem) => { + let body = makeBody(~envioVersion, ~envioApiToken, ~chains, ~ecosystem) - switch await route->Rest.fetch(body, ~client) { + switch await route->Rest.fetch(body, ~client=Rest.client(envioAppUrl ++ "/api")) { | exception exn => Error(exn->Obj.magic) | messages => Ok(messages) } @@ -93,15 +88,18 @@ module InitApi = { type request<'ok, 'err> = Data('ok) | Loading | Err('err) -let useMessages = (~config) => { +let useMessages = (~envioAppUrl, ~envioApiToken, ~envioVersion, ~chains, ~ecosystem) => { let (request, setRequest) = React.useState(_ => Loading) React.useEffect0(() => { - InitApi.getMessages(~config) + InitApi.getMessages(~envioAppUrl, ~envioApiToken, ~envioVersion, ~chains, ~ecosystem) ->Promise.thenResolve(res => switch res { | Ok(data) => setRequest(_ => Data(data)) | Error(e) => - Logging.error({"msg": "Failed to load messages from envio server", "err": e->Utils.prettifyExn}) + Logging.warn({ + "msg": "Failed to load messages from envio server", + "err": e->Utils.prettifyExn, + }) setRequest(_ => Err(e)) } ) diff --git a/codegenerator/cli/templates/static/codegen/src/ink/components/Messages.res b/codegenerator/cli/npm/envio/src/tui/components/Messages.res similarity index 81% rename from codegenerator/cli/templates/static/codegen/src/ink/components/Messages.res rename to codegenerator/cli/npm/envio/src/tui/components/Messages.res index b7df3ff25..7e4864a17 100644 --- a/codegenerator/cli/templates/static/codegen/src/ink/components/Messages.res +++ b/codegenerator/cli/npm/envio/src/tui/components/Messages.res @@ -21,8 +21,14 @@ module Notifications = { } @react.component -let make = (~config) => { - let messages = CustomHooks.useMessages(~config) +let make = (~envioAppUrl, ~envioApiToken, ~envioVersion, ~chains, ~ecosystem) => { + let messages = CustomHooks.useMessages( + ~envioAppUrl, + ~envioApiToken, + ~envioVersion, + ~chains, + ~ecosystem, + ) <> {switch messages { | Data([]) | Loading => React.null //Don't show anything while loading or no messages diff --git a/codegenerator/cli/npm/envio/src/tui/components/SyncETA.res b/codegenerator/cli/npm/envio/src/tui/components/SyncETA.res new file mode 100644 index 000000000..34a9a8dc7 --- /dev/null +++ b/codegenerator/cli/npm/envio/src/tui/components/SyncETA.res @@ -0,0 +1,200 @@ +// open Ink +// open Belt + +// // let isIndexerFullySynced = (chains: array) => { +// // chains->Array.reduce(true, (accum, current) => { +// // switch current.progress { +// // | Synced(_) => accum +// // | _ => false +// // } +// // }) +// // } + +// let getTotalRemainingBlocks = (chains: array) => { +// chains->Array.reduce(0, ( +// accum, +// {progress, currentBlockHeight, latestFetchedBlockNumber, endBlock}, +// ) => { +// let finalBlock = switch endBlock { +// | Some(endBlock) => endBlock +// | None => currentBlockHeight +// } +// switch progress { +// | Syncing({latestProcessedBlock}) +// | Synced({latestProcessedBlock}) => +// finalBlock - latestProcessedBlock + accum +// | SearchingForEvents => finalBlock - latestFetchedBlockNumber + accum +// } +// }) +// } + +// let getLatestTimeCaughtUpToHead = ( +// chains: array, +// indexerStartTime: Js.Date.t, +// ) => { +// let latesttimestampCaughtUpToHeadOrEndblockFloat = chains->Array.reduce(0.0, (accum, current) => { +// switch current.progress { +// | Synced({timestampCaughtUpToHeadOrEndblock}) => +// timestampCaughtUpToHeadOrEndblock->Js.Date.valueOf > accum +// ? timestampCaughtUpToHeadOrEndblock->Js.Date.valueOf +// : accum +// | Syncing(_) +// | SearchingForEvents => accum +// } +// }) + +// DateFns.formatDistanceWithOptions( +// indexerStartTime, +// latesttimestampCaughtUpToHeadOrEndblockFloat->Js.Date.fromFloat, +// {includeSeconds: true}, +// ) +// } + +// let getTotalBlocksProcessed = (chains: array) => { +// chains->Array.reduce(0, (accum, {progress, latestFetchedBlockNumber}) => { +// switch progress { +// | Syncing({latestProcessedBlock, firstEventBlockNumber}) +// | Synced({latestProcessedBlock, firstEventBlockNumber}) => +// latestProcessedBlock - firstEventBlockNumber + accum +// | SearchingForEvents => latestFetchedBlockNumber + accum +// } +// }) +// } + +// let useShouldDisplayEta = (~chains: array) => { +// let (shouldDisplayEta, setShouldDisplayEta) = React.useState(_ => false) +// React.useEffect(() => { +// //Only compute this while it is not displaying eta +// if !shouldDisplayEta { +// //Each chain should have fetched at least one batch +// let (allChainsHaveFetchedABatch, totalNumBatchesFetched) = chains->Array.reduce((true, 0), ( +// (allChainsHaveFetchedABatch, totalNumBatchesFetched), +// chain, +// ) => { +// ( +// allChainsHaveFetchedABatch && chain.numBatchesFetched >= 1, +// totalNumBatchesFetched + chain.numBatchesFetched, +// ) +// }) + +// //Min num fetched batches is num of chains + 2. All +// // Chains should have fetched at least 1 batch. (They +// // could then be blocked from fetching if they are past +// //the max queue size on first batch) +// // Only display once an additinal 2 batches have been fetched to allow +// // eta to realistically stabalize +// let numChains = chains->Array.length +// let minTotalBatches = numChains + 2 +// let hasMinNumBatches = totalNumBatchesFetched >= minTotalBatches + +// let shouldDisplayEta = allChainsHaveFetchedABatch && hasMinNumBatches + +// if shouldDisplayEta { +// setShouldDisplayEta(_ => true) +// } +// } + +// None +// }, [chains]) + +// shouldDisplayEta +// } + +// let useEta = (~chains, ~indexerStartTime) => { +// let shouldDisplayEta = useShouldDisplayEta(~chains) +// let (secondsToSub, setSecondsToSub) = React.useState(_ => 0.) +// let (timeSinceStart, setTimeSinceStart) = React.useState(_ => 0.) + +// React.useEffect2(() => { +// setTimeSinceStart(_ => Js.Date.now() -. indexerStartTime->Js.Date.valueOf) +// setSecondsToSub(_ => 0.) + +// let intervalId = Js.Global.setInterval(() => { +// setSecondsToSub(prev => prev +. 1.) +// }, 1000) + +// Some(() => Js.Global.clearInterval(intervalId)) +// }, (chains, indexerStartTime)) + +// //blocksProcessed/remainingBlocks = timeSoFar/eta +// //eta = (timeSoFar/blocksProcessed) * remainingBlocks + +// let blocksProcessed = getTotalBlocksProcessed(chains)->Int.toFloat +// if shouldDisplayEta && blocksProcessed > 0. { +// let nowDate = Js.Date.now() +// let remainingBlocks = getTotalRemainingBlocks(chains)->Int.toFloat +// let etaFloat = timeSinceStart /. blocksProcessed *. remainingBlocks +// let millisToSub = secondsToSub *. 1000. +// let etaFloat = Pervasives.max(etaFloat -. millisToSub, 0.0) //template this +// let eta = (etaFloat +. nowDate)->Js.Date.fromFloat +// let interval: DateFns.interval = {start: nowDate->Js.Date.fromFloat, end: eta} +// let duration = DateFns.intervalToDuration(interval) +// let formattedDuration = DateFns.formatDuration( +// duration, +// {format: ["hours", "minutes", "seconds"]}, +// ) +// let outputString = switch formattedDuration { +// | "" => "less than 1 second" +// | formattedDuration => formattedDuration +// } +// Some(outputString) +// } else { +// None +// } +// } + +// module Syncing = { +// @react.component +// let make = (~etaStr) => { +// +// {"Sync Time ETA: "->React.string} +// {etaStr->React.string} +// {" ("->React.string} +// +// +// +// {" in progress"->React.string} +// {")"->React.string} +// +// } +// } + +// module Synced = { +// @react.component +// let make = (~latestTimeCaughtUpToHeadStr) => { +// +// {"Time Synced: "->React.string} +// {`${latestTimeCaughtUpToHeadStr}`->React.string} +// {" ("->React.string} +// {"synced"->React.string} +// {")"->React.string} +// +// } +// } + +// module Calculating = { +// @react.component +// let make = () => { +// +// +// +// +// {" Calculating ETA..."->React.string} +// +// } +// } + +// @react.component +// let make = (~chains, ~indexerStartTime) => { +// let optEta = useEta(~chains, ~indexerStartTime) +// if isIndexerFullySynced(chains) { +// let latestTimeCaughtUpToHeadStr = getLatestTimeCaughtUpToHead(chains, indexerStartTime) +// //TODO add real time +// } else { +// switch optEta { +// | Some(etaStr) => +// | None => +// } +// } +// } + diff --git a/codegenerator/cli/npm/envio/src/tui/components/TuiData.res b/codegenerator/cli/npm/envio/src/tui/components/TuiData.res new file mode 100644 index 000000000..527b15962 --- /dev/null +++ b/codegenerator/cli/npm/envio/src/tui/components/TuiData.res @@ -0,0 +1,184 @@ +type chain = { + chainId: string, + mutable eventsProcessed: option, + mutable progressBlock: option, + mutable bufferBlock: option, + mutable sourceBlock: option, + mutable poweredByHyperSync: bool, + mutable startBlock: int, + mutable endBlock: option, +} + +module Metrics = { + type metric = { + name: string, + value: string, + labels: option>, + } + + type parsing = Comment | Name | LabelKey | LabelValue | Value + + let select = Utils.Set.fromArray([ + "envio_events_processed_count", + "envio_progress_block_number", + "envio_indexing_start_block", + "envio_indexing_end_block", + "envio_indexing_buffer_block_number", + "envio_source_height", + ]) + + // Parses prometheus-style metrics data into an array of metric objects + let parseMetrics = data => { + let metrics = [] + + // Track current metric being built + let currentName = ref("") + let currentValue = ref("") + let currentLabels = ref(None) + let parsing = ref(Name) + let currentLabelKey = ref("") + let currentLabelValue = ref("") + + // Parse character by character + let idx = ref(0) + let lastIdx = data->String.length - 1 + + while idx.contents <= lastIdx { + let char = data->Js.String2.charAt(idx.contents) + idx := idx.contents + 1 + + // On newline, push current metric if valid and reset state + if char === "\n" { + if currentName.contents !== "" && parsing.contents !== Comment { + metrics + ->Js.Array2.push({ + name: currentName.contents, + value: currentValue.contents, + labels: currentLabels.contents, + }) + ->ignore + } + currentName := "" + currentValue := "" + currentLabels := None + parsing := Name + } else { + switch parsing.contents { + | Comment => () // Skip comments until new line + | Name => + // Handle start of comment, value, labels or continue building name + if char === "#" && currentName.contents === "" { + parsing := Comment + } else if char === " " { + if select->Utils.Set.has(currentName.contents) { + parsing := Value + } else { + parsing := Comment + } + } else if char === "{" { + if select->Utils.Set.has(currentName.contents) { + parsing := LabelKey + } else { + parsing := Comment + } + } else { + currentName := currentName.contents ++ char + } + | LabelKey => + // Build label key until = is found + if char === "=" { + parsing := LabelValue + } else { + currentLabelKey := currentLabelKey.contents ++ char + } + | LabelValue => + // Build label value until } or , is found + if char === "}" || char === "," { + let labelsDict = switch currentLabels.contents { + | Some(labels) => labels + | None => { + let d = Js.Dict.empty() + currentLabels := Some(d) + d + } + } + labelsDict->Js.Dict.set(currentLabelKey.contents, currentLabelValue.contents) + currentLabelKey := "" + currentLabelValue := "" + + if char === "}" { + parsing := Name + } else { + parsing := LabelKey + } + } else if char !== "\"" { + currentLabelValue := currentLabelValue.contents ++ char + } + | Value => currentValue := currentValue.contents ++ char + } + } + } + + metrics + } + + let parseMetricsToChains = (metrics: array): array => { + // Group metrics by chainId + let chainsMap = Js.Dict.empty() + + metrics->Js.Array2.forEach(metric => { + let labels = metric.labels->Belt.Option.getWithDefault(Js.Dict.empty()) + let chainId = labels->Js.Dict.get("chainId")->Belt.Option.getWithDefault("unknown") + let value = metric.value->Belt.Int.fromString->Belt.Option.getWithDefault(0) + + // Get or create chain entry + let chain = switch chainsMap->Js.Dict.get(chainId) { + | Some(existingChain) => existingChain + | None => { + let newChain = { + chainId, + eventsProcessed: None, + progressBlock: None, + bufferBlock: None, + sourceBlock: None, + poweredByHyperSync: false, + startBlock: 0, + endBlock: None, + } + chainsMap->Js.Dict.set(chainId, newChain) + newChain + } + } + + // Update the appropriate field based on metric name + switch metric.name { + | "envio_events_processed_count" => chain.eventsProcessed = Some(value) + | "envio_progress_block_number" => chain.progressBlock = Some(value) + | "envio_indexing_buffer_block_number" => chain.bufferBlock = Some(value) + | "envio_indexing_start_block" => chain.startBlock = value + | "envio_indexing_end_block" => chain.endBlock = Some(value) + | "envio_source_height" => + if ( + switch chain.sourceBlock { + | Some(existingValue) => existingValue < value + | None => true + } + ) { + chain.sourceBlock = Some(value) + if labels->Js.Dict.get("source") === Some("HyperSync") { + chain.poweredByHyperSync = true + } + } + | _ => () + } + }) + + // Convert map values to array + chainsMap->Js.Dict.values + } +} + +type number +@val external number: int => number = "Number" +@send external toLocaleString: number => string = "toLocaleString" +let formatLocaleString = n => n->number->toLocaleString diff --git a/codegenerator/cli/npm/envio/src/vendored/Rest.res b/codegenerator/cli/npm/envio/src/vendored/Rest.res index 7d73a7ec2..225ad3ef4 100644 --- a/codegenerator/cli/npm/envio/src/vendored/Rest.res +++ b/codegenerator/cli/npm/envio/src/vendored/Rest.res @@ -44,8 +44,14 @@ module Dict = { } } -@inline -let panic = message => Exn.raiseError(Exn.makeError(`[rescript-rest] ${message}`)) +let panic = (message, ~params: option<{..}>=?) => { + let error = Exn.makeError(`[rescript-rest] ${message}`) + switch params { + | Some(params) => (error->Obj.magic)["params"] = params + | None => () + } + Exn.raiseError(error) +} @val external encodeURIComponent: string => string = "encodeURIComponent" @@ -808,6 +814,9 @@ let fetch = (type input response, route: route, input, ~client= | S.Raised(error) => panic( `Failed parsing response at ${error.path->S.Path.toString}. Reason: ${error->S.Error.reason}`, + ~params={ + "response": fetcherResponse, + }, ) } } diff --git a/codegenerator/cli/templates/dynamic/codegen/package.json.hbs b/codegenerator/cli/templates/dynamic/codegen/package.json.hbs index 2f434322d..9c8282f5d 100644 --- a/codegenerator/cli/templates/dynamic/codegen/package.json.hbs +++ b/codegenerator/cli/templates/dynamic/codegen/package.json.hbs @@ -40,15 +40,11 @@ "ts-node": "10.9.1", {{/if}} "@elastic/ecs-pino-format": "1.4.0", - "@rescript/react": "0.12.1", "bignumber.js": "9.1.2", "date-fns": "3.3.1", "dotenv": "16.4.5", "ethers": "6.8.0", "express": "4.19.2", - "ink": "3.2.0", - "ink-big-text": "1.2.0", - "ink-spinner": "4.0.3", "js-sdsl": "4.4.2", "pino": "8.16.1", "postgres": "3.4.1", diff --git a/codegenerator/cli/templates/dynamic/codegen/src/RegisterHandlers.res.hbs b/codegenerator/cli/templates/dynamic/codegen/src/RegisterHandlers.res.hbs index 329bf93c3..80d3dfffa 100644 --- a/codegenerator/cli/templates/dynamic/codegen/src/RegisterHandlers.res.hbs +++ b/codegenerator/cli/templates/dynamic/codegen/src/RegisterHandlers.res.hbs @@ -30,7 +30,7 @@ let registerContractHandlers = ( let contracts = [ {{#each chain_config.codegen_contracts as | contract |}} { - Config.name: "{{contract.name.capitalized}}", + InternalConfig.name: "{{contract.name.capitalized}}", abi: Types.{{contract.name.capitalized}}.abi, addresses: [ {{#each contract.addresses as | address |}} @@ -52,10 +52,12 @@ let registerContractHandlers = ( ] let chain = ChainMap.Chain.makeUnsafe(~chainId={{chain_config.network_config.id}}) { - Config.confirmedBlockThreshold: {{chain_config.network_config.confirmed_block_threshold}}, + InternalConfig.confirmedBlockThreshold: {{chain_config.network_config.confirmed_block_threshold}}, startBlock: {{chain_config.network_config.start_block}}, - endBlock: {{#if chain_config.network_config.end_block}}Some({{chain_config.network_config.end_block}}){{else}}None{{/if}}, - chain, + {{#if chain_config.network_config.end_block}} + endBlock: {{chain_config.network_config.end_block}}, + {{/if}} + id: {{chain_config.network_config.id}}, contracts, sources: {{chain_config.sources_code}} } @@ -98,3 +100,5 @@ let getConfig = () => { | None => registerAllHandlers() } } + +let getConfigWithoutRegisteringHandlers = makeGeneratedConfig diff --git a/codegenerator/cli/templates/dynamic/codegen/src/TestHelpers.res.hbs b/codegenerator/cli/templates/dynamic/codegen/src/TestHelpers.res.hbs index 26625e496..34e17f5cc 100644 --- a/codegenerator/cli/templates/dynamic/codegen/src/TestHelpers.res.hbs +++ b/codegenerator/cli/templates/dynamic/codegen/src/TestHelpers.res.hbs @@ -129,7 +129,7 @@ module EventFunctions = { | Some(chainId) => chainId | None => switch config.defaultChain { - | Some(chainConfig) => chainConfig.chain->ChainMap.Chain.toChainId + | Some(chainConfig) => chainConfig.id | None => Js.Exn.raiseError( "No default chain Id found, please add at least 1 chain to your config.yaml", diff --git a/codegenerator/cli/templates/dynamic/codegen/src/TestHelpers_MockDb.res.hbs b/codegenerator/cli/templates/dynamic/codegen/src/TestHelpers_MockDb.res.hbs index 777df31f0..b87b1e4ef 100644 --- a/codegenerator/cli/templates/dynamic/codegen/src/TestHelpers_MockDb.res.hbs +++ b/codegenerator/cli/templates/dynamic/codegen/src/TestHelpers_MockDb.res.hbs @@ -54,9 +54,9 @@ type inMemoryStore = InMemoryStore.t type rec t = { __dbInternal__: inMemoryStore, entities: entities, - rawEvents: storeOperations, - eventSyncState: storeOperations, - dynamicContractRegistry: entityStoreOperations, + rawEvents: storeOperations, + eventSyncState: storeOperations, + dynamicContractRegistry: entityStoreOperations, processEvents: array> => promise, } @@ -271,16 +271,16 @@ let rec makeWithInMemoryStore: InMemoryStore.t => t = (inMemoryStore: InMemorySt ~getStore=db => db ->InMemoryStore.getInMemTable( - ~entityConfig=module(TablesStatic.DynamicContractRegistry)->Entities.entityModToInternal, + ~entityConfig=module(InternalTable.DynamicContractRegistry)->Entities.entityModToInternal, ) ->( Utils.magic: InMemoryTable.Entity.t => InMemoryTable.Entity.t< - TablesStatic.DynamicContractRegistry.t, + InternalTable.DynamicContractRegistry.t, > ), ~makeMockDb=makeWithInMemoryStore, ~getKey=({chainId, contractAddress}) => { - TablesStatic.DynamicContractRegistry.makeId(~chainId, ~contractAddress) + InternalTable.DynamicContractRegistry.makeId(~chainId, ~contractAddress) }, ) @@ -377,7 +377,7 @@ and makeProcessEvents = (mockDb: t, ~chainId=?) => async ( let persistence = { ...config.persistence, storage: makeMockStorage(mockDb), - storageStatus: Ready({cleanRun: false, cache: Js.Dict.empty()}), + storageStatus: Ready({cleanRun: false, cache: Js.Dict.empty(), chains: []}), } let config = { ...config, @@ -428,8 +428,9 @@ and makeProcessEvents = (mockDb: t, ~chainId=?) => async ( and makeMockStorage = (mockDb: t): Persistence.storage => { { isInitialized: () => Js.Exn.raiseError("Not used yet"), - initialize: (~entities as _=?, ~generalTables as _=?, ~enums as _=?) => + initialize: (~chainConfigs as _=?, ~entities as _=?, ~enums as _=?) => Js.Exn.raiseError("Not used yet"), + loadInitialState: () => Js.Exn.raiseError("Not used yet"), loadByIdsOrThrow: ( type item, ~ids, @@ -473,7 +474,6 @@ and makeMockStorage = (mockDb: t): Persistence.storage => { setOrThrow: (~items as _, ~table as _, ~itemSchema as _) => Js.Exn.raiseError("Not used yet"), setEffectCacheOrThrow: (~effect as _, ~items as _, ~initialize as _) => Promise.resolve(), dumpEffectCache: () => Js.Exn.raiseError("Not used yet"), - restoreEffectCache: (~withUpload as _) => Js.Exn.raiseError("Not used yet"), } } and /** diff --git a/codegenerator/cli/templates/dynamic/codegen/src/db/Entities.res.hbs b/codegenerator/cli/templates/dynamic/codegen/src/db/Entities.res.hbs index 463e04da0..ab98ef563 100644 --- a/codegenerator/cli/templates/dynamic/codegen/src/db/Entities.res.hbs +++ b/codegenerator/cli/templates/dynamic/codegen/src/db/Entities.res.hbs @@ -5,7 +5,7 @@ type id = string type internalEntity = Internal.entity module type Entity = { type t - let name: Enums.EntityType.t + let name: string let schema: S.t let rowsSchema: S.t> let table: Table.table @@ -42,7 +42,7 @@ type whereOperations<'entity, 'fieldType> = { {{#each entities as |entity|}} module {{entity.name.capitalized}} = { - let name = {{entity.name.capitalized}} + let name = ({{entity.name.capitalized}} :> string) @genType type t = { {{#each entity.params as | param |}} @@ -104,7 +104,7 @@ module {{entity.name.capitalized}} = { {{/if}} ) - let entityHistory = table->EntityHistory.fromTable(~pgSchema=Env.Db.publicSchema, ~schema) + let entityHistory = table->EntityHistory.fromTable(~schema) external castToInternal: t => Internal.entity = "%identity" } @@ -118,7 +118,7 @@ let userEntities = [ let allEntities = userEntities->Js.Array2.concat( - [module(TablesStatic.DynamicContractRegistry)]->entityModsToInternal, + [module(InternalTable.DynamicContractRegistry)]->entityModsToInternal, ) let byName = diff --git a/codegenerator/cli/templates/dynamic/init_templates/shared/package.json.hbs b/codegenerator/cli/templates/dynamic/init_templates/shared/package.json.hbs index c08fcfd45..1d74676a4 100644 --- a/codegenerator/cli/templates/dynamic/init_templates/shared/package.json.hbs +++ b/codegenerator/cli/templates/dynamic/init_templates/shared/package.json.hbs @@ -21,7 +21,6 @@ }, "devDependencies": { {{#if is_rescript}} - "@rescript/react": "0.12.1", "rescript": "11.1.3", {{/if}} {{#if is_typescript}} diff --git a/codegenerator/cli/templates/static/codegen/rescript.json b/codegenerator/cli/templates/static/codegen/rescript.json index e99f4ead7..2e5a0c638 100644 --- a/codegenerator/cli/templates/static/codegen/rescript.json +++ b/codegenerator/cli/templates/static/codegen/rescript.json @@ -25,11 +25,6 @@ "version": 4 }, "suffix": ".res.js", - "bs-dependencies": [ - "rescript-envsafe", - "rescript-schema", - "@rescript/react", - "envio" - ], + "bs-dependencies": ["rescript-envsafe", "rescript-schema", "envio"], "bsc-flags": ["-open RescriptSchema"] } diff --git a/codegenerator/cli/templates/static/codegen/src/Config.res b/codegenerator/cli/templates/static/codegen/src/Config.res index faebe1923..ca6810ddb 100644 --- a/codegenerator/cli/templates/static/codegen/src/Config.res +++ b/codegenerator/cli/templates/static/codegen/src/Config.res @@ -1,15 +1,5 @@ open Belt -type ecosystem = | @as("evm") Evm | @as("fuel") Fuel - -type contract = { - name: string, - abi: Ethers.abi, - addresses: array, - events: array, - startBlock: option, -} - type syncConfigOptions = { initialBlockInterval?: int, backoffMultiplicative?: float, @@ -30,15 +20,6 @@ type syncConfig = { fallbackStallTimeout: int, } -type chainConfig = { - startBlock: int, - endBlock: option, - confirmedBlockThreshold: int, - chain: ChainMap.Chain.t, - contracts: array, - sources: array, -} - type historyFlag = FullHistory | MinHistory type rollbackFlag = RollbackOnReorg | NoRollback type historyConfig = {rollbackFlag: rollbackFlag, historyFlag: historyFlag} @@ -82,10 +63,6 @@ let getSyncConfig = ( let storagePgSchema = Env.Db.publicSchema let codegenPersistence = Persistence.make( ~userEntities=Entities.userEntities, - ~staticTables=Db.allStaticTables, - ~dcRegistryEntityConfig=module( - TablesStatic.DynamicContractRegistry - )->Entities.entityModToInternal, ~allEnums=Enums.allEnums, ~storage=PgStorage.make( ~sql=Db.sql, @@ -106,8 +83,7 @@ let codegenPersistence = Persistence.make( secret: Env.Hasura.secret, }, ~pgSchema=storagePgSchema, - ~allStaticTables=Db.allStaticTables, - ~allEntityTables=Db.allEntityTables, + ~userEntities=Entities.userEntities, ~responseLimit=Env.Hasura.responseLimit, ~schema=Db.schema, ~aggregateEntities=Env.Hasura.aggregateEntities, @@ -140,7 +116,7 @@ let codegenPersistence = Persistence.make( err->Utils.prettifyExn, `EE804: Error tracking new tables`, )->Promise.resolve - }) + }) }, ) } else { @@ -153,27 +129,28 @@ let codegenPersistence = Persistence.make( type t = { historyConfig: historyConfig, isUnorderedMultichainMode: bool, - chainMap: ChainMap.t, - defaultChain: option, - ecosystem: ecosystem, + chainMap: ChainMap.t, + defaultChain: option, + ecosystem: InternalConfig.ecosystem, enableRawEvents: bool, persistence: Persistence.t, addContractNameToContractNameMapping: dict, + maxAddrInPartition: int, } let make = ( ~shouldRollbackOnReorg=true, ~shouldSaveFullHistory=false, ~isUnorderedMultichainMode=false, - ~chains=[], + ~chains: array=[], ~enableRawEvents=false, ~persistence=codegenPersistence, - ~ecosystem=Evm, + ~ecosystem=InternalConfig.Evm, ) => { let chainMap = chains ->Js.Array2.map(n => { - (n.chain, n) + (ChainMap.Chain.makeUnsafe(~chainId=n.id), n) }) ->ChainMap.fromArrayUnsafe @@ -202,6 +179,7 @@ let make = ( persistence, ecosystem, addContractNameToContractNameMapping, + maxAddrInPartition: Env.maxAddrInPartition, } } diff --git a/codegenerator/cli/templates/static/codegen/src/EventProcessing.res b/codegenerator/cli/templates/static/codegen/src/EventProcessing.res index 99578e276..8080ee7d3 100644 --- a/codegenerator/cli/templates/static/codegen/src/EventProcessing.res +++ b/codegenerator/cli/templates/static/codegen/src/EventProcessing.res @@ -72,7 +72,7 @@ let addEventToRawEvents = (eventItem: Internal.eventItem, ~inMemoryStore: InMemo params } - let rawEvent: TablesStatic.RawEvents.t = { + let rawEvent: InternalTable.RawEvents.t = { chainId, eventId, eventName: eventConfig.name, diff --git a/codegenerator/cli/templates/static/codegen/src/IO.res b/codegenerator/cli/templates/static/codegen/src/IO.res index 2f80f34d2..5322145c2 100644 --- a/codegenerator/cli/templates/static/codegen/src/IO.res +++ b/codegenerator/cli/templates/static/codegen/src/IO.res @@ -68,8 +68,8 @@ let executeBatch = async ( ~dbFunction=(sql, items) => { sql->PgStorage.setOrThrow( ~items, - ~table=TablesStatic.RawEvents.table, - ~itemSchema=TablesStatic.RawEvents.schema, + ~table=InternalTable.RawEvents.table, + ~itemSchema=InternalTable.RawEvents.schema, ~pgSchema=Config.storagePgSchema, ) }, @@ -136,13 +136,15 @@ let executeBatch = async ( sql => { let promises = [] if entityHistoryItemsToSet->Utils.Array.notEmpty { - promises->Array.push( + promises + ->Js.Array2.pushMany( sql->PgStorage.setEntityHistoryOrThrow( ~entityHistory=entityConfig.entityHistory, ~rows=entityHistoryItemsToSet, ~shouldRemoveInvalidUtf8, ), ) + ->ignore } if entitiesToSet->Utils.Array.notEmpty { if shouldRemoveInvalidUtf8 { @@ -211,30 +213,36 @@ let executeBatch = async ( //valid event identifier, where all rows created after this eventIdentifier should //be deleted let rollbackTables = switch inMemoryStore.rollBackEventIdentifier { - | Some(eventIdentifier) => [ - DbFunctions.EntityHistory.deleteAllEntityHistoryAfterEventIdentifier( - _, - ~isUnorderedMultichainMode=config.isUnorderedMultichainMode, - ~eventIdentifier, - ), - DbFunctions.EndOfBlockRangeScannedData.rollbackEndOfBlockRangeScannedDataForChain( - _, - ~chainId=eventIdentifier.chainId, - ~knownBlockNumber=eventIdentifier.blockNumber, - ), - ] - | None => [] + | Some(eventIdentifier) => + Some( + sql => + Promise.all2(( + sql->DbFunctions.EntityHistory.deleteAllEntityHistoryAfterEventIdentifier( + ~isUnorderedMultichainMode=config.isUnorderedMultichainMode, + ~eventIdentifier, + ), + sql->DbFunctions.EndOfBlockRangeScannedData.rollbackEndOfBlockRangeScannedDataForChain( + ~chainId=eventIdentifier.chainId, + ~knownBlockNumber=eventIdentifier.blockNumber, + ), + )), + ) + | None => None } try { let _ = await Promise.all2(( - sql->Postgres.beginSql(sql => { - Belt.Array.concatMany([ - //Rollback tables need to happen first in the traction - rollbackTables, - [setEventSyncState, setRawEvents], - setEntities, - ])->Belt.Array.map(dbFunc => sql->dbFunc) + sql->Postgres.beginSql(async sql => { + //Rollback tables need to happen first in the traction + switch rollbackTables { + | Some(rollbackTables) => + let _ = await rollbackTables(sql) + | None => () + } + + await Belt.Array.concatMany([[setEventSyncState, setRawEvents], setEntities]) + ->Belt.Array.map(dbFunc => sql->dbFunc) + ->Promise.all }), // Since effect cache currently doesn't support rollback, // we can run it outside of the transaction for simplicity. diff --git a/codegenerator/cli/templates/static/codegen/src/InMemoryStore.res b/codegenerator/cli/templates/static/codegen/src/InMemoryStore.res index 775606abc..8c709daf1 100644 --- a/codegenerator/cli/templates/static/codegen/src/InMemoryStore.res +++ b/codegenerator/cli/templates/static/codegen/src/InMemoryStore.res @@ -49,8 +49,8 @@ type effectCacheInMemTable = { } type t = { - eventSyncState: InMemoryTable.t, - rawEvents: InMemoryTable.t, + eventSyncState: InMemoryTable.t, + rawEvents: InMemoryTable.t, entities: dict>, effects: dict, rollBackEventIdentifier: option, @@ -110,7 +110,7 @@ let setDcsToStore = ( ) => { let inMemTable = inMemoryStore->getInMemTable( - ~entityConfig=module(TablesStatic.DynamicContractRegistry)->Entities.entityModToInternal, + ~entityConfig=module(InternalTable.DynamicContractRegistry)->Entities.entityModToInternal, ) dcsToStoreByChainId->Utils.Dict.forEachWithKey((chainId, dcs) => { let chainId = chainId->Belt.Int.fromString->Belt.Option.getExn @@ -119,11 +119,11 @@ let setDcsToStore = ( | Config => Js.Exn.raiseError("Config contract should not be in dcsToStore") | DC(data) => data } - let entity: TablesStatic.DynamicContractRegistry.t = { - id: TablesStatic.DynamicContractRegistry.makeId(~chainId, ~contractAddress=dc.address), + let entity: InternalTable.DynamicContractRegistry.t = { + id: InternalTable.DynamicContractRegistry.makeId(~chainId, ~contractAddress=dc.address), chainId, contractAddress: dc.address, - contractType: dc.contractName->(Utils.magic: string => Enums.ContractType.t), + contractName: dc.contractName, registeringEventBlockNumber: dc.startBlock, registeringEventBlockTimestamp: dcData.registeringEventBlockTimestamp, registeringEventLogIndex: dcData.registeringEventLogIndex, @@ -139,7 +139,7 @@ let setDcsToStore = ( logIndex: dcData.registeringEventLogIndex, } inMemTable->InMemoryTable.Entity.set( - Set(entity->TablesStatic.DynamicContractRegistry.castToInternal)->Types.mkEntityUpdate( + Set(entity->InternalTable.DynamicContractRegistry.castToInternal)->Types.mkEntityUpdate( ~eventIdentifier, ~entityId=entity.id, ), diff --git a/codegenerator/cli/templates/static/codegen/src/Index.res b/codegenerator/cli/templates/static/codegen/src/Index.res index eae39769b..52f30e7a1 100644 --- a/codegenerator/cli/templates/static/codegen/src/Index.res +++ b/codegenerator/cli/templates/static/codegen/src/Index.res @@ -178,87 +178,18 @@ type process type mainArgs = Yargs.parsedArgs -let makeAppState = (globalState: GlobalState.t): EnvioInkApp.appState => { - let chains = - globalState.chainManager.chainFetchers - ->ChainMap.values - ->Array.map(cf => { - let {numEventsProcessed, fetchState, numBatchesFetched} = cf - let latestFetchedBlockNumber = Pervasives.max( - FetchState.getLatestFullyFetchedBlock(fetchState).blockNumber, - 0, - ) - let hasProcessedToEndblock = cf->ChainFetcher.hasProcessedToEndblock - let currentBlockHeight = - cf->ChainFetcher.hasProcessedToEndblock - ? cf.chainConfig.endBlock->Option.getWithDefault(cf.currentBlockHeight) - : cf.currentBlockHeight - - let progress: ChainData.progress = if hasProcessedToEndblock { - // If the endblock has been reached then set the progress to synced. - // if there's chains that have no events in the block range start->end, - // it's possible there are no events in that block range (ie firstEventBlockNumber = None) - // This ensures TUI still displays synced in this case - let {latestProcessedBlock, timestampCaughtUpToHeadOrEndblock, numEventsProcessed} = cf - - let firstEventBlockNumber = cf->ChainFetcher.getFirstEventBlockNumber - - Synced({ - firstEventBlockNumber: firstEventBlockNumber->Option.getWithDefault(0), - latestProcessedBlock: latestProcessedBlock->Option.getWithDefault(currentBlockHeight), - timestampCaughtUpToHeadOrEndblock: timestampCaughtUpToHeadOrEndblock->Option.getWithDefault( - Js.Date.now()->Js.Date.fromFloat, - ), - numEventsProcessed, - }) - } else { - switch (cf, cf->ChainFetcher.getFirstEventBlockNumber) { - | ( - { - latestProcessedBlock, - timestampCaughtUpToHeadOrEndblock: Some(timestampCaughtUpToHeadOrEndblock), - }, - Some(firstEventBlockNumber), - ) => - let latestProcessedBlock = - latestProcessedBlock->Option.getWithDefault(firstEventBlockNumber) - Synced({ - firstEventBlockNumber, - latestProcessedBlock, - timestampCaughtUpToHeadOrEndblock, - numEventsProcessed, - }) - | ( - {latestProcessedBlock, timestampCaughtUpToHeadOrEndblock: None}, - Some(firstEventBlockNumber), - ) => - let latestProcessedBlock = - latestProcessedBlock->Option.getWithDefault(firstEventBlockNumber) - Syncing({ - firstEventBlockNumber, - latestProcessedBlock, - numEventsProcessed, - }) - | (_, None) => SearchingForEvents - } - } - - ( - { - progress, - currentBlockHeight, - latestFetchedBlockNumber, - numBatchesFetched, - chain: cf.chainConfig.chain, - endBlock: cf.chainConfig.endBlock, - poweredByHyperSync: (cf.sourceManager->SourceManager.getActiveSource).poweredByHyperSync, - }: EnvioInkApp.chainData - ) - }) +let makeAppState = (globalState: GlobalState.t, ~envioVersion): Tui.params => { { - config: globalState.config, + getMetrics: () => { + PromClient.defaultRegister->PromClient.metrics + }, + envioVersion, + envioAppUrl: Env.envioAppUrl, + envioApiToken: Env.envioApiToken, + hasuraUrl: Env.Hasura.url, + hasuraPassword: Env.Hasura.secret, + ecosystem: globalState.config.ecosystem, indexerStartTime: globalState.indexerStartTime, - chains, } } @@ -303,44 +234,40 @@ let main = async () => { | None => Initializing({}) | Some(gsManager) => { let state = gsManager->GlobalStateManager.getState - let appState = state->makeAppState - Active({ - envioVersion, - chains: appState.chains->Js.Array2.map(c => { - let cf = state.chainManager.chainFetchers->ChainMap.get(c.chain) + let chains = + state.chainManager.chainFetchers + ->ChainMap.values + ->Array.map(cf => { + let {fetchState} = cf + let latestFetchedBlockNumber = Pervasives.max( + FetchState.getLatestFullyFetchedBlock(fetchState).blockNumber, + 0, + ) + let currentBlockHeight = + cf->ChainFetcher.hasProcessedToEndblock + ? cf.endBlock->Option.getWithDefault(cf.currentBlockHeight) + : cf.currentBlockHeight + { - chainId: c.chain->ChainMap.Chain.toChainId->Js.Int.toFloat, - poweredByHyperSync: c.poweredByHyperSync, - latestFetchedBlockNumber: c.latestFetchedBlockNumber, - currentBlockHeight: c.currentBlockHeight, - numBatchesFetched: c.numBatchesFetched, - endBlock: c.endBlock, - firstEventBlockNumber: switch c.progress { - | SearchingForEvents => None - | Syncing({firstEventBlockNumber}) | Synced({firstEventBlockNumber}) => - Some(firstEventBlockNumber) - }, - latestProcessedBlock: switch c.progress { - | SearchingForEvents => None - | Syncing({latestProcessedBlock}) | Synced({latestProcessedBlock}) => - Some(latestProcessedBlock) - }, - timestampCaughtUpToHeadOrEndblock: switch c.progress { - | SearchingForEvents - | Syncing(_) => - None - | Synced({timestampCaughtUpToHeadOrEndblock}) => - Some(timestampCaughtUpToHeadOrEndblock) - }, - numEventsProcessed: switch c.progress { - | SearchingForEvents => 0 - | Syncing({numEventsProcessed}) - | Synced({numEventsProcessed}) => numEventsProcessed - }, + chainId: cf.chainConfig.id->Js.Int.toFloat, + poweredByHyperSync: ( + cf.sourceManager->SourceManager.getActiveSource + ).poweredByHyperSync, + latestFetchedBlockNumber, + currentBlockHeight, + numBatchesFetched: cf.numBatchesFetched, + endBlock: cf.endBlock, + firstEventBlockNumber: cf->ChainFetcher.getFirstEventBlockNumber, + latestProcessedBlock: cf.latestProcessedBlock, + timestampCaughtUpToHeadOrEndblock: cf.timestampCaughtUpToHeadOrEndblock, + numEventsProcessed: cf.numEventsProcessed, numAddresses: cf.fetchState->FetchState.numAddresses, } - }), - indexerStartTime: appState.indexerStartTime, + }) + Active({ + envioVersion, + chains, + indexerStartTime: state.indexerStartTime, isPreRegisteringDynamicContracts: false, rollbackOnReorg: config.historyConfig.rollbackFlag === RollbackOnReorg, isUnorderedMultichainMode: config.isUnorderedMultichainMode, @@ -352,17 +279,17 @@ let main = async () => { }, ) - await config.persistence->Persistence.init + await config.persistence->Persistence.init(~chainConfigs=config.chainMap->ChainMap.values) - let chainManager = await ChainManager.makeFromDbState(~config) + let chainManager = await ChainManager.makeFromDbState( + ~initialState=config.persistence->Persistence.getInitializedState, + ~config, + ) let globalState = GlobalState.make(~config, ~chainManager, ~shouldUseTui) - let stateUpdatedHook = if shouldUseTui { - let rerender = EnvioInkApp.startApp(makeAppState(globalState)) - Some(globalState => globalState->makeAppState->rerender) - } else { - None + if shouldUseTui { + let _rerender = Tui.start(makeAppState(globalState, ~envioVersion)) } - let gsManager = globalState->GlobalStateManager.make(~stateUpdatedHook?) + let gsManager = globalState->GlobalStateManager.make gsManagerRef := Some(gsManager) gsManager->GlobalStateManager.dispatchTask(NextQuery(CheckAllChains)) /* diff --git a/codegenerator/cli/templates/static/codegen/src/db/Db.res b/codegenerator/cli/templates/static/codegen/src/db/Db.res index 63ea3a4c2..7519f7935 100644 --- a/codegenerator/cli/templates/static/codegen/src/db/Db.res +++ b/codegenerator/cli/templates/static/codegen/src/db/Db.res @@ -20,21 +20,4 @@ let allEntityTables: array = Entities.allEntities->Belt.Array.map(e entityConfig.table }) -let allEntityHistoryTables: array = [] -let allEntityHistory: array< - EntityHistory.t, -> = Entities.allEntities->Belt.Array.map(entityConfig => { - let entityHistory = entityConfig.entityHistory->EntityHistory.castInternal - allEntityHistoryTables->Js.Array2.push(entityHistory.table)->ignore - entityHistory -}) - -let allStaticTables: array = [ - TablesStatic.EventSyncState.table, - TablesStatic.ChainMetadata.table, - TablesStatic.PersistedState.table, - TablesStatic.EndOfBlockRangeScannedData.table, - TablesStatic.RawEvents.table, -] - let schema = Schema.make(allEntityTables) diff --git a/codegenerator/cli/templates/static/codegen/src/db/DbFunctions.res b/codegenerator/cli/templates/static/codegen/src/db/DbFunctions.res index 4a6bd2b15..4516c60ed 100644 --- a/codegenerator/cli/templates/static/codegen/src/db/DbFunctions.res +++ b/codegenerator/cli/templates/static/codegen/src/db/DbFunctions.res @@ -13,45 +13,6 @@ module General = { } } -module ChainMetadata = { - type chainMetadata = { - @as("chain_id") chainId: int, - @as("block_height") blockHeight: int, - @as("start_block") startBlock: int, - // The values below could use `Js.Null.t` instead of `Js.Nullable.t` - // It just needs to be confiremed that the postgres lib never returns - // undefined. - @as("end_block") endBlock: Js.Nullable.t, - @as("first_event_block_number") firstEventBlockNumber: Js.Nullable.t, - @as("latest_processed_block") latestProcessedBlock: Js.Nullable.t, - @as("num_events_processed") numEventsProcessed: Js.Nullable.t, - @as("is_hyper_sync") poweredByHyperSync: bool, - @as("num_batches_fetched") numBatchesFetched: int, - @as("latest_fetched_block_number") latestFetchedBlockNumber: int, - @as("timestamp_caught_up_to_head_or_endblock") - timestampCaughtUpToHeadOrEndblock: Js.Nullable.t, - } - - @module("./DbFunctionsImplementation.js") - external batchSetChainMetadata: (Postgres.sql, array) => promise = - "batchSetChainMetadata" - - @module("./DbFunctionsImplementation.js") - external readLatestChainMetadataState: ( - Postgres.sql, - ~chainId: int, - ) => promise> = "readLatestChainMetadataState" - - let batchSetChainMetadataRow = (sql, ~chainMetadataArray: array) => { - sql->batchSetChainMetadata(chainMetadataArray) - } - - let getLatestChainMetadataState = async (sql, ~chainId) => { - let arr = await sql->readLatestChainMetadataState(~chainId) - arr->Belt.Array.get(0) - } -} - module EndOfBlockRangeScannedData = { type endOfBlockRangeScannedData = { @as("chain_id") chainId: int, @@ -91,7 +52,7 @@ module EndOfBlockRangeScannedData = { module EventSyncState = { @genType - type eventSyncState = TablesStatic.EventSyncState.t + type eventSyncState = InternalTable.EventSyncState.t @module("./DbFunctionsImplementation.js") external readLatestSyncedEventOnChainIdArr: ( @@ -109,11 +70,13 @@ module EventSyncState = { } @module("./DbFunctionsImplementation.js") - external batchSet: (Postgres.sql, array) => promise = + external batchSet: (Postgres.sql, array) => promise = "batchSetEventSyncState" let resetEventSyncState = async (): unit => { - let query = TablesStatic.EventSyncState.resetCurrentCurrentSyncStateQuery + let query = InternalTable.EventSyncState.resetCurrentCurrentSyncStateQuery( + ~pgSchema=Db.publicSchema, + ) try await Db.sql->Postgres.unsafe(query) catch { | exn => exn->ErrorHandling.mkLogAndRaise(~msg="Failed reset query: " ++ query) } @@ -143,7 +106,7 @@ module DynamicContractRegistry = { let readAllDynamicContracts = async (sql: Postgres.sql, ~chainId: chainId) => { let json = await sql->readAllDynamicContractsRaw(~chainId) - json->S.parseJsonOrThrow(TablesStatic.DynamicContractRegistry.rowsSchema) + json->S.parseJsonOrThrow(InternalTable.DynamicContractRegistry.rowsSchema) } } diff --git a/codegenerator/cli/templates/static/codegen/src/db/DbFunctionsImplementation.js b/codegenerator/cli/templates/static/codegen/src/db/DbFunctionsImplementation.js index 0efa52d3a..f8e198f6f 100644 --- a/codegenerator/cli/templates/static/codegen/src/db/DbFunctionsImplementation.js +++ b/codegenerator/cli/templates/static/codegen/src/db/DbFunctionsImplementation.js @@ -57,44 +57,6 @@ module.exports.batchSetEventSyncState = (sql, entityDataArray) => { `; }; -module.exports.readLatestChainMetadataState = (sql, chainId) => sql` - SELECT * - FROM ${sql(publicSchema)}.chain_metadata - WHERE chain_id = ${chainId}`; - -module.exports.batchSetChainMetadata = (sql, entityDataArray) => { - return sql` - INSERT INTO ${sql(publicSchema)}.chain_metadata - ${sql( - entityDataArray, - "chain_id", - "start_block", // this is left out of the on conflict below as it only needs to be set once - "end_block", // this is left out of the on conflict below as it only needs to be set once - "block_height", - "first_event_block_number", - "latest_processed_block", - "num_events_processed", - "is_hyper_sync", // this is left out of the on conflict below as it only needs to be set once - "num_batches_fetched", - "latest_fetched_block_number", - "timestamp_caught_up_to_head_or_endblock" - )} - ON CONFLICT(chain_id) DO UPDATE - SET - "chain_id" = EXCLUDED."chain_id", - "first_event_block_number" = EXCLUDED."first_event_block_number", - "latest_processed_block" = EXCLUDED."latest_processed_block", - "num_events_processed" = EXCLUDED."num_events_processed", - "num_batches_fetched" = EXCLUDED."num_batches_fetched", - "latest_fetched_block_number" = EXCLUDED."latest_fetched_block_number", - "timestamp_caught_up_to_head_or_endblock" = EXCLUDED."timestamp_caught_up_to_head_or_endblock", - "block_height" = EXCLUDED."block_height";` - .then((res) => {}) - .catch((err) => { - console.log("errored", err); - }); -}; - module.exports.batchDeleteRawEvents = (sql, entityIdArray) => sql` DELETE FROM ${sql(publicSchema)}."raw_events" diff --git a/codegenerator/cli/templates/static/codegen/src/db/Migrations.res b/codegenerator/cli/templates/static/codegen/src/db/Migrations.res index 6422f8cde..74520e880 100644 --- a/codegenerator/cli/templates/static/codegen/src/db/Migrations.res +++ b/codegenerator/cli/templates/static/codegen/src/db/Migrations.res @@ -26,8 +26,13 @@ let runUpMigrations = async ( // Reset is used for db-setup ~reset=false, ) => { + let config = RegisterHandlers.getConfigWithoutRegisteringHandlers() + let exitCode = try { - await Config.codegenPersistence->Persistence.init(~reset) + await config.persistence->Persistence.init( + ~reset, + ~chainConfigs=config.chainMap->ChainMap.values, + ) Success } catch { | _ => Failure diff --git a/codegenerator/cli/templates/static/codegen/src/db/TablesStatic.res b/codegenerator/cli/templates/static/codegen/src/db/TablesStatic.res deleted file mode 100644 index 400de657d..000000000 --- a/codegenerator/cli/templates/static/codegen/src/db/TablesStatic.res +++ /dev/null @@ -1,248 +0,0 @@ -open Table - -//shorthand for punning -let isPrimaryKey = true -let isNullable = true -let isIndex = true - -module EventSyncState = { - //Used unsafely in DbFunctions.res so just enforcing the naming here - let blockTimestampFieldName = "block_timestamp" - let blockNumberFieldName = "block_number" - let logIndexFieldName = "log_index" - let isPreRegisteringDynamicContractsFieldName = "is_pre_registering_dynamic_contracts" - - @genType - type t = { - @as("chain_id") chainId: int, - @as("block_number") blockNumber: int, - @as("log_index") logIndex: int, - @as("block_timestamp") blockTimestamp: int, - } - - let table = mkTable( - PgStorage.eventSyncStateTableName, - ~fields=[ - mkField("chain_id", Integer, ~fieldSchema=S.int, ~isPrimaryKey), - mkField(blockNumberFieldName, Integer, ~fieldSchema=S.int), - mkField(logIndexFieldName, Integer, ~fieldSchema=S.int), - mkField(blockTimestampFieldName, Integer, ~fieldSchema=S.int), - // Keep it in case Hosted Service relies on it to prevent a breaking changes - mkField( - isPreRegisteringDynamicContractsFieldName, - Boolean, - ~default="false", - ~fieldSchema=S.bool, - ), - ], - ) - - //We need to update values here not delet the rows, since restarting without a row - //has a different behaviour to restarting with an initialised row with zero values - let resetCurrentCurrentSyncStateQuery = `UPDATE "${Env.Db.publicSchema}"."${table.tableName}" - SET ${blockNumberFieldName} = 0, - ${logIndexFieldName} = 0, - ${blockTimestampFieldName} = 0, - ${isPreRegisteringDynamicContractsFieldName} = false;` -} - -module ChainMetadata = { - @genType - type t = { - chain_id: int, - start_block: int, - end_block: option, - block_height: int, - first_event_block_number: option, - latest_processed_block: option, - num_events_processed: option, - is_hyper_sync: bool, - num_batches_fetched: int, - latest_fetched_block_number: int, - timestamp_caught_up_to_head_or_endblock: Js.Date.t, - } - - let table = mkTable( - "chain_metadata", - ~fields=[ - mkField("chain_id", Integer, ~fieldSchema=S.int, ~isPrimaryKey), - mkField("start_block", Integer, ~fieldSchema=S.int), - mkField("end_block", Integer, ~fieldSchema=S.null(S.int), ~isNullable), - mkField("block_height", Integer, ~fieldSchema=S.int), - mkField("first_event_block_number", Integer, ~fieldSchema=S.null(S.int), ~isNullable), - mkField("latest_processed_block", Integer, ~fieldSchema=S.null(S.int), ~isNullable), - mkField("num_events_processed", Integer, ~fieldSchema=S.null(S.int), ~isNullable), - mkField("is_hyper_sync", Boolean, ~fieldSchema=S.bool), - mkField("num_batches_fetched", Integer, ~fieldSchema=S.int), - mkField("latest_fetched_block_number", Integer, ~fieldSchema=S.int), - // Used to show how much time historical sync has taken, so we need a timezone here (TUI and Hosted Service) - mkField( - "timestamp_caught_up_to_head_or_endblock", - TimestampWithNullTimezone, - ~fieldSchema=S.null(Utils.Schema.dbDate), - ~isNullable, - ), - ], - ) -} - -module PersistedState = { - @genType - type t = { - id: int, - envio_version: string, - config_hash: string, - schema_hash: string, - handler_files_hash: string, - abi_files_hash: string, - } - - let table = mkTable( - "persisted_state", - ~fields=[ - mkField("id", Serial, ~fieldSchema=S.int, ~isPrimaryKey), - mkField("envio_version", Text, ~fieldSchema=S.string), - mkField("config_hash", Text, ~fieldSchema=S.string), - mkField("schema_hash", Text, ~fieldSchema=S.string), - mkField("handler_files_hash", Text, ~fieldSchema=S.string), - mkField("abi_files_hash", Text, ~fieldSchema=S.string), - ], - ) -} - -module EndOfBlockRangeScannedData = { - @genType - type t = { - chain_id: int, - block_number: int, - block_hash: string, - } - - let table = mkTable( - "end_of_block_range_scanned_data", - ~fields=[ - mkField("chain_id", Integer, ~fieldSchema=S.int, ~isPrimaryKey), - mkField("block_number", Integer, ~fieldSchema=S.int, ~isPrimaryKey), - mkField("block_hash", Text, ~fieldSchema=S.string), - ], - ) -} - -module RawEvents = { - @genType - type t = { - @as("chain_id") chainId: int, - @as("event_id") eventId: bigint, - @as("event_name") eventName: string, - @as("contract_name") contractName: string, - @as("block_number") blockNumber: int, - @as("log_index") logIndex: int, - @as("src_address") srcAddress: Address.t, - @as("block_hash") blockHash: string, - @as("block_timestamp") blockTimestamp: int, - @as("block_fields") blockFields: Js.Json.t, - @as("transaction_fields") transactionFields: Js.Json.t, - params: Js.Json.t, - } - - let schema = S.schema(s => { - chainId: s.matches(S.int), - eventId: s.matches(S.bigint), - eventName: s.matches(S.string), - contractName: s.matches(S.string), - blockNumber: s.matches(S.int), - logIndex: s.matches(S.int), - srcAddress: s.matches(Address.schema), - blockHash: s.matches(S.string), - blockTimestamp: s.matches(S.int), - blockFields: s.matches(S.json(~validate=false)), - transactionFields: s.matches(S.json(~validate=false)), - params: s.matches(S.json(~validate=false)), - }) - - let table = mkTable( - PgStorage.rawEventsTableName, - ~fields=[ - mkField("chain_id", Integer, ~fieldSchema=S.int), - mkField("event_id", Numeric, ~fieldSchema=S.bigint), - mkField("event_name", Text, ~fieldSchema=S.string), - mkField("contract_name", Text, ~fieldSchema=S.string), - mkField("block_number", Integer, ~fieldSchema=S.int), - mkField("log_index", Integer, ~fieldSchema=S.int), - mkField("src_address", Text, ~fieldSchema=Address.schema), - mkField("block_hash", Text, ~fieldSchema=S.string), - mkField("block_timestamp", Integer, ~fieldSchema=S.int), - mkField("block_fields", JsonB, ~fieldSchema=S.json(~validate=false)), - mkField("transaction_fields", JsonB, ~fieldSchema=S.json(~validate=false)), - mkField("params", JsonB, ~fieldSchema=S.json(~validate=false)), - mkField( - "db_write_timestamp", - TimestampWithoutTimezone, - ~default="CURRENT_TIMESTAMP", - ~fieldSchema=S.int, - ), - mkField("serial", Serial, ~isNullable, ~isPrimaryKey, ~fieldSchema=S.null(S.int)), - ], - ) -} - -module DynamicContractRegistry = { - let name = Enums.EntityType.DynamicContractRegistry - - let makeId = (~chainId, ~contractAddress) => { - chainId->Belt.Int.toString ++ "-" ++ contractAddress->Address.toString - } - - @genType - type t = { - id: string, - @as("chain_id") chainId: int, - @as("registering_event_block_number") registeringEventBlockNumber: int, - @as("registering_event_log_index") registeringEventLogIndex: int, - @as("registering_event_block_timestamp") registeringEventBlockTimestamp: int, - @as("registering_event_contract_name") registeringEventContractName: string, - @as("registering_event_name") registeringEventName: string, - @as("registering_event_src_address") registeringEventSrcAddress: Address.t, - @as("contract_address") contractAddress: Address.t, - @as("contract_type") contractType: Enums.ContractType.t, - } - - let schema = S.schema(s => { - id: s.matches(S.string), - chainId: s.matches(S.int), - registeringEventBlockNumber: s.matches(S.int), - registeringEventLogIndex: s.matches(S.int), - registeringEventContractName: s.matches(S.string), - registeringEventName: s.matches(S.string), - registeringEventSrcAddress: s.matches(Address.schema), - registeringEventBlockTimestamp: s.matches(S.int), - contractAddress: s.matches(Address.schema), - contractType: s.matches(Enums.ContractType.config.schema), - }) - - let rowsSchema = S.array(schema) - - let table = mkTable( - "dynamic_contract_registry", - ~fields=[ - mkField("id", Text, ~isPrimaryKey, ~fieldSchema=S.string), - mkField("chain_id", Integer, ~fieldSchema=S.int), - mkField("registering_event_block_number", Integer, ~fieldSchema=S.int), - mkField("registering_event_log_index", Integer, ~fieldSchema=S.int), - mkField("registering_event_block_timestamp", Integer, ~fieldSchema=S.int), - mkField("registering_event_contract_name", Text, ~fieldSchema=S.string), - mkField("registering_event_name", Text, ~fieldSchema=S.string), - mkField("registering_event_src_address", Text, ~fieldSchema=Address.schema), - mkField("contract_address", Text, ~fieldSchema=Address.schema), - mkField( - "contract_type", - Custom(Enums.ContractType.config.name), - ~fieldSchema=Enums.ContractType.config.schema, - ), - ], - ) - - let entityHistory = table->EntityHistory.fromTable(~pgSchema=Env.Db.publicSchema, ~schema) - - external castToInternal: t => Internal.entity = "%identity" -} diff --git a/codegenerator/cli/templates/static/codegen/src/eventFetching/ChainFetcher.res b/codegenerator/cli/templates/static/codegen/src/eventFetching/ChainFetcher.res index 9bbd7ebae..b967d5332 100644 --- a/codegenerator/cli/templates/static/codegen/src/eventFetching/ChainFetcher.res +++ b/codegenerator/cli/templates/static/codegen/src/eventFetching/ChainFetcher.res @@ -7,13 +7,12 @@ type processingFilter = { isValid: (~fetchState: FetchState.t) => bool, } -type addressToDynContractLookup = dict type t = { logger: Pino.t, fetchState: FetchState.t, sourceManager: SourceManager.t, - chainConfig: Config.chainConfig, - startBlock: int, + chainConfig: InternalConfig.chain, + endBlock: option, //The latest known block of the chain currentBlockHeight: int, timestampCaughtUpToHeadOrEndblock: option, @@ -29,9 +28,10 @@ type t = { //CONSTRUCTION let make = ( - ~chainConfig: Config.chainConfig, + ~chainConfig: InternalConfig.chain, ~lastBlockScannedHashes, - ~dynamicContracts: array, + ~dynamicContracts: array, + ~resumeBlock, ~startBlock, ~endBlock, ~dbFirstEventBlockNumber, @@ -42,7 +42,6 @@ let make = ( ~numEventsProcessed, ~numBatchesFetched, ~processingFilters, - ~maxAddrInPartition, ~isInReorgThreshold, ): t => { // We don't need the router itself, but only validation logic, @@ -67,7 +66,7 @@ let make = ( eventConfig.id, (), ~contractName, - ~chain=chainConfig.chain, + ~chain=ChainMap.Chain.makeUnsafe(~chainId=chainConfig.id), ~eventName=eventConfig.name, ~isWildcard, ) @@ -115,7 +114,7 @@ let make = ( dynamicContracts->Array.forEach(dc => contracts->Array.push({ FetchState.address: dc.contractAddress, - contractName: (dc.contractType :> string), + contractName: dc.contractName, startBlock: dc.registeringEventBlockNumber, register: DC({ registeringEventLogIndex: dc.registeringEventLogIndex, @@ -127,13 +126,21 @@ let make = ( }) ) + switch endBlock { + | Some(endBlock) => Prometheus.IndexingEndBlock.set(~endBlock, ~chainId=chainConfig.id) + | None => () + } + if startBlock !== 0 { + Prometheus.IndexingStartBlock.set(~startBlock, ~chainId=chainConfig.id) + } + let fetchState = FetchState.make( - ~maxAddrInPartition, + ~maxAddrInPartition=config.maxAddrInPartition, ~contracts, - ~startBlock, + ~startBlock=resumeBlock, ~endBlock, ~eventConfigs, - ~chainId=chainConfig.chain->ChainMap.Chain.toChainId, + ~chainId=chainConfig.id, ~blockLag=Pervasives.max( !(config->Config.shouldRollbackOnReorg) || isInReorgThreshold ? 0 @@ -145,7 +152,7 @@ let make = ( { logger, chainConfig, - startBlock, + endBlock, sourceManager: SourceManager.make( ~sources=chainConfig.sources, ~maxPartitionConcurrency=Env.maxPartitionConcurrency, @@ -162,8 +169,8 @@ let make = ( } } -let makeFromConfig = (chainConfig: Config.chainConfig, ~config, ~maxAddrInPartition) => { - let logger = Logging.createChild(~params={"chainId": chainConfig.chain->ChainMap.Chain.toChainId}) +let makeFromConfig = (chainConfig: InternalConfig.chain, ~config) => { + let logger = Logging.createChild(~params={"chainId": chainConfig.id}) let lastBlockScannedHashes = ReorgDetection.LastBlockScannedHashes.empty( ~confirmedBlockThreshold=chainConfig.confirmedBlockThreshold, ) @@ -172,6 +179,7 @@ let makeFromConfig = (chainConfig: Config.chainConfig, ~config, ~maxAddrInPartit ~chainConfig, ~config, ~startBlock=chainConfig.startBlock, + ~resumeBlock=chainConfig.startBlock, ~endBlock=chainConfig.endBlock, ~lastBlockScannedHashes, ~dbFirstEventBlockNumber=None, @@ -182,7 +190,6 @@ let makeFromConfig = (chainConfig: Config.chainConfig, ~config, ~maxAddrInPartit ~logger, ~processingFilters=None, ~dynamicContracts=[], - ~maxAddrInPartition, ~isInReorgThreshold=false, ) } @@ -191,18 +198,16 @@ let makeFromConfig = (chainConfig: Config.chainConfig, ~config, ~maxAddrInPartit * This function allows a chain fetcher to be created from metadata, in particular this is useful for restarting an indexer and making sure it fetches blocks from the same place. */ let makeFromDbState = async ( - chainConfig: Config.chainConfig, - ~maxAddrInPartition, + chainConfig: InternalConfig.chain, + ~initialChainState: InternalTable.Chains.t, ~isInReorgThreshold, ~config, ~sql=Db.sql, ) => { - let logger = Logging.createChild(~params={"chainId": chainConfig.chain->ChainMap.Chain.toChainId}) - let chainId = chainConfig.chain->ChainMap.Chain.toChainId + let chainId = chainConfig.id + let logger = Logging.createChild(~params={"chainId": chainId}) let latestProcessedEvent = await sql->DbFunctions.EventSyncState.getLatestProcessedEvent(~chainId) - let chainMetadata = await sql->DbFunctions.ChainMetadata.getLatestChainMetadataState(~chainId) - let ( restartBlockNumber: int, restartLogIndex: int, @@ -246,36 +251,6 @@ let makeFromDbState = async ( let dbRecoveredDynamicContracts = await sql->DbFunctions.DynamicContractRegistry.readAllDynamicContracts(~chainId) - let ( - firstEventBlockNumber, - latestProcessedBlockChainMetadata, - numEventsProcessed, - timestampCaughtUpToHeadOrEndblock, - ) = switch chainMetadata { - | Some({ - firstEventBlockNumber, - latestProcessedBlock, - numEventsProcessed, - timestampCaughtUpToHeadOrEndblock, - }) => { - // on restart, reset the events_processed gauge to the previous state - switch numEventsProcessed { - | Value(numEventsProcessed) => - Prometheus.ProgressEventsCount.set(~processedCount=numEventsProcessed, ~chainId) - | Null | Undefined => () // do nothing if no events have been processed yet for this chain - } - ( - firstEventBlockNumber->Js.Nullable.toOption, - latestProcessedBlock->Js.Nullable.toOption, - numEventsProcessed->Js.Nullable.toOption, - Env.updateSyncTimeOnRestart - ? None - : timestampCaughtUpToHeadOrEndblock->Js.Nullable.toOption, - ) - } - | None => (None, None, None, None) - } - let endOfBlockRangeScannedData = await sql->DbFunctions.EndOfBlockRangeScannedData.readEndOfBlockRangeScannedDataForChain( ~chainId, @@ -291,21 +266,25 @@ let makeFromDbState = async ( ~confirmedBlockThreshold=chainConfig.confirmedBlockThreshold, ) + Prometheus.ProgressEventsCount.set(~processedCount=initialChainState.numEventsProcessed, ~chainId) + make( ~dynamicContracts=dbRecoveredDynamicContracts, ~chainConfig, - ~startBlock=restartBlockNumber, - ~endBlock=chainConfig.endBlock, + ~resumeBlock=restartBlockNumber, + ~startBlock=initialChainState.startBlock, + ~endBlock=initialChainState.endBlock->Js.Null.toOption, ~config, ~lastBlockScannedHashes, - ~dbFirstEventBlockNumber=firstEventBlockNumber, - ~latestProcessedBlock=latestProcessedBlockChainMetadata, - ~timestampCaughtUpToHeadOrEndblock, - ~numEventsProcessed=numEventsProcessed->Option.getWithDefault(0), + ~dbFirstEventBlockNumber=initialChainState.firstEventBlockNumber->Js.Null.toOption, + ~latestProcessedBlock=initialChainState.latestProcessedBlock->Js.Null.toOption, + ~timestampCaughtUpToHeadOrEndblock=Env.updateSyncTimeOnRestart + ? None + : initialChainState.timestampCaughtUpToHeadOrEndblock->Js.Null.toOption, + ~numEventsProcessed=initialChainState.numEventsProcessed, ~numBatchesFetched=0, ~logger, ~processingFilters, - ~maxAddrInPartition, ~isInReorgThreshold, ) } @@ -467,12 +446,7 @@ let handleQueryResult = ( } fs - ->FetchState.handleQueryResult( - ~query, - ~latestFetchedBlock, - ~newItems, - ~currentBlockHeight, - ) + ->FetchState.handleQueryResult(~query, ~latestFetchedBlock, ~newItems, ~currentBlockHeight) ->Result.map(fetchState => { { ...chainFetcher, @@ -489,7 +463,7 @@ let handleQueryResult = ( Gets the latest item on the front of the queue and returns updated fetcher */ let hasProcessedToEndblock = (self: t) => { - let {latestProcessedBlock, chainConfig: {endBlock}} = self + let {latestProcessedBlock, endBlock} = self switch (latestProcessedBlock, endBlock) { | (Some(latestProcessedBlock), Some(endBlock)) => latestProcessedBlock >= endBlock | _ => false diff --git a/codegenerator/cli/templates/static/codegen/src/eventFetching/ChainManager.res b/codegenerator/cli/templates/static/codegen/src/eventFetching/ChainManager.res index 03c3fa829..01cc876d4 100644 --- a/codegenerator/cli/templates/static/codegen/src/eventFetching/ChainManager.res +++ b/codegenerator/cli/templates/static/codegen/src/eventFetching/ChainManager.res @@ -61,9 +61,8 @@ let getOrderedNextItem = (fetchStates: ChainMap.t): option< }) } -let makeFromConfig = (~config: Config.t, ~maxAddrInPartition=Env.maxAddrInPartition): t => { - let chainFetchers = - config.chainMap->ChainMap.map(ChainFetcher.makeFromConfig(_, ~maxAddrInPartition, ~config)) +let makeFromConfig = (~config: Config.t): t => { + let chainFetchers = config.chainMap->ChainMap.map(ChainFetcher.makeFromConfig(_, ~config)) { chainFetchers, isUnorderedMultichainMode: config.isUnorderedMultichainMode, @@ -71,25 +70,32 @@ let makeFromConfig = (~config: Config.t, ~maxAddrInPartition=Env.maxAddrInPartit } } -let makeFromDbState = async (~config: Config.t, ~maxAddrInPartition=Env.maxAddrInPartition): t => { - // Since now it's possible not to have rows in the history table - // even after the indexer started saving history (entered reorg threshold), - // This rows check might incorrectly return false for recovering the isInReorgThreshold option. - // But this is not a problem. There's no history anyways, and the indexer will be able to - // correctly calculate isInReorgThreshold as it starts. - let hasStartedSavingHistory = await Db.sql->DbFunctions.EntityHistory.hasRows - //If we have started saving history, continue to save history - //as regardless of whether we are still in a reorg threshold - let isInReorgThreshold = hasStartedSavingHistory +let makeFromDbState = async (~initialState: Persistence.initialState, ~config: Config.t): t => { + let isInReorgThreshold = if initialState.cleanRun { + false + } else { + // TODO: Move to Persistence.initialState + // Since now it's possible not to have rows in the history table + // even after the indexer started saving history (entered reorg threshold), + // This rows check might incorrectly return false for recovering the isInReorgThreshold option. + // But this is not a problem. There's no history anyways, and the indexer will be able to + // correctly calculate isInReorgThreshold as it starts. + let hasStartedSavingHistory = await Db.sql->DbFunctions.EntityHistory.hasRows + + //If we have started saving history, continue to save history + //as regardless of whether we are still in a reorg threshold + hasStartedSavingHistory + } let chainFetchersArr = - await config.chainMap - ->ChainMap.entries - ->Array.map(async ((chain, chainConfig)) => { + await initialState.chains + ->Array.map(async (initialChainState: InternalTable.Chains.t) => { + let chain = Config.getChain(config, ~chainId=initialChainState.id) + let chainConfig = config.chainMap->ChainMap.get(chain) ( chain, await chainConfig->ChainFetcher.makeFromDbState( - ~maxAddrInPartition, + ~initialChainState, ~isInReorgThreshold, ~config, ), @@ -113,7 +119,10 @@ let getChainFetcher = (self: t, ~chain: ChainMap.Chain.t): ChainFetcher.t => { let setChainFetcher = (self: t, chainFetcher: ChainFetcher.t) => { { ...self, - chainFetchers: self.chainFetchers->ChainMap.set(chainFetcher.chainConfig.chain, chainFetcher), + chainFetchers: self.chainFetchers->ChainMap.set( + ChainMap.Chain.makeUnsafe(~chainId=chainFetcher.chainConfig.id), + chainFetcher, + ), } } @@ -316,8 +325,8 @@ let getSafeReorgBlocks = (self: t): EntityHistory.safeReorgBlocks => { let blockNumbers = [] self.chainFetchers ->ChainMap.values - ->Array.forEach((cf) => { - chainIds->Js.Array2.push(cf.chainConfig.chain->ChainMap.Chain.toChainId)->ignore + ->Array.forEach(cf => { + chainIds->Js.Array2.push(cf.chainConfig.id)->ignore blockNumbers->Js.Array2.push(cf->ChainFetcher.getHighestBlockBelowThreshold)->ignore }) { diff --git a/codegenerator/cli/templates/static/codegen/src/eventFetching/rpc/RpcSource.res b/codegenerator/cli/templates/static/codegen/src/eventFetching/rpc/RpcSource.res index a5feb25a0..4cce599ce 100644 --- a/codegenerator/cli/templates/static/codegen/src/eventFetching/rpc/RpcSource.res +++ b/codegenerator/cli/templates/static/codegen/src/eventFetching/rpc/RpcSource.res @@ -16,17 +16,21 @@ let getKnownBlock = (provider, blockNumber) => } ) -let rec getKnownBlockWithBackoff = async (~provider, ~blockNumber, ~backoffMsOnFailure) => +let rec getKnownBlockWithBackoff = async (~provider, ~sourceName, ~chain, ~blockNumber, ~backoffMsOnFailure) => switch await getKnownBlock(provider, blockNumber) { | exception err => Logging.warn({ "err": err, "msg": `Issue while running fetching batch of events from the RPC. Will wait ${backoffMsOnFailure->Belt.Int.toString}ms and try again.`, + "source": sourceName, + "chainId": chain->ChainMap.Chain.toChainId, "type": "EXPONENTIAL_BACKOFF", }) await Time.resolvePromiseAfterDelay(~delayMilliseconds=backoffMsOnFailure) await getKnownBlockWithBackoff( ~provider, + ~sourceName, + ~chain, ~blockNumber, ~backoffMsOnFailure=backoffMsOnFailure * 2, ) @@ -460,10 +464,11 @@ let make = ({sourceFor, syncConfig, url, chain, contracts, eventRouter}: options "err": exn, "msg": `EE1100: Top level promise timeout reached. Please review other errors or warnings in the code. This function will retry in ${(am._retryDelayMillis / 1000) ->Belt.Int.toString} seconds. It is highly likely that your indexer isn't syncing on one or more chains currently. Also take a look at the "suggestedFix" in the metadata of this command`, + "source": name, + "chainId": chain->ChainMap.Chain.toChainId, "metadata": { { "asyncTaskName": "transactionLoader: fetching transaction data - `getTransaction` rpc call", - "caller": "RPC Source", "suggestedFix": "This likely means the RPC url you are using is not responding correctly. Please try another RPC endipoint.", } }, @@ -473,16 +478,17 @@ let make = ({sourceFor, syncConfig, url, chain, contracts, eventRouter}: options let blockLoader = LazyLoader.make( ~loaderFn=blockNumber => - getKnownBlockWithBackoff(~provider, ~backoffMsOnFailure=1000, ~blockNumber), + getKnownBlockWithBackoff(~provider, ~sourceName=name, ~chain, ~backoffMsOnFailure=1000, ~blockNumber), ~onError=(am, ~exn) => { Logging.error({ "err": exn, "msg": `EE1100: Top level promise timeout reached. Please review other errors or warnings in the code. This function will retry in ${(am._retryDelayMillis / 1000) ->Belt.Int.toString} seconds. It is highly likely that your indexer isn't syncing on one or more chains currently. Also take a look at the "suggestedFix" in the metadata of this command`, + "source": name, + "chainId": chain->ChainMap.Chain.toChainId, "metadata": { { "asyncTaskName": "blockLoader: fetching block data - `getBlock` rpc call", - "caller": "RPC Source", "suggestedFix": "This likely means the RPC url you are using is not responding correctly. Please try another RPC endipoint.", } }, diff --git a/codegenerator/cli/templates/static/codegen/src/globalState/GlobalState.res b/codegenerator/cli/templates/static/codegen/src/globalState/GlobalState.res index 4aef4a2d5..739a8a9c7 100644 --- a/codegenerator/cli/templates/static/codegen/src/globalState/GlobalState.res +++ b/codegenerator/cli/templates/static/codegen/src/globalState/GlobalState.res @@ -27,7 +27,7 @@ module WriteThrottlers = { ~params={ "context": "Throttler for pruning stale endblock data", "intervalMillis": intervalMillis, - "chain": cfg.chain, + "chain": cfg.id, }, ) Throttler.make(~intervalMillis, ~logger) @@ -163,7 +163,7 @@ let updateChainFetcherCurrentBlockHeight = (chainFetcher: ChainFetcher.t, ~curre if currentBlockHeight > chainFetcher.currentBlockHeight { Prometheus.setSourceChainHeight( ~blockNumber=currentBlockHeight, - ~chain=chainFetcher.chainConfig.chain, + ~chainId=chainFetcher.chainConfig.id, ) {...chainFetcher, currentBlockHeight} } else { @@ -171,31 +171,31 @@ let updateChainFetcherCurrentBlockHeight = (chainFetcher: ChainFetcher.t, ~curre } } -let updateChainMetadataTable = async (cm: ChainManager.t, ~throttler: Throttler.t) => { - let chainMetadataArray: array = +let updateChainMetadataTable = (cm: ChainManager.t, ~throttler: Throttler.t) => { + let chainsData: array = cm.chainFetchers ->ChainMap.values - ->Belt.Array.map(cf => { + ->Belt.Array.map((cf): InternalTable.Chains.t => { let latestFetchedBlock = cf.fetchState->FetchState.getLatestFullyFetchedBlock - let chainMetadata: DbFunctions.ChainMetadata.chainMetadata = { - chainId: cf.chainConfig.chain->ChainMap.Chain.toChainId, + { + id: cf.chainConfig.id, startBlock: cf.chainConfig.startBlock, blockHeight: cf.currentBlockHeight, - //optional fields - endBlock: cf.chainConfig.endBlock->Js.Nullable.fromOption, //this is already optional - firstEventBlockNumber: cf->ChainFetcher.getFirstEventBlockNumber->Js.Nullable.fromOption, - latestProcessedBlock: cf.latestProcessedBlock->Js.Nullable.fromOption, // this is already optional - numEventsProcessed: Value(cf.numEventsProcessed), - poweredByHyperSync: (cf.sourceManager->SourceManager.getActiveSource).poweredByHyperSync, + endBlock: cf.endBlock->Js.Null.fromOption, + firstEventBlockNumber: cf->ChainFetcher.getFirstEventBlockNumber->Js.Null.fromOption, + latestProcessedBlock: cf.latestProcessedBlock->Js.Null.fromOption, + numEventsProcessed: cf.numEventsProcessed, + isHyperSync: (cf.sourceManager->SourceManager.getActiveSource).poweredByHyperSync, numBatchesFetched: cf.numBatchesFetched, latestFetchedBlockNumber: latestFetchedBlock.blockNumber, - timestampCaughtUpToHeadOrEndblock: cf.timestampCaughtUpToHeadOrEndblock->Js.Nullable.fromOption, + timestampCaughtUpToHeadOrEndblock: cf.timestampCaughtUpToHeadOrEndblock->Js.Null.fromOption, } - chainMetadata }) //Don't await this set, it can happen in its own time throttler->Throttler.schedule(() => - Db.sql->DbFunctions.ChainMetadata.batchSetChainMetadataRow(~chainMetadataArray) + Db.sql + ->InternalTable.Chains.setValues(~pgSchema=Db.publicSchema, ~chainsData) + ->Promise.ignoreValue ) } @@ -214,7 +214,7 @@ let checkAndSetSyncedChains = ( let allChainsAtHead = chainManager->ChainManager.isFetchingAtHead //Update the timestampCaughtUpToHeadOrEndblock values let chainFetchers = chainManager.chainFetchers->ChainMap.map(cf => { - let chain = cf.chainConfig.chain + let chain = ChainMap.Chain.makeUnsafe(~chainId=cf.chainConfig.id) // None if the chain wasn't processing. // But we still want to update the latest processed block @@ -373,7 +373,7 @@ let validatePartitionQueryResponse = ( if currentBlockHeight > chainFetcher.currentBlockHeight { Prometheus.SourceHeight.set( ~blockNumber=currentBlockHeight, - ~chainId=chainFetcher.chainConfig.chain->ChainMap.Chain.toChainId, + ~chainId=chainFetcher.chainConfig.id, // The currentBlockHeight from response won't necessarily // belong to the currently active source. // But for simplicity, assume it does. @@ -907,8 +907,7 @@ let injectedTaskReducer = ( switch shouldExit { | ExitWithSuccess => updateChainMetadataTable(chainManager, ~throttler=writeThrottlers.chainMetaData) - ->Promise.thenResolve(_ => dispatchAction(SuccessExit)) - ->ignore + dispatchAction(SuccessExit) | NoExit => updateChainMetadataTable(chainManager, ~throttler=writeThrottlers.chainMetaData)->ignore } diff --git a/codegenerator/cli/templates/static/codegen/src/globalState/GlobalStateManager.res b/codegenerator/cli/templates/static/codegen/src/globalState/GlobalStateManager.res index d50879ad9..a9119baa3 100644 --- a/codegenerator/cli/templates/static/codegen/src/globalState/GlobalStateManager.res +++ b/codegenerator/cli/templates/static/codegen/src/globalState/GlobalStateManager.res @@ -11,9 +11,9 @@ module type State = { } module MakeManager = (S: State) => { - type t = {mutable state: S.t, stateUpdatedHook: option unit>} + type t = {mutable state: S.t} - let make = (~stateUpdatedHook: option unit>=?, state: S.t) => {state, stateUpdatedHook} + let make = (state: S.t) => {state: state} let rec dispatchAction = (~stateId=0, self: t, action: S.action) => { try { @@ -23,12 +23,6 @@ module MakeManager = (S: State) => { S.invalidatedActionReducer } let (nextState, nextTasks) = reducer(self.state, action) - switch self.stateUpdatedHook { - // In ReScript `!==` is shallow equality check rather than `!=` - // This is just a check to see if a new object reference was returned - | Some(hook) if self.state !== nextState => hook(nextState) - | _ => () - } self.state = nextState nextTasks->Array.forEach(task => dispatchTask(self, task)) } catch { diff --git a/codegenerator/cli/templates/static/codegen/src/globalState/GlobalStateManager.resi b/codegenerator/cli/templates/static/codegen/src/globalState/GlobalStateManager.resi index 20dee4402..d427a52c0 100644 --- a/codegenerator/cli/templates/static/codegen/src/globalState/GlobalStateManager.resi +++ b/codegenerator/cli/templates/static/codegen/src/globalState/GlobalStateManager.resi @@ -1,6 +1,6 @@ type t -let make: (~stateUpdatedHook: GlobalState.t => unit=?, GlobalState.t) => t +let make: GlobalState.t => t let dispatchAction: (~stateId: int=?, t, GlobalState.action) => unit let dispatchTask: (t, GlobalState.task) => unit let getState: t => GlobalState.t diff --git a/codegenerator/cli/templates/static/codegen/src/ink/EnvioInkApp.res b/codegenerator/cli/templates/static/codegen/src/ink/EnvioInkApp.res deleted file mode 100644 index 926dd288d..000000000 --- a/codegenerator/cli/templates/static/codegen/src/ink/EnvioInkApp.res +++ /dev/null @@ -1,67 +0,0 @@ -open Ink -open Belt - -type chainData = ChainData.chainData -type appState = { - chains: array, - indexerStartTime: Js.Date.t, - config: Config.t, -} - -let getTotalNumEventsProcessed = (~chains: array) => { - chains->Array.reduce(0, (acc, chain) => { - acc + chain.progress->ChainData.getNumberOfEventsProccessed - }) -} - -module TotalEventsProcessed = { - @react.component - let make = (~totalEventsProcessed) => { - let label = "Total Events Processed: " - - {label->React.string} - - {`${totalEventsProcessed->ChainData.formatLocaleString}`->React.string} - - - } -} - -module App = { - @react.component - let make = (~appState: appState) => { - let {chains, indexerStartTime, config} = appState - let totalEventsProcessed = getTotalNumEventsProcessed(~chains) - - - {chains - ->Array.mapWithIndex((i, chainData) => { - Int.toString} chainData /> - }) - ->React.array} - - - - - - {"Development Console: "->React.string} - - {`${Env.envioAppUrl}/console`->React.string} - - - - {"GraphQL Endpoint: "->React.string} - - {`${Env.Hasura.url}/v1/graphql`->React.string} - - - - } -} - -let startApp = appState => { - let {rerender} = render() - appState => { - rerender() - } -} diff --git a/codegenerator/cli/templates/static/codegen/src/ink/components/ChainData.res b/codegenerator/cli/templates/static/codegen/src/ink/components/ChainData.res deleted file mode 100644 index 946e9a4e3..000000000 --- a/codegenerator/cli/templates/static/codegen/src/ink/components/ChainData.res +++ /dev/null @@ -1,161 +0,0 @@ -open Ink - -type syncing = { - firstEventBlockNumber: int, - latestProcessedBlock: int, - numEventsProcessed: int, -} -type synced = { - ...syncing, - timestampCaughtUpToHeadOrEndblock: Js.Date.t, -} - -type progress = SearchingForEvents | Syncing(syncing) | Synced(synced) - -let getNumberOfEventsProccessed = (progress: progress) => { - switch progress { - | SearchingForEvents => 0 - | Syncing(syncing) => syncing.numEventsProcessed - | Synced(synced) => synced.numEventsProcessed - } -} -type chainData = { - chain: ChainMap.Chain.t, - poweredByHyperSync: bool, - progress: progress, - latestFetchedBlockNumber: int, - currentBlockHeight: int, - numBatchesFetched: int, - endBlock: option, -} - -let minOfOption: (int, option) => int = (a: int, b: option) => { - switch (a, b) { - | (a, Some(b)) => min(a, b) - | (a, None) => a - } -} - -type number -@val external number: int => number = "Number" -@send external toLocaleString: number => string = "toLocaleString" -let formatLocaleString = n => n->number->toLocaleString - -module BlocksDisplay = { - @react.component - let make = (~latestProcessedBlock, ~currentBlockHeight) => { - - {"blocks: "->React.string} - - - {latestProcessedBlock->formatLocaleString->React.string} - - - {"/"->React.string} - {currentBlockHeight->formatLocaleString->React.string} - - - - } -} - -module SyncBar = { - @react.component - let make = ( - ~chainId, - ~loaded, - ~buffered=?, - ~outOf, - ~loadingColor, - ~poweredByHyperSync=true, - ~isSearching=false, - ) => { - - - {poweredByHyperSync ? {"⚡"->React.string} : React.null} - {"Chain ID: "->React.string} - {chainId->React.int} - {" "->React.string} - - {isSearching - ? - - - : } - - } -} - -@react.component -let make = (~chainData: chainData) => { - let { - chain, - progress, - poweredByHyperSync, - latestFetchedBlockNumber, - currentBlockHeight, - endBlock, - } = chainData - let chainId = chain->ChainMap.Chain.toChainId - - let toBlock = minOfOption(currentBlockHeight, endBlock) - - switch progress { - | SearchingForEvents => - - - {"Searching for events..."->React.string} - - - - - - | Syncing({firstEventBlockNumber, latestProcessedBlock, numEventsProcessed}) => - - - - - {"Events Processed: "->React.string} - - {numEventsProcessed->formatLocaleString->React.string} - - - - - - - | Synced({firstEventBlockNumber, latestProcessedBlock, numEventsProcessed}) => - - - - {"Events Processed: "->React.string} - {numEventsProcessed->React.int} - - - - - - - } -} diff --git a/codegenerator/cli/templates/static/codegen/src/ink/components/SyncETA.res b/codegenerator/cli/templates/static/codegen/src/ink/components/SyncETA.res deleted file mode 100644 index f3f83e09b..000000000 --- a/codegenerator/cli/templates/static/codegen/src/ink/components/SyncETA.res +++ /dev/null @@ -1,198 +0,0 @@ -open Ink -open Belt - -let isIndexerFullySynced = (chains: array) => { - chains->Array.reduce(true, (accum, current) => { - switch current.progress { - | Synced(_) => accum - | _ => false - } - }) -} - -let getTotalRemainingBlocks = (chains: array) => { - chains->Array.reduce(0, (accum, {progress, currentBlockHeight, latestFetchedBlockNumber, endBlock}) => { - let finalBlock = switch endBlock { - | Some(endBlock) => endBlock - | None => currentBlockHeight - } - switch progress { - | Syncing({latestProcessedBlock}) - | Synced({latestProcessedBlock}) => - finalBlock - latestProcessedBlock + accum - | SearchingForEvents => finalBlock - latestFetchedBlockNumber + accum - } - }) -} - -let getLatestTimeCaughtUpToHead = ( - chains: array, - indexerStartTime: Js.Date.t, -) => { - let latesttimestampCaughtUpToHeadOrEndblockFloat = chains->Array.reduce(0.0, (accum, current) => { - switch current.progress { - | Synced({timestampCaughtUpToHeadOrEndblock}) => - timestampCaughtUpToHeadOrEndblock->Js.Date.valueOf > accum - ? timestampCaughtUpToHeadOrEndblock->Js.Date.valueOf - : accum - | Syncing(_) - | SearchingForEvents => accum - } - }) - - DateFns.formatDistanceWithOptions( - indexerStartTime, - latesttimestampCaughtUpToHeadOrEndblockFloat->Js.Date.fromFloat, - {includeSeconds: true}, - ) -} - -let getTotalBlocksProcessed = (chains: array) => { - chains->Array.reduce(0, (accum, {progress, latestFetchedBlockNumber}) => { - switch progress { - | Syncing({latestProcessedBlock, firstEventBlockNumber}) - | Synced({latestProcessedBlock, firstEventBlockNumber}) => - latestProcessedBlock - firstEventBlockNumber + accum - | SearchingForEvents => latestFetchedBlockNumber + accum - } - }) -} - -let useShouldDisplayEta = (~chains: array) => { - let (shouldDisplayEta, setShouldDisplayEta) = React.useState(_ => false) - React.useEffect(() => { - //Only compute this while it is not displaying eta - if !shouldDisplayEta { - //Each chain should have fetched at least one batch - let (allChainsHaveFetchedABatch, totalNumBatchesFetched) = chains->Array.reduce((true, 0), ( - (allChainsHaveFetchedABatch, totalNumBatchesFetched), - chain, - ) => { - ( - allChainsHaveFetchedABatch && chain.numBatchesFetched >= 1, - totalNumBatchesFetched + chain.numBatchesFetched, - ) - }) - - //Min num fetched batches is num of chains + 2. All - // Chains should have fetched at least 1 batch. (They - // could then be blocked from fetching if they are past - //the max queue size on first batch) - // Only display once an additinal 2 batches have been fetched to allow - // eta to realistically stabalize - let numChains = chains->Array.length - let minTotalBatches = numChains + 2 - let hasMinNumBatches = totalNumBatchesFetched >= minTotalBatches - - let shouldDisplayEta = allChainsHaveFetchedABatch && hasMinNumBatches - - if shouldDisplayEta { - setShouldDisplayEta(_ => true) - } - } - - None - }, [chains]) - - shouldDisplayEta -} - -let useEta = (~chains, ~indexerStartTime) => { - let shouldDisplayEta = useShouldDisplayEta(~chains) - let (secondsToSub, setSecondsToSub) = React.useState(_ => 0.) - let (timeSinceStart, setTimeSinceStart) = React.useState(_ => 0.) - - React.useEffect2(() => { - setTimeSinceStart(_ => Js.Date.now() -. indexerStartTime->Js.Date.valueOf) - setSecondsToSub(_ => 0.) - - let intervalId = Js.Global.setInterval(() => { - setSecondsToSub(prev => prev +. 1.) - }, 1000) - - Some(() => Js.Global.clearInterval(intervalId)) - }, (chains, indexerStartTime)) - - //blocksProcessed/remainingBlocks = timeSoFar/eta - //eta = (timeSoFar/blocksProcessed) * remainingBlocks - - let blocksProcessed = getTotalBlocksProcessed(chains)->Int.toFloat - if shouldDisplayEta && blocksProcessed > 0. { - let nowDate = Js.Date.now() - let remainingBlocks = getTotalRemainingBlocks(chains)->Int.toFloat - let etaFloat = timeSinceStart /. blocksProcessed *. remainingBlocks - let millisToSub = secondsToSub *. 1000. - let etaFloat = Pervasives.max(etaFloat -. millisToSub, 0.0) //template this - let eta = (etaFloat +. nowDate)->Js.Date.fromFloat - let interval: DateFns.interval = {start: nowDate->Js.Date.fromFloat, end: eta} - let duration = DateFns.intervalToDuration(interval) - let formattedDuration = DateFns.formatDuration( - duration, - {format: ["hours", "minutes", "seconds"]}, - ) - let outputString = switch formattedDuration { - | "" => "less than 1 second" - | formattedDuration => formattedDuration - } - Some(outputString) - } else { - None - } -} - -module Syncing = { - @react.component - let make = (~etaStr) => { - - - {"Sync Time ETA: "->React.string} - - {etaStr->React.string} - {" ("->React.string} - - - - {" in progress"->React.string} - {")"->React.string} - - } -} - -module Synced = { - @react.component - let make = (~latestTimeCaughtUpToHeadStr) => { - - {"Time Synced: "->React.string} - {`${latestTimeCaughtUpToHeadStr}`->React.string} - {" ("->React.string} - {"synced"->React.string} - {")"->React.string} - - } -} - -module Calculating = { - @react.component - let make = () => { - - - - - {" Calculating ETA..."->React.string} - - } -} - -@react.component -let make = (~chains, ~indexerStartTime) => { - let optEta = useEta(~chains, ~indexerStartTime) - if isIndexerFullySynced(chains) { - let latestTimeCaughtUpToHeadStr = getLatestTimeCaughtUpToHead(chains, indexerStartTime) - //TODO add real time - } else { - switch optEta { - | Some(etaStr) => - | None => - } - } -} diff --git a/internal_docs/EventFetchers.md b/internal_docs/EventFetchers.md index da583557a..e33e32cab 100644 --- a/internal_docs/EventFetchers.md +++ b/internal_docs/EventFetchers.md @@ -32,7 +32,7 @@ TODO: currently the ChainManager is passed directly to the `EventProcessor` as d classDiagram class ChainFetcher { fetchedEventQueue: ChainEventQueue.t, - chainConfig: Config.chainConfig, + chainConfig: InternalConfig.chain, source: Source.source, startFetchingEvents(): promise diff --git a/scenarios/erc20_multichain_factory/test/DynamicContractRecovery_test.res b/scenarios/erc20_multichain_factory/test/DynamicContractRecovery_test.res index bf918ae7c..6643df96b 100644 --- a/scenarios/erc20_multichain_factory/test/DynamicContractRecovery_test.res +++ b/scenarios/erc20_multichain_factory/test/DynamicContractRecovery_test.res @@ -140,9 +140,13 @@ describe("Dynamic contract restart resistance test", () => { } let getFetchingDcAddressesFromDbState = async (~chainId=1, ~sql=?) => { + let chainConfig = config.chainMap->ChainMap.get(ChainMap.Chain.makeUnsafe(~chainId)) + await config.persistence->Persistence.init(~chainConfigs=[chainConfig]) let chainFetcher = await ChainFetcher.makeFromDbState( - config.chainMap->ChainMap.get(ChainMap.Chain.makeUnsafe(~chainId)), - ~maxAddrInPartition=Env.maxAddrInPartition, + chainConfig, + ~initialChainState=(config.persistence->Persistence.getInitializedState).chains + ->Js.Array2.find(chainState => chainState.id === chainId) + ->Option.getExn, ~config, ~isInReorgThreshold=true, ~sql?, @@ -233,51 +237,47 @@ describe("Dynamic contract restart resistance test", () => { ) try await Db.sql->Postgres.beginSql( - sql => [ - ( - async () => { - Assert.deepEqual( - await getFetchingDcAddressesFromDbState(~sql), - dcsBeforeRestart->Js.Array2.map(dc => dc["contract_address"]), - ~message="Should get all addresses on restart", - ) - - // But let's say the indexer crashed before - // the processing of events catch up to the dcs we stored in the db - // In this case on restart we should prune contracts after event_sync_state - let _ = - await sql->Postgres.unsafe(`UPDATE public.event_sync_state SET block_number = 0 WHERE chain_id = 1;`) - - Assert.deepEqual( - await getFetchingDcAddressesFromDbState(~sql), - // This one has - // registering_event_block_number: 0 - // registering_event_log_index: 0 - // So it's not pruned - [Mock.mockDynamicToken1], - ~message="Should keep only the dc up to the event_sync_state", - ) - - Assert.equal( - (await sql - ->Postgres.unsafe(`SELECT * FROM public.dynamic_contract_registry;`)) - ->Array.length, - 1, - ~message="Should clean up pruned dc from db on restart", - ) - Assert.equal( - (await sql - ->Postgres.unsafe(`SELECT * FROM public.dynamic_contract_registry_history;`)) - ->Array.length, - 1, - ~message=`Should clean up pruned dc history from db on restart. + async sql => { + Assert.deepEqual( + await getFetchingDcAddressesFromDbState(~sql), + dcsBeforeRestart->Js.Array2.map(dc => dc["contract_address"]), + ~message="Should get all addresses on restart", + ) + + // But let's say the indexer crashed before + // the processing of events catch up to the dcs we stored in the db + // In this case on restart we should prune contracts after event_sync_state + let _ = + await sql->Postgres.unsafe(`UPDATE public.event_sync_state SET block_number = 0 WHERE chain_id = 1;`) + + Assert.deepEqual( + await getFetchingDcAddressesFromDbState(~sql), + // This one has + // registering_event_block_number: 0 + // registering_event_log_index: 0 + // So it's not pruned + [Mock.mockDynamicToken1], + ~message="Should keep only the dc up to the event_sync_state", + ) + + Assert.equal( + (await sql + ->Postgres.unsafe(`SELECT * FROM public.dynamic_contract_registry;`)) + ->Array.length, + 1, + ~message="Should clean up pruned dc from db on restart", + ) + Assert.equal( + (await sql + ->Postgres.unsafe(`SELECT * FROM public.dynamic_contract_registry_history;`)) + ->Array.length, + 1, + ~message=`Should clean up pruned dc history from db on restart. Note: Without it there's a case when the indexer might crash because of a conflict`, - ) + ) - raise(RollbackTransaction) - } - )(), - ], + raise(RollbackTransaction) + }, ) catch { | RollbackTransaction => () } diff --git a/scenarios/erc20_multichain_factory/test/TestDeleteEntity.res b/scenarios/erc20_multichain_factory/test/TestDeleteEntity.res index 2624dcde2..8607bae09 100644 --- a/scenarios/erc20_multichain_factory/test/TestDeleteEntity.res +++ b/scenarios/erc20_multichain_factory/test/TestDeleteEntity.res @@ -120,10 +120,7 @@ describe("Unsafe delete test", () => { Async.it("Deletes account entity successfully", async () => { //Setup a chainManager with unordered multichain mode to make processing happen //without blocking for the purposes of this test - let chainManager = ChainManager.makeFromConfig( - ~config, - ~maxAddrInPartition=Env.maxAddrInPartition, - ) + let chainManager = ChainManager.makeFromConfig(~config) //Setup initial state stub that will be used for both //initial chain data and reorg chain data diff --git a/scenarios/erc20_multichain_factory/test/TestWhereQuery.res b/scenarios/erc20_multichain_factory/test/TestWhereQuery.res index 9c2b5ae09..dd78bd6ca 100644 --- a/scenarios/erc20_multichain_factory/test/TestWhereQuery.res +++ b/scenarios/erc20_multichain_factory/test/TestWhereQuery.res @@ -114,10 +114,7 @@ describe("Tests where eq queries", () => { Async.it("Where Eq query returns values and removes after inmemory delete", async () => { //Setup a chainManager with unordered multichain mode to make processing happen //without blocking for the purposes of this test - let chainManager = ChainManager.makeFromConfig( - ~config, - ~maxAddrInPartition=Env.maxAddrInPartition, - ) + let chainManager = ChainManager.makeFromConfig(~config) //Setup initial state stub that will be used for both //initial chain data and reorg chain data diff --git a/scenarios/helpers/src/ChainMocking.res b/scenarios/helpers/src/ChainMocking.res index d7dae4770..02f1b212b 100644 --- a/scenarios/helpers/src/ChainMocking.res +++ b/scenarios/helpers/src/ChainMocking.res @@ -115,7 +115,7 @@ module Make = (Indexer: Indexer.S) => { } type t = { - chainConfig: Config.chainConfig, + chainConfig: InternalConfig.chain, blocks: array, maxBlocksReturned: int, blockTimestampInterval: int, @@ -151,7 +151,7 @@ module Make = (Indexer: Indexer.S) => { x( ~transactionIndex=i, ~logIndex=i, - ~chainId=self.chainConfig.chain->ChainMap.Chain.toChainId, + ~chainId=self.chainConfig.id, ~blockNumber, ~blockTimestamp, ) @@ -169,7 +169,7 @@ module Make = (Indexer: Indexer.S) => { let log: Internal.eventItem = { eventConfig: (eventConfig :> Internal.eventConfig), event: makeEvent(~blockHash), - chain: self.chainConfig.chain, + chain: ChainMap.Chain.makeUnsafe(~chainId=self.chainConfig.id), timestamp: blockTimestamp, blockNumber, logIndex, diff --git a/scenarios/helpers/src/Indexer.res b/scenarios/helpers/src/Indexer.res index d160b4eb1..331ea4a62 100644 --- a/scenarios/helpers/src/Indexer.res +++ b/scenarios/helpers/src/Indexer.res @@ -72,23 +72,4 @@ module type S = { ) => promise, } } - - module Config: { - type contract = { - name: string, - abi: Ethers.abi, - addresses: array, - events: array, - startBlock: option, - } - - type chainConfig = { - startBlock: int, - endBlock: option, - confirmedBlockThreshold: int, - chain: ChainMap.Chain.t, - contracts: array, - sources: array, - } - } } diff --git a/scenarios/test_codegen/package.json b/scenarios/test_codegen/package.json index a4a19f421..5f3b611d7 100644 --- a/scenarios/test_codegen/package.json +++ b/scenarios/test_codegen/package.json @@ -48,7 +48,6 @@ "ts-node": "^10.9.1" }, "dependencies": { - "@rescript/react": "0.12.1", "bignumber.js": "^9.1.2", "hardhat-abi-exporter": "^2.10.1", "helpers": "workspace:*", diff --git a/scenarios/test_codegen/pnpm-lock.yaml b/scenarios/test_codegen/pnpm-lock.yaml index 01bb2be00..d9064036b 100644 --- a/scenarios/test_codegen/pnpm-lock.yaml +++ b/scenarios/test_codegen/pnpm-lock.yaml @@ -8,15 +8,12 @@ importers: .: dependencies: - '@rescript/react': - specifier: 0.12.1 - version: 0.12.1(react-dom@18.3.1(react@18.2.0))(react@18.2.0) bignumber.js: specifier: ^9.1.2 version: 9.1.2 envio: specifier: file:../../codegenerator/cli/npm/envio - version: file:../../codegenerator/cli/npm/envio(typescript@5.5.4) + version: file:../../codegenerator/cli/npm/envio(react-dom@18.3.1(react@18.2.0))(react@18.2.0)(typescript@5.5.4) hardhat-abi-exporter: specifier: ^2.10.1 version: 2.10.1(hardhat@2.22.5(ts-node@10.9.2(@types/node@18.19.47)(typescript@5.5.4))(typescript@5.5.4)) @@ -99,7 +96,7 @@ importers: dependencies: envio: specifier: file:../../codegenerator/cli/npm/envio - version: file:../../codegenerator/cli/npm/envio(typescript@5.5.4) + version: file:../../codegenerator/cli/npm/envio(react-dom@18.3.1(react@18.2.0))(react@18.2.0)(typescript@5.5.4) rescript: specifier: 11.1.3 version: 11.1.3 @@ -136,9 +133,6 @@ importers: '@envio-dev/hypersync-client': specifier: 0.6.5 version: 0.6.5 - '@rescript/react': - specifier: 0.12.1 - version: 0.12.1(react-dom@18.3.1(react@18.2.0))(react@18.2.0) bignumber.js: specifier: 9.1.2 version: 9.1.2 @@ -150,22 +144,13 @@ importers: version: 16.4.5 envio: specifier: file:/Users/dzakh/code/envio/hyperindex/codegenerator/target/debug/envio/../../../cli/npm/envio - version: file:../../codegenerator/cli/npm/envio(typescript@5.5.4) + version: file:../../codegenerator/cli/npm/envio(react-dom@18.3.1(react@18.2.0))(react@18.2.0)(typescript@5.5.4) ethers: specifier: 6.8.0 version: 6.8.0 express: specifier: 4.19.2 version: 4.19.2 - ink: - specifier: 3.2.0 - version: 3.2.0(react@18.2.0) - ink-big-text: - specifier: 1.2.0 - version: 1.2.0(ink@3.2.0(react@18.2.0))(react@18.2.0) - ink-spinner: - specifier: 4.0.3 - version: 4.0.3(ink@3.2.0(react@18.2.0))(react@18.2.0) js-sdsl: specifier: 4.4.2 version: 4.4.2 @@ -5389,10 +5374,14 @@ snapshots: env-paths@2.2.1: {} - envio@file:../../codegenerator/cli/npm/envio(typescript@5.5.4): + envio@file:../../codegenerator/cli/npm/envio(react-dom@18.3.1(react@18.2.0))(react@18.2.0)(typescript@5.5.4): dependencies: '@envio-dev/hypersync-client': 0.6.5 + '@rescript/react': 0.12.1(react-dom@18.3.1(react@18.2.0))(react@18.2.0) bignumber.js: 9.1.2 + ink: 3.2.0(react@18.2.0) + ink-big-text: 1.2.0(ink@3.2.0(react@18.2.0))(react@18.2.0) + ink-spinner: 4.0.3(ink@3.2.0(react@18.2.0))(react@18.2.0) pino: 8.16.1 pino-pretty: 10.2.3 prom-client: 15.0.0 @@ -5400,7 +5389,10 @@ snapshots: rescript-schema: 9.3.0(rescript@11.1.3) viem: 2.21.0(typescript@5.5.4) transitivePeerDependencies: + - '@types/react' - bufferutil + - react + - react-dom - typescript - utf-8-validate - zod diff --git a/scenarios/test_codegen/rescript.json b/scenarios/test_codegen/rescript.json index fe0d69505..484a9b176 100644 --- a/scenarios/test_codegen/rescript.json +++ b/scenarios/test_codegen/rescript.json @@ -29,7 +29,6 @@ "version": 4 }, "bs-dependencies": [ - "@rescript/react", "generated", "rescript-nodejs", "rescript-schema", diff --git a/scenarios/test_codegen/test/ChainManager_test.res b/scenarios/test_codegen/test/ChainManager_test.res index a05cfc858..5b7882e0d 100644 --- a/scenarios/test_codegen/test/ChainManager_test.res +++ b/scenarios/test_codegen/test/ChainManager_test.res @@ -6,7 +6,7 @@ let populateChainQueuesWithRandomEvents = (~runTime=1000, ~maxBlockTime=15, ()) let allEvents = [] let numberOfMockEventsCreated = ref(0) - let chainFetchers = config.chainMap->ChainMap.map(({chain}) => { + let chainFetchers = config.chainMap->ChainMap.map(({id}) => { let getCurrentTimestamp = () => { let timestampMillis = Js.Date.now() // Convert milliseconds to seconds @@ -53,11 +53,11 @@ let populateChainQueuesWithRandomEvents = (~runTime=1000, ~maxBlockTime=15, ()) for logIndex in 0 to numberOfEventsInBatch { let batchItem: Internal.eventItem = { timestamp: currentTime.contents, - chain, + chain: ChainMap.Chain.makeUnsafe(~chainId=id), blockNumber: currentBlockNumber.contents, logIndex, eventConfig: Utils.magic("Mock eventConfig in ChainManager test"), - event: `mock event (chainId)${chain->ChainMap.Chain.toString} - (blockNumber)${currentBlockNumber.contents->string_of_int} - (logIndex)${logIndex->string_of_int} - (timestamp)${currentTime.contents->string_of_int}`->Utils.magic, + event: `mock event (chainId)${id->Int.toString} - (blockNumber)${currentBlockNumber.contents->string_of_int} - (logIndex)${logIndex->string_of_int} - (timestamp)${currentTime.contents->string_of_int}`->Utils.magic, } allEvents->Js.Array2.push(batchItem)->ignore @@ -100,6 +100,7 @@ let populateChainQueuesWithRandomEvents = (~runTime=1000, ~maxBlockTime=15, ()) numEventsProcessed: 0, numBatchesFetched: 0, startBlock: 0, + endBlock: None, fetchState: fetchState.contents, logger: Logging.getLogger(), sourceManager: SourceManager.make( diff --git a/scenarios/test_codegen/test/E2EEthNode_test.res b/scenarios/test_codegen/test/E2EEthNode_test.res index 3eca97648..011b2e36a 100644 --- a/scenarios/test_codegen/test/E2EEthNode_test.res +++ b/scenarios/test_codegen/test/E2EEthNode_test.res @@ -15,10 +15,10 @@ describe("E2E Integration Test", () => { let contracts = await SetupRpcNode.deployContracts() await SetupRpcNode.runBasicGravatarTransactions(contracts.gravatar) - let localChainConfig: Config.chainConfig = { + let localChainConfig: InternalConfig.chain = { let contracts = [ { - Config.name: "Gravatar", + InternalConfig.name: "Gravatar", abi: Types.Gravatar.abi, addresses: ["0x5FbDB2315678afecb367f032d93F642f64180aa3"->Address.Evm.fromStringOrThrow], events: [ @@ -41,8 +41,7 @@ describe("E2E Integration Test", () => { { confirmedBlockThreshold: 200, startBlock: 0, - endBlock: None, - chain, + id: 1337, contracts, sources: [ RpcSource.make({ diff --git a/scenarios/test_codegen/test/EventHandler_test.ts b/scenarios/test_codegen/test/EventHandler_test.ts index a3ff623e6..cf95b529b 100644 --- a/scenarios/test_codegen/test/EventHandler_test.ts +++ b/scenarios/test_codegen/test/EventHandler_test.ts @@ -21,7 +21,7 @@ describe("Use Envio test framework to test event handlers", () => { assert.deepEqual(registeredDcs, [ { id: `1-${dcAddress}`, - contract_type: "SimpleNft", + contract_name: "SimpleNft", contract_address: dcAddress, chain_id: 1, registering_event_block_number: 0, @@ -51,7 +51,7 @@ describe("Use Envio test framework to test event handlers", () => { assert.deepEqual(registeredDcs, [ { id: `1-${dcAddress}`, - contract_type: "SimpleNft", + contract_name: "SimpleNft", contract_address: dcAddress, chain_id: 1, registering_event_block_number: 0, @@ -313,7 +313,7 @@ describe("Use Envio test framework to test event handlers", () => { assert.deepEqual(registeredDcs, [ { id: `1-${expectedChecksummedAddress}`, - contract_type: "SimpleNft", + contract_name: "SimpleNft", contract_address: expectedChecksummedAddress, chain_id: 1, registering_event_block_number: 0, diff --git a/scenarios/test_codegen/test/Integration_ts_helpers.res b/scenarios/test_codegen/test/Integration_ts_helpers.res index fa5b0786a..c5c1264ed 100644 --- a/scenarios/test_codegen/test/Integration_ts_helpers.res +++ b/scenarios/test_codegen/test/Integration_ts_helpers.res @@ -1,11 +1,11 @@ @genType.opaque -type chainConfig = Config.chainConfig +type chainConfig = InternalConfig.chain @genType let getLocalChainConfig = (nftFactoryContractAddress): chainConfig => { let contracts = [ { - Config.name: "NftFactory", + InternalConfig.name: "NftFactory", abi: Types.NftFactory.abi, addresses: [nftFactoryContractAddress], events: [(Types.NftFactory.SimpleNftCreated.register() :> Internal.eventConfig)], @@ -30,8 +30,7 @@ let getLocalChainConfig = (nftFactoryContractAddress): chainConfig => { { confirmedBlockThreshold: 200, startBlock: 1, - endBlock: None, - chain, + id: 1337, contracts, sources: [ RpcSource.make({ @@ -77,5 +76,7 @@ let startProcessing = (config, cfg: chainConfig, chainManager: chainManager) => let gsManager = globalState->GlobalStateManager.make - gsManager->GlobalStateManager.dispatchTask(NextQuery(Chain(cfg.chain))) + gsManager->GlobalStateManager.dispatchTask( + NextQuery(Chain(ChainMap.Chain.makeUnsafe(~chainId=cfg.id))), + ) } diff --git a/scenarios/test_codegen/test/SerDe_Test.res b/scenarios/test_codegen/test/SerDe_Test.res index a8a122960..503e8c70d 100644 --- a/scenarios/test_codegen/test/SerDe_Test.res +++ b/scenarios/test_codegen/test/SerDe_Test.res @@ -76,10 +76,12 @@ describe("SerDe Test", () => { ) let setHistory = (sql, row) => - sql->PgStorage.setEntityHistoryOrThrow( - ~entityHistory=Entities.EntityWithAllTypes.entityHistory, - ~rows=[row], - ) + Promise.all( + sql->PgStorage.setEntityHistoryOrThrow( + ~entityHistory=Entities.EntityWithAllTypes.entityHistory, + ~rows=[row], + ), + )->Promise.ignoreValue try await Db.sql->setHistory(entityHistoryItem) catch { | exn => @@ -192,10 +194,12 @@ SELECT * FROM unnest($1::NUMERIC[],$2::NUMERIC(10, 8)[],$3::NUMERIC[],$4::INTEGE ) let setHistory = (sql, row) => - sql->PgStorage.setEntityHistoryOrThrow( - ~entityHistory=Entities.EntityWithAllNonArrayTypes.entityHistory, - ~rows=[row], - ) + Promise.all( + sql->PgStorage.setEntityHistoryOrThrow( + ~entityHistory=Entities.EntityWithAllNonArrayTypes.entityHistory, + ~rows=[row], + ), + )->Promise.ignoreValue try await Db.sql->setHistory(entityHistoryItem) catch { | exn => diff --git a/scenarios/test_codegen/test/__mocks__/MockConfig.res b/scenarios/test_codegen/test/__mocks__/MockConfig.res index c0d4cb296..3d841ad55 100644 --- a/scenarios/test_codegen/test/__mocks__/MockConfig.res +++ b/scenarios/test_codegen/test/__mocks__/MockConfig.res @@ -4,7 +4,7 @@ let chain1337 = ChainMap.Chain.makeUnsafe(~chainId=1337) let contracts = [ { - Config.name: "Gravatar", + InternalConfig.name: "Gravatar", abi: Types.Gravatar.abi, addresses: ["0x2B2f78c5BF6D9C12Ee1225D5F374aa91204580c3"->Address.Evm.fromStringOrThrow], events: [ @@ -38,11 +38,10 @@ let evmContracts = contracts->Js.Array2.map((contract): Internal.evmContractConf ), }) -let mockChainConfig: Config.chainConfig = { +let mockChainConfig: InternalConfig.chain = { + id: 1337, confirmedBlockThreshold: 200, startBlock: 1, - endBlock: None, - chain: chain1337, contracts, sources: [ RpcSource.make({ diff --git a/scenarios/test_codegen/test/helpers/Mock.res b/scenarios/test_codegen/test/helpers/Mock.res index 7113302cb..1003ac192 100644 --- a/scenarios/test_codegen/test/helpers/Mock.res +++ b/scenarios/test_codegen/test/helpers/Mock.res @@ -36,8 +36,8 @@ module Storage = { type method = [ | #isInitialized | #initialize + | #loadInitialState | #dumpEffectCache - | #restoreEffectCache | #setEffectCacheOrThrow | #loadByIdsOrThrow | #loadByFieldOrThrow @@ -49,10 +49,12 @@ module Storage = { resolveIsInitialized: bool => unit, initializeCalls: array<{ "entities": array, - "generalTables": array, + "chainConfigs": array, "enums": array>, }>, - resolveInitialize: unit => unit, + resolveInitialize: Persistence.initialState => unit, + loadInitialStateCalls: array, + resolveLoadInitialState: Persistence.initialState => unit, loadByIdsOrThrowCalls: array<{"ids": array, "tableName": string}>, loadByFieldOrThrowCalls: array<{ "fieldName": string, @@ -61,7 +63,6 @@ module Storage = { "operator": Persistence.operator, }>, dumpEffectCacheCalls: ref, - restoreEffectCacheCalls: array<{"withUpload": bool}>, storage: Persistence.storage, } @@ -89,8 +90,9 @@ module Storage = { let loadByIdsOrThrowCalls = [] let loadByFieldOrThrowCalls = [] let dumpEffectCacheCalls = ref(0) - let restoreEffectCacheCalls = [] let setEffectCacheOrThrowCalls = ref(0) + let loadInitialStateCalls = [] + let loadInitialStateResolveFns = [] { isInitializedCalls, @@ -98,12 +100,15 @@ module Storage = { loadByIdsOrThrowCalls, loadByFieldOrThrowCalls, dumpEffectCacheCalls, - restoreEffectCacheCalls, + loadInitialStateCalls, + resolveLoadInitialState: (initialState: Persistence.initialState) => { + loadInitialStateResolveFns->Js.Array2.forEach(resolve => resolve(initialState)) + }, resolveIsInitialized: bool => { isInitializedResolveFns->Js.Array2.forEach(resolve => resolve(bool)) }, - resolveInitialize: () => { - initializeResolveFns->Js.Array2.forEach(resolve => resolve()) + resolveInitialize: (initialState: Persistence.initialState) => { + initializeResolveFns->Js.Array2.forEach(resolve => resolve(initialState)) }, storage: { isInitialized: implement(#isInitialized, () => { @@ -112,11 +117,11 @@ module Storage = { isInitializedResolveFns->Js.Array2.push(resolve)->ignore }) }), - initialize: implement(#initialize, (~entities=[], ~generalTables=[], ~enums=[]) => { + initialize: implement(#initialize, (~chainConfigs=[], ~entities=[], ~enums=[]) => { initializeCalls ->Js.Array2.push({ "entities": entities, - "generalTables": generalTables, + "chainConfigs": chainConfigs, "enums": enums, }) ->ignore @@ -124,14 +129,16 @@ module Storage = { initializeResolveFns->Js.Array2.push(resolve)->ignore }) }), + loadInitialState: implement(#loadInitialState, () => { + loadInitialStateCalls->Js.Array2.push(true)->ignore + Promise.make((resolve, _reject) => { + loadInitialStateResolveFns->Js.Array2.push(resolve)->ignore + }) + }), dumpEffectCache: implement(#dumpEffectCache, () => { dumpEffectCacheCalls := dumpEffectCacheCalls.contents + 1 Promise.resolve() }), - restoreEffectCache: implement(#restoreEffectCache, (~withUpload) => { - restoreEffectCacheCalls->Js.Array2.push({"withUpload": withUpload})->ignore - Promise.resolve([]) - }), setEffectCacheOrThrow: implement(#setEffectCacheOrThrow, ( ~effect as _, ~items as _, @@ -190,13 +197,14 @@ module Storage = { storageStatus: Ready({ cleanRun: false, cache: Js.Dict.empty(), + chains: [], }), } } } @genType -let mockRawEventRow: TablesStatic.RawEvents.t = { +let mockRawEventRow: InternalTable.RawEvents.t = { chainId: 1, eventId: 1234567890n, contractName: "NftFactory", diff --git a/scenarios/test_codegen/test/lib_tests/EntityHistory_test.res b/scenarios/test_codegen/test/lib_tests/EntityHistory_test.res index aa5f7b082..7384ac50b 100644 --- a/scenarios/test_codegen/test/lib_tests/EntityHistory_test.res +++ b/scenarios/test_codegen/test/lib_tests/EntityHistory_test.res @@ -3,10 +3,6 @@ open RescriptMocha //unsafe polymorphic toString binding for any type @send external toStringUnsafe: 'a => string = "toString" -// These are mandatory tables that must be created for every Envio-managed schema. -// The event_sync_state table is used to distinguish Envio-controlled schemas from others. -let generalTables = [TablesStatic.EventSyncState.table] - let stripUndefinedFieldsInPlace = (val: 'a): 'a => { let json = val->(Utils.magic: 'a => Js.Json.t) //Hot fix for rescript equality check that removes optional fields @@ -39,7 +35,7 @@ module TestEntity = { fieldB: option, } - let name = "TestEntity"->(Utils.magic: string => Enums.EntityType.t) + let name = "TestEntity" let schema = S.schema(s => { id: s.matches(S.string), fieldA: s.matches(S.int), @@ -56,7 +52,7 @@ module TestEntity = { ], ) - let entityHistory = table->EntityHistory.fromTable(~pgSchema="public", ~schema) + let entityHistory = table->EntityHistory.fromTable(~schema) external castToInternal: t => Internal.entity = "%identity" } @@ -222,7 +218,7 @@ BEGIN END; $$ LANGUAGE plpgsql;` - Assert.equal(expected, TestEntity.entityHistory.createInsertFnQuery) + Assert.equal(expected, TestEntity.entityHistory.makeInsertFnQuery(~pgSchema="public")) }) it("Creates an entity history table", () => { let createQuery = @@ -254,9 +250,9 @@ $$ LANGUAGE plpgsql;` ~pgPort=Env.Db.port, ) try { - await storage.initialize( + let _ = await storage.initialize( + ~chainConfigs=[], ~entities=[module(TestEntity)->Entities.entityModToInternal], - ~generalTables, ~enums=[Persistence.entityHistoryActionEnumConfig->Internal.fromGenericEnumConfig], ) } catch { @@ -265,7 +261,9 @@ $$ LANGUAGE plpgsql;` Assert.fail("Failed setting up tables") } - switch await Db.sql->Postgres.unsafe(TestEntity.entityHistory.createInsertFnQuery) { + switch await Db.sql->Postgres.unsafe( + TestEntity.entityHistory.makeInsertFnQuery(~pgSchema="public"), + ) { | exception exn => Js.log2("createInsertFnQuery exn", exn) Assert.fail("Failed creating insert function") @@ -308,11 +306,15 @@ $$ LANGUAGE plpgsql;` }), } - switch await Db.sql->PgStorage.setEntityHistoryOrThrow( - ~entityHistory=TestEntity.entityHistory, - ~rows=[entityHistoryItem], - ~shouldCopyCurrentEntity=true, - ) { + switch { + await Promise.all( + Db.sql->PgStorage.setEntityHistoryOrThrow( + ~entityHistory=TestEntity.entityHistory, + ~rows=[entityHistoryItem], + ~shouldCopyCurrentEntity=true, + ), + ) + } { | exception exn => Js.log2("insertRow exn", exn) Assert.fail("Failed to insert mock entity history") @@ -355,42 +357,46 @@ $$ LANGUAGE plpgsql;` let currentHistoryItems = await Db.sql->getAllMockEntityHistory Assert.deepEqual(currentHistoryItems, expectedResult) - switch await Db.sql->PgStorage.setEntityHistoryOrThrow( - ~entityHistory=TestEntity.entityHistory, - ~rows=[ - { - entityData: Set({id: "2", fieldA: 1, fieldB: None}), - previous: None, - current: { - chain_id: 1, - block_timestamp: 4, - block_number: 4, - log_index: 6, + switch await Promise.all( + Db.sql->PgStorage.setEntityHistoryOrThrow( + ~entityHistory=TestEntity.entityHistory, + ~rows=[ + { + entityData: Set({id: "2", fieldA: 1, fieldB: None}), + previous: None, + current: { + chain_id: 1, + block_timestamp: 4, + block_number: 4, + log_index: 6, + }, }, - }, - ], - ~shouldCopyCurrentEntity=true, + ], + ~shouldCopyCurrentEntity=true, + ), ) { | exception exn => Js.log2("insertRow exn", exn) Assert.fail("Failed to insert mock entity history") | _ => () } - switch await Db.sql->PgStorage.setEntityHistoryOrThrow( - ~entityHistory=TestEntity.entityHistory, - ~rows=[ - { - entityData: Set({id: "2", fieldA: 3, fieldB: None}), - previous: None, - current: { - chain_id: 1, - block_timestamp: 4, - block_number: 10, - log_index: 6, + switch await Promise.all( + Db.sql->PgStorage.setEntityHistoryOrThrow( + ~entityHistory=TestEntity.entityHistory, + ~rows=[ + { + entityData: Set({id: "2", fieldA: 3, fieldB: None}), + previous: None, + current: { + chain_id: 1, + block_timestamp: 4, + block_number: 10, + log_index: 6, + }, }, - }, - ], - ~shouldCopyCurrentEntity=true, + ], + ~shouldCopyCurrentEntity=true, + ), ) { | exception exn => Js.log2("insertRow exn", exn) @@ -398,21 +404,23 @@ $$ LANGUAGE plpgsql;` | _ => () } - await Db.sql->PgStorage.setEntityHistoryOrThrow( - ~entityHistory=TestEntity.entityHistory, - ~rows=[ - { - entityData: Set({id: "3", fieldA: 4, fieldB: None}), - previous: None, - current: { - chain_id: 137, - block_timestamp: 4, - block_number: 7, - log_index: 6, + let _ = await Promise.all( + Db.sql->PgStorage.setEntityHistoryOrThrow( + ~entityHistory=TestEntity.entityHistory, + ~rows=[ + { + entityData: Set({id: "3", fieldA: 4, fieldB: None}), + previous: None, + current: { + chain_id: 137, + block_timestamp: 4, + block_number: 7, + log_index: 6, + }, }, - }, - ], - ~shouldCopyCurrentEntity=true, + ], + ~shouldCopyCurrentEntity=true, + ), ) }) @@ -654,13 +662,16 @@ describe("Entity history rollbacks", () => { ~pgHost=Env.Db.host, ~pgPort=Env.Db.port, ) - await storage.initialize( + let _ = await storage.initialize( + ~chainConfigs=[], ~entities=[module(TestEntity)->Entities.entityModToInternal], - ~generalTables, ~enums=[Persistence.entityHistoryActionEnumConfig->Internal.fromGenericEnumConfig], ) - let _ = await Db.sql->Postgres.unsafe(TestEntity.entityHistory.createInsertFnQuery) + let _ = + await Db.sql->Postgres.unsafe( + TestEntity.entityHistory.makeInsertFnQuery(~pgSchema="public"), + ) try await Db.sql->PgStorage.setOrThrow( ~items=[ @@ -677,12 +688,14 @@ describe("Entity history rollbacks", () => { } try await Db.sql->Postgres.beginSql( - sql => [ - sql->PgStorage.setEntityHistoryOrThrow( + sql => + sql + ->PgStorage.setEntityHistoryOrThrow( ~entityHistory=TestEntity.entityHistory, ~rows=Mocks.GnosisBug.historyRows, - ), - ], + ) + ->Promise.all + ->Promise.ignoreValue, ) catch { | exn => Js.log2("insert mock rows exn", exn) @@ -777,12 +790,14 @@ describe("Entity history rollbacks", () => { // set an updated version of its row to get a copied entity history try await Db.sql->Postgres.beginSql( - sql => [ - sql->PgStorage.setEntityHistoryOrThrow( + sql => + sql + ->PgStorage.setEntityHistoryOrThrow( ~entityHistory=TestEntity.entityHistory, ~rows=Mocks.GnosisBug.historyRowsForPrune, - ), - ], + ) + ->Promise.all + ->Promise.ignoreValue, ) catch { | exn => Js.log2("insert mock rows exn", exn) @@ -831,21 +846,26 @@ describe("Entity history rollbacks", () => { ~pgHost=Env.Db.host, ~pgPort=Env.Db.port, ) - await storage.initialize( + let _ = await storage.initialize( + ~chainConfigs=[], ~entities=[module(TestEntity)->Entities.entityModToInternal], - ~generalTables, ~enums=[Persistence.entityHistoryActionEnumConfig->Internal.fromGenericEnumConfig], ) - let _ = await Db.sql->Postgres.unsafe(TestEntity.entityHistory.createInsertFnQuery) + let _ = + await Db.sql->Postgres.unsafe( + TestEntity.entityHistory.makeInsertFnQuery(~pgSchema="public"), + ) try await Db.sql->Postgres.beginSql( - sql => [ - sql->PgStorage.setEntityHistoryOrThrow( + sql => + sql + ->PgStorage.setEntityHistoryOrThrow( ~entityHistory=TestEntity.entityHistory, ~rows=Mocks.historyRows, - ), - ], + ) + ->Promise.all + ->Promise.ignoreValue, ) catch { | exn => Js.log2("insert mock rows exn", exn) @@ -1128,13 +1148,14 @@ describe_skip("Prune performance test", () => { ~pgHost=Env.Db.host, ~pgPort=Env.Db.port, ) - await storage.initialize( + let _ = await storage.initialize( ~entities=[module(TestEntity)->Entities.entityModToInternal], - ~generalTables, + ~chainConfigs=[], ~enums=[Persistence.entityHistoryActionEnumConfig->Internal.fromGenericEnumConfig], ) - let _ = await Db.sql->Postgres.unsafe(TestEntity.entityHistory.createInsertFnQuery) + let _ = + await Db.sql->Postgres.unsafe(TestEntity.entityHistory.makeInsertFnQuery(~pgSchema="public")) let rows: array = [] for i in 0 to 1000 { @@ -1158,9 +1179,11 @@ describe_skip("Prune performance test", () => { } try await Db.sql->Postgres.beginSql( - sql => [ - sql->PgStorage.setEntityHistoryOrThrow(~entityHistory=TestEntity.entityHistory, ~rows), - ], + sql => + sql + ->PgStorage.setEntityHistoryOrThrow(~entityHistory=TestEntity.entityHistory, ~rows) + ->Promise.all + ->Promise.ignoreValue, ) catch { | exn => Js.log2("insert mock rows exn", exn) diff --git a/scenarios/test_codegen/test/lib_tests/FetchState_test.res b/scenarios/test_codegen/test/lib_tests/FetchState_test.res index 07bc3fd72..f12d5081f 100644 --- a/scenarios/test_codegen/test/lib_tests/FetchState_test.res +++ b/scenarios/test_codegen/test/lib_tests/FetchState_test.res @@ -3047,3 +3047,136 @@ describe("Dynamic contracts with start blocks", () => { ) }) }) + +describe("FetchState buffer overflow prevention", () => { + it( + "Should limit endBlock when maxQueryBlockNumber < currentBlockHeight to prevent buffer overflow", + () => { + let fetchState = makeInitial() + + // Build up a large queue using public API (handleQueryResult) + // queue.length = 15, targetBufferSize = 10 + // targetBlockIdx = 15 - 10 = 5 + // maxQueryBlockNumber should be the blockNumber at index 5 (which is 15) + let largeQueueEvents = [ + mockEvent(~blockNumber=20), // index 0 + mockEvent(~blockNumber=19), // index 1 + mockEvent(~blockNumber=18), // index 2 + mockEvent(~blockNumber=17), // index 3 + mockEvent(~blockNumber=16), // index 4 + mockEvent(~blockNumber=15), // index 5 <- this should be maxQueryBlockNumber + mockEvent(~blockNumber=14), // index 6 + mockEvent(~blockNumber=13), // index 7 + mockEvent(~blockNumber=12), // index 8 + mockEvent(~blockNumber=11), // index 9 + mockEvent(~blockNumber=10), // index 10 + mockEvent(~blockNumber=9), // index 11 + mockEvent(~blockNumber=8), // index 12 + mockEvent(~blockNumber=7), // index 13 + mockEvent(~blockNumber=6), // index 14 + ] + + let fetchStateWithLargeQueue = + fetchState + ->FetchState.handleQueryResult( + ~query={ + partitionId: "0", + target: Head, + selection: fetchState.normalSelection, + addressesByContractName: Js.Dict.fromArray([("Gravatar", [mockAddress0])]), + fromBlock: 0, + indexingContracts: fetchState.indexingContracts, + }, + ~latestFetchedBlock={blockNumber: 30, blockTimestamp: 30 * 15}, + ~newItems=largeQueueEvents, + ~currentBlockHeight=30, + ) + ->Result.getExn + + // Test case 1: With endBlock set, should be limited by maxQueryBlockNumber + let fetchStateWithEndBlock = {...fetchStateWithLargeQueue, endBlock: Some(25)} + let query1 = + fetchStateWithEndBlock->FetchState.getNextQuery( + ~currentBlockHeight=30, + ~concurrencyLimit=10, + ~targetBufferSize=10, + ~stateId=0, + ) + + switch query1 { + | Ready([q]) => + // The query should have endBlock limited to maxQueryBlockNumber (15) + switch q.target { + | EndBlock({toBlock}) => + Assert.equal( + toBlock, + 15, + ~message="Should limit endBlock to maxQueryBlockNumber (15) when both endBlock and maxQueryBlockNumber are present", + ) + | _ => Assert.fail("Expected EndBlock target when buffer is limited") + } + | _ => Assert.fail("Expected Ready query when buffer limiting is active") + } + + // Test case 2: endBlock=None, maxQueryBlockNumber=15 -> Should use Some(15) + let fetchStateNoEndBlock = {...fetchStateWithLargeQueue, endBlock: None} + let query2 = + fetchStateNoEndBlock->FetchState.getNextQuery( + ~currentBlockHeight=30, + ~concurrencyLimit=10, + ~targetBufferSize=10, + ~stateId=0, + ) + + switch query2 { + | Ready([q]) => + switch q.target { + | EndBlock({toBlock}) => + Assert.equal( + toBlock, + 15, + ~message="Should set endBlock to maxQueryBlockNumber (15) when no endBlock was specified", + ) + | _ => + Assert.fail("Expected EndBlock target when buffer limiting is active and no endBlock set") + } + | _ => Assert.fail("Expected Ready query when buffer limiting is active") + } + + // Test case 3: Small queue, no buffer limiting -> Should use Head target + let fetchStateSmallQueue = + fetchState + ->FetchState.handleQueryResult( + ~query={ + partitionId: "0", + target: Head, + selection: fetchState.normalSelection, + addressesByContractName: Js.Dict.fromArray([("Gravatar", [mockAddress0])]), + fromBlock: 0, + indexingContracts: fetchState.indexingContracts, + }, + ~latestFetchedBlock={blockNumber: 10, blockTimestamp: 10 * 15}, + ~newItems=[mockEvent(~blockNumber=5)], + ~currentBlockHeight=10, + ) + ->Result.getExn + + let query3 = + fetchStateSmallQueue->FetchState.getNextQuery( + ~currentBlockHeight=30, + ~concurrencyLimit=10, + ~targetBufferSize=10, + ~stateId=0, + ) + + switch query3 { + | Ready([q]) => + switch q.target { + | Head => Assert.ok(true, ~message="Should use Head target when buffer is not limited") + | _ => Assert.fail("Expected Head target when buffer is not limited") + } + | _ => Assert.fail("Expected Ready query") + } + }, + ) +}) diff --git a/scenarios/test_codegen/test/lib_tests/Persistence_test.res b/scenarios/test_codegen/test/lib_tests/Persistence_test.res index 0c47bfedd..778e4d990 100644 --- a/scenarios/test_codegen/test/lib_tests/Persistence_test.res +++ b/scenarios/test_codegen/test/lib_tests/Persistence_test.res @@ -2,29 +2,15 @@ open RescriptMocha describe("Test Persistence layer init", () => { Async.it("Should initialize the persistence layer without the user entities", async () => { - let storageMock = Mock.Storage.make([#isInitialized, #restoreEffectCache, #initialize]) - - let persistence = Persistence.make( - ~userEntities=[], - ~staticTables=[], - ~dcRegistryEntityConfig=module( - TablesStatic.DynamicContractRegistry - )->Entities.entityModToInternal, - ~allEnums=[], - ~storage=storageMock.storage, - ) + let storageMock = Mock.Storage.make([#isInitialized, #loadInitialState, #initialize]) + + let persistence = Persistence.make(~userEntities=[], ~allEnums=[], ~storage=storageMock.storage) Assert.deepEqual( persistence.allEntities, - [module(TablesStatic.DynamicContractRegistry)->Entities.entityModToInternal], + [module(InternalTable.DynamicContractRegistry)->Entities.entityModToInternal], ~message=`All entities should automatically include the indexer core ones`, ) - Assert.deepEqual( - persistence.staticTables, - [], - // This is not implemented yet and passed via dependencies - ~message=`All static tables should automatically include the indexer core ones`, - ) Assert.deepEqual( persistence.allEnums, [Persistence.entityHistoryActionEnumConfig->Internal.fromGenericEnumConfig], @@ -43,7 +29,7 @@ describe("Test Persistence layer init", () => { ) Assert.deepEqual(storageMock.initializeCalls, [], ~message=`Storage should not be initialized`) - let p = persistence->Persistence.init + let p = persistence->Persistence.init(~chainConfigs=[]) Assert.deepEqual( storageMock.isInitializedCalls, @@ -69,43 +55,56 @@ describe("Test Persistence layer init", () => { ) Assert.deepEqual( - storageMock.initializeCalls, - [ - { - "entities": persistence.allEntities, - "generalTables": persistence.staticTables, - "enums": persistence.allEnums, - }, - ], + ( + storageMock.isInitializedCalls->Array.length, + storageMock.initializeCalls, + storageMock.loadInitialStateCalls->Array.length, + ), + ( + 1, + [ + { + "entities": persistence.allEntities, + "chainConfigs": [], + "enums": persistence.allEnums, + }, + ], + 0, + ), ~message=`Should initialize if storage is not initialized`, ) - storageMock.resolveInitialize() + let initialState: Persistence.initialState = { + cleanRun: true, + chains: [], + cache: Js.Dict.empty(), + } + storageMock.resolveInitialize(initialState) let _ = await Promise.resolve() let _ = await Promise.resolve() let _ = await Promise.resolve() Assert.deepEqual( persistence.storageStatus, - Persistence.Ready({cleanRun: true, cache: Js.Dict.empty()}), + Persistence.Ready(initialState), ~message=`Storage status should be ready`, ) // Can resolve the promise now await p - await persistence->Persistence.init + await persistence->Persistence.init(~chainConfigs=[]) Assert.deepEqual( ( storageMock.isInitializedCalls->Array.length, storageMock.initializeCalls->Array.length, - storageMock.restoreEffectCacheCalls, + storageMock.loadInitialStateCalls->Array.length, ), - (1, 1, [{"withUpload": true}]), + (1, 1, 0), ~message=`Calling init the second time shouldn't do anything`, ) - let _p2 = persistence->Persistence.init(~reset=true) + let _p2 = persistence->Persistence.init(~reset=true, ~chainConfigs=[]) Assert.deepEqual( ( storageMock.isInitializedCalls->Array.length, @@ -117,7 +116,7 @@ describe("Test Persistence layer init", () => { 2, { "entities": persistence.allEntities, - "generalTables": persistence.staticTables, + "chainConfigs": [], "enums": persistence.allEnums, }, ), @@ -127,40 +126,38 @@ describe("Test Persistence layer init", () => { }) Async.it("Should skip initialization when storage is already initialized", async () => { - let storageMock = Mock.Storage.make([#isInitialized, #restoreEffectCache]) - - let persistence = Persistence.make( - ~userEntities=[], - ~staticTables=[], - ~dcRegistryEntityConfig=module( - TablesStatic.DynamicContractRegistry - )->Entities.entityModToInternal, - ~allEnums=[], - ~storage=storageMock.storage, - ) + let storageMock = Mock.Storage.make([#isInitialized, #loadInitialState]) + + let persistence = Persistence.make(~userEntities=[], ~allEnums=[], ~storage=storageMock.storage) - let p = persistence->Persistence.init + let p = persistence->Persistence.init(~chainConfigs=[]) // Additional calls to init should not do anything - let _ = persistence->Persistence.init - let _ = persistence->Persistence.init + let _ = persistence->Persistence.init(~chainConfigs=[]) + let _ = persistence->Persistence.init(~chainConfigs=[]) storageMock.resolveIsInitialized(true) let _ = await Promise.resolve() - let _ = await Promise.resolve() + + let initialState: Persistence.initialState = { + cleanRun: false, + chains: [], + cache: Js.Dict.empty(), + } + storageMock.resolveLoadInitialState(initialState) let _ = await Promise.resolve() Assert.deepEqual( persistence.storageStatus, - Persistence.Ready({cleanRun: false, cache: Js.Dict.empty()}), + Persistence.Ready(initialState), ~message=`Storage status should be ready`, ) Assert.deepEqual( ( storageMock.isInitializedCalls->Array.length, storageMock.initializeCalls->Array.length, - storageMock.restoreEffectCacheCalls, + storageMock.loadInitialStateCalls->Array.length, ), - (1, 0, [{"withUpload": false}]), + (1, 0, 1), ~message=`Storage should be already initialized without additional initialize calls. Although it should load effect caches metadata.`, ) diff --git a/scenarios/test_codegen/test/lib_tests/PgStorage_test.res b/scenarios/test_codegen/test/lib_tests/PgStorage_test.res index 135c89645..736f3cc31 100644 --- a/scenarios/test_codegen/test/lib_tests/PgStorage_test.res +++ b/scenarios/test_codegen/test/lib_tests/PgStorage_test.res @@ -103,15 +103,30 @@ describe("Test PgStorage SQL generation functions", () => { module(Entities.A)->Entities.entityModToInternal, module(Entities.B)->Entities.entityModToInternal, ] - let generalTables = [TablesStatic.ChainMetadata.table] let enums = [Enums.EntityType.config->Internal.fromGenericEnumConfig] let queries = PgStorage.makeInitializeTransaction( ~pgSchema="test_schema", ~pgUser="postgres", - ~generalTables, ~entities, ~enums, + ~chainConfigs=[ + { + id: 1, + startBlock: 100, + endBlock: 200, + confirmedBlockThreshold: 10, + contracts: [], + sources: [], + }, + { + id: 137, + startBlock: 0, + confirmedBlockThreshold: 200, + contracts: [], + sources: [], + }, + ], ) // Should return exactly 2 queries: main DDL + functions @@ -129,7 +144,11 @@ CREATE SCHEMA "test_schema"; GRANT ALL ON SCHEMA "test_schema" TO "postgres"; GRANT ALL ON SCHEMA "test_schema" TO public; CREATE TYPE "test_schema".ENTITY_TYPE AS ENUM('A', 'B', 'C', 'CustomSelectionTestPass', 'D', 'EntityWithAllNonArrayTypes', 'EntityWithAllTypes', 'EntityWithBigDecimal', 'EntityWithTimestamp', 'Gravatar', 'NftCollection', 'PostgresNumericPrecisionEntityTester', 'Token', 'User', 'dynamic_contract_registry'); -CREATE TABLE IF NOT EXISTS "test_schema"."chain_metadata"("chain_id" INTEGER NOT NULL, "start_block" INTEGER NOT NULL, "end_block" INTEGER, "block_height" INTEGER NOT NULL, "first_event_block_number" INTEGER, "latest_processed_block" INTEGER, "num_events_processed" INTEGER, "is_hyper_sync" BOOLEAN NOT NULL, "num_batches_fetched" INTEGER NOT NULL, "latest_fetched_block_number" INTEGER NOT NULL, "timestamp_caught_up_to_head_or_endblock" TIMESTAMP WITH TIME ZONE NULL, PRIMARY KEY("chain_id")); +CREATE TABLE IF NOT EXISTS "test_schema"."event_sync_state"("chain_id" INTEGER NOT NULL, "block_number" INTEGER NOT NULL, "log_index" INTEGER NOT NULL, "block_timestamp" INTEGER NOT NULL, "is_pre_registering_dynamic_contracts" BOOLEAN DEFAULT false, PRIMARY KEY("chain_id")); +CREATE TABLE IF NOT EXISTS "test_schema"."envio_chains"("id" INTEGER NOT NULL, "start_block" INTEGER NOT NULL, "end_block" INTEGER, "buffer_block" INTEGER NOT NULL, "source_block" INTEGER NOT NULL, "first_event_block" INTEGER, "ready_at" TIMESTAMP WITH TIME ZONE NULL, "events_processed" INTEGER NOT NULL, "_is_hyper_sync" BOOLEAN NOT NULL, "_latest_processed_block" INTEGER, "_num_batches_fetched" INTEGER NOT NULL, PRIMARY KEY("id")); +CREATE TABLE IF NOT EXISTS "test_schema"."persisted_state"("id" SERIAL NOT NULL, "envio_version" TEXT NOT NULL, "config_hash" TEXT NOT NULL, "schema_hash" TEXT NOT NULL, "handler_files_hash" TEXT NOT NULL, "abi_files_hash" TEXT NOT NULL, PRIMARY KEY("id")); +CREATE TABLE IF NOT EXISTS "test_schema"."end_of_block_range_scanned_data"("chain_id" INTEGER NOT NULL, "block_number" INTEGER NOT NULL, "block_hash" TEXT NOT NULL, PRIMARY KEY("chain_id", "block_number")); +CREATE TABLE IF NOT EXISTS "test_schema"."raw_events"("chain_id" INTEGER NOT NULL, "event_id" NUMERIC NOT NULL, "event_name" TEXT NOT NULL, "contract_name" TEXT NOT NULL, "block_number" INTEGER NOT NULL, "log_index" INTEGER NOT NULL, "src_address" TEXT NOT NULL, "block_hash" TEXT NOT NULL, "block_timestamp" INTEGER NOT NULL, "block_fields" JSONB NOT NULL, "transaction_fields" JSONB NOT NULL, "params" JSONB NOT NULL, "db_write_timestamp" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, "serial" SERIAL, PRIMARY KEY("serial")); CREATE TABLE IF NOT EXISTS "test_schema"."A"("b_id" TEXT NOT NULL, "id" TEXT NOT NULL, "optionalStringToTestLinkedEntities" TEXT, "db_write_timestamp" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY("id")); CREATE TABLE IF NOT EXISTS "test_schema"."A_history"("entity_history_block_timestamp" INTEGER NOT NULL, "entity_history_chain_id" INTEGER NOT NULL, "entity_history_block_number" INTEGER NOT NULL, "entity_history_log_index" INTEGER NOT NULL, "previous_entity_history_block_timestamp" INTEGER, "previous_entity_history_chain_id" INTEGER, "previous_entity_history_block_number" INTEGER, "previous_entity_history_log_index" INTEGER, "b_id" TEXT, "id" TEXT NOT NULL, "optionalStringToTestLinkedEntities" TEXT, "action" "test_schema".ENTITY_HISTORY_ROW_ACTION NOT NULL, "serial" SERIAL, PRIMARY KEY("entity_history_block_timestamp", "entity_history_chain_id", "entity_history_block_number", "entity_history_log_index", "id")); CREATE TABLE IF NOT EXISTS "test_schema"."B"("c_id" TEXT, "id" TEXT NOT NULL, "db_write_timestamp" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY("id")); @@ -137,7 +156,36 @@ CREATE TABLE IF NOT EXISTS "test_schema"."B_history"("entity_history_block_times CREATE INDEX IF NOT EXISTS "A_b_id" ON "test_schema"."A"("b_id"); CREATE INDEX IF NOT EXISTS "A_history_serial" ON "test_schema"."A_history"("serial"); CREATE INDEX IF NOT EXISTS "B_history_serial" ON "test_schema"."B_history"("serial"); -CREATE INDEX IF NOT EXISTS "A_b_id" ON "test_schema"."A"("b_id");` +CREATE INDEX IF NOT EXISTS "A_b_id" ON "test_schema"."A"("b_id"); +CREATE VIEW "test_schema"."_meta" AS + SELECT + "id" AS "chainId", + "start_block" AS "startBlock", + "end_block" AS "endBlock", + "buffer_block" AS "bufferBlock", + "ready_at" AS "readyAt", + "first_event_block" AS "firstEventBlock", + "events_processed" AS "eventsProcessed", + ("ready_at" IS NOT NULL) AS "isReady" + FROM "test_schema"."envio_chains" + ORDER BY "id"; +CREATE VIEW "test_schema"."chain_metadata" AS + SELECT + "source_block" AS "block_height", + "id" AS "chain_id", + "end_block" AS "end_block", + "first_event_block" AS "first_event_block_number", + "_is_hyper_sync" AS "is_hyper_sync", + "buffer_block" AS "latest_fetched_block_number", + "_latest_processed_block" AS "latest_processed_block", + "_num_batches_fetched" AS "num_batches_fetched", + "events_processed" AS "num_events_processed", + "start_block" AS "start_block", + "ready_at" AS "timestamp_caught_up_to_head_or_endblock" + FROM "test_schema"."envio_chains"; +INSERT INTO "test_schema"."envio_chains" ("id", "start_block", "end_block", "source_block", "first_event_block", "buffer_block", "ready_at", "events_processed", "_is_hyper_sync", "_latest_processed_block", "_num_batches_fetched") +VALUES (1, 100, 200, 0, NULL, -1, NULL, 0, false, NULL, 0), + (137, 0, NULL, 0, NULL, -1, NULL, 0, false, NULL, 0);` Assert.equal( mainQuery, @@ -179,7 +227,38 @@ CREATE INDEX IF NOT EXISTS "A_b_id" ON "test_schema"."A"("b_id");` let expectedMainQuery = `DROP SCHEMA IF EXISTS "test_schema" CASCADE; CREATE SCHEMA "test_schema"; GRANT ALL ON SCHEMA "test_schema" TO "postgres"; -GRANT ALL ON SCHEMA "test_schema" TO public;` +GRANT ALL ON SCHEMA "test_schema" TO public; +CREATE TABLE IF NOT EXISTS "test_schema"."event_sync_state"("chain_id" INTEGER NOT NULL, "block_number" INTEGER NOT NULL, "log_index" INTEGER NOT NULL, "block_timestamp" INTEGER NOT NULL, "is_pre_registering_dynamic_contracts" BOOLEAN DEFAULT false, PRIMARY KEY("chain_id")); +CREATE TABLE IF NOT EXISTS "test_schema"."envio_chains"("id" INTEGER NOT NULL, "start_block" INTEGER NOT NULL, "end_block" INTEGER, "buffer_block" INTEGER NOT NULL, "source_block" INTEGER NOT NULL, "first_event_block" INTEGER, "ready_at" TIMESTAMP WITH TIME ZONE NULL, "events_processed" INTEGER NOT NULL, "_is_hyper_sync" BOOLEAN NOT NULL, "_latest_processed_block" INTEGER, "_num_batches_fetched" INTEGER NOT NULL, PRIMARY KEY("id")); +CREATE TABLE IF NOT EXISTS "test_schema"."persisted_state"("id" SERIAL NOT NULL, "envio_version" TEXT NOT NULL, "config_hash" TEXT NOT NULL, "schema_hash" TEXT NOT NULL, "handler_files_hash" TEXT NOT NULL, "abi_files_hash" TEXT NOT NULL, PRIMARY KEY("id")); +CREATE TABLE IF NOT EXISTS "test_schema"."end_of_block_range_scanned_data"("chain_id" INTEGER NOT NULL, "block_number" INTEGER NOT NULL, "block_hash" TEXT NOT NULL, PRIMARY KEY("chain_id", "block_number")); +CREATE TABLE IF NOT EXISTS "test_schema"."raw_events"("chain_id" INTEGER NOT NULL, "event_id" NUMERIC NOT NULL, "event_name" TEXT NOT NULL, "contract_name" TEXT NOT NULL, "block_number" INTEGER NOT NULL, "log_index" INTEGER NOT NULL, "src_address" TEXT NOT NULL, "block_hash" TEXT NOT NULL, "block_timestamp" INTEGER NOT NULL, "block_fields" JSONB NOT NULL, "transaction_fields" JSONB NOT NULL, "params" JSONB NOT NULL, "db_write_timestamp" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, "serial" SERIAL, PRIMARY KEY("serial")); +CREATE VIEW "test_schema"."_meta" AS + SELECT + "id" AS "chainId", + "start_block" AS "startBlock", + "end_block" AS "endBlock", + "buffer_block" AS "bufferBlock", + "ready_at" AS "readyAt", + "first_event_block" AS "firstEventBlock", + "events_processed" AS "eventsProcessed", + ("ready_at" IS NOT NULL) AS "isReady" + FROM "test_schema"."envio_chains" + ORDER BY "id"; +CREATE VIEW "test_schema"."chain_metadata" AS + SELECT + "source_block" AS "block_height", + "id" AS "chain_id", + "end_block" AS "end_block", + "first_event_block" AS "first_event_block_number", + "_is_hyper_sync" AS "is_hyper_sync", + "buffer_block" AS "latest_fetched_block_number", + "_latest_processed_block" AS "latest_processed_block", + "_num_batches_fetched" AS "num_batches_fetched", + "events_processed" AS "num_events_processed", + "start_block" AS "start_block", + "ready_at" AS "timestamp_caught_up_to_head_or_endblock" + FROM "test_schema"."envio_chains";` Assert.equal( mainQuery, @@ -213,7 +292,6 @@ $$ LANGUAGE plpgsql;`, let queries = PgStorage.makeInitializeTransaction( ~pgSchema="public", ~pgUser="postgres", - ~generalTables=[], ~entities, ~enums=[], ) @@ -231,10 +309,41 @@ $$ LANGUAGE plpgsql;`, CREATE SCHEMA "public"; GRANT ALL ON SCHEMA "public" TO "postgres"; GRANT ALL ON SCHEMA "public" TO public; +CREATE TABLE IF NOT EXISTS "public"."event_sync_state"("chain_id" INTEGER NOT NULL, "block_number" INTEGER NOT NULL, "log_index" INTEGER NOT NULL, "block_timestamp" INTEGER NOT NULL, "is_pre_registering_dynamic_contracts" BOOLEAN DEFAULT false, PRIMARY KEY("chain_id")); +CREATE TABLE IF NOT EXISTS "public"."envio_chains"("id" INTEGER NOT NULL, "start_block" INTEGER NOT NULL, "end_block" INTEGER, "buffer_block" INTEGER NOT NULL, "source_block" INTEGER NOT NULL, "first_event_block" INTEGER, "ready_at" TIMESTAMP WITH TIME ZONE NULL, "events_processed" INTEGER NOT NULL, "_is_hyper_sync" BOOLEAN NOT NULL, "_latest_processed_block" INTEGER, "_num_batches_fetched" INTEGER NOT NULL, PRIMARY KEY("id")); +CREATE TABLE IF NOT EXISTS "public"."persisted_state"("id" SERIAL NOT NULL, "envio_version" TEXT NOT NULL, "config_hash" TEXT NOT NULL, "schema_hash" TEXT NOT NULL, "handler_files_hash" TEXT NOT NULL, "abi_files_hash" TEXT NOT NULL, PRIMARY KEY("id")); +CREATE TABLE IF NOT EXISTS "public"."end_of_block_range_scanned_data"("chain_id" INTEGER NOT NULL, "block_number" INTEGER NOT NULL, "block_hash" TEXT NOT NULL, PRIMARY KEY("chain_id", "block_number")); +CREATE TABLE IF NOT EXISTS "public"."raw_events"("chain_id" INTEGER NOT NULL, "event_id" NUMERIC NOT NULL, "event_name" TEXT NOT NULL, "contract_name" TEXT NOT NULL, "block_number" INTEGER NOT NULL, "log_index" INTEGER NOT NULL, "src_address" TEXT NOT NULL, "block_hash" TEXT NOT NULL, "block_timestamp" INTEGER NOT NULL, "block_fields" JSONB NOT NULL, "transaction_fields" JSONB NOT NULL, "params" JSONB NOT NULL, "db_write_timestamp" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, "serial" SERIAL, PRIMARY KEY("serial")); CREATE TABLE IF NOT EXISTS "public"."A"("b_id" TEXT NOT NULL, "id" TEXT NOT NULL, "optionalStringToTestLinkedEntities" TEXT, "db_write_timestamp" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY("id")); CREATE TABLE IF NOT EXISTS "public"."A_history"("entity_history_block_timestamp" INTEGER NOT NULL, "entity_history_chain_id" INTEGER NOT NULL, "entity_history_block_number" INTEGER NOT NULL, "entity_history_log_index" INTEGER NOT NULL, "previous_entity_history_block_timestamp" INTEGER, "previous_entity_history_chain_id" INTEGER, "previous_entity_history_block_number" INTEGER, "previous_entity_history_log_index" INTEGER, "b_id" TEXT, "id" TEXT NOT NULL, "optionalStringToTestLinkedEntities" TEXT, "action" "public".ENTITY_HISTORY_ROW_ACTION NOT NULL, "serial" SERIAL, PRIMARY KEY("entity_history_block_timestamp", "entity_history_chain_id", "entity_history_block_number", "entity_history_log_index", "id")); CREATE INDEX IF NOT EXISTS "A_b_id" ON "public"."A"("b_id"); -CREATE INDEX IF NOT EXISTS "A_history_serial" ON "public"."A_history"("serial");` +CREATE INDEX IF NOT EXISTS "A_history_serial" ON "public"."A_history"("serial"); +CREATE VIEW "public"."_meta" AS + SELECT + "id" AS "chainId", + "start_block" AS "startBlock", + "end_block" AS "endBlock", + "buffer_block" AS "bufferBlock", + "ready_at" AS "readyAt", + "first_event_block" AS "firstEventBlock", + "events_processed" AS "eventsProcessed", + ("ready_at" IS NOT NULL) AS "isReady" + FROM "public"."envio_chains" + ORDER BY "id"; +CREATE VIEW "public"."chain_metadata" AS + SELECT + "source_block" AS "block_height", + "id" AS "chain_id", + "end_block" AS "end_block", + "first_event_block" AS "first_event_block_number", + "_is_hyper_sync" AS "is_hyper_sync", + "buffer_block" AS "latest_fetched_block_number", + "_latest_processed_block" AS "latest_processed_block", + "_num_batches_fetched" AS "num_batches_fetched", + "events_processed" AS "num_events_processed", + "start_block" AS "start_block", + "ready_at" AS "timestamp_caught_up_to_head_or_endblock" + FROM "public"."envio_chains";` Assert.equal( mainQuery, @@ -330,8 +439,8 @@ SELECT * FROM unnest($1::NUMERIC[],$2::NUMERIC(10, 8)[],$3::NUMERIC[],$4::INTEGE async () => { let query = PgStorage.makeInsertUnnestSetQuery( ~pgSchema="test_schema", - ~table=TablesStatic.RawEvents.table, - ~itemSchema=TablesStatic.RawEvents.schema, + ~table=InternalTable.RawEvents.table, + ~itemSchema=InternalTable.RawEvents.schema, ~isRawEvents=true, ) @@ -387,4 +496,176 @@ VALUES($1,$2)ON CONFLICT("id") DO UPDATE SET "c_id" = EXCLUDED."c_id";` }, ) }) + + describe("InternalTable.Chains.makeSingleUpdateQuery", () => { + Async.it( + "Should create correct SQL for updating chain state", + async () => { + let query = InternalTable.Chains.makeSingleUpdateQuery(~pgSchema="test_schema") + + let expectedQuery = `UPDATE "test_schema"."envio_chains" +SET "source_block" = $2, + "first_event_block" = $3, + "buffer_block" = $4, + "ready_at" = $5, + "events_processed" = $6, + "_is_hyper_sync" = $7, + "_latest_processed_block" = $8, + "_num_batches_fetched" = $9 +WHERE "id" = $1;` + + Assert.equal( + query, + expectedQuery, + ~message="Should generate correct UPDATE SQL with parameter placeholders", + ) + }, + ) + }) + + describe("InternalTable.Chains.makeInitialValuesQuery", () => { + Async.it( + "Should return empty string for empty chain configs", + async () => { + let query = InternalTable.Chains.makeInitialValuesQuery( + ~pgSchema="test_schema", + ~chainConfigs=[], + ) + + Assert.equal( + query, + None, + ~message="Should return empty string when no chain configs provided", + ) + }, + ) + + Async.it( + "Should create correct SQL for single chain config", + async () => { + let chainConfig: InternalConfig.chain = { + id: 1, + startBlock: 100, + endBlock: 200, + confirmedBlockThreshold: 5, + contracts: [], + sources: [], + } + + let query = InternalTable.Chains.makeInitialValuesQuery( + ~pgSchema="test_schema", + ~chainConfigs=[chainConfig], + ) + + let expectedQuery = `INSERT INTO "test_schema"."envio_chains" ("id", "start_block", "end_block", "source_block", "first_event_block", "buffer_block", "ready_at", "events_processed", "_is_hyper_sync", "_latest_processed_block", "_num_batches_fetched") +VALUES (1, 100, 200, 0, NULL, -1, NULL, 0, false, NULL, 0);` + + Assert.equal( + query, + Some(expectedQuery), + ~message="Should generate correct INSERT VALUES SQL for single chain", + ) + }, + ) + + Async.it( + "Should create correct SQL for single chain config with no end block", + async () => { + let chainConfig: InternalConfig.chain = { + id: 1, + startBlock: 100, + confirmedBlockThreshold: 5, + contracts: [], + sources: [], + } + + let query = InternalTable.Chains.makeInitialValuesQuery( + ~pgSchema="public", + ~chainConfigs=[chainConfig], + ) + + let expectedQuery = `INSERT INTO "public"."envio_chains" ("id", "start_block", "end_block", "source_block", "first_event_block", "buffer_block", "ready_at", "events_processed", "_is_hyper_sync", "_latest_processed_block", "_num_batches_fetched") +VALUES (1, 100, NULL, 0, NULL, -1, NULL, 0, false, NULL, 0);` + + Assert.equal( + query, + Some(expectedQuery), + ~message="Should generate correct INSERT VALUES SQL with NULL end_block", + ) + }, + ) + + Async.it( + "Should create correct SQL for multiple chain configs", + async () => { + let chainConfig1: InternalConfig.chain = { + id: 1, + startBlock: 100, + endBlock: 200, + confirmedBlockThreshold: 5, + contracts: [], + sources: [], + } + + let chainConfig2: InternalConfig.chain = { + id: 42, + startBlock: 500, + confirmedBlockThreshold: 0, + contracts: [], + sources: [], + } + + let query = InternalTable.Chains.makeInitialValuesQuery( + ~pgSchema="production", + ~chainConfigs=[chainConfig1, chainConfig2], + ) + + let expectedQuery = `INSERT INTO "production"."envio_chains" ("id", "start_block", "end_block", "source_block", "first_event_block", "buffer_block", "ready_at", "events_processed", "_is_hyper_sync", "_latest_processed_block", "_num_batches_fetched") +VALUES (1, 100, 200, 0, NULL, -1, NULL, 0, false, NULL, 0), + (42, 500, NULL, 0, NULL, -1, NULL, 0, false, NULL, 0);` + + Assert.equal( + query, + Some(expectedQuery), + ~message="Should generate correct INSERT VALUES SQL for multiple chains", + ) + }, + ) + + Async.it( + "Should use hardcoded values as specified", + async () => { + let chainConfig: InternalConfig.chain = { + id: 1, + startBlock: 1000, + endBlock: 2000, + confirmedBlockThreshold: 10, + contracts: [], + sources: [], + } + + let query = + InternalTable.Chains.makeInitialValuesQuery( + ~pgSchema="test_schema", + ~chainConfigs=[chainConfig], + )->Belt.Option.getExn + + // Verify the hardcoded values are correct: + // source_block: -1 + // buffer_block: -1 + // events_processed: 0 + // first_event_block: NULL + // ready_at: NULL + // _is_hyper_sync: false + // _num_batches_fetched: 0 + // _latest_processed_block: NULL + Assert.ok( + query->Js.String2.includes( + "VALUES (1, 1000, 2000, 0, NULL, -1, NULL, 0, false, NULL, 0)", + ), + ~message="Should contain all hardcoded values as specified", + ) + }, + ) + }) })